1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2Client"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <inttypes.h>
22 #include <utils/Log.h>
23 #include <utils/Trace.h>
24
25 #include <camera/CameraUtils.h>
26 #include <cutils/properties.h>
27 #include <gui/Surface.h>
28 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
29
30 #include "api1/Camera2Client.h"
31
32 #include "api1/client2/StreamingProcessor.h"
33 #include "api1/client2/JpegProcessor.h"
34 #include "api1/client2/CaptureSequencer.h"
35 #include "api1/client2/CallbackProcessor.h"
36 #include "api1/client2/ZslProcessor.h"
37 #include "device3/RotateAndCropMapper.h"
38 #include "utils/CameraThreadState.h"
39 #include "utils/CameraServiceProxyWrapper.h"
40
41 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
42 #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
43
44 #ifndef FALLTHROUGH_INTENDED
45 #define FALLTHROUGH_INTENDED [[fallthrough]]
46 #endif
47
48 namespace android {
49 using namespace camera2;
50
51 // Interface used by CameraService
52
Camera2Client(const sp<CameraService> & cameraService,const sp<hardware::ICameraClient> & cameraClient,std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,const String16 & clientPackageName,const std::optional<String16> & clientFeatureId,const String8 & cameraDeviceId,int api1CameraId,int cameraFacing,int sensorOrientation,int clientPid,uid_t clientUid,int servicePid,bool overrideForPerfClass,bool overrideToPortrait,bool forceSlowJpegMode)53 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
54 const sp<hardware::ICameraClient>& cameraClient,
55 std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
56 const String16& clientPackageName,
57 const std::optional<String16>& clientFeatureId,
58 const String8& cameraDeviceId,
59 int api1CameraId,
60 int cameraFacing,
61 int sensorOrientation,
62 int clientPid,
63 uid_t clientUid,
64 int servicePid,
65 bool overrideForPerfClass,
66 bool overrideToPortrait,
67 bool forceSlowJpegMode):
68 Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
69 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
70 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
71 clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
72 /*legacyClient*/ true),
73 mParameters(api1CameraId, cameraFacing),
74 mLatestRequestIds(kMaxRequestIds),
75 mLatestFailedRequestIds(kMaxRequestIds)
76 {
77 ATRACE_CALL();
78
79 mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
80 mRotateAndCropIsSupported = false;
81 mRotateAndCropPreviewTransform = 0;
82
83 SharedParameters::Lock l(mParameters);
84 l.mParameters.state = Parameters::DISCONNECTED;
85 l.mParameters.isSlowJpegModeForced = forceSlowJpegMode;
86 }
87
initialize(sp<CameraProviderManager> manager,const String8 & monitorTags)88 status_t Camera2Client::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
89 return initializeImpl(manager, monitorTags);
90 }
91
isZslEnabledInStillTemplate()92 bool Camera2Client::isZslEnabledInStillTemplate() {
93 bool zslEnabled = false;
94 CameraMetadata stillTemplate;
95 status_t res = mDevice->createDefaultRequest(
96 camera_request_template_t::CAMERA_TEMPLATE_STILL_CAPTURE, &stillTemplate);
97 if (res == OK) {
98 camera_metadata_entry_t enableZsl = stillTemplate.find(ANDROID_CONTROL_ENABLE_ZSL);
99 if (enableZsl.count == 1) {
100 zslEnabled = (enableZsl.data.u8[0] == ANDROID_CONTROL_ENABLE_ZSL_TRUE);
101 }
102 }
103
104 return zslEnabled;
105 }
106
107 template<typename TProviderPtr>
initializeImpl(TProviderPtr providerPtr,const String8 & monitorTags)108 status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const String8& monitorTags)
109 {
110 ATRACE_CALL();
111 ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
112 status_t res;
113
114 res = Camera2ClientBase::initialize(providerPtr, monitorTags);
115 if (res != OK) {
116 return res;
117 }
118
119 {
120 SharedParameters::Lock l(mParameters);
121
122 res = l.mParameters.initialize(mDevice.get());
123 if (res != OK) {
124 ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
125 __FUNCTION__, mCameraId, strerror(-res), res);
126 return NO_INIT;
127 }
128
129 l.mParameters.isDeviceZslSupported = isZslEnabledInStillTemplate();
130 }
131
132 const CameraMetadata& staticInfo = mDevice->info();
133 mRotateAndCropIsSupported = camera3::RotateAndCropMapper::isNeeded(&staticInfo);
134 // The 'mRotateAndCropMode' value only accounts for the necessary adjustment
135 // when the display rotates. The sensor orientation still needs to be calculated
136 // and applied similar to the Camera2 path.
137 CameraUtils::getRotationTransform(staticInfo, OutputConfiguration::MIRROR_MODE_AUTO,
138 &mRotateAndCropPreviewTransform);
139
140 String8 threadName;
141
142 mStreamingProcessor = new StreamingProcessor(this);
143 threadName = String8::format("C2-%d-StreamProc",
144 mCameraId);
145
146 mFrameProcessor = new FrameProcessor(mDevice, this);
147 threadName = String8::format("C2-%d-FrameProc",
148 mCameraId);
149 res = mFrameProcessor->run(threadName.string());
150 if (res != OK) {
151 ALOGE("%s: Unable to start frame processor thread: %s (%d)",
152 __FUNCTION__, strerror(-res), res);
153 return res;
154 }
155
156 mCaptureSequencer = new CaptureSequencer(this);
157 threadName = String8::format("C2-%d-CaptureSeq",
158 mCameraId);
159 res = mCaptureSequencer->run(threadName.string());
160 if (res != OK) {
161 ALOGE("%s: Unable to start capture sequencer thread: %s (%d)",
162 __FUNCTION__, strerror(-res), res);
163 return res;
164 }
165
166 mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
167 threadName = String8::format("C2-%d-JpegProc",
168 mCameraId);
169 res = mJpegProcessor->run(threadName.string());
170 if (res != OK) {
171 ALOGE("%s: Unable to start jpeg processor thread: %s (%d)",
172 __FUNCTION__, strerror(-res), res);
173 return res;
174 }
175
176 mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
177
178 threadName = String8::format("C2-%d-ZslProc",
179 mCameraId);
180 res = mZslProcessor->run(threadName.string());
181 if (res != OK) {
182 ALOGE("%s: Unable to start zsl processor thread: %s (%d)",
183 __FUNCTION__, strerror(-res), res);
184 return res;
185 }
186
187 mCallbackProcessor = new CallbackProcessor(this);
188 threadName = String8::format("C2-%d-CallbkProc",
189 mCameraId);
190 res = mCallbackProcessor->run(threadName.string());
191 if (res != OK) {
192 ALOGE("%s: Unable to start callback processor thread: %s (%d)",
193 __FUNCTION__, strerror(-res), res);
194 return res;
195 }
196
197 if (gLogLevel >= 1) {
198 SharedParameters::Lock l(mParameters);
199 ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
200 mCameraId);
201 ALOGD("%s", l.mParameters.paramsFlattened.string());
202 }
203
204 return OK;
205 }
206
~Camera2Client()207 Camera2Client::~Camera2Client() {
208 ATRACE_CALL();
209 ALOGV("~Camera2Client");
210
211 mDestructionStarted = true;
212
213 disconnect();
214
215 ALOGI("Camera %d: Closed", mCameraId);
216 }
217
dump(int fd,const Vector<String16> & args)218 status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
219 return BasicClient::dump(fd, args);
220 }
221
dumpClient(int fd,const Vector<String16> & args)222 status_t Camera2Client::dumpClient(int fd, const Vector<String16>& args) {
223 String8 result;
224 result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
225 (getRemoteCallback() != NULL ?
226 (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
227 mClientPid);
228 result.append(" State: ");
229 #define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break;
230
231 const Parameters& p = mParameters.unsafeAccess();
232
233 result.append(Parameters::getStateName(p.state));
234
235 result.append("\n Current parameters:\n");
236 result.appendFormat(" Preview size: %d x %d\n",
237 p.previewWidth, p.previewHeight);
238 result.appendFormat(" Preview FPS range: %d - %d\n",
239 p.previewFpsRange[0], p.previewFpsRange[1]);
240 result.appendFormat(" Preview HAL pixel format: 0x%x\n",
241 p.previewFormat);
242 result.appendFormat(" Preview transform: %x\n",
243 p.previewTransform);
244 result.appendFormat(" Picture size: %d x %d\n",
245 p.pictureWidth, p.pictureHeight);
246 result.appendFormat(" Jpeg thumbnail size: %d x %d\n",
247 p.jpegThumbSize[0], p.jpegThumbSize[1]);
248 result.appendFormat(" Jpeg quality: %d, thumbnail quality: %d\n",
249 p.jpegQuality, p.jpegThumbQuality);
250 result.appendFormat(" Jpeg rotation: %d\n", p.jpegRotation);
251 result.appendFormat(" GPS tags %s\n",
252 p.gpsEnabled ? "enabled" : "disabled");
253 if (p.gpsEnabled) {
254 result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n",
255 p.gpsCoordinates[0], p.gpsCoordinates[1],
256 p.gpsCoordinates[2]);
257 result.appendFormat(" GPS timestamp: %" PRId64 "\n",
258 p.gpsTimestamp);
259 result.appendFormat(" GPS processing method: %s\n",
260 p.gpsProcessingMethod.string());
261 }
262
263 result.append(" White balance mode: ");
264 switch (p.wbMode) {
265 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
266 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
267 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT)
268 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT)
269 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT)
270 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
271 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
272 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
273 default: result.append("UNKNOWN\n");
274 }
275
276 result.append(" Effect mode: ");
277 switch (p.effectMode) {
278 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
279 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
280 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE)
281 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE)
282 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA)
283 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE)
284 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
285 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
286 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
287 default: result.append("UNKNOWN\n");
288 }
289
290 result.append(" Antibanding mode: ");
291 switch (p.antibandingMode) {
292 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
293 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
294 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
295 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
296 default: result.append("UNKNOWN\n");
297 }
298
299 result.append(" Scene mode: ");
300 switch (p.sceneMode) {
301 case ANDROID_CONTROL_SCENE_MODE_DISABLED:
302 result.append("AUTO\n"); break;
303 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY)
304 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
305 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
306 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE)
307 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT)
308 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT)
309 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE)
310 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH)
311 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW)
312 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET)
313 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO)
314 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS)
315 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS)
316 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY)
317 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT)
318 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE)
319 default: result.append("UNKNOWN\n");
320 }
321
322 result.append(" Flash mode: ");
323 switch (p.flashMode) {
324 CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF)
325 CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO)
326 CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON)
327 CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH)
328 CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE)
329 CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID)
330 default: result.append("UNKNOWN\n");
331 }
332
333 result.append(" Focus mode: ");
334 switch (p.focusMode) {
335 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO)
336 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO)
337 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO)
338 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE)
339 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF)
340 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY)
341 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED)
342 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID)
343 default: result.append("UNKNOWN\n");
344 }
345
346 result.append(" Focus state: ");
347 switch (p.focusState) {
348 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
349 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
350 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)
351 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED)
352 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
353 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
354 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
355 default: result.append("UNKNOWN\n");
356 }
357
358 result.append(" Focusing areas:\n");
359 for (size_t i = 0; i < p.focusingAreas.size(); i++) {
360 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
361 p.focusingAreas[i].left,
362 p.focusingAreas[i].top,
363 p.focusingAreas[i].right,
364 p.focusingAreas[i].bottom,
365 p.focusingAreas[i].weight);
366 }
367
368 result.appendFormat(" Exposure compensation index: %d\n",
369 p.exposureCompensation);
370
371 result.appendFormat(" AE lock %s, AWB lock %s\n",
372 p.autoExposureLock ? "enabled" : "disabled",
373 p.autoWhiteBalanceLock ? "enabled" : "disabled" );
374
375 result.appendFormat(" Metering areas:\n");
376 for (size_t i = 0; i < p.meteringAreas.size(); i++) {
377 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
378 p.meteringAreas[i].left,
379 p.meteringAreas[i].top,
380 p.meteringAreas[i].right,
381 p.meteringAreas[i].bottom,
382 p.meteringAreas[i].weight);
383 }
384
385 result.appendFormat(" Zoom index: %d\n", p.zoom);
386 result.appendFormat(" Video size: %d x %d\n", p.videoWidth,
387 p.videoHeight);
388
389 result.appendFormat(" Recording hint is %s\n",
390 p.recordingHint ? "set" : "not set");
391
392 result.appendFormat(" Video stabilization is %s\n",
393 p.videoStabilization ? "enabled" : "disabled");
394
395 result.appendFormat(" Selected still capture FPS range: %d - %d\n",
396 p.fastInfo.bestStillCaptureFpsRange[0],
397 p.fastInfo.bestStillCaptureFpsRange[1]);
398
399 result.appendFormat(" Use zero shutter lag: %s\n",
400 p.useZeroShutterLag() ? "yes" : "no");
401
402 result.append(" Current streams:\n");
403 result.appendFormat(" Preview stream ID: %d\n",
404 getPreviewStreamId());
405 result.appendFormat(" Capture stream ID: %d\n",
406 getCaptureStreamId());
407 result.appendFormat(" Recording stream ID: %d\n",
408 getRecordingStreamId());
409
410 result.append(" Quirks for this camera:\n");
411 bool haveQuirk = false;
412 if (p.quirks.triggerAfWithAuto) {
413 result.appendFormat(" triggerAfWithAuto\n");
414 haveQuirk = true;
415 }
416 if (p.quirks.useZslFormat) {
417 result.appendFormat(" useZslFormat\n");
418 haveQuirk = true;
419 }
420 if (p.quirks.meteringCropRegion) {
421 result.appendFormat(" meteringCropRegion\n");
422 haveQuirk = true;
423 }
424 if (p.quirks.partialResults) {
425 result.appendFormat(" usePartialResult\n");
426 haveQuirk = true;
427 }
428 if (!haveQuirk) {
429 result.appendFormat(" none\n");
430 }
431
432 write(fd, result.string(), result.size());
433
434 mStreamingProcessor->dump(fd, args);
435
436 mCaptureSequencer->dump(fd, args);
437
438 mFrameProcessor->dump(fd, args);
439
440 mZslProcessor->dump(fd, args);
441
442 return dumpDevice(fd, args);
443 #undef CASE_APPEND_ENUM
444 }
445
446 // ICamera interface
447
disconnect()448 binder::Status Camera2Client::disconnect() {
449 ATRACE_CALL();
450 nsecs_t startTime = systemTime();
451 Mutex::Autolock icl(mBinderSerializationLock);
452
453 binder::Status res = binder::Status::ok();
454 // Allow both client and the cameraserver to disconnect at all times
455 int callingPid = CameraThreadState::getCallingPid();
456 if (callingPid != mClientPid && callingPid != mServicePid) return res;
457
458 if (mDevice == 0) return res;
459
460 ALOGV("Camera %d: Shutting down", mCameraId);
461
462 /**
463 * disconnect() cannot call any methods that might need to promote a
464 * wp<Camera2Client>, since disconnect can be called from the destructor, at
465 * which point all such promotions will fail.
466 */
467
468 stopPreviewL();
469
470 {
471 SharedParameters::Lock l(mParameters);
472 if (l.mParameters.state == Parameters::DISCONNECTED) return res;
473 l.mParameters.state = Parameters::DISCONNECTED;
474 }
475
476 mFrameProcessor->requestExit();
477 mCaptureSequencer->requestExit();
478 mJpegProcessor->requestExit();
479 mZslProcessor->requestExit();
480 mCallbackProcessor->requestExit();
481
482 ALOGV("Camera %d: Waiting for threads", mCameraId);
483
484 {
485 // Don't wait with lock held, in case the other threads need to
486 // complete callbacks that re-enter Camera2Client
487 mBinderSerializationLock.unlock();
488
489 mFrameProcessor->join();
490 mCaptureSequencer->join();
491 mJpegProcessor->join();
492 mZslProcessor->join();
493 mCallbackProcessor->join();
494
495 mBinderSerializationLock.lock();
496 }
497
498 ALOGV("Camera %d: Deleting streams", mCameraId);
499
500 mStreamingProcessor->deletePreviewStream();
501 mStreamingProcessor->deleteRecordingStream();
502 mJpegProcessor->deleteStream();
503 mCallbackProcessor->deleteStream();
504 mZslProcessor->deleteStream();
505
506 ALOGV("Camera %d: Disconnecting device", mCameraId);
507
508 bool hasDeviceError = mDevice->hasDeviceError();
509 mDevice->disconnect();
510
511 CameraService::Client::disconnect();
512
513 int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
514 mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
515
516 return res;
517 }
518
connect(const sp<hardware::ICameraClient> & client)519 status_t Camera2Client::connect(const sp<hardware::ICameraClient>& client) {
520 ATRACE_CALL();
521 ALOGV("%s: E", __FUNCTION__);
522 Mutex::Autolock icl(mBinderSerializationLock);
523
524 if (mClientPid != 0 && CameraThreadState::getCallingPid() != mClientPid) {
525 ALOGE("%s: Camera %d: Connection attempt from pid %d; "
526 "current locked to pid %d", __FUNCTION__,
527 mCameraId, CameraThreadState::getCallingPid(), mClientPid);
528 return BAD_VALUE;
529 }
530
531 mClientPid = CameraThreadState::getCallingPid();
532
533 mRemoteCallback = client;
534 mSharedCameraCallbacks = client;
535
536 return OK;
537 }
538
lock()539 status_t Camera2Client::lock() {
540 ATRACE_CALL();
541 ALOGV("%s: E", __FUNCTION__);
542 Mutex::Autolock icl(mBinderSerializationLock);
543 ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
544 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
545
546 if (mClientPid == 0) {
547 mClientPid = CameraThreadState::getCallingPid();
548 return OK;
549 }
550
551 if (mClientPid != CameraThreadState::getCallingPid()) {
552 ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
553 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
554 return EBUSY;
555 }
556
557 return OK;
558 }
559
unlock()560 status_t Camera2Client::unlock() {
561 ATRACE_CALL();
562 ALOGV("%s: E", __FUNCTION__);
563 Mutex::Autolock icl(mBinderSerializationLock);
564 ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
565 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
566
567 if (mClientPid == CameraThreadState::getCallingPid()) {
568 SharedParameters::Lock l(mParameters);
569 if (l.mParameters.state == Parameters::RECORD ||
570 l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
571 ALOGD("Not allowed to unlock camera during recording.");
572 return INVALID_OPERATION;
573 }
574 mClientPid = 0;
575 mRemoteCallback.clear();
576 mSharedCameraCallbacks.clear();
577 return OK;
578 }
579
580 ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
581 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
582 return EBUSY;
583 }
584
setPreviewTarget(const sp<IGraphicBufferProducer> & bufferProducer)585 status_t Camera2Client::setPreviewTarget(
586 const sp<IGraphicBufferProducer>& bufferProducer) {
587 ATRACE_CALL();
588 ALOGV("%s: E", __FUNCTION__);
589 Mutex::Autolock icl(mBinderSerializationLock);
590 status_t res;
591 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
592
593 sp<IBinder> binder;
594 sp<Surface> window;
595 if (bufferProducer != 0) {
596 binder = IInterface::asBinder(bufferProducer);
597 // Using controlledByApp flag to ensure that the buffer queue remains in
598 // async mode for the old camera API, where many applications depend
599 // on that behavior.
600 window = new Surface(bufferProducer, /*controlledByApp*/ true);
601 }
602 return setPreviewWindowL(binder, window);
603 }
604
setPreviewWindowL(const sp<IBinder> & binder,const sp<Surface> & window)605 status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
606 const sp<Surface>& window) {
607 ATRACE_CALL();
608 status_t res;
609
610 if (binder == mPreviewSurface) {
611 ALOGV("%s: Camera %d: New window is same as old window",
612 __FUNCTION__, mCameraId);
613 return NO_ERROR;
614 }
615
616 Parameters::State state;
617 {
618 SharedParameters::Lock l(mParameters);
619 state = l.mParameters.state;
620 }
621 switch (state) {
622 case Parameters::DISCONNECTED:
623 case Parameters::RECORD:
624 case Parameters::STILL_CAPTURE:
625 case Parameters::VIDEO_SNAPSHOT:
626 ALOGE("%s: Camera %d: Cannot set preview display while in state %s",
627 __FUNCTION__, mCameraId,
628 Parameters::getStateName(state));
629 return INVALID_OPERATION;
630 case Parameters::STOPPED:
631 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
632 // OK
633 break;
634 case Parameters::PREVIEW:
635 // Already running preview - need to stop and create a new stream
636 res = stopStream();
637 if (res != OK) {
638 ALOGE("%s: Unable to stop preview to swap windows: %s (%d)",
639 __FUNCTION__, strerror(-res), res);
640 return res;
641 }
642 state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
643 break;
644 }
645
646 mPreviewSurface = binder;
647 res = mStreamingProcessor->setPreviewWindow(window);
648 if (res != OK) {
649 ALOGE("%s: Unable to set new preview window: %s (%d)",
650 __FUNCTION__, strerror(-res), res);
651 return res;
652 }
653
654 if (state == Parameters::WAITING_FOR_PREVIEW_WINDOW) {
655 SharedParameters::Lock l(mParameters);
656 l.mParameters.state = state;
657 return startPreviewL(l.mParameters, false);
658 }
659
660 return OK;
661 }
662
setPreviewCallbackFlag(int flag)663 void Camera2Client::setPreviewCallbackFlag(int flag) {
664 ATRACE_CALL();
665 ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag);
666 Mutex::Autolock icl(mBinderSerializationLock);
667
668 if ( checkPid(__FUNCTION__) != OK) return;
669
670 SharedParameters::Lock l(mParameters);
671 setPreviewCallbackFlagL(l.mParameters, flag);
672 }
673
setPreviewCallbackFlagL(Parameters & params,int flag)674 void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) {
675 status_t res = OK;
676
677 switch(params.state) {
678 case Parameters::STOPPED:
679 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
680 case Parameters::PREVIEW:
681 case Parameters::STILL_CAPTURE:
682 // OK
683 break;
684 default:
685 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
686 ALOGE("%s: Camera %d: Can't use preview callbacks "
687 "in state %d", __FUNCTION__, mCameraId, params.state);
688 return;
689 }
690 }
691
692 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) {
693 ALOGV("%s: setting oneshot", __FUNCTION__);
694 params.previewCallbackOneShot = true;
695 }
696 if (params.previewCallbackFlags != (uint32_t)flag) {
697
698 if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
699 // Disable any existing preview callback window when enabling
700 // preview callback flags
701 res = mCallbackProcessor->setCallbackWindow(NULL);
702 if (res != OK) {
703 ALOGE("%s: Camera %d: Unable to clear preview callback surface:"
704 " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
705 return;
706 }
707 params.previewCallbackSurface = false;
708 }
709
710 params.previewCallbackFlags = flag;
711
712 if (params.state == Parameters::PREVIEW) {
713 res = startPreviewL(params, true);
714 if (res != OK) {
715 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
716 __FUNCTION__, mCameraId,
717 Parameters::getStateName(params.state));
718 }
719 }
720 }
721 }
722
setPreviewCallbackTarget(const sp<IGraphicBufferProducer> & callbackProducer)723 status_t Camera2Client::setPreviewCallbackTarget(
724 const sp<IGraphicBufferProducer>& callbackProducer) {
725 ATRACE_CALL();
726 ALOGV("%s: E", __FUNCTION__);
727 Mutex::Autolock icl(mBinderSerializationLock);
728 status_t res;
729 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
730
731 sp<Surface> window;
732 if (callbackProducer != 0) {
733 window = new Surface(callbackProducer);
734 }
735
736 res = mCallbackProcessor->setCallbackWindow(window);
737 if (res != OK) {
738 ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)",
739 __FUNCTION__, mCameraId, strerror(-res), res);
740 return res;
741 }
742
743 SharedParameters::Lock l(mParameters);
744
745 if (window != NULL) {
746 // Disable traditional callbacks when a valid callback target is given
747 l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
748 l.mParameters.previewCallbackOneShot = false;
749 l.mParameters.previewCallbackSurface = true;
750 } else {
751 // Disable callback target if given a NULL interface.
752 l.mParameters.previewCallbackSurface = false;
753 }
754
755 switch(l.mParameters.state) {
756 case Parameters::PREVIEW:
757 res = startPreviewL(l.mParameters, true);
758 break;
759 case Parameters::RECORD:
760 case Parameters::VIDEO_SNAPSHOT:
761 res = startRecordingL(l.mParameters, true);
762 break;
763 default:
764 break;
765 }
766 if (res != OK) {
767 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
768 __FUNCTION__, mCameraId,
769 Parameters::getStateName(l.mParameters.state));
770 }
771
772 return OK;
773 }
774
775
startPreview()776 status_t Camera2Client::startPreview() {
777 ATRACE_CALL();
778 ALOGV("%s: E", __FUNCTION__);
779 Mutex::Autolock icl(mBinderSerializationLock);
780 status_t res;
781 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
782 SharedParameters::Lock l(mParameters);
783 return startPreviewL(l.mParameters, false);
784 }
785
startPreviewL(Parameters & params,bool restart)786 status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) {
787 ATRACE_CALL();
788 status_t res;
789
790 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
791
792 if (params.state == Parameters::DISCONNECTED) {
793 ALOGE("%s: Camera %d has been disconnected.", __FUNCTION__, mCameraId);
794 return INVALID_OPERATION;
795 }
796 if ( (params.state == Parameters::PREVIEW ||
797 params.state == Parameters::RECORD ||
798 params.state == Parameters::VIDEO_SNAPSHOT)
799 && !restart) {
800 // Succeed attempt to re-enter a streaming state
801 ALOGI("%s: Camera %d: Preview already active, ignoring restart",
802 __FUNCTION__, mCameraId);
803 return OK;
804 }
805 if (params.state > Parameters::PREVIEW && !restart) {
806 ALOGE("%s: Can't start preview in state %s",
807 __FUNCTION__,
808 Parameters::getStateName(params.state));
809 return INVALID_OPERATION;
810 }
811
812 if (!mStreamingProcessor->haveValidPreviewWindow()) {
813 params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
814 return OK;
815 }
816 params.state = Parameters::STOPPED;
817 int lastPreviewStreamId = mStreamingProcessor->getPreviewStreamId();
818
819 res = mStreamingProcessor->updatePreviewStream(params);
820 if (res != OK) {
821 ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)",
822 __FUNCTION__, mCameraId, strerror(-res), res);
823 return res;
824 }
825
826 bool previewStreamChanged = mStreamingProcessor->getPreviewStreamId() != lastPreviewStreamId;
827
828 // We could wait to create the JPEG output stream until first actual use
829 // (first takePicture call). However, this would substantially increase the
830 // first capture latency on HAL3 devices.
831 // So create it unconditionally at preview start. As a drawback,
832 // this increases gralloc memory consumption for applications that don't
833 // ever take a picture. Do not enter this mode when jpeg stream will slow
834 // down preview.
835 // TODO: Find a better compromise, though this likely would involve HAL
836 // changes.
837 int lastJpegStreamId = mJpegProcessor->getStreamId();
838 // If jpeg stream will slow down preview, make sure we remove it before starting preview
839 if (params.slowJpegMode) {
840 if (lastJpegStreamId != NO_STREAM) {
841 // Pause preview if we are streaming
842 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
843 if (activeRequestId != 0) {
844 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
845 if (res != OK) {
846 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
847 __FUNCTION__, mCameraId, strerror(-res), res);
848 }
849 res = mDevice->waitUntilDrained();
850 if (res != OK) {
851 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
852 __FUNCTION__, mCameraId, strerror(-res), res);
853 }
854 }
855
856 res = mJpegProcessor->deleteStream();
857
858 if (res != OK) {
859 ALOGE("%s: Camera %d: delete Jpeg stream failed: %s (%d)",
860 __FUNCTION__, mCameraId, strerror(-res), res);
861 }
862
863 if (activeRequestId != 0) {
864 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
865 if (res != OK) {
866 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
867 __FUNCTION__, mCameraId, strerror(-res), res);
868 }
869 }
870 }
871 } else {
872 res = updateProcessorStream(mJpegProcessor, params);
873 if (res != OK) {
874 ALOGE("%s: Camera %d: Can't pre-configure still image "
875 "stream: %s (%d)",
876 __FUNCTION__, mCameraId, strerror(-res), res);
877 return res;
878 }
879 }
880 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
881
882 Vector<int32_t> outputStreams;
883 bool callbacksEnabled = (params.previewCallbackFlags &
884 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ||
885 params.previewCallbackSurface;
886
887 if (callbacksEnabled) {
888 // Can't have recording stream hanging around when enabling callbacks,
889 // since it exceeds the max stream count on some devices.
890 if (mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
891 ALOGV("%s: Camera %d: Clearing out recording stream before "
892 "creating callback stream", __FUNCTION__, mCameraId);
893 res = mStreamingProcessor->stopStream();
894 if (res != OK) {
895 ALOGE("%s: Camera %d: Can't stop streaming to delete "
896 "recording stream", __FUNCTION__, mCameraId);
897 return res;
898 }
899 res = mStreamingProcessor->deleteRecordingStream();
900 if (res != OK) {
901 ALOGE("%s: Camera %d: Unable to delete recording stream before "
902 "enabling callbacks: %s (%d)", __FUNCTION__, mCameraId,
903 strerror(-res), res);
904 return res;
905 }
906 }
907
908 res = mCallbackProcessor->updateStream(params);
909 if (res != OK) {
910 ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)",
911 __FUNCTION__, mCameraId, strerror(-res), res);
912 return res;
913 }
914 outputStreams.push(getCallbackStreamId());
915 } else if (previewStreamChanged && mCallbackProcessor->getStreamId() != NO_STREAM) {
916 /**
917 * Delete the unused callback stream when preview stream is changed and
918 * preview is not enabled. Don't need stop preview stream as preview is in
919 * STOPPED state now.
920 */
921 ALOGV("%s: Camera %d: Delete unused preview callback stream.", __FUNCTION__, mCameraId);
922 res = mCallbackProcessor->deleteStream();
923 if (res != OK) {
924 ALOGE("%s: Camera %d: Unable to delete callback stream %s (%d)",
925 __FUNCTION__, mCameraId, strerror(-res), res);
926 return res;
927 }
928 }
929
930 if (params.useZeroShutterLag() &&
931 getRecordingStreamId() == NO_STREAM) {
932 res = updateProcessorStream(mZslProcessor, params);
933 if (res != OK) {
934 ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)",
935 __FUNCTION__, mCameraId, strerror(-res), res);
936 return res;
937 }
938
939 if (jpegStreamChanged) {
940 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
941 __FUNCTION__, mCameraId);
942 mZslProcessor->clearZslQueue();
943 }
944 outputStreams.push(getZslStreamId());
945 } else {
946 mZslProcessor->deleteStream();
947 }
948
949 outputStreams.push(getPreviewStreamId());
950
951 if (params.isDeviceZslSupported) {
952 // If device ZSL is supported, resume preview buffers that may be paused
953 // during last takePicture().
954 mDevice->dropStreamBuffers(false, getPreviewStreamId());
955 }
956
957 if (!params.recordingHint) {
958 if (!restart) {
959 res = mStreamingProcessor->updatePreviewRequest(params);
960 if (res != OK) {
961 ALOGE("%s: Camera %d: Can't set up preview request: "
962 "%s (%d)", __FUNCTION__, mCameraId,
963 strerror(-res), res);
964 return res;
965 }
966 }
967 res = mStreamingProcessor->startStream(StreamingProcessor::PREVIEW,
968 outputStreams);
969 } else {
970 if (!restart) {
971 res = mStreamingProcessor->updateRecordingRequest(params);
972 if (res != OK) {
973 ALOGE("%s: Camera %d: Can't set up preview request with "
974 "record hint: %s (%d)", __FUNCTION__, mCameraId,
975 strerror(-res), res);
976 return res;
977 }
978 }
979 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
980 outputStreams);
981 }
982 if (res != OK) {
983 ALOGE("%s: Camera %d: Unable to start streaming preview: %s (%d)",
984 __FUNCTION__, mCameraId, strerror(-res), res);
985 return res;
986 }
987
988 mCallbackProcessor->unpauseCallback();
989 params.state = Parameters::PREVIEW;
990 return OK;
991 }
992
stopPreview()993 void Camera2Client::stopPreview() {
994 ATRACE_CALL();
995 ALOGV("%s: E", __FUNCTION__);
996 Mutex::Autolock icl(mBinderSerializationLock);
997 status_t res;
998 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
999 stopPreviewL();
1000 }
1001
stopPreviewL()1002 void Camera2Client::stopPreviewL() {
1003 ATRACE_CALL();
1004 status_t res;
1005 const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds
1006 Parameters::State state;
1007 {
1008 SharedParameters::Lock l(mParameters);
1009 state = l.mParameters.state;
1010 }
1011
1012 switch (state) {
1013 case Parameters::DISCONNECTED:
1014 // Nothing to do.
1015 break;
1016 case Parameters::STOPPED:
1017 case Parameters::VIDEO_SNAPSHOT:
1018 case Parameters::STILL_CAPTURE:
1019 mCaptureSequencer->waitUntilIdle(kStopCaptureTimeout);
1020 FALLTHROUGH_INTENDED;
1021 case Parameters::RECORD:
1022 case Parameters::PREVIEW:
1023 mCallbackProcessor->pauseCallback();
1024 syncWithDevice();
1025 // Due to flush a camera device sync is not a sufficient
1026 // guarantee that the current client parameters are
1027 // correctly applied. To resolve this wait for the current
1028 // request id to return in the results.
1029 waitUntilCurrentRequestIdLocked();
1030 res = stopStream();
1031 if (res != OK) {
1032 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
1033 __FUNCTION__, mCameraId, strerror(-res), res);
1034 }
1035
1036 // Flush all in-process captures and buffer in order to stop
1037 // preview faster.
1038 res = mDevice->flush();
1039 if (res != OK) {
1040 ALOGE("%s: Camera %d: Unable to flush pending requests: %s (%d)",
1041 __FUNCTION__, mCameraId, strerror(-res), res);
1042 }
1043
1044 res = mDevice->waitUntilDrained();
1045 if (res != OK) {
1046 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1047 __FUNCTION__, mCameraId, strerror(-res), res);
1048 }
1049 // Clean up recording stream
1050 res = mStreamingProcessor->deleteRecordingStream();
1051 if (res != OK) {
1052 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1053 "stop preview: %s (%d)",
1054 __FUNCTION__, mCameraId, strerror(-res), res);
1055 }
1056 FALLTHROUGH_INTENDED;
1057 case Parameters::WAITING_FOR_PREVIEW_WINDOW: {
1058 SharedParameters::Lock l(mParameters);
1059 l.mParameters.state = Parameters::STOPPED;
1060 commandStopFaceDetectionL(l.mParameters);
1061 break;
1062 }
1063 default:
1064 ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId,
1065 state);
1066 }
1067 }
1068
previewEnabled()1069 bool Camera2Client::previewEnabled() {
1070 ATRACE_CALL();
1071 Mutex::Autolock icl(mBinderSerializationLock);
1072 status_t res;
1073 if ( (res = checkPid(__FUNCTION__) ) != OK) return false;
1074
1075 SharedParameters::Lock l(mParameters);
1076 return l.mParameters.state == Parameters::PREVIEW;
1077 }
1078
setVideoBufferMode(int32_t videoBufferMode)1079 status_t Camera2Client::setVideoBufferMode(int32_t videoBufferMode) {
1080 ATRACE_CALL();
1081 Mutex::Autolock icl(mBinderSerializationLock);
1082 status_t res;
1083 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1084
1085 SharedParameters::Lock l(mParameters);
1086 switch (l.mParameters.state) {
1087 case Parameters::RECORD:
1088 case Parameters::VIDEO_SNAPSHOT:
1089 ALOGE("%s: Camera %d: Can't be called in state %s",
1090 __FUNCTION__, mCameraId,
1091 Parameters::getStateName(l.mParameters.state));
1092 return INVALID_OPERATION;
1093 default:
1094 // OK
1095 break;
1096 }
1097
1098 if (videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1099 ALOGE("%s: %d: Only video buffer queue is supported", __FUNCTION__, __LINE__);
1100 return BAD_VALUE;
1101 }
1102
1103 l.mParameters.videoBufferMode = videoBufferMode;
1104
1105 return OK;
1106 }
1107
startRecording()1108 status_t Camera2Client::startRecording() {
1109 ATRACE_CALL();
1110 ALOGV("%s: E", __FUNCTION__);
1111 Mutex::Autolock icl(mBinderSerializationLock);
1112 status_t res;
1113 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1114 SharedParameters::Lock l(mParameters);
1115
1116 return startRecordingL(l.mParameters, false);
1117 }
1118
startRecordingL(Parameters & params,bool restart)1119 status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) {
1120 status_t res = OK;
1121
1122 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
1123
1124 switch (params.state) {
1125 case Parameters::STOPPED:
1126 res = startPreviewL(params, false);
1127 if (res != OK) return res;
1128 // Make sure first preview request is submitted to the HAL device to avoid
1129 // two consecutive set of configure_streams being called into the HAL.
1130 // TODO: Refactor this to avoid initial preview configuration.
1131 syncWithDevice();
1132 break;
1133 case Parameters::PREVIEW:
1134 // Ready to go
1135 break;
1136 case Parameters::RECORD:
1137 case Parameters::VIDEO_SNAPSHOT:
1138 // OK to call this when recording is already on, just skip unless
1139 // we're looking to restart
1140 if (!restart) return OK;
1141 break;
1142 default:
1143 ALOGE("%s: Camera %d: Can't start recording in state %s",
1144 __FUNCTION__, mCameraId,
1145 Parameters::getStateName(params.state));
1146 return INVALID_OPERATION;
1147 };
1148
1149 if (params.videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1150 ALOGE("%s: Camera %d: Recording only supported buffer queue mode, but "
1151 "mode %d is requested!", __FUNCTION__, mCameraId, params.videoBufferMode);
1152 return INVALID_OPERATION;
1153 }
1154
1155 if (!mStreamingProcessor->haveValidRecordingWindow()) {
1156 ALOGE("%s: No valid recording window", __FUNCTION__);
1157 return INVALID_OPERATION;
1158 }
1159
1160 if (!restart) {
1161 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1162 mStreamingProcessor->updateRecordingRequest(params);
1163 if (res != OK) {
1164 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
1165 __FUNCTION__, mCameraId, strerror(-res), res);
1166 return res;
1167 }
1168 }
1169
1170 // Not all devices can support a preview callback stream and a recording
1171 // stream at the same time, so assume none of them can.
1172 if (mCallbackProcessor->getStreamId() != NO_STREAM) {
1173 ALOGV("%s: Camera %d: Clearing out callback stream before "
1174 "creating recording stream", __FUNCTION__, mCameraId);
1175 res = mStreamingProcessor->stopStream();
1176 if (res != OK) {
1177 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1178 __FUNCTION__, mCameraId);
1179 return res;
1180 }
1181 res = mCallbackProcessor->deleteStream();
1182 if (res != OK) {
1183 ALOGE("%s: Camera %d: Unable to delete callback stream before "
1184 "record: %s (%d)", __FUNCTION__, mCameraId,
1185 strerror(-res), res);
1186 return res;
1187 }
1188 }
1189
1190 // Clean up ZSL before transitioning into recording
1191 if (mZslProcessor->getStreamId() != NO_STREAM) {
1192 ALOGV("%s: Camera %d: Clearing out zsl stream before "
1193 "creating recording stream", __FUNCTION__, mCameraId);
1194 res = mStreamingProcessor->stopStream();
1195 if (res != OK) {
1196 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1197 __FUNCTION__, mCameraId);
1198 return res;
1199 }
1200 res = mDevice->waitUntilDrained();
1201 if (res != OK) {
1202 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1203 __FUNCTION__, mCameraId, strerror(-res), res);
1204 }
1205 res = mZslProcessor->clearZslQueue();
1206 if (res != OK) {
1207 ALOGE("%s: Camera %d: Can't clear zsl queue",
1208 __FUNCTION__, mCameraId);
1209 return res;
1210 }
1211 res = mZslProcessor->deleteStream();
1212 if (res != OK) {
1213 ALOGE("%s: Camera %d: Unable to delete zsl stream before "
1214 "record: %s (%d)", __FUNCTION__, mCameraId,
1215 strerror(-res), res);
1216 return res;
1217 }
1218 }
1219
1220 // Disable callbacks if they're enabled; can't record and use callbacks,
1221 // and we can't fail record start without stagefright asserting.
1222 params.previewCallbackFlags = 0;
1223
1224 // May need to reconfigure video snapshot JPEG sizes
1225 // during recording startup, so need a more complex sequence here to
1226 // ensure an early stream reconfiguration doesn't happen
1227 bool recordingStreamNeedsUpdate;
1228 res = mStreamingProcessor->recordingStreamNeedsUpdate(params, &recordingStreamNeedsUpdate);
1229 if (res != OK) {
1230 ALOGE("%s: Camera %d: Can't query recording stream",
1231 __FUNCTION__, mCameraId);
1232 return res;
1233 }
1234
1235 if (recordingStreamNeedsUpdate) {
1236 // Need to stop stream here so updateProcessorStream won't trigger configureStream
1237 // Right now camera device cannot handle configureStream failure gracefully
1238 // when device is streaming
1239 res = mStreamingProcessor->stopStream();
1240 if (res != OK) {
1241 ALOGE("%s: Camera %d: Can't stop streaming to update record "
1242 "stream", __FUNCTION__, mCameraId);
1243 return res;
1244 }
1245 res = mDevice->waitUntilDrained();
1246 if (res != OK) {
1247 ALOGE("%s: Camera %d: Waiting to stop streaming failed: "
1248 "%s (%d)", __FUNCTION__, mCameraId,
1249 strerror(-res), res);
1250 }
1251
1252 res = updateProcessorStream<
1253 StreamingProcessor,
1254 &StreamingProcessor::updateRecordingStream>(
1255 mStreamingProcessor,
1256 params);
1257 if (res != OK) {
1258 ALOGE("%s: Camera %d: Unable to update recording stream: "
1259 "%s (%d)", __FUNCTION__, mCameraId,
1260 strerror(-res), res);
1261 return res;
1262 }
1263 }
1264
1265 Vector<int32_t> outputStreams;
1266 outputStreams.push(getPreviewStreamId());
1267 outputStreams.push(getRecordingStreamId());
1268
1269 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1270 outputStreams);
1271
1272 // startStream might trigger a configureStream call and device might fail
1273 // configureStream due to jpeg size > video size. Try again with jpeg size overridden
1274 // to video size.
1275 if (res == BAD_VALUE) {
1276 overrideVideoSnapshotSize(params);
1277 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1278 outputStreams);
1279 }
1280
1281 if (res != OK) {
1282 ALOGE("%s: Camera %d: Unable to start recording stream: %s (%d)",
1283 __FUNCTION__, mCameraId, strerror(-res), res);
1284 return res;
1285 }
1286
1287 if (params.state < Parameters::RECORD) {
1288 params.state = Parameters::RECORD;
1289 }
1290
1291 return OK;
1292 }
1293
stopRecording()1294 void Camera2Client::stopRecording() {
1295 ATRACE_CALL();
1296 ALOGV("%s: E", __FUNCTION__);
1297 Mutex::Autolock icl(mBinderSerializationLock);
1298 SharedParameters::Lock l(mParameters);
1299
1300 status_t res;
1301 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
1302
1303 switch (l.mParameters.state) {
1304 case Parameters::RECORD:
1305 // OK to stop
1306 break;
1307 case Parameters::STOPPED:
1308 case Parameters::PREVIEW:
1309 case Parameters::STILL_CAPTURE:
1310 case Parameters::VIDEO_SNAPSHOT:
1311 default:
1312 ALOGE("%s: Camera %d: Can't stop recording in state %s",
1313 __FUNCTION__, mCameraId,
1314 Parameters::getStateName(l.mParameters.state));
1315 return;
1316 };
1317
1318 sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
1319
1320 // Remove recording stream because the video target may be abandoned soon.
1321 res = stopStream();
1322 if (res != OK) {
1323 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
1324 __FUNCTION__, mCameraId, strerror(-res), res);
1325 }
1326
1327 res = mDevice->waitUntilDrained();
1328 if (res != OK) {
1329 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1330 __FUNCTION__, mCameraId, strerror(-res), res);
1331 }
1332 // Clean up recording stream
1333 res = mStreamingProcessor->deleteRecordingStream();
1334 if (res != OK) {
1335 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1336 "stop preview: %s (%d)",
1337 __FUNCTION__, mCameraId, strerror(-res), res);
1338 }
1339 l.mParameters.recoverOverriddenJpegSize();
1340
1341 // Restart preview
1342 res = startPreviewL(l.mParameters, true);
1343 if (res != OK) {
1344 ALOGE("%s: Camera %d: Unable to return to preview",
1345 __FUNCTION__, mCameraId);
1346 }
1347 }
1348
recordingEnabled()1349 bool Camera2Client::recordingEnabled() {
1350 ATRACE_CALL();
1351 Mutex::Autolock icl(mBinderSerializationLock);
1352
1353 if ( checkPid(__FUNCTION__) != OK) return false;
1354
1355 return recordingEnabledL();
1356 }
1357
recordingEnabledL()1358 bool Camera2Client::recordingEnabledL() {
1359 ATRACE_CALL();
1360 SharedParameters::Lock l(mParameters);
1361
1362 return (l.mParameters.state == Parameters::RECORD
1363 || l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
1364 }
1365
releaseRecordingFrame(const sp<IMemory> & mem)1366 void Camera2Client::releaseRecordingFrame([[maybe_unused]] const sp<IMemory>& mem) {
1367 ATRACE_CALL();
1368 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1369 }
1370
releaseRecordingFrameHandle(native_handle_t * handle)1371 void Camera2Client::releaseRecordingFrameHandle([[maybe_unused]] native_handle_t *handle) {
1372 ATRACE_CALL();
1373 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1374 }
1375
releaseRecordingFrameHandleBatch(const std::vector<native_handle_t * > & handles)1376 void Camera2Client::releaseRecordingFrameHandleBatch(
1377 [[maybe_unused]] const std::vector<native_handle_t*>& handles) {
1378 ATRACE_CALL();
1379 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1380 }
1381
autoFocus()1382 status_t Camera2Client::autoFocus() {
1383 ATRACE_CALL();
1384 Mutex::Autolock icl(mBinderSerializationLock);
1385 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1386 status_t res;
1387 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1388
1389 int triggerId;
1390 bool notifyImmediately = false;
1391 bool notifySuccess = false;
1392 {
1393 SharedParameters::Lock l(mParameters);
1394 if (l.mParameters.state < Parameters::PREVIEW) {
1395 ALOGE("%s: Camera %d: Call autoFocus when preview is inactive (state = %d).",
1396 __FUNCTION__, mCameraId, l.mParameters.state);
1397 return INVALID_OPERATION;
1398 }
1399
1400 /**
1401 * If the camera does not support auto-focus, it is a no-op and
1402 * onAutoFocus(boolean, Camera) callback will be called immediately
1403 * with a fake value of success set to true.
1404 *
1405 * Similarly, if focus mode is set to INFINITY, there's no reason to
1406 * bother the HAL.
1407 */
1408 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1409 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1410 notifyImmediately = true;
1411 notifySuccess = true;
1412 }
1413 /**
1414 * If we're in CAF mode, and AF has already been locked, just fire back
1415 * the callback right away; the HAL would not send a notification since
1416 * no state change would happen on a AF trigger.
1417 */
1418 if ( (l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_PICTURE ||
1419 l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) &&
1420 l.mParameters.focusState == ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED ) {
1421 notifyImmediately = true;
1422 notifySuccess = true;
1423 }
1424 /**
1425 * Send immediate notification back to client
1426 */
1427 if (notifyImmediately) {
1428 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1429 if (l.mRemoteCallback != 0) {
1430 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1431 notifySuccess ? 1 : 0, 0);
1432 }
1433 return OK;
1434 }
1435 /**
1436 * Handle quirk mode for AF in scene modes
1437 */
1438 if (l.mParameters.quirks.triggerAfWithAuto &&
1439 l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED &&
1440 l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO &&
1441 !l.mParameters.focusingAreas[0].isEmpty()) {
1442 ALOGV("%s: Quirk: Switching from focusMode %d to AUTO",
1443 __FUNCTION__, l.mParameters.focusMode);
1444 l.mParameters.shadowFocusMode = l.mParameters.focusMode;
1445 l.mParameters.focusMode = Parameters::FOCUS_MODE_AUTO;
1446 updateRequests(l.mParameters);
1447 }
1448
1449 l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter;
1450 triggerId = l.mParameters.currentAfTriggerId;
1451 }
1452 ATRACE_ASYNC_BEGIN(kAutofocusLabel, triggerId);
1453
1454 syncWithDevice();
1455
1456 mDevice->triggerAutofocus(triggerId);
1457
1458 return OK;
1459 }
1460
cancelAutoFocus()1461 status_t Camera2Client::cancelAutoFocus() {
1462 ATRACE_CALL();
1463 Mutex::Autolock icl(mBinderSerializationLock);
1464 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1465 status_t res;
1466 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1467
1468 int triggerId;
1469 {
1470 SharedParameters::Lock l(mParameters);
1471 // Canceling does nothing in FIXED or INFINITY modes
1472 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1473 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1474 return OK;
1475 }
1476
1477 // An active AF trigger is canceled
1478 if (l.mParameters.afTriggerCounter == l.mParameters.currentAfTriggerId) {
1479 ATRACE_ASYNC_END(kAutofocusLabel, l.mParameters.currentAfTriggerId);
1480 }
1481
1482 triggerId = ++l.mParameters.afTriggerCounter;
1483
1484 // When using triggerAfWithAuto quirk, may need to reset focus mode to
1485 // the real state at this point. No need to cancel explicitly if
1486 // changing the AF mode.
1487 if (l.mParameters.shadowFocusMode != Parameters::FOCUS_MODE_INVALID) {
1488 ALOGV("%s: Quirk: Restoring focus mode to %d", __FUNCTION__,
1489 l.mParameters.shadowFocusMode);
1490 l.mParameters.focusMode = l.mParameters.shadowFocusMode;
1491 l.mParameters.shadowFocusMode = Parameters::FOCUS_MODE_INVALID;
1492 updateRequests(l.mParameters);
1493
1494 return OK;
1495 }
1496 if (l.mParameters.allowZslMode) {
1497 mZslProcessor->clearZslQueue();
1498 }
1499 }
1500 syncWithDevice();
1501
1502 mDevice->triggerCancelAutofocus(triggerId);
1503
1504 return OK;
1505 }
1506
takePicture(int)1507 status_t Camera2Client::takePicture(int /*msgType*/) {
1508 ATRACE_CALL();
1509 Mutex::Autolock icl(mBinderSerializationLock);
1510 status_t res;
1511 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1512
1513 int takePictureCounter;
1514 bool shouldSyncWithDevice = true;
1515 {
1516 SharedParameters::Lock l(mParameters);
1517 switch (l.mParameters.state) {
1518 case Parameters::DISCONNECTED:
1519 case Parameters::STOPPED:
1520 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1521 ALOGE("%s: Camera %d: Cannot take picture without preview enabled",
1522 __FUNCTION__, mCameraId);
1523 return INVALID_OPERATION;
1524 case Parameters::PREVIEW:
1525 // Good to go for takePicture
1526 res = commandStopFaceDetectionL(l.mParameters);
1527 if (res != OK) {
1528 ALOGE("%s: Camera %d: Unable to stop face detection for still capture",
1529 __FUNCTION__, mCameraId);
1530 return res;
1531 }
1532 l.mParameters.state = Parameters::STILL_CAPTURE;
1533
1534 // Remove recording stream to prevent video snapshot jpeg logic kicking in
1535 if (l.mParameters.isJpegSizeOverridden() &&
1536 mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
1537 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
1538 if (res != OK) {
1539 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
1540 __FUNCTION__, mCameraId, strerror(-res), res);
1541 }
1542 res = mDevice->waitUntilDrained();
1543 if (res != OK) {
1544 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1545 __FUNCTION__, mCameraId, strerror(-res), res);
1546 }
1547 // Clean up recording stream
1548 res = mStreamingProcessor->deleteRecordingStream();
1549 if (res != OK) {
1550 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1551 "stop preview: %s (%d)",
1552 __FUNCTION__, mCameraId, strerror(-res), res);
1553 }
1554 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
1555 if (res != OK) {
1556 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
1557 __FUNCTION__, mCameraId, strerror(-res), res);
1558 }
1559 l.mParameters.recoverOverriddenJpegSize();
1560 }
1561 break;
1562 case Parameters::RECORD:
1563 // Good to go for video snapshot
1564 l.mParameters.state = Parameters::VIDEO_SNAPSHOT;
1565 break;
1566 case Parameters::STILL_CAPTURE:
1567 case Parameters::VIDEO_SNAPSHOT:
1568 ALOGE("%s: Camera %d: Already taking a picture",
1569 __FUNCTION__, mCameraId);
1570 return INVALID_OPERATION;
1571 }
1572
1573 ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId);
1574 int lastJpegStreamId = mJpegProcessor->getStreamId();
1575 // slowJpegMode will create jpeg stream in CaptureSequencer before capturing
1576 if (!l.mParameters.slowJpegMode) {
1577 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1578 }
1579
1580 // If video snapshot fail to configureStream, try override video snapshot size to
1581 // video size
1582 if (res == BAD_VALUE && l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
1583 overrideVideoSnapshotSize(l.mParameters);
1584 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1585 }
1586 if (res != OK) {
1587 ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)",
1588 __FUNCTION__, mCameraId, strerror(-res), res);
1589 return res;
1590 }
1591 takePictureCounter = ++l.mParameters.takePictureCounter;
1592
1593 // Clear ZSL buffer queue when Jpeg size is changed.
1594 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
1595 if (l.mParameters.allowZslMode && jpegStreamChanged) {
1596 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
1597 __FUNCTION__, mCameraId);
1598 mZslProcessor->clearZslQueue();
1599 }
1600
1601 // We should always sync with the device in case flash is turned on,
1602 // the camera device suggests that flash is needed (AE state FLASH_REQUIRED)
1603 // or we are in some other AE state different from CONVERGED that may need
1604 // precapture trigger.
1605 if (l.mParameters.flashMode != Parameters::FLASH_MODE_ON &&
1606 (l.mParameters.aeState == ANDROID_CONTROL_AE_STATE_CONVERGED)) {
1607 shouldSyncWithDevice = false;
1608 }
1609 }
1610
1611 ATRACE_ASYNC_BEGIN(kTakepictureLabel, takePictureCounter);
1612
1613 // Make sure HAL has correct settings in case precapture trigger is needed.
1614 if (shouldSyncWithDevice) {
1615 syncWithDevice();
1616 }
1617
1618 res = mCaptureSequencer->startCapture();
1619 if (res != OK) {
1620 ALOGE("%s: Camera %d: Unable to start capture: %s (%d)",
1621 __FUNCTION__, mCameraId, strerror(-res), res);
1622 }
1623
1624 return res;
1625 }
1626
setParameters(const String8 & params)1627 status_t Camera2Client::setParameters(const String8& params) {
1628 ATRACE_CALL();
1629 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1630 Mutex::Autolock icl(mBinderSerializationLock);
1631 status_t res;
1632 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1633
1634 SharedParameters::Lock l(mParameters);
1635
1636 Parameters::focusMode_t focusModeBefore = l.mParameters.focusMode;
1637 res = l.mParameters.set(params);
1638 if (res != OK) return res;
1639 Parameters::focusMode_t focusModeAfter = l.mParameters.focusMode;
1640
1641 if (l.mParameters.allowZslMode && focusModeAfter != focusModeBefore) {
1642 mZslProcessor->clearZslQueue();
1643 }
1644
1645 res = updateRequests(l.mParameters);
1646
1647 return res;
1648 }
1649
getParameters() const1650 String8 Camera2Client::getParameters() const {
1651 ATRACE_CALL();
1652 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1653 Mutex::Autolock icl(mBinderSerializationLock);
1654 // The camera service can unconditionally get the parameters at all times
1655 if (CameraThreadState::getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
1656
1657 SharedParameters::ReadLock l(mParameters);
1658
1659 return l.mParameters.get();
1660 }
1661
sendCommand(int32_t cmd,int32_t arg1,int32_t arg2)1662 status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
1663 ATRACE_CALL();
1664 Mutex::Autolock icl(mBinderSerializationLock);
1665 status_t res;
1666 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1667
1668 ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId,
1669 cmd, arg1, arg2);
1670
1671 switch (cmd) {
1672 case CAMERA_CMD_START_SMOOTH_ZOOM:
1673 return commandStartSmoothZoomL();
1674 case CAMERA_CMD_STOP_SMOOTH_ZOOM:
1675 return commandStopSmoothZoomL();
1676 case CAMERA_CMD_SET_DISPLAY_ORIENTATION:
1677 return commandSetDisplayOrientationL(arg1);
1678 case CAMERA_CMD_ENABLE_SHUTTER_SOUND:
1679 return commandEnableShutterSoundL(arg1 == 1);
1680 case CAMERA_CMD_PLAY_RECORDING_SOUND:
1681 return commandPlayRecordingSoundL();
1682 case CAMERA_CMD_START_FACE_DETECTION:
1683 return commandStartFaceDetectionL(arg1);
1684 case CAMERA_CMD_STOP_FACE_DETECTION: {
1685 SharedParameters::Lock l(mParameters);
1686 return commandStopFaceDetectionL(l.mParameters);
1687 }
1688 case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
1689 return commandEnableFocusMoveMsgL(arg1 == 1);
1690 case CAMERA_CMD_PING:
1691 return commandPingL();
1692 case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:
1693 case CAMERA_CMD_SET_VIDEO_FORMAT:
1694 ALOGE("%s: command %d (arguments %d, %d) is not supported.",
1695 __FUNCTION__, cmd, arg1, arg2);
1696 return BAD_VALUE;
1697 default:
1698 ALOGE("%s: Unknown command %d (arguments %d, %d)",
1699 __FUNCTION__, cmd, arg1, arg2);
1700 return BAD_VALUE;
1701 }
1702 }
1703
commandStartSmoothZoomL()1704 status_t Camera2Client::commandStartSmoothZoomL() {
1705 ALOGE("%s: Unimplemented!", __FUNCTION__);
1706 return OK;
1707 }
1708
commandStopSmoothZoomL()1709 status_t Camera2Client::commandStopSmoothZoomL() {
1710 ALOGE("%s: Unimplemented!", __FUNCTION__);
1711 return OK;
1712 }
1713
commandSetDisplayOrientationL(int degrees)1714 status_t Camera2Client::commandSetDisplayOrientationL(int degrees) {
1715 int transform = Parameters::degToTransform(degrees,
1716 mCameraFacing == CAMERA_FACING_FRONT);
1717 if (transform == -1) {
1718 ALOGE("%s: Camera %d: Error setting %d as display orientation value",
1719 __FUNCTION__, mCameraId, degrees);
1720 return BAD_VALUE;
1721 }
1722 {
1723 Mutex::Autolock icl(mRotateAndCropLock);
1724 if (mRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_NONE) {
1725 ALOGI("%s: Rotate and crop set to: %d, skipping display orientation!", __FUNCTION__,
1726 mRotateAndCropMode);
1727 transform = mRotateAndCropPreviewTransform;
1728 }
1729 }
1730 SharedParameters::Lock l(mParameters);
1731 if (transform != l.mParameters.previewTransform &&
1732 getPreviewStreamId() != NO_STREAM) {
1733 mDevice->setStreamTransform(getPreviewStreamId(), transform);
1734 }
1735 l.mParameters.previewTransform = transform;
1736 return OK;
1737 }
1738
commandEnableShutterSoundL(bool enable)1739 status_t Camera2Client::commandEnableShutterSoundL(bool enable) {
1740 SharedParameters::Lock l(mParameters);
1741 if (enable) {
1742 l.mParameters.playShutterSound = true;
1743 return OK;
1744 }
1745
1746 l.mParameters.playShutterSound = false;
1747 return OK;
1748 }
1749
commandPlayRecordingSoundL()1750 status_t Camera2Client::commandPlayRecordingSoundL() {
1751 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1752 return OK;
1753 }
1754
commandStartFaceDetectionL(int)1755 status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) {
1756 ALOGV("%s: Camera %d: Starting face detection",
1757 __FUNCTION__, mCameraId);
1758 status_t res;
1759 SharedParameters::Lock l(mParameters);
1760 switch (l.mParameters.state) {
1761 case Parameters::DISCONNECTED:
1762 case Parameters::STOPPED:
1763 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1764 case Parameters::STILL_CAPTURE:
1765 ALOGE("%s: Camera %d: Cannot start face detection without preview active",
1766 __FUNCTION__, mCameraId);
1767 return INVALID_OPERATION;
1768 case Parameters::PREVIEW:
1769 case Parameters::RECORD:
1770 case Parameters::VIDEO_SNAPSHOT:
1771 // Good to go for starting face detect
1772 break;
1773 }
1774 // Ignoring type
1775 if (l.mParameters.fastInfo.bestFaceDetectMode ==
1776 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
1777 ALOGE("%s: Camera %d: Face detection not supported",
1778 __FUNCTION__, mCameraId);
1779 return BAD_VALUE;
1780 }
1781 if (l.mParameters.enableFaceDetect) return OK;
1782
1783 l.mParameters.enableFaceDetect = true;
1784
1785 res = updateRequests(l.mParameters);
1786
1787 return res;
1788 }
1789
commandStopFaceDetectionL(Parameters & params)1790 status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) {
1791 status_t res = OK;
1792 ALOGV("%s: Camera %d: Stopping face detection",
1793 __FUNCTION__, mCameraId);
1794
1795 if (!params.enableFaceDetect) return OK;
1796
1797 params.enableFaceDetect = false;
1798
1799 if (params.state == Parameters::PREVIEW
1800 || params.state == Parameters::RECORD
1801 || params.state == Parameters::VIDEO_SNAPSHOT) {
1802 res = updateRequests(params);
1803 }
1804
1805 return res;
1806 }
1807
commandEnableFocusMoveMsgL(bool enable)1808 status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) {
1809 SharedParameters::Lock l(mParameters);
1810 l.mParameters.enableFocusMoveMessages = enable;
1811
1812 return OK;
1813 }
1814
commandPingL()1815 status_t Camera2Client::commandPingL() {
1816 // Always ping back if access is proper and device is alive
1817 SharedParameters::Lock l(mParameters);
1818 if (l.mParameters.state != Parameters::DISCONNECTED) {
1819 return OK;
1820 } else {
1821 return NO_INIT;
1822 }
1823 }
1824
notifyError(int32_t errorCode,const CaptureResultExtras & resultExtras)1825 void Camera2Client::notifyError(int32_t errorCode,
1826 const CaptureResultExtras& resultExtras) {
1827 int32_t err = CAMERA_ERROR_UNKNOWN;
1828 switch(errorCode) {
1829 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED:
1830 err = CAMERA_ERROR_RELEASED;
1831 break;
1832 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
1833 err = CAMERA_ERROR_UNKNOWN;
1834 break;
1835 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE:
1836 err = CAMERA_ERROR_SERVER_DIED;
1837 break;
1838 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
1839 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
1840 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1841 ALOGW("%s: Received recoverable error %d from HAL - ignoring, requestId %" PRId32,
1842 __FUNCTION__, errorCode, resultExtras.requestId);
1843
1844 if ((hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST == errorCode) ||
1845 (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT == errorCode)) {
1846 Mutex::Autolock al(mLatestRequestMutex);
1847
1848 mLatestFailedRequestIds.add(resultExtras.requestId);
1849 mLatestRequestSignal.signal();
1850 }
1851 mCaptureSequencer->notifyError(errorCode, resultExtras);
1852 return;
1853 default:
1854 err = CAMERA_ERROR_UNKNOWN;
1855 break;
1856 }
1857
1858 ALOGE("%s: Error condition %d reported by HAL, requestId %" PRId32, __FUNCTION__, errorCode,
1859 resultExtras.requestId);
1860
1861 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1862 if (l.mRemoteCallback != nullptr) {
1863 l.mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, err, 0);
1864 }
1865 }
1866
1867
1868 /** Device-related methods */
notifyAutoFocus(uint8_t newState,int triggerId)1869 void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) {
1870 ALOGV("%s: Autofocus state now %d, last trigger %d",
1871 __FUNCTION__, newState, triggerId);
1872 bool sendCompletedMessage = false;
1873 bool sendMovingMessage = false;
1874
1875 bool success = false;
1876 bool afInMotion = false;
1877 {
1878 SharedParameters::Lock l(mParameters);
1879 // Trace end of AF state
1880 char tmp[32];
1881 if (l.mParameters.afStateCounter > 0) {
1882 camera_metadata_enum_snprint(
1883 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1884 ATRACE_ASYNC_END(tmp, l.mParameters.afStateCounter);
1885 }
1886
1887 // Update state
1888 l.mParameters.focusState = newState;
1889 l.mParameters.afStateCounter++;
1890
1891 // Trace start of AF state
1892
1893 camera_metadata_enum_snprint(
1894 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1895 ATRACE_ASYNC_BEGIN(tmp, l.mParameters.afStateCounter);
1896
1897 switch (l.mParameters.focusMode) {
1898 case Parameters::FOCUS_MODE_AUTO:
1899 case Parameters::FOCUS_MODE_MACRO:
1900 // Don't send notifications upstream if they're not for the current AF
1901 // trigger. For example, if cancel was called in between, or if we
1902 // already sent a notification about this AF call.
1903 if (triggerId != l.mParameters.currentAfTriggerId) break;
1904 switch (newState) {
1905 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1906 success = true;
1907 FALLTHROUGH_INTENDED;
1908 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1909 sendCompletedMessage = true;
1910 l.mParameters.currentAfTriggerId = -1;
1911 break;
1912 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1913 // Just starting focusing, ignore
1914 break;
1915 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1916 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1917 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1918 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1919 default:
1920 // Unexpected in AUTO/MACRO mode
1921 ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d",
1922 __FUNCTION__, newState);
1923 break;
1924 }
1925 break;
1926 case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
1927 case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
1928 switch (newState) {
1929 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1930 success = true;
1931 FALLTHROUGH_INTENDED;
1932 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1933 // Don't send notifications upstream if they're not for
1934 // the current AF trigger. For example, if cancel was
1935 // called in between, or if we already sent a
1936 // notification about this AF call.
1937 // Send both a 'AF done' callback and a 'AF move' callback
1938 if (triggerId != l.mParameters.currentAfTriggerId) break;
1939 sendCompletedMessage = true;
1940 afInMotion = false;
1941 if (l.mParameters.enableFocusMoveMessages &&
1942 l.mParameters.afInMotion) {
1943 sendMovingMessage = true;
1944 }
1945 l.mParameters.currentAfTriggerId = -1;
1946 break;
1947 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1948 // Cancel was called, or we switched state; care if
1949 // currently moving
1950 afInMotion = false;
1951 if (l.mParameters.enableFocusMoveMessages &&
1952 l.mParameters.afInMotion) {
1953 sendMovingMessage = true;
1954 }
1955 break;
1956 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1957 // Start passive scan, inform upstream
1958 afInMotion = true;
1959 FALLTHROUGH_INTENDED;
1960 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1961 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1962 // Stop passive scan, inform upstream
1963 if (l.mParameters.enableFocusMoveMessages) {
1964 sendMovingMessage = true;
1965 }
1966 break;
1967 }
1968 l.mParameters.afInMotion = afInMotion;
1969 break;
1970 case Parameters::FOCUS_MODE_EDOF:
1971 case Parameters::FOCUS_MODE_INFINITY:
1972 case Parameters::FOCUS_MODE_FIXED:
1973 default:
1974 if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) {
1975 ALOGE("%s: Unexpected AF state change %d "
1976 "(ID %d) in focus mode %d",
1977 __FUNCTION__, newState, triggerId,
1978 l.mParameters.focusMode);
1979 }
1980 }
1981 }
1982 if (sendMovingMessage) {
1983 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1984 if (l.mRemoteCallback != 0) {
1985 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
1986 afInMotion ? 1 : 0, 0);
1987 }
1988 }
1989 if (sendCompletedMessage) {
1990 ATRACE_ASYNC_END(kAutofocusLabel, triggerId);
1991 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1992 if (l.mRemoteCallback != 0) {
1993 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1994 success ? 1 : 0, 0);
1995 }
1996 }
1997 }
1998
notifyAutoExposure(uint8_t newState,int triggerId)1999 void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) {
2000 ALOGV("%s: Autoexposure state now %d, last trigger %d",
2001 __FUNCTION__, newState, triggerId);
2002 {
2003 SharedParameters::Lock l(mParameters);
2004 // Update state
2005 l.mParameters.aeState = newState;
2006 }
2007 mCaptureSequencer->notifyAutoExposure(newState, triggerId);
2008 }
2009
notifyShutter(const CaptureResultExtras & resultExtras,nsecs_t timestamp)2010 void Camera2Client::notifyShutter(const CaptureResultExtras& resultExtras,
2011 nsecs_t timestamp) {
2012 ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
2013 __FUNCTION__, resultExtras.requestId, timestamp);
2014 mCaptureSequencer->notifyShutter(resultExtras, timestamp);
2015
2016 Camera2ClientBase::notifyShutter(resultExtras, timestamp);
2017 }
2018
getParameters()2019 camera2::SharedParameters& Camera2Client::getParameters() {
2020 return mParameters;
2021 }
2022
getPreviewStreamId() const2023 int Camera2Client::getPreviewStreamId() const {
2024 return mStreamingProcessor->getPreviewStreamId();
2025 }
2026
getCaptureStreamId() const2027 int Camera2Client::getCaptureStreamId() const {
2028 return mJpegProcessor->getStreamId();
2029 }
2030
getCallbackStreamId() const2031 int Camera2Client::getCallbackStreamId() const {
2032 return mCallbackProcessor->getStreamId();
2033 }
2034
getRecordingStreamId() const2035 int Camera2Client::getRecordingStreamId() const {
2036 return mStreamingProcessor->getRecordingStreamId();
2037 }
2038
getZslStreamId() const2039 int Camera2Client::getZslStreamId() const {
2040 return mZslProcessor->getStreamId();
2041 }
2042
registerFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener,bool sendPartials)2043 status_t Camera2Client::registerFrameListener(int32_t minId, int32_t maxId,
2044 const wp<camera2::FrameProcessor::FilteredListener>& listener, bool sendPartials) {
2045 return mFrameProcessor->registerListener(minId, maxId, listener, sendPartials);
2046 }
2047
removeFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener)2048 status_t Camera2Client::removeFrameListener(int32_t minId, int32_t maxId,
2049 const wp<camera2::FrameProcessor::FilteredListener>& listener) {
2050 return mFrameProcessor->removeListener(minId, maxId, listener);
2051 }
2052
stopStream()2053 status_t Camera2Client::stopStream() {
2054 return mStreamingProcessor->stopStream();
2055 }
2056
createJpegStreamL(Parameters & params)2057 status_t Camera2Client::createJpegStreamL(Parameters ¶ms) {
2058 status_t res = OK;
2059 int lastJpegStreamId = mJpegProcessor->getStreamId();
2060 if (lastJpegStreamId != NO_STREAM) {
2061 return INVALID_OPERATION;
2062 }
2063
2064 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2065 if (res != OK) {
2066 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2067 __FUNCTION__, mCameraId, strerror(-res), res);
2068 return res;
2069 }
2070
2071 res = mDevice->flush();
2072 if (res != OK) {
2073 ALOGE("%s: Camera %d: Unable flush device: %s (%d)",
2074 __FUNCTION__, mCameraId, strerror(-res), res);
2075 return res;
2076 }
2077
2078 // Ideally we don't need this, but current camera device
2079 // status tracking mechanism demands it.
2080 res = mDevice->waitUntilDrained();
2081 if (res != OK) {
2082 ALOGE("%s: Camera %d: Waiting device drain failed: %s (%d)",
2083 __FUNCTION__, mCameraId, strerror(-res), res);
2084 }
2085
2086 res = updateProcessorStream(mJpegProcessor, params);
2087 return res;
2088 }
2089
2090 const int32_t Camera2Client::kPreviewRequestIdStart;
2091 const int32_t Camera2Client::kPreviewRequestIdEnd;
2092 const int32_t Camera2Client::kRecordingRequestIdStart;
2093 const int32_t Camera2Client::kRecordingRequestIdEnd;
2094 const int32_t Camera2Client::kCaptureRequestIdStart;
2095 const int32_t Camera2Client::kCaptureRequestIdEnd;
2096
2097 /** Utility methods */
2098
updateRequests(Parameters & params)2099 status_t Camera2Client::updateRequests(Parameters ¶ms) {
2100 status_t res;
2101
2102 ALOGV("%s: Camera %d: state = %d", __FUNCTION__, getCameraId(), params.state);
2103
2104 res = mStreamingProcessor->incrementStreamingIds();
2105 if (res != OK) {
2106 ALOGE("%s: Camera %d: Unable to increment request IDs: %s (%d)",
2107 __FUNCTION__, mCameraId, strerror(-res), res);
2108 return res;
2109 }
2110
2111 res = mStreamingProcessor->updatePreviewRequest(params);
2112 if (res != OK) {
2113 ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)",
2114 __FUNCTION__, mCameraId, strerror(-res), res);
2115 return res;
2116 }
2117 res = mStreamingProcessor->updateRecordingRequest(params);
2118 if (res != OK) {
2119 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
2120 __FUNCTION__, mCameraId, strerror(-res), res);
2121 return res;
2122 }
2123
2124 if (params.state == Parameters::PREVIEW) {
2125 res = startPreviewL(params, true);
2126 if (res != OK) {
2127 ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)",
2128 __FUNCTION__, mCameraId, strerror(-res), res);
2129 return res;
2130 }
2131 } else if (params.state == Parameters::RECORD ||
2132 params.state == Parameters::VIDEO_SNAPSHOT) {
2133 res = startRecordingL(params, true);
2134 if (res != OK) {
2135 ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)",
2136 __FUNCTION__, mCameraId, strerror(-res), res);
2137 return res;
2138 }
2139 }
2140 return res;
2141 }
2142
2143
calculateBufferSize(int width,int height,int format,int stride)2144 size_t Camera2Client::calculateBufferSize(int width, int height,
2145 int format, int stride) {
2146 switch (format) {
2147 case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16
2148 return width * height * 2;
2149 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21
2150 return width * height * 3 / 2;
2151 case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2
2152 return width * height * 2;
2153 case HAL_PIXEL_FORMAT_YV12: { // YV12
2154 size_t ySize = stride * height;
2155 size_t uvStride = (stride / 2 + 0xF) & ~0xF;
2156 size_t uvSize = uvStride * height / 2;
2157 return ySize + uvSize * 2;
2158 }
2159 case HAL_PIXEL_FORMAT_RGB_565:
2160 return width * height * 2;
2161 case HAL_PIXEL_FORMAT_RGBA_8888:
2162 return width * height * 4;
2163 case HAL_PIXEL_FORMAT_RAW16:
2164 return width * height * 2;
2165 default:
2166 ALOGE("%s: Unknown preview format: %x",
2167 __FUNCTION__, format);
2168 return 0;
2169 }
2170 }
2171
syncWithDevice()2172 status_t Camera2Client::syncWithDevice() {
2173 ATRACE_CALL();
2174 const nsecs_t kMaxSyncTimeout = 500000000; // 500 ms
2175 status_t res;
2176
2177 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2178 if (activeRequestId == 0) return OK;
2179
2180 res = mDevice->waitUntilRequestReceived(activeRequestId, kMaxSyncTimeout);
2181 if (res == TIMED_OUT) {
2182 ALOGE("%s: Camera %d: Timed out waiting sync with HAL",
2183 __FUNCTION__, mCameraId);
2184 } else if (res != OK) {
2185 ALOGE("%s: Camera %d: Error while waiting to sync with HAL",
2186 __FUNCTION__, mCameraId);
2187 }
2188 return res;
2189 }
2190
2191 template <typename ProcessorT>
updateProcessorStream(sp<ProcessorT> processor,camera2::Parameters params)2192 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2193 camera2::Parameters params) {
2194 // No default template arguments until C++11, so we need this overload
2195 return updateProcessorStream<ProcessorT, &ProcessorT::updateStream>(
2196 processor, params);
2197 }
2198
2199 template <typename ProcessorT,
2200 status_t (ProcessorT::*updateStreamF)(const Parameters &)>
updateProcessorStream(sp<ProcessorT> processor,Parameters params)2201 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2202 Parameters params) {
2203 status_t res;
2204
2205 // Get raw pointer since sp<T> doesn't have operator->*
2206 ProcessorT *processorPtr = processor.get();
2207 res = (processorPtr->*updateStreamF)(params);
2208
2209 /**
2210 * Can't update the stream if it's busy?
2211 *
2212 * Then we need to stop the device (by temporarily clearing the request
2213 * queue) and then try again. Resume streaming once we're done.
2214 */
2215 if (res == -EBUSY) {
2216 ALOGV("%s: Camera %d: Pausing to update stream", __FUNCTION__,
2217 mCameraId);
2218 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2219 if (res != OK) {
2220 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2221 __FUNCTION__, mCameraId, strerror(-res), res);
2222 }
2223
2224 res = mDevice->waitUntilDrained();
2225 if (res != OK) {
2226 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
2227 __FUNCTION__, mCameraId, strerror(-res), res);
2228 }
2229
2230 res = (processorPtr->*updateStreamF)(params);
2231 if (res != OK) {
2232 ALOGE("%s: Camera %d: Failed to update processing stream "
2233 " despite having halted streaming first: %s (%d)",
2234 __FUNCTION__, mCameraId, strerror(-res), res);
2235 }
2236
2237 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
2238 if (res != OK) {
2239 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
2240 __FUNCTION__, mCameraId, strerror(-res), res);
2241 }
2242 }
2243
2244 return res;
2245 }
2246
overrideVideoSnapshotSize(Parameters & params)2247 status_t Camera2Client::overrideVideoSnapshotSize(Parameters ¶ms) {
2248 ALOGV("%s: Camera %d: configure still size to video size before recording"
2249 , __FUNCTION__, mCameraId);
2250 params.overrideJpegSizeByVideoSize();
2251 status_t res = updateProcessorStream(mJpegProcessor, params);
2252 if (res != OK) {
2253 ALOGE("%s: Camera %d: Can't override video snapshot size to video size: %s (%d)",
2254 __FUNCTION__, mCameraId, strerror(-res), res);
2255 }
2256 return res;
2257 }
2258
setVideoTarget(const sp<IGraphicBufferProducer> & bufferProducer)2259 status_t Camera2Client::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
2260 ATRACE_CALL();
2261 ALOGV("%s: E", __FUNCTION__);
2262 Mutex::Autolock icl(mBinderSerializationLock);
2263 status_t res;
2264 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
2265
2266 sp<IBinder> binder = IInterface::asBinder(bufferProducer);
2267 if (binder == mVideoSurface) {
2268 ALOGV("%s: Camera %d: New video window is same as old video window",
2269 __FUNCTION__, mCameraId);
2270 return NO_ERROR;
2271 }
2272
2273 sp<Surface> window;
2274 int format;
2275 android_dataspace dataSpace;
2276
2277 if (bufferProducer != nullptr) {
2278 // Using controlledByApp flag to ensure that the buffer queue remains in
2279 // async mode for the old camera API, where many applications depend
2280 // on that behavior.
2281 window = new Surface(bufferProducer, /*controlledByApp*/ true);
2282
2283 ANativeWindow *anw = window.get();
2284
2285 if ((res = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
2286 ALOGE("%s: Failed to query Surface format", __FUNCTION__);
2287 return res;
2288 }
2289
2290 if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
2291 reinterpret_cast<int*>(&dataSpace))) != OK) {
2292 ALOGE("%s: Failed to query Surface dataSpace", __FUNCTION__);
2293 return res;
2294 }
2295 }
2296
2297 Parameters::State state;
2298 {
2299 SharedParameters::Lock l(mParameters);
2300 state = l.mParameters.state;
2301 }
2302
2303 switch (state) {
2304 case Parameters::STOPPED:
2305 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
2306 case Parameters::PREVIEW:
2307 // OK
2308 break;
2309 case Parameters::DISCONNECTED:
2310 case Parameters::RECORD:
2311 case Parameters::STILL_CAPTURE:
2312 case Parameters::VIDEO_SNAPSHOT:
2313 default:
2314 ALOGE("%s: Camera %d: Cannot set video target while in state %s",
2315 __FUNCTION__, mCameraId,
2316 Parameters::getStateName(state));
2317 return INVALID_OPERATION;
2318 }
2319
2320 mVideoSurface = binder;
2321 res = mStreamingProcessor->setRecordingWindow(window);
2322 if (res != OK) {
2323 ALOGE("%s: Unable to set new recording window: %s (%d)",
2324 __FUNCTION__, strerror(-res), res);
2325 return res;
2326 }
2327
2328 {
2329 SharedParameters::Lock l(mParameters);
2330 l.mParameters.videoFormat = format;
2331 l.mParameters.videoDataSpace = dataSpace;
2332 }
2333
2334 return OK;
2335 }
2336
setAudioRestriction(int)2337 status_t Camera2Client::setAudioRestriction(int /*mode*/) {
2338 // Empty implementation. setAudioRestriction is hidden interface and not
2339 // supported by android.hardware.Camera API
2340 return INVALID_OPERATION;
2341 }
2342
getGlobalAudioRestriction()2343 int32_t Camera2Client::getGlobalAudioRestriction() {
2344 // Empty implementation. getAudioRestriction is hidden interface and not
2345 // supported by android.hardware.Camera API
2346 return INVALID_OPERATION;
2347 }
2348
setCameraServiceWatchdog(bool enabled)2349 status_t Camera2Client::setCameraServiceWatchdog(bool enabled) {
2350 return mDevice->setCameraServiceWatchdog(enabled);
2351 }
2352
setRotateAndCropOverride(uint8_t rotateAndCrop,bool fromHal)2353 status_t Camera2Client::setRotateAndCropOverride(uint8_t rotateAndCrop, bool fromHal) {
2354 if (rotateAndCrop > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
2355
2356 {
2357 Mutex::Autolock icl(mRotateAndCropLock);
2358 if (mRotateAndCropIsSupported) {
2359 mRotateAndCropMode = rotateAndCrop;
2360 } else {
2361 mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
2362 return OK;
2363 }
2364 }
2365
2366 return mDevice->setRotateAndCropAutoBehavior(
2367 static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop), fromHal);
2368 }
2369
setAutoframingOverride(uint8_t autoframingValue)2370 status_t Camera2Client::setAutoframingOverride(uint8_t autoframingValue) {
2371 if (autoframingValue > ANDROID_CONTROL_AUTOFRAMING_AUTO) return BAD_VALUE;
2372
2373 return mDevice->setAutoframingAutoBehavior(
2374 static_cast<camera_metadata_enum_android_control_autoframing_t>(autoframingValue));
2375 }
2376
supportsCameraMute()2377 bool Camera2Client::supportsCameraMute() {
2378 return mDevice->supportsCameraMute();
2379 }
2380
setCameraMute(bool enabled)2381 status_t Camera2Client::setCameraMute(bool enabled) {
2382 return mDevice->setCameraMute(enabled);
2383 }
2384
setStreamUseCaseOverrides(const std::vector<int64_t> & useCaseOverrides)2385 void Camera2Client::setStreamUseCaseOverrides(
2386 const std::vector<int64_t>& useCaseOverrides) {
2387 mDevice->setStreamUseCaseOverrides(useCaseOverrides);
2388 }
2389
clearStreamUseCaseOverrides()2390 void Camera2Client::clearStreamUseCaseOverrides() {
2391 mDevice->clearStreamUseCaseOverrides();
2392 }
2393
supportsZoomOverride()2394 bool Camera2Client::supportsZoomOverride() {
2395 return mDevice->supportsZoomOverride();
2396 }
2397
setZoomOverride(int zoomOverride)2398 status_t Camera2Client::setZoomOverride(int zoomOverride) {
2399 return mDevice->setZoomOverride(zoomOverride);
2400 }
2401
waitUntilCurrentRequestIdLocked()2402 status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
2403 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2404 if (activeRequestId != 0) {
2405 auto res = waitUntilRequestIdApplied(activeRequestId,
2406 mDevice->getExpectedInFlightDuration());
2407 if (res == TIMED_OUT) {
2408 ALOGE("%s: Camera %d: Timed out waiting for current request id to return in results!",
2409 __FUNCTION__, mCameraId);
2410 return res;
2411 } else if (res != OK) {
2412 ALOGE("%s: Camera %d: Error while waiting for current request id to return in results!",
2413 __FUNCTION__, mCameraId);
2414 return res;
2415 }
2416 }
2417
2418 return OK;
2419 }
2420
waitUntilRequestIdApplied(int32_t requestId,nsecs_t timeout)2421 status_t Camera2Client::waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout) {
2422 Mutex::Autolock l(mLatestRequestMutex);
2423 while ((std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) ==
2424 mLatestRequestIds.end()) &&
2425 (std::find(mLatestFailedRequestIds.begin(), mLatestFailedRequestIds.end(), requestId) ==
2426 mLatestFailedRequestIds.end())) {
2427 nsecs_t startTime = systemTime();
2428
2429 auto res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
2430 if (res != OK) return res;
2431
2432 timeout -= (systemTime() - startTime);
2433 }
2434
2435 return (std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) !=
2436 mLatestRequestIds.end()) ? OK : DEAD_OBJECT;
2437 }
2438
notifyRequestId(int32_t requestId)2439 void Camera2Client::notifyRequestId(int32_t requestId) {
2440 Mutex::Autolock al(mLatestRequestMutex);
2441
2442 mLatestRequestIds.add(requestId);
2443 mLatestRequestSignal.signal();
2444 }
2445
2446 const char* Camera2Client::kAutofocusLabel = "autofocus";
2447 const char* Camera2Client::kTakepictureLabel = "take_picture";
2448
2449 } // namespace android
2450