1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2Client"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <inttypes.h>
22 #include <utils/Log.h>
23 #include <utils/Trace.h>
24
25 #include <camera/CameraUtils.h>
26 #include <cutils/properties.h>
27 #include <gui/Surface.h>
28 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
29
30 #include "api1/Camera2Client.h"
31
32 #include "api1/client2/StreamingProcessor.h"
33 #include "api1/client2/JpegProcessor.h"
34 #include "api1/client2/CaptureSequencer.h"
35 #include "api1/client2/CallbackProcessor.h"
36 #include "api1/client2/ZslProcessor.h"
37 #include "device3/RotateAndCropMapper.h"
38 #include "utils/CameraThreadState.h"
39 #include "utils/CameraServiceProxyWrapper.h"
40
41 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
42 #define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
43
44 #ifndef FALLTHROUGH_INTENDED
45 #define FALLTHROUGH_INTENDED [[fallthrough]]
46 #endif
47
48 namespace android {
49 using namespace camera2;
50
51 // Interface used by CameraService
52
Camera2Client(const sp<CameraService> & cameraService,const sp<hardware::ICameraClient> & cameraClient,const String16 & clientPackageName,const std::optional<String16> & clientFeatureId,const String8 & cameraDeviceId,int api1CameraId,int cameraFacing,int sensorOrientation,int clientPid,uid_t clientUid,int servicePid,bool overrideForPerfClass,bool overrideToPortrait,bool forceSlowJpegMode)53 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
54 const sp<hardware::ICameraClient>& cameraClient,
55 const String16& clientPackageName,
56 const std::optional<String16>& clientFeatureId,
57 const String8& cameraDeviceId,
58 int api1CameraId,
59 int cameraFacing,
60 int sensorOrientation,
61 int clientPid,
62 uid_t clientUid,
63 int servicePid,
64 bool overrideForPerfClass,
65 bool overrideToPortrait,
66 bool forceSlowJpegMode):
67 Camera2ClientBase(cameraService, cameraClient, clientPackageName,
68 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
69 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
70 clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
71 /*legacyClient*/ true),
72 mParameters(api1CameraId, cameraFacing)
73 {
74 ATRACE_CALL();
75
76 mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
77 mRotateAndCropIsSupported = false;
78 mRotateAndCropPreviewTransform = 0;
79
80 SharedParameters::Lock l(mParameters);
81 l.mParameters.state = Parameters::DISCONNECTED;
82 if (forceSlowJpegMode) {
83 l.mParameters.isSlowJpegModeForced = true;
84 }
85 }
86
initialize(sp<CameraProviderManager> manager,const String8 & monitorTags)87 status_t Camera2Client::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
88 return initializeImpl(manager, monitorTags);
89 }
90
isZslEnabledInStillTemplate()91 bool Camera2Client::isZslEnabledInStillTemplate() {
92 bool zslEnabled = false;
93 CameraMetadata stillTemplate;
94 status_t res = mDevice->createDefaultRequest(
95 camera_request_template_t::CAMERA_TEMPLATE_STILL_CAPTURE, &stillTemplate);
96 if (res == OK) {
97 camera_metadata_entry_t enableZsl = stillTemplate.find(ANDROID_CONTROL_ENABLE_ZSL);
98 if (enableZsl.count == 1) {
99 zslEnabled = (enableZsl.data.u8[0] == ANDROID_CONTROL_ENABLE_ZSL_TRUE);
100 }
101 }
102
103 return zslEnabled;
104 }
105
106 template<typename TProviderPtr>
initializeImpl(TProviderPtr providerPtr,const String8 & monitorTags)107 status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const String8& monitorTags)
108 {
109 ATRACE_CALL();
110 ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
111 status_t res;
112
113 res = Camera2ClientBase::initialize(providerPtr, monitorTags);
114 if (res != OK) {
115 return res;
116 }
117
118 {
119 SharedParameters::Lock l(mParameters);
120
121 res = l.mParameters.initialize(mDevice.get());
122 if (res != OK) {
123 ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
124 __FUNCTION__, mCameraId, strerror(-res), res);
125 return NO_INIT;
126 }
127
128 l.mParameters.isDeviceZslSupported = isZslEnabledInStillTemplate();
129 }
130
131 const CameraMetadata& staticInfo = mDevice->info();
132 mRotateAndCropIsSupported = camera3::RotateAndCropMapper::isNeeded(&staticInfo);
133 // The 'mRotateAndCropMode' value only accounts for the necessary adjustment
134 // when the display rotates. The sensor orientation still needs to be calculated
135 // and applied similar to the Camera2 path.
136 CameraUtils::getRotationTransform(staticInfo, OutputConfiguration::MIRROR_MODE_AUTO,
137 &mRotateAndCropPreviewTransform);
138
139 String8 threadName;
140
141 mStreamingProcessor = new StreamingProcessor(this);
142 threadName = String8::format("C2-%d-StreamProc",
143 mCameraId);
144
145 mFrameProcessor = new FrameProcessor(mDevice, this);
146 threadName = String8::format("C2-%d-FrameProc",
147 mCameraId);
148 mFrameProcessor->run(threadName.string());
149
150 mCaptureSequencer = new CaptureSequencer(this);
151 threadName = String8::format("C2-%d-CaptureSeq",
152 mCameraId);
153 mCaptureSequencer->run(threadName.string());
154
155 mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
156 threadName = String8::format("C2-%d-JpegProc",
157 mCameraId);
158 mJpegProcessor->run(threadName.string());
159
160 mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
161
162 threadName = String8::format("C2-%d-ZslProc",
163 mCameraId);
164 mZslProcessor->run(threadName.string());
165
166 mCallbackProcessor = new CallbackProcessor(this);
167 threadName = String8::format("C2-%d-CallbkProc",
168 mCameraId);
169 mCallbackProcessor->run(threadName.string());
170
171 if (gLogLevel >= 1) {
172 SharedParameters::Lock l(mParameters);
173 ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
174 mCameraId);
175 ALOGD("%s", l.mParameters.paramsFlattened.string());
176 }
177
178 return OK;
179 }
180
~Camera2Client()181 Camera2Client::~Camera2Client() {
182 ATRACE_CALL();
183 ALOGV("~Camera2Client");
184
185 mDestructionStarted = true;
186
187 disconnect();
188
189 ALOGI("Camera %d: Closed", mCameraId);
190 }
191
dump(int fd,const Vector<String16> & args)192 status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
193 return BasicClient::dump(fd, args);
194 }
195
dumpClient(int fd,const Vector<String16> & args)196 status_t Camera2Client::dumpClient(int fd, const Vector<String16>& args) {
197 String8 result;
198 result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
199 (getRemoteCallback() != NULL ?
200 (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
201 mClientPid);
202 result.append(" State: ");
203 #define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break;
204
205 const Parameters& p = mParameters.unsafeAccess();
206
207 result.append(Parameters::getStateName(p.state));
208
209 result.append("\n Current parameters:\n");
210 result.appendFormat(" Preview size: %d x %d\n",
211 p.previewWidth, p.previewHeight);
212 result.appendFormat(" Preview FPS range: %d - %d\n",
213 p.previewFpsRange[0], p.previewFpsRange[1]);
214 result.appendFormat(" Preview HAL pixel format: 0x%x\n",
215 p.previewFormat);
216 result.appendFormat(" Preview transform: %x\n",
217 p.previewTransform);
218 result.appendFormat(" Picture size: %d x %d\n",
219 p.pictureWidth, p.pictureHeight);
220 result.appendFormat(" Jpeg thumbnail size: %d x %d\n",
221 p.jpegThumbSize[0], p.jpegThumbSize[1]);
222 result.appendFormat(" Jpeg quality: %d, thumbnail quality: %d\n",
223 p.jpegQuality, p.jpegThumbQuality);
224 result.appendFormat(" Jpeg rotation: %d\n", p.jpegRotation);
225 result.appendFormat(" GPS tags %s\n",
226 p.gpsEnabled ? "enabled" : "disabled");
227 if (p.gpsEnabled) {
228 result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n",
229 p.gpsCoordinates[0], p.gpsCoordinates[1],
230 p.gpsCoordinates[2]);
231 result.appendFormat(" GPS timestamp: %" PRId64 "\n",
232 p.gpsTimestamp);
233 result.appendFormat(" GPS processing method: %s\n",
234 p.gpsProcessingMethod.string());
235 }
236
237 result.append(" White balance mode: ");
238 switch (p.wbMode) {
239 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
240 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
241 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT)
242 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT)
243 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT)
244 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
245 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
246 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
247 default: result.append("UNKNOWN\n");
248 }
249
250 result.append(" Effect mode: ");
251 switch (p.effectMode) {
252 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
253 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
254 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE)
255 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE)
256 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA)
257 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE)
258 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
259 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
260 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
261 default: result.append("UNKNOWN\n");
262 }
263
264 result.append(" Antibanding mode: ");
265 switch (p.antibandingMode) {
266 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
267 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
268 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
269 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
270 default: result.append("UNKNOWN\n");
271 }
272
273 result.append(" Scene mode: ");
274 switch (p.sceneMode) {
275 case ANDROID_CONTROL_SCENE_MODE_DISABLED:
276 result.append("AUTO\n"); break;
277 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY)
278 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
279 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
280 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE)
281 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT)
282 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT)
283 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE)
284 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH)
285 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW)
286 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET)
287 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO)
288 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS)
289 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS)
290 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY)
291 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT)
292 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE)
293 default: result.append("UNKNOWN\n");
294 }
295
296 result.append(" Flash mode: ");
297 switch (p.flashMode) {
298 CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF)
299 CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO)
300 CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON)
301 CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH)
302 CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE)
303 CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID)
304 default: result.append("UNKNOWN\n");
305 }
306
307 result.append(" Focus mode: ");
308 switch (p.focusMode) {
309 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO)
310 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO)
311 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO)
312 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE)
313 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF)
314 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY)
315 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED)
316 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID)
317 default: result.append("UNKNOWN\n");
318 }
319
320 result.append(" Focus state: ");
321 switch (p.focusState) {
322 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
323 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
324 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)
325 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED)
326 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
327 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
328 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
329 default: result.append("UNKNOWN\n");
330 }
331
332 result.append(" Focusing areas:\n");
333 for (size_t i = 0; i < p.focusingAreas.size(); i++) {
334 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
335 p.focusingAreas[i].left,
336 p.focusingAreas[i].top,
337 p.focusingAreas[i].right,
338 p.focusingAreas[i].bottom,
339 p.focusingAreas[i].weight);
340 }
341
342 result.appendFormat(" Exposure compensation index: %d\n",
343 p.exposureCompensation);
344
345 result.appendFormat(" AE lock %s, AWB lock %s\n",
346 p.autoExposureLock ? "enabled" : "disabled",
347 p.autoWhiteBalanceLock ? "enabled" : "disabled" );
348
349 result.appendFormat(" Metering areas:\n");
350 for (size_t i = 0; i < p.meteringAreas.size(); i++) {
351 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
352 p.meteringAreas[i].left,
353 p.meteringAreas[i].top,
354 p.meteringAreas[i].right,
355 p.meteringAreas[i].bottom,
356 p.meteringAreas[i].weight);
357 }
358
359 result.appendFormat(" Zoom index: %d\n", p.zoom);
360 result.appendFormat(" Video size: %d x %d\n", p.videoWidth,
361 p.videoHeight);
362
363 result.appendFormat(" Recording hint is %s\n",
364 p.recordingHint ? "set" : "not set");
365
366 result.appendFormat(" Video stabilization is %s\n",
367 p.videoStabilization ? "enabled" : "disabled");
368
369 result.appendFormat(" Selected still capture FPS range: %d - %d\n",
370 p.fastInfo.bestStillCaptureFpsRange[0],
371 p.fastInfo.bestStillCaptureFpsRange[1]);
372
373 result.appendFormat(" Use zero shutter lag: %s\n",
374 p.useZeroShutterLag() ? "yes" : "no");
375
376 result.append(" Current streams:\n");
377 result.appendFormat(" Preview stream ID: %d\n",
378 getPreviewStreamId());
379 result.appendFormat(" Capture stream ID: %d\n",
380 getCaptureStreamId());
381 result.appendFormat(" Recording stream ID: %d\n",
382 getRecordingStreamId());
383
384 result.append(" Quirks for this camera:\n");
385 bool haveQuirk = false;
386 if (p.quirks.triggerAfWithAuto) {
387 result.appendFormat(" triggerAfWithAuto\n");
388 haveQuirk = true;
389 }
390 if (p.quirks.useZslFormat) {
391 result.appendFormat(" useZslFormat\n");
392 haveQuirk = true;
393 }
394 if (p.quirks.meteringCropRegion) {
395 result.appendFormat(" meteringCropRegion\n");
396 haveQuirk = true;
397 }
398 if (p.quirks.partialResults) {
399 result.appendFormat(" usePartialResult\n");
400 haveQuirk = true;
401 }
402 if (!haveQuirk) {
403 result.appendFormat(" none\n");
404 }
405
406 write(fd, result.string(), result.size());
407
408 mStreamingProcessor->dump(fd, args);
409
410 mCaptureSequencer->dump(fd, args);
411
412 mFrameProcessor->dump(fd, args);
413
414 mZslProcessor->dump(fd, args);
415
416 return dumpDevice(fd, args);
417 #undef CASE_APPEND_ENUM
418 }
419
420 // ICamera interface
421
disconnect()422 binder::Status Camera2Client::disconnect() {
423 ATRACE_CALL();
424 nsecs_t startTime = systemTime();
425 Mutex::Autolock icl(mBinderSerializationLock);
426
427 binder::Status res = binder::Status::ok();
428 // Allow both client and the cameraserver to disconnect at all times
429 int callingPid = CameraThreadState::getCallingPid();
430 if (callingPid != mClientPid && callingPid != mServicePid) return res;
431
432 if (mDevice == 0) return res;
433
434 ALOGV("Camera %d: Shutting down", mCameraId);
435
436 /**
437 * disconnect() cannot call any methods that might need to promote a
438 * wp<Camera2Client>, since disconnect can be called from the destructor, at
439 * which point all such promotions will fail.
440 */
441
442 stopPreviewL();
443
444 {
445 SharedParameters::Lock l(mParameters);
446 if (l.mParameters.state == Parameters::DISCONNECTED) return res;
447 l.mParameters.state = Parameters::DISCONNECTED;
448 }
449
450 mFrameProcessor->requestExit();
451 mCaptureSequencer->requestExit();
452 mJpegProcessor->requestExit();
453 mZslProcessor->requestExit();
454 mCallbackProcessor->requestExit();
455
456 ALOGV("Camera %d: Waiting for threads", mCameraId);
457
458 {
459 // Don't wait with lock held, in case the other threads need to
460 // complete callbacks that re-enter Camera2Client
461 mBinderSerializationLock.unlock();
462
463 mFrameProcessor->join();
464 mCaptureSequencer->join();
465 mJpegProcessor->join();
466 mZslProcessor->join();
467 mCallbackProcessor->join();
468
469 mBinderSerializationLock.lock();
470 }
471
472 ALOGV("Camera %d: Deleting streams", mCameraId);
473
474 mStreamingProcessor->deletePreviewStream();
475 mStreamingProcessor->deleteRecordingStream();
476 mJpegProcessor->deleteStream();
477 mCallbackProcessor->deleteStream();
478 mZslProcessor->deleteStream();
479
480 ALOGV("Camera %d: Disconnecting device", mCameraId);
481
482 mDevice->disconnect();
483
484 CameraService::Client::disconnect();
485
486 int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
487 CameraServiceProxyWrapper::logClose(mCameraIdStr, closeLatencyMs);
488
489 return res;
490 }
491
connect(const sp<hardware::ICameraClient> & client)492 status_t Camera2Client::connect(const sp<hardware::ICameraClient>& client) {
493 ATRACE_CALL();
494 ALOGV("%s: E", __FUNCTION__);
495 Mutex::Autolock icl(mBinderSerializationLock);
496
497 if (mClientPid != 0 && CameraThreadState::getCallingPid() != mClientPid) {
498 ALOGE("%s: Camera %d: Connection attempt from pid %d; "
499 "current locked to pid %d", __FUNCTION__,
500 mCameraId, CameraThreadState::getCallingPid(), mClientPid);
501 return BAD_VALUE;
502 }
503
504 mClientPid = CameraThreadState::getCallingPid();
505
506 mRemoteCallback = client;
507 mSharedCameraCallbacks = client;
508
509 return OK;
510 }
511
lock()512 status_t Camera2Client::lock() {
513 ATRACE_CALL();
514 ALOGV("%s: E", __FUNCTION__);
515 Mutex::Autolock icl(mBinderSerializationLock);
516 ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
517 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
518
519 if (mClientPid == 0) {
520 mClientPid = CameraThreadState::getCallingPid();
521 return OK;
522 }
523
524 if (mClientPid != CameraThreadState::getCallingPid()) {
525 ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
526 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
527 return EBUSY;
528 }
529
530 return OK;
531 }
532
unlock()533 status_t Camera2Client::unlock() {
534 ATRACE_CALL();
535 ALOGV("%s: E", __FUNCTION__);
536 Mutex::Autolock icl(mBinderSerializationLock);
537 ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
538 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
539
540 if (mClientPid == CameraThreadState::getCallingPid()) {
541 SharedParameters::Lock l(mParameters);
542 if (l.mParameters.state == Parameters::RECORD ||
543 l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
544 ALOGD("Not allowed to unlock camera during recording.");
545 return INVALID_OPERATION;
546 }
547 mClientPid = 0;
548 mRemoteCallback.clear();
549 mSharedCameraCallbacks.clear();
550 return OK;
551 }
552
553 ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
554 __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
555 return EBUSY;
556 }
557
setPreviewTarget(const sp<IGraphicBufferProducer> & bufferProducer)558 status_t Camera2Client::setPreviewTarget(
559 const sp<IGraphicBufferProducer>& bufferProducer) {
560 ATRACE_CALL();
561 ALOGV("%s: E", __FUNCTION__);
562 Mutex::Autolock icl(mBinderSerializationLock);
563 status_t res;
564 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
565
566 sp<IBinder> binder;
567 sp<Surface> window;
568 if (bufferProducer != 0) {
569 binder = IInterface::asBinder(bufferProducer);
570 // Using controlledByApp flag to ensure that the buffer queue remains in
571 // async mode for the old camera API, where many applications depend
572 // on that behavior.
573 window = new Surface(bufferProducer, /*controlledByApp*/ true);
574 }
575 return setPreviewWindowL(binder, window);
576 }
577
setPreviewWindowL(const sp<IBinder> & binder,const sp<Surface> & window)578 status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder,
579 const sp<Surface>& window) {
580 ATRACE_CALL();
581 status_t res;
582
583 if (binder == mPreviewSurface) {
584 ALOGV("%s: Camera %d: New window is same as old window",
585 __FUNCTION__, mCameraId);
586 return NO_ERROR;
587 }
588
589 Parameters::State state;
590 {
591 SharedParameters::Lock l(mParameters);
592 state = l.mParameters.state;
593 }
594 switch (state) {
595 case Parameters::DISCONNECTED:
596 case Parameters::RECORD:
597 case Parameters::STILL_CAPTURE:
598 case Parameters::VIDEO_SNAPSHOT:
599 ALOGE("%s: Camera %d: Cannot set preview display while in state %s",
600 __FUNCTION__, mCameraId,
601 Parameters::getStateName(state));
602 return INVALID_OPERATION;
603 case Parameters::STOPPED:
604 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
605 // OK
606 break;
607 case Parameters::PREVIEW:
608 // Already running preview - need to stop and create a new stream
609 res = stopStream();
610 if (res != OK) {
611 ALOGE("%s: Unable to stop preview to swap windows: %s (%d)",
612 __FUNCTION__, strerror(-res), res);
613 return res;
614 }
615 state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
616 break;
617 }
618
619 mPreviewSurface = binder;
620 res = mStreamingProcessor->setPreviewWindow(window);
621 if (res != OK) {
622 ALOGE("%s: Unable to set new preview window: %s (%d)",
623 __FUNCTION__, strerror(-res), res);
624 return res;
625 }
626
627 if (state == Parameters::WAITING_FOR_PREVIEW_WINDOW) {
628 SharedParameters::Lock l(mParameters);
629 l.mParameters.state = state;
630 return startPreviewL(l.mParameters, false);
631 }
632
633 return OK;
634 }
635
setPreviewCallbackFlag(int flag)636 void Camera2Client::setPreviewCallbackFlag(int flag) {
637 ATRACE_CALL();
638 ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag);
639 Mutex::Autolock icl(mBinderSerializationLock);
640
641 if ( checkPid(__FUNCTION__) != OK) return;
642
643 SharedParameters::Lock l(mParameters);
644 setPreviewCallbackFlagL(l.mParameters, flag);
645 }
646
setPreviewCallbackFlagL(Parameters & params,int flag)647 void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) {
648 status_t res = OK;
649
650 switch(params.state) {
651 case Parameters::STOPPED:
652 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
653 case Parameters::PREVIEW:
654 case Parameters::STILL_CAPTURE:
655 // OK
656 break;
657 default:
658 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
659 ALOGE("%s: Camera %d: Can't use preview callbacks "
660 "in state %d", __FUNCTION__, mCameraId, params.state);
661 return;
662 }
663 }
664
665 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) {
666 ALOGV("%s: setting oneshot", __FUNCTION__);
667 params.previewCallbackOneShot = true;
668 }
669 if (params.previewCallbackFlags != (uint32_t)flag) {
670
671 if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) {
672 // Disable any existing preview callback window when enabling
673 // preview callback flags
674 res = mCallbackProcessor->setCallbackWindow(NULL);
675 if (res != OK) {
676 ALOGE("%s: Camera %d: Unable to clear preview callback surface:"
677 " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
678 return;
679 }
680 params.previewCallbackSurface = false;
681 }
682
683 params.previewCallbackFlags = flag;
684
685 if (params.state == Parameters::PREVIEW) {
686 res = startPreviewL(params, true);
687 if (res != OK) {
688 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
689 __FUNCTION__, mCameraId,
690 Parameters::getStateName(params.state));
691 }
692 }
693 }
694 }
695
setPreviewCallbackTarget(const sp<IGraphicBufferProducer> & callbackProducer)696 status_t Camera2Client::setPreviewCallbackTarget(
697 const sp<IGraphicBufferProducer>& callbackProducer) {
698 ATRACE_CALL();
699 ALOGV("%s: E", __FUNCTION__);
700 Mutex::Autolock icl(mBinderSerializationLock);
701 status_t res;
702 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
703
704 sp<Surface> window;
705 if (callbackProducer != 0) {
706 window = new Surface(callbackProducer);
707 }
708
709 res = mCallbackProcessor->setCallbackWindow(window);
710 if (res != OK) {
711 ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)",
712 __FUNCTION__, mCameraId, strerror(-res), res);
713 return res;
714 }
715
716 SharedParameters::Lock l(mParameters);
717
718 if (window != NULL) {
719 // Disable traditional callbacks when a valid callback target is given
720 l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP;
721 l.mParameters.previewCallbackOneShot = false;
722 l.mParameters.previewCallbackSurface = true;
723 } else {
724 // Disable callback target if given a NULL interface.
725 l.mParameters.previewCallbackSurface = false;
726 }
727
728 switch(l.mParameters.state) {
729 case Parameters::PREVIEW:
730 res = startPreviewL(l.mParameters, true);
731 break;
732 case Parameters::RECORD:
733 case Parameters::VIDEO_SNAPSHOT:
734 res = startRecordingL(l.mParameters, true);
735 break;
736 default:
737 break;
738 }
739 if (res != OK) {
740 ALOGE("%s: Camera %d: Unable to refresh request in state %s",
741 __FUNCTION__, mCameraId,
742 Parameters::getStateName(l.mParameters.state));
743 }
744
745 return OK;
746 }
747
748
startPreview()749 status_t Camera2Client::startPreview() {
750 ATRACE_CALL();
751 ALOGV("%s: E", __FUNCTION__);
752 Mutex::Autolock icl(mBinderSerializationLock);
753 status_t res;
754 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
755 SharedParameters::Lock l(mParameters);
756 return startPreviewL(l.mParameters, false);
757 }
758
startPreviewL(Parameters & params,bool restart)759 status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) {
760 ATRACE_CALL();
761 status_t res;
762
763 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
764
765 if (params.state == Parameters::DISCONNECTED) {
766 ALOGE("%s: Camera %d has been disconnected.", __FUNCTION__, mCameraId);
767 return INVALID_OPERATION;
768 }
769 if ( (params.state == Parameters::PREVIEW ||
770 params.state == Parameters::RECORD ||
771 params.state == Parameters::VIDEO_SNAPSHOT)
772 && !restart) {
773 // Succeed attempt to re-enter a streaming state
774 ALOGI("%s: Camera %d: Preview already active, ignoring restart",
775 __FUNCTION__, mCameraId);
776 return OK;
777 }
778 if (params.state > Parameters::PREVIEW && !restart) {
779 ALOGE("%s: Can't start preview in state %s",
780 __FUNCTION__,
781 Parameters::getStateName(params.state));
782 return INVALID_OPERATION;
783 }
784
785 if (!mStreamingProcessor->haveValidPreviewWindow()) {
786 params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW;
787 return OK;
788 }
789 params.state = Parameters::STOPPED;
790 int lastPreviewStreamId = mStreamingProcessor->getPreviewStreamId();
791
792 res = mStreamingProcessor->updatePreviewStream(params);
793 if (res != OK) {
794 ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)",
795 __FUNCTION__, mCameraId, strerror(-res), res);
796 return res;
797 }
798
799 bool previewStreamChanged = mStreamingProcessor->getPreviewStreamId() != lastPreviewStreamId;
800
801 // We could wait to create the JPEG output stream until first actual use
802 // (first takePicture call). However, this would substantially increase the
803 // first capture latency on HAL3 devices.
804 // So create it unconditionally at preview start. As a drawback,
805 // this increases gralloc memory consumption for applications that don't
806 // ever take a picture. Do not enter this mode when jpeg stream will slow
807 // down preview.
808 // TODO: Find a better compromise, though this likely would involve HAL
809 // changes.
810 int lastJpegStreamId = mJpegProcessor->getStreamId();
811 // If jpeg stream will slow down preview, make sure we remove it before starting preview
812 if (params.slowJpegMode) {
813 if (lastJpegStreamId != NO_STREAM) {
814 // Pause preview if we are streaming
815 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
816 if (activeRequestId != 0) {
817 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
818 if (res != OK) {
819 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
820 __FUNCTION__, mCameraId, strerror(-res), res);
821 }
822 res = mDevice->waitUntilDrained();
823 if (res != OK) {
824 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
825 __FUNCTION__, mCameraId, strerror(-res), res);
826 }
827 }
828
829 res = mJpegProcessor->deleteStream();
830
831 if (res != OK) {
832 ALOGE("%s: Camera %d: delete Jpeg stream failed: %s (%d)",
833 __FUNCTION__, mCameraId, strerror(-res), res);
834 }
835
836 if (activeRequestId != 0) {
837 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
838 if (res != OK) {
839 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
840 __FUNCTION__, mCameraId, strerror(-res), res);
841 }
842 }
843 }
844 } else {
845 res = updateProcessorStream(mJpegProcessor, params);
846 if (res != OK) {
847 ALOGE("%s: Camera %d: Can't pre-configure still image "
848 "stream: %s (%d)",
849 __FUNCTION__, mCameraId, strerror(-res), res);
850 return res;
851 }
852 }
853 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
854
855 Vector<int32_t> outputStreams;
856 bool callbacksEnabled = (params.previewCallbackFlags &
857 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ||
858 params.previewCallbackSurface;
859
860 if (callbacksEnabled) {
861 // Can't have recording stream hanging around when enabling callbacks,
862 // since it exceeds the max stream count on some devices.
863 if (mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
864 ALOGV("%s: Camera %d: Clearing out recording stream before "
865 "creating callback stream", __FUNCTION__, mCameraId);
866 res = mStreamingProcessor->stopStream();
867 if (res != OK) {
868 ALOGE("%s: Camera %d: Can't stop streaming to delete "
869 "recording stream", __FUNCTION__, mCameraId);
870 return res;
871 }
872 res = mStreamingProcessor->deleteRecordingStream();
873 if (res != OK) {
874 ALOGE("%s: Camera %d: Unable to delete recording stream before "
875 "enabling callbacks: %s (%d)", __FUNCTION__, mCameraId,
876 strerror(-res), res);
877 return res;
878 }
879 }
880
881 res = mCallbackProcessor->updateStream(params);
882 if (res != OK) {
883 ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)",
884 __FUNCTION__, mCameraId, strerror(-res), res);
885 return res;
886 }
887 outputStreams.push(getCallbackStreamId());
888 } else if (previewStreamChanged && mCallbackProcessor->getStreamId() != NO_STREAM) {
889 /**
890 * Delete the unused callback stream when preview stream is changed and
891 * preview is not enabled. Don't need stop preview stream as preview is in
892 * STOPPED state now.
893 */
894 ALOGV("%s: Camera %d: Delete unused preview callback stream.", __FUNCTION__, mCameraId);
895 res = mCallbackProcessor->deleteStream();
896 if (res != OK) {
897 ALOGE("%s: Camera %d: Unable to delete callback stream %s (%d)",
898 __FUNCTION__, mCameraId, strerror(-res), res);
899 return res;
900 }
901 }
902
903 if (params.useZeroShutterLag() &&
904 getRecordingStreamId() == NO_STREAM) {
905 res = updateProcessorStream(mZslProcessor, params);
906 if (res != OK) {
907 ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)",
908 __FUNCTION__, mCameraId, strerror(-res), res);
909 return res;
910 }
911
912 if (jpegStreamChanged) {
913 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
914 __FUNCTION__, mCameraId);
915 mZslProcessor->clearZslQueue();
916 }
917 outputStreams.push(getZslStreamId());
918 } else {
919 mZslProcessor->deleteStream();
920 }
921
922 outputStreams.push(getPreviewStreamId());
923
924 if (params.isDeviceZslSupported) {
925 // If device ZSL is supported, resume preview buffers that may be paused
926 // during last takePicture().
927 mDevice->dropStreamBuffers(false, getPreviewStreamId());
928 }
929
930 if (!params.recordingHint) {
931 if (!restart) {
932 res = mStreamingProcessor->updatePreviewRequest(params);
933 if (res != OK) {
934 ALOGE("%s: Camera %d: Can't set up preview request: "
935 "%s (%d)", __FUNCTION__, mCameraId,
936 strerror(-res), res);
937 return res;
938 }
939 }
940 res = mStreamingProcessor->startStream(StreamingProcessor::PREVIEW,
941 outputStreams);
942 } else {
943 if (!restart) {
944 res = mStreamingProcessor->updateRecordingRequest(params);
945 if (res != OK) {
946 ALOGE("%s: Camera %d: Can't set up preview request with "
947 "record hint: %s (%d)", __FUNCTION__, mCameraId,
948 strerror(-res), res);
949 return res;
950 }
951 }
952 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
953 outputStreams);
954 }
955 if (res != OK) {
956 ALOGE("%s: Camera %d: Unable to start streaming preview: %s (%d)",
957 __FUNCTION__, mCameraId, strerror(-res), res);
958 return res;
959 }
960
961 mCallbackProcessor->unpauseCallback();
962 params.state = Parameters::PREVIEW;
963 return OK;
964 }
965
stopPreview()966 void Camera2Client::stopPreview() {
967 ATRACE_CALL();
968 ALOGV("%s: E", __FUNCTION__);
969 Mutex::Autolock icl(mBinderSerializationLock);
970 status_t res;
971 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
972 stopPreviewL();
973 }
974
stopPreviewL()975 void Camera2Client::stopPreviewL() {
976 ATRACE_CALL();
977 status_t res;
978 const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds
979 Parameters::State state;
980 {
981 SharedParameters::Lock l(mParameters);
982 state = l.mParameters.state;
983 }
984
985 switch (state) {
986 case Parameters::DISCONNECTED:
987 // Nothing to do.
988 break;
989 case Parameters::STOPPED:
990 case Parameters::VIDEO_SNAPSHOT:
991 case Parameters::STILL_CAPTURE:
992 mCaptureSequencer->waitUntilIdle(kStopCaptureTimeout);
993 FALLTHROUGH_INTENDED;
994 case Parameters::RECORD:
995 case Parameters::PREVIEW:
996 mCallbackProcessor->pauseCallback();
997 syncWithDevice();
998 // Due to flush a camera device sync is not a sufficient
999 // guarantee that the current client parameters are
1000 // correctly applied. To resolve this wait for the current
1001 // request id to return in the results.
1002 waitUntilCurrentRequestIdLocked();
1003 res = stopStream();
1004 if (res != OK) {
1005 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
1006 __FUNCTION__, mCameraId, strerror(-res), res);
1007 }
1008
1009 // Flush all in-process captures and buffer in order to stop
1010 // preview faster.
1011 res = mDevice->flush();
1012 if (res != OK) {
1013 ALOGE("%s: Camera %d: Unable to flush pending requests: %s (%d)",
1014 __FUNCTION__, mCameraId, strerror(-res), res);
1015 }
1016
1017 res = mDevice->waitUntilDrained();
1018 if (res != OK) {
1019 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1020 __FUNCTION__, mCameraId, strerror(-res), res);
1021 }
1022 // Clean up recording stream
1023 res = mStreamingProcessor->deleteRecordingStream();
1024 if (res != OK) {
1025 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1026 "stop preview: %s (%d)",
1027 __FUNCTION__, mCameraId, strerror(-res), res);
1028 }
1029 FALLTHROUGH_INTENDED;
1030 case Parameters::WAITING_FOR_PREVIEW_WINDOW: {
1031 SharedParameters::Lock l(mParameters);
1032 l.mParameters.state = Parameters::STOPPED;
1033 commandStopFaceDetectionL(l.mParameters);
1034 break;
1035 }
1036 default:
1037 ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId,
1038 state);
1039 }
1040 }
1041
previewEnabled()1042 bool Camera2Client::previewEnabled() {
1043 ATRACE_CALL();
1044 Mutex::Autolock icl(mBinderSerializationLock);
1045 status_t res;
1046 if ( (res = checkPid(__FUNCTION__) ) != OK) return false;
1047
1048 SharedParameters::Lock l(mParameters);
1049 return l.mParameters.state == Parameters::PREVIEW;
1050 }
1051
setVideoBufferMode(int32_t videoBufferMode)1052 status_t Camera2Client::setVideoBufferMode(int32_t videoBufferMode) {
1053 ATRACE_CALL();
1054 Mutex::Autolock icl(mBinderSerializationLock);
1055 status_t res;
1056 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1057
1058 SharedParameters::Lock l(mParameters);
1059 switch (l.mParameters.state) {
1060 case Parameters::RECORD:
1061 case Parameters::VIDEO_SNAPSHOT:
1062 ALOGE("%s: Camera %d: Can't be called in state %s",
1063 __FUNCTION__, mCameraId,
1064 Parameters::getStateName(l.mParameters.state));
1065 return INVALID_OPERATION;
1066 default:
1067 // OK
1068 break;
1069 }
1070
1071 if (videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1072 ALOGE("%s: %d: Only video buffer queue is supported", __FUNCTION__, __LINE__);
1073 return BAD_VALUE;
1074 }
1075
1076 l.mParameters.videoBufferMode = videoBufferMode;
1077
1078 return OK;
1079 }
1080
startRecording()1081 status_t Camera2Client::startRecording() {
1082 ATRACE_CALL();
1083 ALOGV("%s: E", __FUNCTION__);
1084 Mutex::Autolock icl(mBinderSerializationLock);
1085 status_t res;
1086 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1087 SharedParameters::Lock l(mParameters);
1088
1089 return startRecordingL(l.mParameters, false);
1090 }
1091
startRecordingL(Parameters & params,bool restart)1092 status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) {
1093 status_t res = OK;
1094
1095 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart);
1096
1097 switch (params.state) {
1098 case Parameters::STOPPED:
1099 res = startPreviewL(params, false);
1100 if (res != OK) return res;
1101 // Make sure first preview request is submitted to the HAL device to avoid
1102 // two consecutive set of configure_streams being called into the HAL.
1103 // TODO: Refactor this to avoid initial preview configuration.
1104 syncWithDevice();
1105 break;
1106 case Parameters::PREVIEW:
1107 // Ready to go
1108 break;
1109 case Parameters::RECORD:
1110 case Parameters::VIDEO_SNAPSHOT:
1111 // OK to call this when recording is already on, just skip unless
1112 // we're looking to restart
1113 if (!restart) return OK;
1114 break;
1115 default:
1116 ALOGE("%s: Camera %d: Can't start recording in state %s",
1117 __FUNCTION__, mCameraId,
1118 Parameters::getStateName(params.state));
1119 return INVALID_OPERATION;
1120 };
1121
1122 if (params.videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
1123 ALOGE("%s: Camera %d: Recording only supported buffer queue mode, but "
1124 "mode %d is requested!", __FUNCTION__, mCameraId, params.videoBufferMode);
1125 return INVALID_OPERATION;
1126 }
1127
1128 if (!mStreamingProcessor->haveValidRecordingWindow()) {
1129 ALOGE("%s: No valid recording window", __FUNCTION__);
1130 return INVALID_OPERATION;
1131 }
1132
1133 if (!restart) {
1134 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1135 mStreamingProcessor->updateRecordingRequest(params);
1136 if (res != OK) {
1137 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
1138 __FUNCTION__, mCameraId, strerror(-res), res);
1139 return res;
1140 }
1141 }
1142
1143 // Not all devices can support a preview callback stream and a recording
1144 // stream at the same time, so assume none of them can.
1145 if (mCallbackProcessor->getStreamId() != NO_STREAM) {
1146 ALOGV("%s: Camera %d: Clearing out callback stream before "
1147 "creating recording stream", __FUNCTION__, mCameraId);
1148 res = mStreamingProcessor->stopStream();
1149 if (res != OK) {
1150 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1151 __FUNCTION__, mCameraId);
1152 return res;
1153 }
1154 res = mCallbackProcessor->deleteStream();
1155 if (res != OK) {
1156 ALOGE("%s: Camera %d: Unable to delete callback stream before "
1157 "record: %s (%d)", __FUNCTION__, mCameraId,
1158 strerror(-res), res);
1159 return res;
1160 }
1161 }
1162
1163 // Clean up ZSL before transitioning into recording
1164 if (mZslProcessor->getStreamId() != NO_STREAM) {
1165 ALOGV("%s: Camera %d: Clearing out zsl stream before "
1166 "creating recording stream", __FUNCTION__, mCameraId);
1167 res = mStreamingProcessor->stopStream();
1168 if (res != OK) {
1169 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream",
1170 __FUNCTION__, mCameraId);
1171 return res;
1172 }
1173 res = mDevice->waitUntilDrained();
1174 if (res != OK) {
1175 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1176 __FUNCTION__, mCameraId, strerror(-res), res);
1177 }
1178 res = mZslProcessor->clearZslQueue();
1179 if (res != OK) {
1180 ALOGE("%s: Camera %d: Can't clear zsl queue",
1181 __FUNCTION__, mCameraId);
1182 return res;
1183 }
1184 res = mZslProcessor->deleteStream();
1185 if (res != OK) {
1186 ALOGE("%s: Camera %d: Unable to delete zsl stream before "
1187 "record: %s (%d)", __FUNCTION__, mCameraId,
1188 strerror(-res), res);
1189 return res;
1190 }
1191 }
1192
1193 // Disable callbacks if they're enabled; can't record and use callbacks,
1194 // and we can't fail record start without stagefright asserting.
1195 params.previewCallbackFlags = 0;
1196
1197 // May need to reconfigure video snapshot JPEG sizes
1198 // during recording startup, so need a more complex sequence here to
1199 // ensure an early stream reconfiguration doesn't happen
1200 bool recordingStreamNeedsUpdate;
1201 res = mStreamingProcessor->recordingStreamNeedsUpdate(params, &recordingStreamNeedsUpdate);
1202 if (res != OK) {
1203 ALOGE("%s: Camera %d: Can't query recording stream",
1204 __FUNCTION__, mCameraId);
1205 return res;
1206 }
1207
1208 if (recordingStreamNeedsUpdate) {
1209 // Need to stop stream here so updateProcessorStream won't trigger configureStream
1210 // Right now camera device cannot handle configureStream failure gracefully
1211 // when device is streaming
1212 res = mStreamingProcessor->stopStream();
1213 if (res != OK) {
1214 ALOGE("%s: Camera %d: Can't stop streaming to update record "
1215 "stream", __FUNCTION__, mCameraId);
1216 return res;
1217 }
1218 res = mDevice->waitUntilDrained();
1219 if (res != OK) {
1220 ALOGE("%s: Camera %d: Waiting to stop streaming failed: "
1221 "%s (%d)", __FUNCTION__, mCameraId,
1222 strerror(-res), res);
1223 }
1224
1225 res = updateProcessorStream<
1226 StreamingProcessor,
1227 &StreamingProcessor::updateRecordingStream>(
1228 mStreamingProcessor,
1229 params);
1230 if (res != OK) {
1231 ALOGE("%s: Camera %d: Unable to update recording stream: "
1232 "%s (%d)", __FUNCTION__, mCameraId,
1233 strerror(-res), res);
1234 return res;
1235 }
1236 }
1237
1238 Vector<int32_t> outputStreams;
1239 outputStreams.push(getPreviewStreamId());
1240 outputStreams.push(getRecordingStreamId());
1241
1242 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1243 outputStreams);
1244
1245 // startStream might trigger a configureStream call and device might fail
1246 // configureStream due to jpeg size > video size. Try again with jpeg size overridden
1247 // to video size.
1248 if (res == BAD_VALUE) {
1249 overrideVideoSnapshotSize(params);
1250 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD,
1251 outputStreams);
1252 }
1253
1254 if (res != OK) {
1255 ALOGE("%s: Camera %d: Unable to start recording stream: %s (%d)",
1256 __FUNCTION__, mCameraId, strerror(-res), res);
1257 return res;
1258 }
1259
1260 if (params.state < Parameters::RECORD) {
1261 params.state = Parameters::RECORD;
1262 }
1263
1264 return OK;
1265 }
1266
stopRecording()1267 void Camera2Client::stopRecording() {
1268 ATRACE_CALL();
1269 ALOGV("%s: E", __FUNCTION__);
1270 Mutex::Autolock icl(mBinderSerializationLock);
1271 SharedParameters::Lock l(mParameters);
1272
1273 status_t res;
1274 if ( (res = checkPid(__FUNCTION__) ) != OK) return;
1275
1276 switch (l.mParameters.state) {
1277 case Parameters::RECORD:
1278 // OK to stop
1279 break;
1280 case Parameters::STOPPED:
1281 case Parameters::PREVIEW:
1282 case Parameters::STILL_CAPTURE:
1283 case Parameters::VIDEO_SNAPSHOT:
1284 default:
1285 ALOGE("%s: Camera %d: Can't stop recording in state %s",
1286 __FUNCTION__, mCameraId,
1287 Parameters::getStateName(l.mParameters.state));
1288 return;
1289 };
1290
1291 sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
1292
1293 // Remove recording stream because the video target may be abandoned soon.
1294 res = stopStream();
1295 if (res != OK) {
1296 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
1297 __FUNCTION__, mCameraId, strerror(-res), res);
1298 }
1299
1300 res = mDevice->waitUntilDrained();
1301 if (res != OK) {
1302 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1303 __FUNCTION__, mCameraId, strerror(-res), res);
1304 }
1305 // Clean up recording stream
1306 res = mStreamingProcessor->deleteRecordingStream();
1307 if (res != OK) {
1308 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1309 "stop preview: %s (%d)",
1310 __FUNCTION__, mCameraId, strerror(-res), res);
1311 }
1312 l.mParameters.recoverOverriddenJpegSize();
1313
1314 // Restart preview
1315 res = startPreviewL(l.mParameters, true);
1316 if (res != OK) {
1317 ALOGE("%s: Camera %d: Unable to return to preview",
1318 __FUNCTION__, mCameraId);
1319 }
1320 }
1321
recordingEnabled()1322 bool Camera2Client::recordingEnabled() {
1323 ATRACE_CALL();
1324 Mutex::Autolock icl(mBinderSerializationLock);
1325
1326 if ( checkPid(__FUNCTION__) != OK) return false;
1327
1328 return recordingEnabledL();
1329 }
1330
recordingEnabledL()1331 bool Camera2Client::recordingEnabledL() {
1332 ATRACE_CALL();
1333 SharedParameters::Lock l(mParameters);
1334
1335 return (l.mParameters.state == Parameters::RECORD
1336 || l.mParameters.state == Parameters::VIDEO_SNAPSHOT);
1337 }
1338
releaseRecordingFrame(const sp<IMemory> & mem)1339 void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) {
1340 (void)mem;
1341 ATRACE_CALL();
1342 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1343 }
1344
releaseRecordingFrameHandle(native_handle_t * handle)1345 void Camera2Client::releaseRecordingFrameHandle(native_handle_t *handle) {
1346 (void)handle;
1347 ATRACE_CALL();
1348 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1349 }
1350
releaseRecordingFrameHandleBatch(const std::vector<native_handle_t * > & handles)1351 void Camera2Client::releaseRecordingFrameHandleBatch(
1352 const std::vector<native_handle_t*>& handles) {
1353 (void)handles;
1354 ATRACE_CALL();
1355 ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
1356 }
1357
autoFocus()1358 status_t Camera2Client::autoFocus() {
1359 ATRACE_CALL();
1360 Mutex::Autolock icl(mBinderSerializationLock);
1361 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1362 status_t res;
1363 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1364
1365 int triggerId;
1366 bool notifyImmediately = false;
1367 bool notifySuccess = false;
1368 {
1369 SharedParameters::Lock l(mParameters);
1370 if (l.mParameters.state < Parameters::PREVIEW) {
1371 ALOGE("%s: Camera %d: Call autoFocus when preview is inactive (state = %d).",
1372 __FUNCTION__, mCameraId, l.mParameters.state);
1373 return INVALID_OPERATION;
1374 }
1375
1376 /**
1377 * If the camera does not support auto-focus, it is a no-op and
1378 * onAutoFocus(boolean, Camera) callback will be called immediately
1379 * with a fake value of success set to true.
1380 *
1381 * Similarly, if focus mode is set to INFINITY, there's no reason to
1382 * bother the HAL.
1383 */
1384 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1385 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1386 notifyImmediately = true;
1387 notifySuccess = true;
1388 }
1389 /**
1390 * If we're in CAF mode, and AF has already been locked, just fire back
1391 * the callback right away; the HAL would not send a notification since
1392 * no state change would happen on a AF trigger.
1393 */
1394 if ( (l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_PICTURE ||
1395 l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) &&
1396 l.mParameters.focusState == ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED ) {
1397 notifyImmediately = true;
1398 notifySuccess = true;
1399 }
1400 /**
1401 * Send immediate notification back to client
1402 */
1403 if (notifyImmediately) {
1404 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1405 if (l.mRemoteCallback != 0) {
1406 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1407 notifySuccess ? 1 : 0, 0);
1408 }
1409 return OK;
1410 }
1411 /**
1412 * Handle quirk mode for AF in scene modes
1413 */
1414 if (l.mParameters.quirks.triggerAfWithAuto &&
1415 l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED &&
1416 l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO &&
1417 !l.mParameters.focusingAreas[0].isEmpty()) {
1418 ALOGV("%s: Quirk: Switching from focusMode %d to AUTO",
1419 __FUNCTION__, l.mParameters.focusMode);
1420 l.mParameters.shadowFocusMode = l.mParameters.focusMode;
1421 l.mParameters.focusMode = Parameters::FOCUS_MODE_AUTO;
1422 updateRequests(l.mParameters);
1423 }
1424
1425 l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter;
1426 triggerId = l.mParameters.currentAfTriggerId;
1427 }
1428 ATRACE_ASYNC_BEGIN(kAutofocusLabel, triggerId);
1429
1430 syncWithDevice();
1431
1432 mDevice->triggerAutofocus(triggerId);
1433
1434 return OK;
1435 }
1436
cancelAutoFocus()1437 status_t Camera2Client::cancelAutoFocus() {
1438 ATRACE_CALL();
1439 Mutex::Autolock icl(mBinderSerializationLock);
1440 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1441 status_t res;
1442 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1443
1444 int triggerId;
1445 {
1446 SharedParameters::Lock l(mParameters);
1447 // Canceling does nothing in FIXED or INFINITY modes
1448 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
1449 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
1450 return OK;
1451 }
1452
1453 // An active AF trigger is canceled
1454 if (l.mParameters.afTriggerCounter == l.mParameters.currentAfTriggerId) {
1455 ATRACE_ASYNC_END(kAutofocusLabel, l.mParameters.currentAfTriggerId);
1456 }
1457
1458 triggerId = ++l.mParameters.afTriggerCounter;
1459
1460 // When using triggerAfWithAuto quirk, may need to reset focus mode to
1461 // the real state at this point. No need to cancel explicitly if
1462 // changing the AF mode.
1463 if (l.mParameters.shadowFocusMode != Parameters::FOCUS_MODE_INVALID) {
1464 ALOGV("%s: Quirk: Restoring focus mode to %d", __FUNCTION__,
1465 l.mParameters.shadowFocusMode);
1466 l.mParameters.focusMode = l.mParameters.shadowFocusMode;
1467 l.mParameters.shadowFocusMode = Parameters::FOCUS_MODE_INVALID;
1468 updateRequests(l.mParameters);
1469
1470 return OK;
1471 }
1472 if (l.mParameters.allowZslMode) {
1473 mZslProcessor->clearZslQueue();
1474 }
1475 }
1476 syncWithDevice();
1477
1478 mDevice->triggerCancelAutofocus(triggerId);
1479
1480 return OK;
1481 }
1482
takePicture(int)1483 status_t Camera2Client::takePicture(int /*msgType*/) {
1484 ATRACE_CALL();
1485 Mutex::Autolock icl(mBinderSerializationLock);
1486 status_t res;
1487 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1488
1489 int takePictureCounter;
1490 bool shouldSyncWithDevice = true;
1491 {
1492 SharedParameters::Lock l(mParameters);
1493 switch (l.mParameters.state) {
1494 case Parameters::DISCONNECTED:
1495 case Parameters::STOPPED:
1496 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1497 ALOGE("%s: Camera %d: Cannot take picture without preview enabled",
1498 __FUNCTION__, mCameraId);
1499 return INVALID_OPERATION;
1500 case Parameters::PREVIEW:
1501 // Good to go for takePicture
1502 res = commandStopFaceDetectionL(l.mParameters);
1503 if (res != OK) {
1504 ALOGE("%s: Camera %d: Unable to stop face detection for still capture",
1505 __FUNCTION__, mCameraId);
1506 return res;
1507 }
1508 l.mParameters.state = Parameters::STILL_CAPTURE;
1509
1510 // Remove recording stream to prevent video snapshot jpeg logic kicking in
1511 if (l.mParameters.isJpegSizeOverridden() &&
1512 mStreamingProcessor->getRecordingStreamId() != NO_STREAM) {
1513 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
1514 if (res != OK) {
1515 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
1516 __FUNCTION__, mCameraId, strerror(-res), res);
1517 }
1518 res = mDevice->waitUntilDrained();
1519 if (res != OK) {
1520 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
1521 __FUNCTION__, mCameraId, strerror(-res), res);
1522 }
1523 // Clean up recording stream
1524 res = mStreamingProcessor->deleteRecordingStream();
1525 if (res != OK) {
1526 ALOGE("%s: Camera %d: Unable to delete recording stream before "
1527 "stop preview: %s (%d)",
1528 __FUNCTION__, mCameraId, strerror(-res), res);
1529 }
1530 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
1531 if (res != OK) {
1532 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
1533 __FUNCTION__, mCameraId, strerror(-res), res);
1534 }
1535 l.mParameters.recoverOverriddenJpegSize();
1536 }
1537 break;
1538 case Parameters::RECORD:
1539 // Good to go for video snapshot
1540 l.mParameters.state = Parameters::VIDEO_SNAPSHOT;
1541 break;
1542 case Parameters::STILL_CAPTURE:
1543 case Parameters::VIDEO_SNAPSHOT:
1544 ALOGE("%s: Camera %d: Already taking a picture",
1545 __FUNCTION__, mCameraId);
1546 return INVALID_OPERATION;
1547 }
1548
1549 ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId);
1550 int lastJpegStreamId = mJpegProcessor->getStreamId();
1551 // slowJpegMode will create jpeg stream in CaptureSequencer before capturing
1552 if (!l.mParameters.slowJpegMode) {
1553 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1554 }
1555
1556 // If video snapshot fail to configureStream, try override video snapshot size to
1557 // video size
1558 if (res == BAD_VALUE && l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
1559 overrideVideoSnapshotSize(l.mParameters);
1560 res = updateProcessorStream(mJpegProcessor, l.mParameters);
1561 }
1562 if (res != OK) {
1563 ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)",
1564 __FUNCTION__, mCameraId, strerror(-res), res);
1565 return res;
1566 }
1567 takePictureCounter = ++l.mParameters.takePictureCounter;
1568
1569 // Clear ZSL buffer queue when Jpeg size is changed.
1570 bool jpegStreamChanged = mJpegProcessor->getStreamId() != lastJpegStreamId;
1571 if (l.mParameters.allowZslMode && jpegStreamChanged) {
1572 ALOGV("%s: Camera %d: Clear ZSL buffer queue when Jpeg size is changed",
1573 __FUNCTION__, mCameraId);
1574 mZslProcessor->clearZslQueue();
1575 }
1576
1577 // We should always sync with the device in case flash is turned on,
1578 // the camera device suggests that flash is needed (AE state FLASH_REQUIRED)
1579 // or we are in some other AE state different from CONVERGED that may need
1580 // precapture trigger.
1581 if (l.mParameters.flashMode != Parameters::FLASH_MODE_ON &&
1582 (l.mParameters.aeState == ANDROID_CONTROL_AE_STATE_CONVERGED)) {
1583 shouldSyncWithDevice = false;
1584 }
1585 }
1586
1587 ATRACE_ASYNC_BEGIN(kTakepictureLabel, takePictureCounter);
1588
1589 // Make sure HAL has correct settings in case precapture trigger is needed.
1590 if (shouldSyncWithDevice) {
1591 syncWithDevice();
1592 }
1593
1594 res = mCaptureSequencer->startCapture();
1595 if (res != OK) {
1596 ALOGE("%s: Camera %d: Unable to start capture: %s (%d)",
1597 __FUNCTION__, mCameraId, strerror(-res), res);
1598 }
1599
1600 return res;
1601 }
1602
setParameters(const String8 & params)1603 status_t Camera2Client::setParameters(const String8& params) {
1604 ATRACE_CALL();
1605 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1606 Mutex::Autolock icl(mBinderSerializationLock);
1607 status_t res;
1608 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1609
1610 SharedParameters::Lock l(mParameters);
1611
1612 Parameters::focusMode_t focusModeBefore = l.mParameters.focusMode;
1613 res = l.mParameters.set(params);
1614 if (res != OK) return res;
1615 Parameters::focusMode_t focusModeAfter = l.mParameters.focusMode;
1616
1617 if (l.mParameters.allowZslMode && focusModeAfter != focusModeBefore) {
1618 mZslProcessor->clearZslQueue();
1619 }
1620
1621 res = updateRequests(l.mParameters);
1622
1623 return res;
1624 }
1625
getParameters() const1626 String8 Camera2Client::getParameters() const {
1627 ATRACE_CALL();
1628 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
1629 Mutex::Autolock icl(mBinderSerializationLock);
1630 // The camera service can unconditionally get the parameters at all times
1631 if (CameraThreadState::getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
1632
1633 SharedParameters::ReadLock l(mParameters);
1634
1635 return l.mParameters.get();
1636 }
1637
sendCommand(int32_t cmd,int32_t arg1,int32_t arg2)1638 status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
1639 ATRACE_CALL();
1640 Mutex::Autolock icl(mBinderSerializationLock);
1641 status_t res;
1642 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
1643
1644 ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId,
1645 cmd, arg1, arg2);
1646
1647 switch (cmd) {
1648 case CAMERA_CMD_START_SMOOTH_ZOOM:
1649 return commandStartSmoothZoomL();
1650 case CAMERA_CMD_STOP_SMOOTH_ZOOM:
1651 return commandStopSmoothZoomL();
1652 case CAMERA_CMD_SET_DISPLAY_ORIENTATION:
1653 return commandSetDisplayOrientationL(arg1);
1654 case CAMERA_CMD_ENABLE_SHUTTER_SOUND:
1655 return commandEnableShutterSoundL(arg1 == 1);
1656 case CAMERA_CMD_PLAY_RECORDING_SOUND:
1657 return commandPlayRecordingSoundL();
1658 case CAMERA_CMD_START_FACE_DETECTION:
1659 return commandStartFaceDetectionL(arg1);
1660 case CAMERA_CMD_STOP_FACE_DETECTION: {
1661 SharedParameters::Lock l(mParameters);
1662 return commandStopFaceDetectionL(l.mParameters);
1663 }
1664 case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG:
1665 return commandEnableFocusMoveMsgL(arg1 == 1);
1666 case CAMERA_CMD_PING:
1667 return commandPingL();
1668 case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:
1669 case CAMERA_CMD_SET_VIDEO_FORMAT:
1670 ALOGE("%s: command %d (arguments %d, %d) is not supported.",
1671 __FUNCTION__, cmd, arg1, arg2);
1672 return BAD_VALUE;
1673 default:
1674 ALOGE("%s: Unknown command %d (arguments %d, %d)",
1675 __FUNCTION__, cmd, arg1, arg2);
1676 return BAD_VALUE;
1677 }
1678 }
1679
commandStartSmoothZoomL()1680 status_t Camera2Client::commandStartSmoothZoomL() {
1681 ALOGE("%s: Unimplemented!", __FUNCTION__);
1682 return OK;
1683 }
1684
commandStopSmoothZoomL()1685 status_t Camera2Client::commandStopSmoothZoomL() {
1686 ALOGE("%s: Unimplemented!", __FUNCTION__);
1687 return OK;
1688 }
1689
commandSetDisplayOrientationL(int degrees)1690 status_t Camera2Client::commandSetDisplayOrientationL(int degrees) {
1691 int transform = Parameters::degToTransform(degrees,
1692 mCameraFacing == CAMERA_FACING_FRONT);
1693 if (transform == -1) {
1694 ALOGE("%s: Camera %d: Error setting %d as display orientation value",
1695 __FUNCTION__, mCameraId, degrees);
1696 return BAD_VALUE;
1697 }
1698 {
1699 Mutex::Autolock icl(mRotateAndCropLock);
1700 if (mRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_NONE) {
1701 ALOGI("%s: Rotate and crop set to: %d, skipping display orientation!", __FUNCTION__,
1702 mRotateAndCropMode);
1703 transform = mRotateAndCropPreviewTransform;
1704 }
1705 }
1706 SharedParameters::Lock l(mParameters);
1707 if (transform != l.mParameters.previewTransform &&
1708 getPreviewStreamId() != NO_STREAM) {
1709 mDevice->setStreamTransform(getPreviewStreamId(), transform);
1710 }
1711 l.mParameters.previewTransform = transform;
1712 return OK;
1713 }
1714
commandEnableShutterSoundL(bool enable)1715 status_t Camera2Client::commandEnableShutterSoundL(bool enable) {
1716 SharedParameters::Lock l(mParameters);
1717 if (enable) {
1718 l.mParameters.playShutterSound = true;
1719 return OK;
1720 }
1721
1722 l.mParameters.playShutterSound = false;
1723 return OK;
1724 }
1725
commandPlayRecordingSoundL()1726 status_t Camera2Client::commandPlayRecordingSoundL() {
1727 sCameraService->playSound(CameraService::SOUND_RECORDING_START);
1728 return OK;
1729 }
1730
commandStartFaceDetectionL(int)1731 status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) {
1732 ALOGV("%s: Camera %d: Starting face detection",
1733 __FUNCTION__, mCameraId);
1734 status_t res;
1735 SharedParameters::Lock l(mParameters);
1736 switch (l.mParameters.state) {
1737 case Parameters::DISCONNECTED:
1738 case Parameters::STOPPED:
1739 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
1740 case Parameters::STILL_CAPTURE:
1741 ALOGE("%s: Camera %d: Cannot start face detection without preview active",
1742 __FUNCTION__, mCameraId);
1743 return INVALID_OPERATION;
1744 case Parameters::PREVIEW:
1745 case Parameters::RECORD:
1746 case Parameters::VIDEO_SNAPSHOT:
1747 // Good to go for starting face detect
1748 break;
1749 }
1750 // Ignoring type
1751 if (l.mParameters.fastInfo.bestFaceDetectMode ==
1752 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
1753 ALOGE("%s: Camera %d: Face detection not supported",
1754 __FUNCTION__, mCameraId);
1755 return BAD_VALUE;
1756 }
1757 if (l.mParameters.enableFaceDetect) return OK;
1758
1759 l.mParameters.enableFaceDetect = true;
1760
1761 res = updateRequests(l.mParameters);
1762
1763 return res;
1764 }
1765
commandStopFaceDetectionL(Parameters & params)1766 status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) {
1767 status_t res = OK;
1768 ALOGV("%s: Camera %d: Stopping face detection",
1769 __FUNCTION__, mCameraId);
1770
1771 if (!params.enableFaceDetect) return OK;
1772
1773 params.enableFaceDetect = false;
1774
1775 if (params.state == Parameters::PREVIEW
1776 || params.state == Parameters::RECORD
1777 || params.state == Parameters::VIDEO_SNAPSHOT) {
1778 res = updateRequests(params);
1779 }
1780
1781 return res;
1782 }
1783
commandEnableFocusMoveMsgL(bool enable)1784 status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) {
1785 SharedParameters::Lock l(mParameters);
1786 l.mParameters.enableFocusMoveMessages = enable;
1787
1788 return OK;
1789 }
1790
commandPingL()1791 status_t Camera2Client::commandPingL() {
1792 // Always ping back if access is proper and device is alive
1793 SharedParameters::Lock l(mParameters);
1794 if (l.mParameters.state != Parameters::DISCONNECTED) {
1795 return OK;
1796 } else {
1797 return NO_INIT;
1798 }
1799 }
1800
notifyError(int32_t errorCode,const CaptureResultExtras & resultExtras)1801 void Camera2Client::notifyError(int32_t errorCode,
1802 const CaptureResultExtras& resultExtras) {
1803 int32_t err = CAMERA_ERROR_UNKNOWN;
1804 switch(errorCode) {
1805 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED:
1806 err = CAMERA_ERROR_RELEASED;
1807 break;
1808 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
1809 err = CAMERA_ERROR_UNKNOWN;
1810 break;
1811 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE:
1812 err = CAMERA_ERROR_SERVER_DIED;
1813 break;
1814 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
1815 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
1816 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1817 ALOGW("%s: Received recoverable error %d from HAL - ignoring, requestId %" PRId32,
1818 __FUNCTION__, errorCode, resultExtras.requestId);
1819
1820 if ((hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST == errorCode) ||
1821 (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT == errorCode)) {
1822 Mutex::Autolock al(mLatestRequestMutex);
1823
1824 mLatestFailedRequestId = resultExtras.requestId;
1825 mLatestRequestSignal.signal();
1826 }
1827 mCaptureSequencer->notifyError(errorCode, resultExtras);
1828 return;
1829 default:
1830 err = CAMERA_ERROR_UNKNOWN;
1831 break;
1832 }
1833
1834 ALOGE("%s: Error condition %d reported by HAL, requestId %" PRId32, __FUNCTION__, errorCode,
1835 resultExtras.requestId);
1836
1837 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1838 if (l.mRemoteCallback != nullptr) {
1839 l.mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, err, 0);
1840 }
1841 }
1842
1843
1844 /** Device-related methods */
notifyAutoFocus(uint8_t newState,int triggerId)1845 void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) {
1846 ALOGV("%s: Autofocus state now %d, last trigger %d",
1847 __FUNCTION__, newState, triggerId);
1848 bool sendCompletedMessage = false;
1849 bool sendMovingMessage = false;
1850
1851 bool success = false;
1852 bool afInMotion = false;
1853 {
1854 SharedParameters::Lock l(mParameters);
1855 // Trace end of AF state
1856 char tmp[32];
1857 if (l.mParameters.afStateCounter > 0) {
1858 camera_metadata_enum_snprint(
1859 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1860 ATRACE_ASYNC_END(tmp, l.mParameters.afStateCounter);
1861 }
1862
1863 // Update state
1864 l.mParameters.focusState = newState;
1865 l.mParameters.afStateCounter++;
1866
1867 // Trace start of AF state
1868
1869 camera_metadata_enum_snprint(
1870 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp));
1871 ATRACE_ASYNC_BEGIN(tmp, l.mParameters.afStateCounter);
1872
1873 switch (l.mParameters.focusMode) {
1874 case Parameters::FOCUS_MODE_AUTO:
1875 case Parameters::FOCUS_MODE_MACRO:
1876 // Don't send notifications upstream if they're not for the current AF
1877 // trigger. For example, if cancel was called in between, or if we
1878 // already sent a notification about this AF call.
1879 if (triggerId != l.mParameters.currentAfTriggerId) break;
1880 switch (newState) {
1881 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1882 success = true;
1883 FALLTHROUGH_INTENDED;
1884 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1885 sendCompletedMessage = true;
1886 l.mParameters.currentAfTriggerId = -1;
1887 break;
1888 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1889 // Just starting focusing, ignore
1890 break;
1891 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1892 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1893 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1894 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1895 default:
1896 // Unexpected in AUTO/MACRO mode
1897 ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d",
1898 __FUNCTION__, newState);
1899 break;
1900 }
1901 break;
1902 case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
1903 case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
1904 switch (newState) {
1905 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1906 success = true;
1907 FALLTHROUGH_INTENDED;
1908 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1909 // Don't send notifications upstream if they're not for
1910 // the current AF trigger. For example, if cancel was
1911 // called in between, or if we already sent a
1912 // notification about this AF call.
1913 // Send both a 'AF done' callback and a 'AF move' callback
1914 if (triggerId != l.mParameters.currentAfTriggerId) break;
1915 sendCompletedMessage = true;
1916 afInMotion = false;
1917 if (l.mParameters.enableFocusMoveMessages &&
1918 l.mParameters.afInMotion) {
1919 sendMovingMessage = true;
1920 }
1921 l.mParameters.currentAfTriggerId = -1;
1922 break;
1923 case ANDROID_CONTROL_AF_STATE_INACTIVE:
1924 // Cancel was called, or we switched state; care if
1925 // currently moving
1926 afInMotion = false;
1927 if (l.mParameters.enableFocusMoveMessages &&
1928 l.mParameters.afInMotion) {
1929 sendMovingMessage = true;
1930 }
1931 break;
1932 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1933 // Start passive scan, inform upstream
1934 afInMotion = true;
1935 FALLTHROUGH_INTENDED;
1936 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1937 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
1938 // Stop passive scan, inform upstream
1939 if (l.mParameters.enableFocusMoveMessages) {
1940 sendMovingMessage = true;
1941 }
1942 break;
1943 }
1944 l.mParameters.afInMotion = afInMotion;
1945 break;
1946 case Parameters::FOCUS_MODE_EDOF:
1947 case Parameters::FOCUS_MODE_INFINITY:
1948 case Parameters::FOCUS_MODE_FIXED:
1949 default:
1950 if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) {
1951 ALOGE("%s: Unexpected AF state change %d "
1952 "(ID %d) in focus mode %d",
1953 __FUNCTION__, newState, triggerId,
1954 l.mParameters.focusMode);
1955 }
1956 }
1957 }
1958 if (sendMovingMessage) {
1959 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1960 if (l.mRemoteCallback != 0) {
1961 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
1962 afInMotion ? 1 : 0, 0);
1963 }
1964 }
1965 if (sendCompletedMessage) {
1966 ATRACE_ASYNC_END(kAutofocusLabel, triggerId);
1967 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
1968 if (l.mRemoteCallback != 0) {
1969 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS,
1970 success ? 1 : 0, 0);
1971 }
1972 }
1973 }
1974
notifyAutoExposure(uint8_t newState,int triggerId)1975 void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) {
1976 ALOGV("%s: Autoexposure state now %d, last trigger %d",
1977 __FUNCTION__, newState, triggerId);
1978 {
1979 SharedParameters::Lock l(mParameters);
1980 // Update state
1981 l.mParameters.aeState = newState;
1982 }
1983 mCaptureSequencer->notifyAutoExposure(newState, triggerId);
1984 }
1985
notifyShutter(const CaptureResultExtras & resultExtras,nsecs_t timestamp)1986 void Camera2Client::notifyShutter(const CaptureResultExtras& resultExtras,
1987 nsecs_t timestamp) {
1988 ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
1989 __FUNCTION__, resultExtras.requestId, timestamp);
1990 mCaptureSequencer->notifyShutter(resultExtras, timestamp);
1991
1992 Camera2ClientBase::notifyShutter(resultExtras, timestamp);
1993 }
1994
getParameters()1995 camera2::SharedParameters& Camera2Client::getParameters() {
1996 return mParameters;
1997 }
1998
getPreviewStreamId() const1999 int Camera2Client::getPreviewStreamId() const {
2000 return mStreamingProcessor->getPreviewStreamId();
2001 }
2002
getCaptureStreamId() const2003 int Camera2Client::getCaptureStreamId() const {
2004 return mJpegProcessor->getStreamId();
2005 }
2006
getCallbackStreamId() const2007 int Camera2Client::getCallbackStreamId() const {
2008 return mCallbackProcessor->getStreamId();
2009 }
2010
getRecordingStreamId() const2011 int Camera2Client::getRecordingStreamId() const {
2012 return mStreamingProcessor->getRecordingStreamId();
2013 }
2014
getZslStreamId() const2015 int Camera2Client::getZslStreamId() const {
2016 return mZslProcessor->getStreamId();
2017 }
2018
registerFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener,bool sendPartials)2019 status_t Camera2Client::registerFrameListener(int32_t minId, int32_t maxId,
2020 const wp<camera2::FrameProcessor::FilteredListener>& listener, bool sendPartials) {
2021 return mFrameProcessor->registerListener(minId, maxId, listener, sendPartials);
2022 }
2023
removeFrameListener(int32_t minId,int32_t maxId,const wp<camera2::FrameProcessor::FilteredListener> & listener)2024 status_t Camera2Client::removeFrameListener(int32_t minId, int32_t maxId,
2025 const wp<camera2::FrameProcessor::FilteredListener>& listener) {
2026 return mFrameProcessor->removeListener(minId, maxId, listener);
2027 }
2028
stopStream()2029 status_t Camera2Client::stopStream() {
2030 return mStreamingProcessor->stopStream();
2031 }
2032
createJpegStreamL(Parameters & params)2033 status_t Camera2Client::createJpegStreamL(Parameters ¶ms) {
2034 status_t res = OK;
2035 int lastJpegStreamId = mJpegProcessor->getStreamId();
2036 if (lastJpegStreamId != NO_STREAM) {
2037 return INVALID_OPERATION;
2038 }
2039
2040 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2041 if (res != OK) {
2042 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2043 __FUNCTION__, mCameraId, strerror(-res), res);
2044 return res;
2045 }
2046
2047 res = mDevice->flush();
2048 if (res != OK) {
2049 ALOGE("%s: Camera %d: Unable flush device: %s (%d)",
2050 __FUNCTION__, mCameraId, strerror(-res), res);
2051 return res;
2052 }
2053
2054 // Ideally we don't need this, but current camera device
2055 // status tracking mechanism demands it.
2056 res = mDevice->waitUntilDrained();
2057 if (res != OK) {
2058 ALOGE("%s: Camera %d: Waiting device drain failed: %s (%d)",
2059 __FUNCTION__, mCameraId, strerror(-res), res);
2060 }
2061
2062 res = updateProcessorStream(mJpegProcessor, params);
2063 return res;
2064 }
2065
2066 const int32_t Camera2Client::kPreviewRequestIdStart;
2067 const int32_t Camera2Client::kPreviewRequestIdEnd;
2068 const int32_t Camera2Client::kRecordingRequestIdStart;
2069 const int32_t Camera2Client::kRecordingRequestIdEnd;
2070 const int32_t Camera2Client::kCaptureRequestIdStart;
2071 const int32_t Camera2Client::kCaptureRequestIdEnd;
2072
2073 /** Utility methods */
2074
updateRequests(Parameters & params)2075 status_t Camera2Client::updateRequests(Parameters ¶ms) {
2076 status_t res;
2077
2078 ALOGV("%s: Camera %d: state = %d", __FUNCTION__, getCameraId(), params.state);
2079
2080 res = mStreamingProcessor->incrementStreamingIds();
2081 if (res != OK) {
2082 ALOGE("%s: Camera %d: Unable to increment request IDs: %s (%d)",
2083 __FUNCTION__, mCameraId, strerror(-res), res);
2084 return res;
2085 }
2086
2087 res = mStreamingProcessor->updatePreviewRequest(params);
2088 if (res != OK) {
2089 ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)",
2090 __FUNCTION__, mCameraId, strerror(-res), res);
2091 return res;
2092 }
2093 res = mStreamingProcessor->updateRecordingRequest(params);
2094 if (res != OK) {
2095 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
2096 __FUNCTION__, mCameraId, strerror(-res), res);
2097 return res;
2098 }
2099
2100 if (params.state == Parameters::PREVIEW) {
2101 res = startPreviewL(params, true);
2102 if (res != OK) {
2103 ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)",
2104 __FUNCTION__, mCameraId, strerror(-res), res);
2105 return res;
2106 }
2107 } else if (params.state == Parameters::RECORD ||
2108 params.state == Parameters::VIDEO_SNAPSHOT) {
2109 res = startRecordingL(params, true);
2110 if (res != OK) {
2111 ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)",
2112 __FUNCTION__, mCameraId, strerror(-res), res);
2113 return res;
2114 }
2115 }
2116 return res;
2117 }
2118
2119
calculateBufferSize(int width,int height,int format,int stride)2120 size_t Camera2Client::calculateBufferSize(int width, int height,
2121 int format, int stride) {
2122 switch (format) {
2123 case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16
2124 return width * height * 2;
2125 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21
2126 return width * height * 3 / 2;
2127 case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2
2128 return width * height * 2;
2129 case HAL_PIXEL_FORMAT_YV12: { // YV12
2130 size_t ySize = stride * height;
2131 size_t uvStride = (stride / 2 + 0xF) & ~0xF;
2132 size_t uvSize = uvStride * height / 2;
2133 return ySize + uvSize * 2;
2134 }
2135 case HAL_PIXEL_FORMAT_RGB_565:
2136 return width * height * 2;
2137 case HAL_PIXEL_FORMAT_RGBA_8888:
2138 return width * height * 4;
2139 case HAL_PIXEL_FORMAT_RAW16:
2140 return width * height * 2;
2141 default:
2142 ALOGE("%s: Unknown preview format: %x",
2143 __FUNCTION__, format);
2144 return 0;
2145 }
2146 }
2147
syncWithDevice()2148 status_t Camera2Client::syncWithDevice() {
2149 ATRACE_CALL();
2150 const nsecs_t kMaxSyncTimeout = 500000000; // 500 ms
2151 status_t res;
2152
2153 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2154 if (activeRequestId == 0) return OK;
2155
2156 res = mDevice->waitUntilRequestReceived(activeRequestId, kMaxSyncTimeout);
2157 if (res == TIMED_OUT) {
2158 ALOGE("%s: Camera %d: Timed out waiting sync with HAL",
2159 __FUNCTION__, mCameraId);
2160 } else if (res != OK) {
2161 ALOGE("%s: Camera %d: Error while waiting to sync with HAL",
2162 __FUNCTION__, mCameraId);
2163 }
2164 return res;
2165 }
2166
2167 template <typename ProcessorT>
updateProcessorStream(sp<ProcessorT> processor,camera2::Parameters params)2168 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2169 camera2::Parameters params) {
2170 // No default template arguments until C++11, so we need this overload
2171 return updateProcessorStream<ProcessorT, &ProcessorT::updateStream>(
2172 processor, params);
2173 }
2174
2175 template <typename ProcessorT,
2176 status_t (ProcessorT::*updateStreamF)(const Parameters &)>
updateProcessorStream(sp<ProcessorT> processor,Parameters params)2177 status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor,
2178 Parameters params) {
2179 status_t res;
2180
2181 // Get raw pointer since sp<T> doesn't have operator->*
2182 ProcessorT *processorPtr = processor.get();
2183 res = (processorPtr->*updateStreamF)(params);
2184
2185 /**
2186 * Can't update the stream if it's busy?
2187 *
2188 * Then we need to stop the device (by temporarily clearing the request
2189 * queue) and then try again. Resume streaming once we're done.
2190 */
2191 if (res == -EBUSY) {
2192 ALOGV("%s: Camera %d: Pausing to update stream", __FUNCTION__,
2193 mCameraId);
2194 res = mStreamingProcessor->togglePauseStream(/*pause*/true);
2195 if (res != OK) {
2196 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)",
2197 __FUNCTION__, mCameraId, strerror(-res), res);
2198 }
2199
2200 res = mDevice->waitUntilDrained();
2201 if (res != OK) {
2202 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
2203 __FUNCTION__, mCameraId, strerror(-res), res);
2204 }
2205
2206 res = (processorPtr->*updateStreamF)(params);
2207 if (res != OK) {
2208 ALOGE("%s: Camera %d: Failed to update processing stream "
2209 " despite having halted streaming first: %s (%d)",
2210 __FUNCTION__, mCameraId, strerror(-res), res);
2211 }
2212
2213 res = mStreamingProcessor->togglePauseStream(/*pause*/false);
2214 if (res != OK) {
2215 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)",
2216 __FUNCTION__, mCameraId, strerror(-res), res);
2217 }
2218 }
2219
2220 return res;
2221 }
2222
overrideVideoSnapshotSize(Parameters & params)2223 status_t Camera2Client::overrideVideoSnapshotSize(Parameters ¶ms) {
2224 ALOGV("%s: Camera %d: configure still size to video size before recording"
2225 , __FUNCTION__, mCameraId);
2226 params.overrideJpegSizeByVideoSize();
2227 status_t res = updateProcessorStream(mJpegProcessor, params);
2228 if (res != OK) {
2229 ALOGE("%s: Camera %d: Can't override video snapshot size to video size: %s (%d)",
2230 __FUNCTION__, mCameraId, strerror(-res), res);
2231 }
2232 return res;
2233 }
2234
setVideoTarget(const sp<IGraphicBufferProducer> & bufferProducer)2235 status_t Camera2Client::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
2236 ATRACE_CALL();
2237 ALOGV("%s: E", __FUNCTION__);
2238 Mutex::Autolock icl(mBinderSerializationLock);
2239 status_t res;
2240 if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
2241
2242 sp<IBinder> binder = IInterface::asBinder(bufferProducer);
2243 if (binder == mVideoSurface) {
2244 ALOGV("%s: Camera %d: New video window is same as old video window",
2245 __FUNCTION__, mCameraId);
2246 return NO_ERROR;
2247 }
2248
2249 sp<Surface> window;
2250 int format;
2251 android_dataspace dataSpace;
2252
2253 if (bufferProducer != nullptr) {
2254 // Using controlledByApp flag to ensure that the buffer queue remains in
2255 // async mode for the old camera API, where many applications depend
2256 // on that behavior.
2257 window = new Surface(bufferProducer, /*controlledByApp*/ true);
2258
2259 ANativeWindow *anw = window.get();
2260
2261 if ((res = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
2262 ALOGE("%s: Failed to query Surface format", __FUNCTION__);
2263 return res;
2264 }
2265
2266 if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
2267 reinterpret_cast<int*>(&dataSpace))) != OK) {
2268 ALOGE("%s: Failed to query Surface dataSpace", __FUNCTION__);
2269 return res;
2270 }
2271 }
2272
2273 Parameters::State state;
2274 {
2275 SharedParameters::Lock l(mParameters);
2276 state = l.mParameters.state;
2277 }
2278
2279 switch (state) {
2280 case Parameters::STOPPED:
2281 case Parameters::WAITING_FOR_PREVIEW_WINDOW:
2282 case Parameters::PREVIEW:
2283 // OK
2284 break;
2285 case Parameters::DISCONNECTED:
2286 case Parameters::RECORD:
2287 case Parameters::STILL_CAPTURE:
2288 case Parameters::VIDEO_SNAPSHOT:
2289 default:
2290 ALOGE("%s: Camera %d: Cannot set video target while in state %s",
2291 __FUNCTION__, mCameraId,
2292 Parameters::getStateName(state));
2293 return INVALID_OPERATION;
2294 }
2295
2296 mVideoSurface = binder;
2297 res = mStreamingProcessor->setRecordingWindow(window);
2298 if (res != OK) {
2299 ALOGE("%s: Unable to set new recording window: %s (%d)",
2300 __FUNCTION__, strerror(-res), res);
2301 return res;
2302 }
2303
2304 {
2305 SharedParameters::Lock l(mParameters);
2306 l.mParameters.videoFormat = format;
2307 l.mParameters.videoDataSpace = dataSpace;
2308 }
2309
2310 return OK;
2311 }
2312
setAudioRestriction(int)2313 status_t Camera2Client::setAudioRestriction(int /*mode*/) {
2314 // Empty implementation. setAudioRestriction is hidden interface and not
2315 // supported by android.hardware.Camera API
2316 return INVALID_OPERATION;
2317 }
2318
getGlobalAudioRestriction()2319 int32_t Camera2Client::getGlobalAudioRestriction() {
2320 // Empty implementation. getAudioRestriction is hidden interface and not
2321 // supported by android.hardware.Camera API
2322 return INVALID_OPERATION;
2323 }
2324
setCameraServiceWatchdog(bool enabled)2325 status_t Camera2Client::setCameraServiceWatchdog(bool enabled) {
2326 return mDevice->setCameraServiceWatchdog(enabled);
2327 }
2328
setRotateAndCropOverride(uint8_t rotateAndCrop)2329 status_t Camera2Client::setRotateAndCropOverride(uint8_t rotateAndCrop) {
2330 if (rotateAndCrop > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
2331
2332 {
2333 Mutex::Autolock icl(mRotateAndCropLock);
2334 if (mRotateAndCropIsSupported) {
2335 mRotateAndCropMode = rotateAndCrop;
2336 } else {
2337 mRotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
2338 return OK;
2339 }
2340 }
2341
2342 return mDevice->setRotateAndCropAutoBehavior(
2343 static_cast<camera_metadata_enum_android_scaler_rotate_and_crop_t>(rotateAndCrop));
2344 }
2345
supportsCameraMute()2346 bool Camera2Client::supportsCameraMute() {
2347 return mDevice->supportsCameraMute();
2348 }
2349
setCameraMute(bool enabled)2350 status_t Camera2Client::setCameraMute(bool enabled) {
2351 return mDevice->setCameraMute(enabled);
2352 }
2353
setStreamUseCaseOverrides(const std::vector<int64_t> & useCaseOverrides)2354 void Camera2Client::setStreamUseCaseOverrides(
2355 const std::vector<int64_t>& useCaseOverrides) {
2356 mDevice->setStreamUseCaseOverrides(useCaseOverrides);
2357 }
2358
clearStreamUseCaseOverrides()2359 void Camera2Client::clearStreamUseCaseOverrides() {
2360 mDevice->clearStreamUseCaseOverrides();
2361 }
2362
waitUntilCurrentRequestIdLocked()2363 status_t Camera2Client::waitUntilCurrentRequestIdLocked() {
2364 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId();
2365 if (activeRequestId != 0) {
2366 auto res = waitUntilRequestIdApplied(activeRequestId,
2367 mDevice->getExpectedInFlightDuration());
2368 if (res == TIMED_OUT) {
2369 ALOGE("%s: Camera %d: Timed out waiting for current request id to return in results!",
2370 __FUNCTION__, mCameraId);
2371 return res;
2372 } else if (res != OK) {
2373 ALOGE("%s: Camera %d: Error while waiting for current request id to return in results!",
2374 __FUNCTION__, mCameraId);
2375 return res;
2376 }
2377 }
2378
2379 return OK;
2380 }
2381
waitUntilRequestIdApplied(int32_t requestId,nsecs_t timeout)2382 status_t Camera2Client::waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout) {
2383 Mutex::Autolock l(mLatestRequestMutex);
2384 while ((mLatestRequestId != requestId) && (mLatestFailedRequestId != requestId)) {
2385 nsecs_t startTime = systemTime();
2386
2387 auto res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
2388 if (res != OK) return res;
2389
2390 timeout -= (systemTime() - startTime);
2391 }
2392
2393 return (mLatestRequestId == requestId) ? OK : DEAD_OBJECT;
2394 }
2395
notifyRequestId(int32_t requestId)2396 void Camera2Client::notifyRequestId(int32_t requestId) {
2397 Mutex::Autolock al(mLatestRequestMutex);
2398
2399 mLatestRequestId = requestId;
2400 mLatestRequestSignal.signal();
2401 }
2402
2403 const char* Camera2Client::kAutofocusLabel = "autofocus";
2404 const char* Camera2Client::kTakepictureLabel = "take_picture";
2405
2406 } // namespace android
2407