1 /*
2 * Copyright (C) 2013-2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2-ZslProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 //#define LOG_NNDEBUG 0
21
22 #ifdef LOG_NNDEBUG
23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
24 #else
25 #define ALOGVV(...) if (0) ALOGV(__VA_ARGS__)
26 #endif
27
28 #include <inttypes.h>
29
30 #include <utils/Log.h>
31 #include <utils/Trace.h>
32 #include <gui/Surface.h>
33
34 #include "common/CameraDeviceBase.h"
35 #include "api1/Camera2Client.h"
36 #include "api1/client2/CaptureSequencer.h"
37 #include "api1/client2/ZslProcessor.h"
38 #include "device3/Camera3Device.h"
39
40 typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
41
42 namespace android {
43 namespace camera2 {
44
45 using android::camera3::CAMERA_STREAM_ROTATION_0;
46 using android::camera3::CAMERA_TEMPLATE_STILL_CAPTURE;
47
48 namespace {
49 struct TimestampFinder : public RingBufferConsumer::RingBufferComparator {
50 typedef RingBufferConsumer::BufferInfo BufferInfo;
51
52 enum {
53 SELECT_I1 = -1,
54 SELECT_I2 = 1,
55 SELECT_NEITHER = 0,
56 };
57
TimestampFinderandroid::camera2::__anon7ddac16b0111::TimestampFinder58 explicit TimestampFinder(nsecs_t timestamp) : mTimestamp(timestamp) {}
~TimestampFinderandroid::camera2::__anon7ddac16b0111::TimestampFinder59 ~TimestampFinder() {}
60
61 template <typename T>
swapandroid::camera2::__anon7ddac16b0111::TimestampFinder62 static void swap(T& a, T& b) {
63 T tmp = a;
64 a = b;
65 b = tmp;
66 }
67
68 /**
69 * Try to find the best candidate for a ZSL buffer.
70 * Match priority from best to worst:
71 * 1) Timestamps match.
72 * 2) Timestamp is closest to the needle (and lower).
73 * 3) Timestamp is closest to the needle (and higher).
74 *
75 */
compareandroid::camera2::__anon7ddac16b0111::TimestampFinder76 virtual int compare(const BufferInfo *i1,
77 const BufferInfo *i2) const {
78 // Try to select non-null object first.
79 if (i1 == NULL) {
80 return SELECT_I2;
81 } else if (i2 == NULL) {
82 return SELECT_I1;
83 }
84
85 // Best result: timestamp is identical
86 if (i1->mTimestamp == mTimestamp) {
87 return SELECT_I1;
88 } else if (i2->mTimestamp == mTimestamp) {
89 return SELECT_I2;
90 }
91
92 const BufferInfo* infoPtrs[2] = {
93 i1,
94 i2
95 };
96 int infoSelectors[2] = {
97 SELECT_I1,
98 SELECT_I2
99 };
100
101 // Order i1,i2 so that always i1.timestamp < i2.timestamp
102 if (i1->mTimestamp > i2->mTimestamp) {
103 swap(infoPtrs[0], infoPtrs[1]);
104 swap(infoSelectors[0], infoSelectors[1]);
105 }
106
107 // Second best: closest (lower) timestamp
108 if (infoPtrs[1]->mTimestamp < mTimestamp) {
109 return infoSelectors[1];
110 } else if (infoPtrs[0]->mTimestamp < mTimestamp) {
111 return infoSelectors[0];
112 }
113
114 // Worst: closest (higher) timestamp
115 return infoSelectors[0];
116
117 /**
118 * The above cases should cover all the possibilities,
119 * and we get an 'empty' result only if the ring buffer
120 * was empty itself
121 */
122 }
123
124 const nsecs_t mTimestamp;
125 }; // struct TimestampFinder
126 } // namespace anonymous
127
ZslProcessor(sp<Camera2Client> client,wp<CaptureSequencer> sequencer)128 ZslProcessor::ZslProcessor(
129 sp<Camera2Client> client,
130 wp<CaptureSequencer> sequencer):
131 Thread(false),
132 mLatestClearedBufferTimestamp(0),
133 mState(RUNNING),
134 mClient(client),
135 mSequencer(sequencer),
136 mId(client->getCameraId()),
137 mZslStreamId(NO_STREAM),
138 mInputStreamId(NO_STREAM),
139 mFrameListHead(0),
140 mHasFocuser(false),
141 mInputBuffer(nullptr),
142 mProducer(nullptr),
143 mInputProducer(nullptr),
144 mInputProducerSlot(-1),
145 mBuffersToDetach(0) {
146 // Initialize buffer queue and frame list based on pipeline max depth.
147 size_t pipelineMaxDepth = kDefaultMaxPipelineDepth;
148 if (client != 0) {
149 sp<Camera3Device> device =
150 static_cast<Camera3Device*>(client->getCameraDevice().get());
151 if (device != 0) {
152 camera_metadata_ro_entry_t entry =
153 device->info().find(ANDROID_REQUEST_PIPELINE_MAX_DEPTH);
154 if (entry.count == 1) {
155 pipelineMaxDepth = entry.data.u8[0];
156 } else {
157 ALOGW("%s: Unable to find the android.request.pipelineMaxDepth,"
158 " use default pipeline max depth %d", __FUNCTION__,
159 kDefaultMaxPipelineDepth);
160 }
161
162 entry = device->info().find(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
163 if (entry.count > 0 && entry.data.f[0] != 0.) {
164 mHasFocuser = true;
165 }
166 }
167 }
168
169 ALOGV("%s: Initialize buffer queue and frame list depth based on max pipeline depth (%zu)",
170 __FUNCTION__, pipelineMaxDepth);
171 // Need to keep buffer queue longer than metadata queue because sometimes buffer arrives
172 // earlier than metadata which causes the buffer corresponding to oldest metadata being
173 // removed.
174 mFrameListDepth = pipelineMaxDepth;
175 mBufferQueueDepth = mFrameListDepth + 1;
176
177 mZslQueue.insertAt(0, mBufferQueueDepth);
178 mFrameList.resize(mFrameListDepth);
179 sp<CaptureSequencer> captureSequencer = mSequencer.promote();
180 if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
181 }
182
~ZslProcessor()183 ZslProcessor::~ZslProcessor() {
184 ALOGV("%s: Exit", __FUNCTION__);
185 deleteStream();
186 }
187
onResultAvailable(const CaptureResult & result)188 void ZslProcessor::onResultAvailable(const CaptureResult &result) {
189 ATRACE_CALL();
190 ALOGV("%s:", __FUNCTION__);
191 Mutex::Autolock l(mInputMutex);
192 camera_metadata_ro_entry_t entry;
193 entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
194 nsecs_t timestamp = entry.data.i64[0];
195 if (entry.count == 0) {
196 ALOGE("%s: metadata doesn't have timestamp, skip this result", __FUNCTION__);
197 return;
198 }
199
200 entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
201 if (entry.count == 0) {
202 ALOGE("%s: metadata doesn't have frame number, skip this result", __FUNCTION__);
203 return;
204 }
205 int32_t frameNumber = entry.data.i32[0];
206
207 ALOGVV("Got preview metadata for frame %d with timestamp %" PRId64, frameNumber, timestamp);
208
209 if (mState != RUNNING) return;
210
211 // Corresponding buffer has been cleared. No need to push into mFrameList
212 if (timestamp <= mLatestClearedBufferTimestamp) return;
213
214 mFrameList[mFrameListHead] = result.mMetadata;
215 mFrameListHead = (mFrameListHead + 1) % mFrameListDepth;
216 }
217
updateStream(const Parameters & params)218 status_t ZslProcessor::updateStream(const Parameters ¶ms) {
219 ATRACE_CALL();
220 ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
221 status_t res;
222
223 Mutex::Autolock l(mInputMutex);
224
225 sp<Camera2Client> client = mClient.promote();
226 if (client == 0) {
227 ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
228 return INVALID_OPERATION;
229 }
230 sp<Camera3Device> device =
231 static_cast<Camera3Device*>(client->getCameraDevice().get());
232 if (device == 0) {
233 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
234 return INVALID_OPERATION;
235 }
236
237 if (mInputStreamId == NO_STREAM) {
238 res = device->createInputStream(params.fastInfo.usedZslSize.width,
239 params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
240 /*isMultiResolution*/false, &mInputStreamId);
241 if (res != OK) {
242 ALOGE("%s: Camera %d: Can't create input stream: "
243 "%s (%d)", __FUNCTION__, client->getCameraId(),
244 strerror(-res), res);
245 return res;
246 }
247 }
248
249 if (mZslStreamId == NO_STREAM) {
250 // Create stream for HAL production
251 // TODO: Sort out better way to select resolution for ZSL
252
253 sp<IGraphicBufferProducer> producer;
254 sp<IGraphicBufferConsumer> consumer;
255 BufferQueue::createBufferQueue(&producer, &consumer);
256 mProducer = new RingBufferConsumer(consumer, GRALLOC_USAGE_HW_CAMERA_ZSL,
257 mBufferQueueDepth);
258 mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
259 sp<Surface> outSurface = new Surface(producer);
260
261 res = device->createStream(outSurface, params.fastInfo.usedZslSize.width,
262 params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
263 HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0, &mZslStreamId,
264 String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
265 if (res != OK) {
266 ALOGE("%s: Camera %d: Can't create ZSL stream: "
267 "%s (%d)", __FUNCTION__, client->getCameraId(),
268 strerror(-res), res);
269 return res;
270 }
271 }
272
273 client->registerFrameListener(Camera2Client::kPreviewRequestIdStart,
274 Camera2Client::kPreviewRequestIdEnd,
275 this,
276 /*sendPartials*/false);
277
278 return OK;
279 }
280
deleteStream()281 status_t ZslProcessor::deleteStream() {
282 ATRACE_CALL();
283 status_t res;
284 sp<Camera3Device> device = nullptr;
285 sp<Camera2Client> client = nullptr;
286
287 Mutex::Autolock l(mInputMutex);
288
289 if ((mZslStreamId != NO_STREAM) || (mInputStreamId != NO_STREAM)) {
290 client = mClient.promote();
291 if (client == 0) {
292 ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
293 return INVALID_OPERATION;
294 }
295
296 device =
297 reinterpret_cast<Camera3Device*>(client->getCameraDevice().get());
298 if (device == 0) {
299 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
300 return INVALID_OPERATION;
301 }
302 }
303
304 if (mZslStreamId != NO_STREAM) {
305 res = device->deleteStream(mZslStreamId);
306 if (res != OK) {
307 ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: "
308 "%s (%d)", __FUNCTION__, client->getCameraId(),
309 mZslStreamId, strerror(-res), res);
310 return res;
311 }
312
313 mZslStreamId = NO_STREAM;
314 }
315 if (mInputStreamId != NO_STREAM) {
316 res = device->deleteStream(mInputStreamId);
317 if (res != OK) {
318 ALOGE("%s: Camera %d: Cannot delete input stream %d: "
319 "%s (%d)", __FUNCTION__, client->getCameraId(),
320 mInputStreamId, strerror(-res), res);
321 return res;
322 }
323
324 mInputStreamId = NO_STREAM;
325 }
326
327 if (nullptr != mInputProducer.get()) {
328 mInputProducer->disconnect(NATIVE_WINDOW_API_CPU);
329 mInputProducer.clear();
330 }
331
332 return OK;
333 }
334
getStreamId() const335 int ZslProcessor::getStreamId() const {
336 Mutex::Autolock l(mInputMutex);
337 return mZslStreamId;
338 }
339
updateRequestWithDefaultStillRequest(CameraMetadata & request) const340 status_t ZslProcessor::updateRequestWithDefaultStillRequest(CameraMetadata &request) const {
341 sp<Camera2Client> client = mClient.promote();
342 if (client == 0) {
343 ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
344 return INVALID_OPERATION;
345 }
346 sp<Camera3Device> device =
347 static_cast<Camera3Device*>(client->getCameraDevice().get());
348 if (device == 0) {
349 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
350 return INVALID_OPERATION;
351 }
352
353 CameraMetadata stillTemplate;
354 device->createDefaultRequest(CAMERA_TEMPLATE_STILL_CAPTURE, &stillTemplate);
355
356 // Find some of the post-processing tags, and assign the value from template to the request.
357 // Only check the aberration mode and noise reduction mode for now, as they are very important
358 // for image quality.
359 uint32_t postProcessingTags[] = {
360 ANDROID_NOISE_REDUCTION_MODE,
361 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
362 ANDROID_COLOR_CORRECTION_MODE,
363 ANDROID_TONEMAP_MODE,
364 ANDROID_SHADING_MODE,
365 ANDROID_HOT_PIXEL_MODE,
366 ANDROID_EDGE_MODE
367 };
368
369 camera_metadata_entry_t entry;
370 for (size_t i = 0; i < sizeof(postProcessingTags) / sizeof(uint32_t); i++) {
371 entry = stillTemplate.find(postProcessingTags[i]);
372 if (entry.count > 0) {
373 request.update(postProcessingTags[i], entry.data.u8, 1);
374 }
375 }
376
377 return OK;
378 }
379
notifyInputReleased()380 void ZslProcessor::notifyInputReleased() {
381 Mutex::Autolock l(mInputMutex);
382
383 mBuffersToDetach++;
384 mBuffersToDetachSignal.signal();
385 }
386
doNotifyInputReleasedLocked()387 void ZslProcessor::doNotifyInputReleasedLocked() {
388 assert(nullptr != mInputBuffer.get());
389 assert(nullptr != mInputProducer.get());
390
391 sp<GraphicBuffer> gb;
392 sp<Fence> fence;
393 auto rc = mInputProducer->detachNextBuffer(&gb, &fence);
394 if (NO_ERROR != rc) {
395 ALOGE("%s: Failed to detach buffer from input producer: %d",
396 __FUNCTION__, rc);
397 return;
398 }
399
400 BufferItem &item = mInputBuffer->getBufferItem();
401 sp<GraphicBuffer> inputBuffer = item.mGraphicBuffer;
402 if (gb->handle != inputBuffer->handle) {
403 ALOGE("%s: Input mismatch, expected buffer %p received %p", __FUNCTION__,
404 inputBuffer->handle, gb->handle);
405 return;
406 }
407
408 mInputBuffer.clear();
409 ALOGV("%s: Memory optimization, clearing ZSL queue",
410 __FUNCTION__);
411 clearZslResultQueueLocked();
412
413 // Required so we accept more ZSL requests
414 mState = RUNNING;
415 }
416
onBufferReleased()417 void ZslProcessor::InputProducerListener::onBufferReleased() {
418 sp<ZslProcessor> parent = mParent.promote();
419 if (nullptr != parent.get()) {
420 parent->notifyInputReleased();
421 }
422 }
423
pushToReprocess(int32_t requestId)424 status_t ZslProcessor::pushToReprocess(int32_t requestId) {
425 ALOGV("%s: Send in reprocess request with id %d",
426 __FUNCTION__, requestId);
427 Mutex::Autolock l(mInputMutex);
428 status_t res;
429 sp<Camera2Client> client = mClient.promote();
430
431 if (client == 0) {
432 ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
433 return INVALID_OPERATION;
434 }
435
436 IF_ALOGV() {
437 dumpZslQueue(-1);
438 }
439
440 size_t metadataIdx;
441 nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx);
442
443 if (candidateTimestamp == -1) {
444 ALOGV("%s: Could not find good candidate for ZSL reprocessing",
445 __FUNCTION__);
446 return NOT_ENOUGH_DATA;
447 } else {
448 ALOGV("%s: Found good ZSL candidate idx: %u",
449 __FUNCTION__, (unsigned int) metadataIdx);
450 }
451
452 if (nullptr == mInputProducer.get()) {
453 res = client->getCameraDevice()->getInputBufferProducer(
454 &mInputProducer);
455 if (res != OK) {
456 ALOGE("%s: Camera %d: Unable to retrieve input producer: "
457 "%s (%d)", __FUNCTION__, client->getCameraId(),
458 strerror(-res), res);
459 return res;
460 }
461
462 IGraphicBufferProducer::QueueBufferOutput output;
463 res = mInputProducer->connect(new InputProducerListener(this),
464 NATIVE_WINDOW_API_CPU, false, &output);
465 if (res != OK) {
466 ALOGE("%s: Camera %d: Unable to connect to input producer: "
467 "%s (%d)", __FUNCTION__, client->getCameraId(),
468 strerror(-res), res);
469 return res;
470 }
471 }
472
473 res = enqueueInputBufferByTimestamp(candidateTimestamp,
474 /*actualTimestamp*/NULL);
475 if (res == NO_BUFFER_AVAILABLE) {
476 ALOGV("%s: No ZSL buffers yet", __FUNCTION__);
477 return NOT_ENOUGH_DATA;
478 } else if (res != OK) {
479 ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
480 __FUNCTION__, strerror(-res), res);
481 return res;
482 }
483
484 {
485 CameraMetadata request = mFrameList[metadataIdx];
486
487 // Verify that the frame is reasonable for reprocessing
488
489 camera_metadata_entry_t entry;
490 entry = request.find(ANDROID_CONTROL_AE_STATE);
491 if (entry.count == 0) {
492 ALOGE("%s: ZSL queue frame has no AE state field!",
493 __FUNCTION__);
494 return BAD_VALUE;
495 }
496 if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
497 entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
498 ALOGV("%s: ZSL queue frame AE state is %d, need full capture",
499 __FUNCTION__, entry.data.u8[0]);
500 return NOT_ENOUGH_DATA;
501 }
502
503 uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
504 res = request.update(ANDROID_REQUEST_TYPE,
505 &requestType, 1);
506 if (res != OK) {
507 ALOGE("%s: Unable to update request type",
508 __FUNCTION__);
509 return INVALID_OPERATION;
510 }
511
512 int32_t inputStreams[1] =
513 { mInputStreamId };
514 res = request.update(ANDROID_REQUEST_INPUT_STREAMS,
515 inputStreams, 1);
516 if (res != OK) {
517 ALOGE("%s: Unable to update request input streams",
518 __FUNCTION__);
519 return INVALID_OPERATION;
520 }
521
522 uint8_t captureIntent =
523 static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
524 res = request.update(ANDROID_CONTROL_CAPTURE_INTENT,
525 &captureIntent, 1);
526 if (res != OK ) {
527 ALOGE("%s: Unable to update request capture intent",
528 __FUNCTION__);
529 return INVALID_OPERATION;
530 }
531
532 // TODO: Shouldn't we also update the latest preview frame?
533 int32_t outputStreams[1] =
534 { client->getCaptureStreamId() };
535 res = request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
536 outputStreams, 1);
537 if (res != OK) {
538 ALOGE("%s: Unable to update request output streams",
539 __FUNCTION__);
540 return INVALID_OPERATION;
541 }
542
543 res = request.update(ANDROID_REQUEST_ID,
544 &requestId, 1);
545 if (res != OK ) {
546 ALOGE("%s: Unable to update frame to a reprocess request",
547 __FUNCTION__);
548 return INVALID_OPERATION;
549 }
550
551 res = client->stopStream();
552 if (res != OK) {
553 ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
554 "%s (%d)",
555 __FUNCTION__, client->getCameraId(), strerror(-res), res);
556 return INVALID_OPERATION;
557 }
558
559 // Update JPEG settings
560 {
561 SharedParameters::Lock l(client->getParameters());
562 res = l.mParameters.updateRequestJpeg(&request);
563 if (res != OK) {
564 ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL "
565 "capture request: %s (%d)", __FUNCTION__,
566 client->getCameraId(),
567 strerror(-res), res);
568 return res;
569 }
570 }
571
572 // Update post-processing settings
573 res = updateRequestWithDefaultStillRequest(request);
574 if (res != OK) {
575 ALOGW("%s: Unable to update post-processing tags, the reprocessed image quality "
576 "may be compromised", __FUNCTION__);
577 }
578
579 mLatestCapturedRequest = request;
580 res = client->getCameraDevice()->capture(request);
581 if (res != OK ) {
582 ALOGE("%s: Unable to send ZSL reprocess request to capture: %s"
583 " (%d)", __FUNCTION__, strerror(-res), res);
584 return res;
585 }
586
587 mState = LOCKED;
588 }
589
590 return OK;
591 }
592
enqueueInputBufferByTimestamp(nsecs_t timestamp,nsecs_t * actualTimestamp)593 status_t ZslProcessor::enqueueInputBufferByTimestamp(
594 nsecs_t timestamp,
595 nsecs_t* actualTimestamp) {
596
597 TimestampFinder timestampFinder = TimestampFinder(timestamp);
598
599 mInputBuffer = mProducer->pinSelectedBuffer(timestampFinder,
600 /*waitForFence*/false);
601
602 if (nullptr == mInputBuffer.get()) {
603 ALOGE("%s: No ZSL buffers were available yet", __FUNCTION__);
604 return NO_BUFFER_AVAILABLE;
605 }
606
607 nsecs_t actual = mInputBuffer->getBufferItem().mTimestamp;
608
609 if (actual != timestamp) {
610 // TODO: This is problematic, the metadata queue timestamp should
611 // usually have a corresponding ZSL buffer with the same timestamp.
612 // If this is not the case, then it is possible that we will use
613 // a ZSL buffer from a different request, which can result in
614 // side effects during the reprocess pass.
615 ALOGW("%s: ZSL buffer candidate search didn't find an exact match --"
616 " requested timestamp = %" PRId64 ", actual timestamp = %" PRId64,
617 __FUNCTION__, timestamp, actual);
618 }
619
620 if (nullptr != actualTimestamp) {
621 *actualTimestamp = actual;
622 }
623
624 BufferItem &item = mInputBuffer->getBufferItem();
625 auto rc = mInputProducer->attachBuffer(&mInputProducerSlot,
626 item.mGraphicBuffer);
627 if (OK != rc) {
628 ALOGE("%s: Failed to attach input ZSL buffer to producer: %d",
629 __FUNCTION__, rc);
630 return rc;
631 }
632
633 IGraphicBufferProducer::QueueBufferOutput output;
634 IGraphicBufferProducer::QueueBufferInput input(item.mTimestamp,
635 item.mIsAutoTimestamp, item.mDataSpace, item.mCrop,
636 item.mScalingMode, item.mTransform, item.mFence);
637 rc = mInputProducer->queueBuffer(mInputProducerSlot, input, &output);
638 if (OK != rc) {
639 ALOGE("%s: Failed to queue ZSL buffer to producer: %d",
640 __FUNCTION__, rc);
641 return rc;
642 }
643
644 return rc;
645 }
646
clearInputRingBufferLocked(nsecs_t * latestTimestamp)647 status_t ZslProcessor::clearInputRingBufferLocked(nsecs_t* latestTimestamp) {
648
649 if (nullptr != latestTimestamp) {
650 *latestTimestamp = mProducer->getLatestTimestamp();
651 }
652 mInputBuffer.clear();
653
654 return mProducer->clear();
655 }
656
clearZslQueue()657 status_t ZslProcessor::clearZslQueue() {
658 Mutex::Autolock l(mInputMutex);
659 // If in middle of capture, can't clear out queue
660 if (mState == LOCKED) return OK;
661
662 return clearZslQueueLocked();
663 }
664
clearZslQueueLocked()665 status_t ZslProcessor::clearZslQueueLocked() {
666 if (NO_STREAM != mZslStreamId) {
667 // clear result metadata list first.
668 clearZslResultQueueLocked();
669 return clearInputRingBufferLocked(&mLatestClearedBufferTimestamp);
670 }
671 return OK;
672 }
673
clearZslResultQueueLocked()674 void ZslProcessor::clearZslResultQueueLocked() {
675 mFrameList.clear();
676 mFrameListHead = 0;
677 mFrameList.resize(mFrameListDepth);
678 }
679
dump(int fd,const Vector<String16> &) const680 void ZslProcessor::dump(int fd, const Vector<String16>& /*args*/) const {
681 Mutex::Autolock l(mInputMutex);
682 if (!mLatestCapturedRequest.isEmpty()) {
683 String8 result(" Latest ZSL capture request:\n");
684 write(fd, result.string(), result.size());
685 mLatestCapturedRequest.dump(fd, 2, 6);
686 } else {
687 String8 result(" Latest ZSL capture request: none yet\n");
688 write(fd, result.string(), result.size());
689 }
690 dumpZslQueue(fd);
691 }
692
threadLoop()693 bool ZslProcessor::threadLoop() {
694 Mutex::Autolock l(mInputMutex);
695
696 if (mBuffersToDetach == 0) {
697 status_t res = mBuffersToDetachSignal.waitRelative(mInputMutex, kWaitDuration);
698 if (res == TIMED_OUT) return true;
699 }
700 while (mBuffersToDetach > 0) {
701 doNotifyInputReleasedLocked();
702 mBuffersToDetach--;
703 }
704
705 return true;
706 }
707
dumpZslQueue(int fd) const708 void ZslProcessor::dumpZslQueue(int fd) const {
709 String8 header("ZSL queue contents:");
710 String8 indent(" ");
711 ALOGV("%s", header.string());
712 if (fd != -1) {
713 header = indent + header + "\n";
714 write(fd, header.string(), header.size());
715 }
716 for (size_t i = 0; i < mZslQueue.size(); i++) {
717 const ZslPair &queueEntry = mZslQueue[i];
718 nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
719 camera_metadata_ro_entry_t entry;
720 nsecs_t frameTimestamp = 0;
721 int frameAeState = -1;
722 if (!queueEntry.frame.isEmpty()) {
723 entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP);
724 if (entry.count > 0) frameTimestamp = entry.data.i64[0];
725 entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE);
726 if (entry.count > 0) frameAeState = entry.data.u8[0];
727 }
728 String8 result =
729 String8::format(" %zu: b: %" PRId64 "\tf: %" PRId64 ", AE state: %d", i,
730 bufferTimestamp, frameTimestamp, frameAeState);
731 ALOGV("%s", result.string());
732 if (fd != -1) {
733 result = indent + result + "\n";
734 write(fd, result.string(), result.size());
735 }
736
737 }
738 }
739
isFixedFocusMode(uint8_t afMode) const740 bool ZslProcessor::isFixedFocusMode(uint8_t afMode) const {
741 switch (afMode) {
742 case ANDROID_CONTROL_AF_MODE_AUTO:
743 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
744 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
745 case ANDROID_CONTROL_AF_MODE_MACRO:
746 return false;
747 break;
748 case ANDROID_CONTROL_AF_MODE_OFF:
749 case ANDROID_CONTROL_AF_MODE_EDOF:
750 return true;
751 default:
752 ALOGE("%s: unknown focus mode %d", __FUNCTION__, afMode);
753 return false;
754 }
755 }
756
getCandidateTimestampLocked(size_t * metadataIdx) const757 nsecs_t ZslProcessor::getCandidateTimestampLocked(size_t* metadataIdx) const {
758 /**
759 * Find the smallest timestamp we know about so far
760 * - ensure that aeState is either converged or locked
761 */
762
763 size_t idx = 0;
764 nsecs_t minTimestamp = -1;
765
766 size_t emptyCount = mFrameList.size();
767
768 for (size_t j = 0; j < mFrameList.size(); j++) {
769 const CameraMetadata &frame = mFrameList[j];
770 if (!frame.isEmpty()) {
771
772 emptyCount--;
773
774 camera_metadata_ro_entry_t entry;
775 entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
776 if (entry.count == 0) {
777 ALOGE("%s: Can't find timestamp in frame!",
778 __FUNCTION__);
779 continue;
780 }
781 nsecs_t frameTimestamp = entry.data.i64[0];
782 if (minTimestamp > frameTimestamp || minTimestamp == -1) {
783
784 entry = frame.find(ANDROID_CONTROL_AE_STATE);
785
786 if (entry.count == 0) {
787 /**
788 * This is most likely a HAL bug. The aeState field is
789 * mandatory, so it should always be in a metadata packet.
790 */
791 ALOGW("%s: ZSL queue frame has no AE state field!",
792 __FUNCTION__);
793 continue;
794 }
795 if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
796 entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
797 ALOGVV("%s: ZSL queue frame AE state is %d, need "
798 "full capture", __FUNCTION__, entry.data.u8[0]);
799 continue;
800 }
801
802 entry = frame.find(ANDROID_CONTROL_AF_MODE);
803 if (entry.count == 0) {
804 ALOGW("%s: ZSL queue frame has no AF mode field!",
805 __FUNCTION__);
806 continue;
807 }
808 // Check AF state if device has focuser and focus mode isn't fixed
809 if (mHasFocuser) {
810 uint8_t afMode = entry.data.u8[0];
811 if (!isFixedFocusMode(afMode)) {
812 // Make sure the candidate frame has good focus.
813 entry = frame.find(ANDROID_CONTROL_AF_STATE);
814 if (entry.count == 0) {
815 ALOGW("%s: ZSL queue frame has no AF state field!",
816 __FUNCTION__);
817 continue;
818 }
819 uint8_t afState = entry.data.u8[0];
820 if (afState != ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
821 afState != ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
822 afState != ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
823 ALOGVV("%s: ZSL queue frame AF state is %d is not good for capture,"
824 " skip it", __FUNCTION__, afState);
825 continue;
826 }
827 }
828 }
829
830 minTimestamp = frameTimestamp;
831 idx = j;
832 }
833
834 ALOGVV("%s: Saw timestamp %" PRId64, __FUNCTION__, frameTimestamp);
835 }
836 }
837
838 if (emptyCount == mFrameList.size()) {
839 /**
840 * This could be mildly bad and means our ZSL was triggered before
841 * there were any frames yet received by the camera framework.
842 *
843 * This is a fairly corner case which can happen under:
844 * + a user presses the shutter button real fast when the camera starts
845 * (startPreview followed immediately by takePicture).
846 * + burst capture case (hitting shutter button as fast possible)
847 *
848 * If this happens in steady case (preview running for a while, call
849 * a single takePicture) then this might be a fwk bug.
850 */
851 ALOGW("%s: ZSL queue has no metadata frames", __FUNCTION__);
852 }
853
854 ALOGV("%s: Candidate timestamp %" PRId64 " (idx %zu), empty frames: %zu",
855 __FUNCTION__, minTimestamp, idx, emptyCount);
856
857 if (metadataIdx) {
858 *metadataIdx = idx;
859 }
860
861 return minTimestamp;
862 }
863
864 }; // namespace camera2
865 }; // namespace android
866