1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2-ZslProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 //#define LOG_NNDEBUG 0
21
22 #ifdef LOG_NNDEBUG
23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
24 #else
25 #define ALOGVV(...) if (0) ALOGV(__VA_ARGS__)
26 #endif
27
28 #include <inttypes.h>
29
30 #include <utils/Log.h>
31 #include <utils/Trace.h>
32 #include <gui/Surface.h>
33
34 #include "common/CameraDeviceBase.h"
35 #include "api1/Camera2Client.h"
36 #include "api1/client2/CaptureSequencer.h"
37 #include "api1/client2/ZslProcessor.h"
38 #include "device3/Camera3Device.h"
39
40 typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
41
42 namespace android {
43 namespace camera2 {
44
45 namespace {
46 struct TimestampFinder : public RingBufferConsumer::RingBufferComparator {
47 typedef RingBufferConsumer::BufferInfo BufferInfo;
48
49 enum {
50 SELECT_I1 = -1,
51 SELECT_I2 = 1,
52 SELECT_NEITHER = 0,
53 };
54
TimestampFinderandroid::camera2::__anon5f5a60830111::TimestampFinder55 explicit TimestampFinder(nsecs_t timestamp) : mTimestamp(timestamp) {}
~TimestampFinderandroid::camera2::__anon5f5a60830111::TimestampFinder56 ~TimestampFinder() {}
57
58 template <typename T>
swapandroid::camera2::__anon5f5a60830111::TimestampFinder59 static void swap(T& a, T& b) {
60 T tmp = a;
61 a = b;
62 b = tmp;
63 }
64
65 /**
66 * Try to find the best candidate for a ZSL buffer.
67 * Match priority from best to worst:
68 * 1) Timestamps match.
69 * 2) Timestamp is closest to the needle (and lower).
70 * 3) Timestamp is closest to the needle (and higher).
71 *
72 */
compareandroid::camera2::__anon5f5a60830111::TimestampFinder73 virtual int compare(const BufferInfo *i1,
74 const BufferInfo *i2) const {
75 // Try to select non-null object first.
76 if (i1 == NULL) {
77 return SELECT_I2;
78 } else if (i2 == NULL) {
79 return SELECT_I1;
80 }
81
82 // Best result: timestamp is identical
83 if (i1->mTimestamp == mTimestamp) {
84 return SELECT_I1;
85 } else if (i2->mTimestamp == mTimestamp) {
86 return SELECT_I2;
87 }
88
89 const BufferInfo* infoPtrs[2] = {
90 i1,
91 i2
92 };
93 int infoSelectors[2] = {
94 SELECT_I1,
95 SELECT_I2
96 };
97
98 // Order i1,i2 so that always i1.timestamp < i2.timestamp
99 if (i1->mTimestamp > i2->mTimestamp) {
100 swap(infoPtrs[0], infoPtrs[1]);
101 swap(infoSelectors[0], infoSelectors[1]);
102 }
103
104 // Second best: closest (lower) timestamp
105 if (infoPtrs[1]->mTimestamp < mTimestamp) {
106 return infoSelectors[1];
107 } else if (infoPtrs[0]->mTimestamp < mTimestamp) {
108 return infoSelectors[0];
109 }
110
111 // Worst: closest (higher) timestamp
112 return infoSelectors[0];
113
114 /**
115 * The above cases should cover all the possibilities,
116 * and we get an 'empty' result only if the ring buffer
117 * was empty itself
118 */
119 }
120
121 const nsecs_t mTimestamp;
122 }; // struct TimestampFinder
123 } // namespace anonymous
124
ZslProcessor(sp<Camera2Client> client,wp<CaptureSequencer> sequencer)125 ZslProcessor::ZslProcessor(
126 sp<Camera2Client> client,
127 wp<CaptureSequencer> sequencer):
128 Thread(false),
129 mLatestClearedBufferTimestamp(0),
130 mState(RUNNING),
131 mClient(client),
132 mSequencer(sequencer),
133 mId(client->getCameraId()),
134 mZslStreamId(NO_STREAM),
135 mInputStreamId(NO_STREAM),
136 mFrameListHead(0),
137 mHasFocuser(false),
138 mInputBuffer(nullptr),
139 mProducer(nullptr),
140 mInputProducer(nullptr),
141 mInputProducerSlot(-1),
142 mBuffersToDetach(0) {
143 // Initialize buffer queue and frame list based on pipeline max depth.
144 size_t pipelineMaxDepth = kDefaultMaxPipelineDepth;
145 if (client != 0) {
146 sp<Camera3Device> device =
147 static_cast<Camera3Device*>(client->getCameraDevice().get());
148 if (device != 0) {
149 camera_metadata_ro_entry_t entry =
150 device->info().find(ANDROID_REQUEST_PIPELINE_MAX_DEPTH);
151 if (entry.count == 1) {
152 pipelineMaxDepth = entry.data.u8[0];
153 } else {
154 ALOGW("%s: Unable to find the android.request.pipelineMaxDepth,"
155 " use default pipeline max depth %d", __FUNCTION__,
156 kDefaultMaxPipelineDepth);
157 }
158
159 entry = device->info().find(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
160 if (entry.count > 0 && entry.data.f[0] != 0.) {
161 mHasFocuser = true;
162 }
163 }
164 }
165
166 ALOGV("%s: Initialize buffer queue and frame list depth based on max pipeline depth (%zu)",
167 __FUNCTION__, pipelineMaxDepth);
168 // Need to keep buffer queue longer than metadata queue because sometimes buffer arrives
169 // earlier than metadata which causes the buffer corresponding to oldest metadata being
170 // removed.
171 mFrameListDepth = pipelineMaxDepth;
172 mBufferQueueDepth = mFrameListDepth + 1;
173
174 mZslQueue.insertAt(0, mBufferQueueDepth);
175 mFrameList.insertAt(0, mFrameListDepth);
176 sp<CaptureSequencer> captureSequencer = mSequencer.promote();
177 if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
178 }
179
~ZslProcessor()180 ZslProcessor::~ZslProcessor() {
181 ALOGV("%s: Exit", __FUNCTION__);
182 deleteStream();
183 }
184
onResultAvailable(const CaptureResult & result)185 void ZslProcessor::onResultAvailable(const CaptureResult &result) {
186 ATRACE_CALL();
187 ALOGV("%s:", __FUNCTION__);
188 Mutex::Autolock l(mInputMutex);
189 camera_metadata_ro_entry_t entry;
190 entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
191 nsecs_t timestamp = entry.data.i64[0];
192 if (entry.count == 0) {
193 ALOGE("%s: metadata doesn't have timestamp, skip this result", __FUNCTION__);
194 return;
195 }
196
197 entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
198 if (entry.count == 0) {
199 ALOGE("%s: metadata doesn't have frame number, skip this result", __FUNCTION__);
200 return;
201 }
202 int32_t frameNumber = entry.data.i32[0];
203
204 ALOGVV("Got preview metadata for frame %d with timestamp %" PRId64, frameNumber, timestamp);
205
206 if (mState != RUNNING) return;
207
208 // Corresponding buffer has been cleared. No need to push into mFrameList
209 if (timestamp <= mLatestClearedBufferTimestamp) return;
210
211 mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
212 mFrameListHead = (mFrameListHead + 1) % mFrameListDepth;
213 }
214
updateStream(const Parameters & params)215 status_t ZslProcessor::updateStream(const Parameters ¶ms) {
216 ATRACE_CALL();
217 ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
218 status_t res;
219
220 Mutex::Autolock l(mInputMutex);
221
222 sp<Camera2Client> client = mClient.promote();
223 if (client == 0) {
224 ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
225 return INVALID_OPERATION;
226 }
227 sp<Camera3Device> device =
228 static_cast<Camera3Device*>(client->getCameraDevice().get());
229 if (device == 0) {
230 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
231 return INVALID_OPERATION;
232 }
233
234 if ((mZslStreamId != NO_STREAM) || (mInputStreamId != NO_STREAM)) {
235 // Check if stream parameters have to change
236 CameraDeviceBase::StreamInfo streamInfo;
237 res = device->getStreamInfo(mZslStreamId, &streamInfo);
238 if (res != OK) {
239 ALOGE("%s: Camera %d: Error querying capture output stream info: "
240 "%s (%d)", __FUNCTION__,
241 client->getCameraId(), strerror(-res), res);
242 return res;
243 }
244 if (streamInfo.width != (uint32_t)params.fastInfo.arrayWidth ||
245 streamInfo.height != (uint32_t)params.fastInfo.arrayHeight) {
246 if (mZslStreamId != NO_STREAM) {
247 ALOGV("%s: Camera %d: Deleting stream %d since the buffer "
248 "dimensions changed",
249 __FUNCTION__, client->getCameraId(), mZslStreamId);
250 res = device->deleteStream(mZslStreamId);
251 if (res == -EBUSY) {
252 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
253 " after it becomes idle", __FUNCTION__, mId);
254 return res;
255 } else if(res != OK) {
256 ALOGE("%s: Camera %d: Unable to delete old output stream "
257 "for ZSL: %s (%d)", __FUNCTION__,
258 client->getCameraId(), strerror(-res), res);
259 return res;
260 }
261 mZslStreamId = NO_STREAM;
262 }
263
264 if (mInputStreamId != NO_STREAM) {
265 ALOGV("%s: Camera %d: Deleting stream %d since the buffer "
266 "dimensions changed",
267 __FUNCTION__, client->getCameraId(), mInputStreamId);
268 res = device->deleteStream(mInputStreamId);
269 if (res == -EBUSY) {
270 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
271 " after it becomes idle", __FUNCTION__, mId);
272 return res;
273 } else if(res != OK) {
274 ALOGE("%s: Camera %d: Unable to delete old output stream "
275 "for ZSL: %s (%d)", __FUNCTION__,
276 client->getCameraId(), strerror(-res), res);
277 return res;
278 }
279 mInputStreamId = NO_STREAM;
280 }
281 if (nullptr != mInputProducer.get()) {
282 mInputProducer->disconnect(NATIVE_WINDOW_API_CPU);
283 mInputProducer.clear();
284 }
285 }
286 }
287
288 if (mInputStreamId == NO_STREAM) {
289 res = device->createInputStream(params.fastInfo.arrayWidth,
290 params.fastInfo.arrayHeight, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
291 &mInputStreamId);
292 if (res != OK) {
293 ALOGE("%s: Camera %d: Can't create input stream: "
294 "%s (%d)", __FUNCTION__, client->getCameraId(),
295 strerror(-res), res);
296 return res;
297 }
298 }
299
300 if (mZslStreamId == NO_STREAM) {
301 // Create stream for HAL production
302 // TODO: Sort out better way to select resolution for ZSL
303
304 sp<IGraphicBufferProducer> producer;
305 sp<IGraphicBufferConsumer> consumer;
306 BufferQueue::createBufferQueue(&producer, &consumer);
307 mProducer = new RingBufferConsumer(consumer, GRALLOC_USAGE_HW_CAMERA_ZSL,
308 mBufferQueueDepth);
309 mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
310 sp<Surface> outSurface = new Surface(producer);
311
312 res = device->createStream(outSurface, params.fastInfo.arrayWidth,
313 params.fastInfo.arrayHeight, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
314 HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0, &mZslStreamId);
315 if (res != OK) {
316 ALOGE("%s: Camera %d: Can't create ZSL stream: "
317 "%s (%d)", __FUNCTION__, client->getCameraId(),
318 strerror(-res), res);
319 return res;
320 }
321 }
322
323 client->registerFrameListener(Camera2Client::kPreviewRequestIdStart,
324 Camera2Client::kPreviewRequestIdEnd,
325 this,
326 /*sendPartials*/false);
327
328 return OK;
329 }
330
deleteStream()331 status_t ZslProcessor::deleteStream() {
332 ATRACE_CALL();
333 status_t res;
334 sp<Camera3Device> device = nullptr;
335 sp<Camera2Client> client = nullptr;
336
337 Mutex::Autolock l(mInputMutex);
338
339 if ((mZslStreamId != NO_STREAM) || (mInputStreamId != NO_STREAM)) {
340 client = mClient.promote();
341 if (client == 0) {
342 ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
343 return INVALID_OPERATION;
344 }
345
346 device =
347 reinterpret_cast<Camera3Device*>(client->getCameraDevice().get());
348 if (device == 0) {
349 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
350 return INVALID_OPERATION;
351 }
352 }
353
354 if (mZslStreamId != NO_STREAM) {
355 res = device->deleteStream(mZslStreamId);
356 if (res != OK) {
357 ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: "
358 "%s (%d)", __FUNCTION__, client->getCameraId(),
359 mZslStreamId, strerror(-res), res);
360 return res;
361 }
362
363 mZslStreamId = NO_STREAM;
364 }
365 if (mInputStreamId != NO_STREAM) {
366 res = device->deleteStream(mInputStreamId);
367 if (res != OK) {
368 ALOGE("%s: Camera %d: Cannot delete input stream %d: "
369 "%s (%d)", __FUNCTION__, client->getCameraId(),
370 mInputStreamId, strerror(-res), res);
371 return res;
372 }
373
374 mInputStreamId = NO_STREAM;
375 }
376
377 if (nullptr != mInputProducer.get()) {
378 mInputProducer->disconnect(NATIVE_WINDOW_API_CPU);
379 mInputProducer.clear();
380 }
381
382 return OK;
383 }
384
getStreamId() const385 int ZslProcessor::getStreamId() const {
386 Mutex::Autolock l(mInputMutex);
387 return mZslStreamId;
388 }
389
updateRequestWithDefaultStillRequest(CameraMetadata & request) const390 status_t ZslProcessor::updateRequestWithDefaultStillRequest(CameraMetadata &request) const {
391 sp<Camera2Client> client = mClient.promote();
392 if (client == 0) {
393 ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
394 return INVALID_OPERATION;
395 }
396 sp<Camera3Device> device =
397 static_cast<Camera3Device*>(client->getCameraDevice().get());
398 if (device == 0) {
399 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
400 return INVALID_OPERATION;
401 }
402
403 CameraMetadata stillTemplate;
404 device->createDefaultRequest(CAMERA3_TEMPLATE_STILL_CAPTURE, &stillTemplate);
405
406 // Find some of the post-processing tags, and assign the value from template to the request.
407 // Only check the aberration mode and noise reduction mode for now, as they are very important
408 // for image quality.
409 uint32_t postProcessingTags[] = {
410 ANDROID_NOISE_REDUCTION_MODE,
411 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
412 ANDROID_COLOR_CORRECTION_MODE,
413 ANDROID_TONEMAP_MODE,
414 ANDROID_SHADING_MODE,
415 ANDROID_HOT_PIXEL_MODE,
416 ANDROID_EDGE_MODE
417 };
418
419 camera_metadata_entry_t entry;
420 for (size_t i = 0; i < sizeof(postProcessingTags) / sizeof(uint32_t); i++) {
421 entry = stillTemplate.find(postProcessingTags[i]);
422 if (entry.count > 0) {
423 request.update(postProcessingTags[i], entry.data.u8, 1);
424 }
425 }
426
427 return OK;
428 }
429
notifyInputReleased()430 void ZslProcessor::notifyInputReleased() {
431 Mutex::Autolock l(mInputMutex);
432
433 mBuffersToDetach++;
434 mBuffersToDetachSignal.signal();
435 }
436
doNotifyInputReleasedLocked()437 void ZslProcessor::doNotifyInputReleasedLocked() {
438 assert(nullptr != mInputBuffer.get());
439 assert(nullptr != mInputProducer.get());
440
441 sp<GraphicBuffer> gb;
442 sp<Fence> fence;
443 auto rc = mInputProducer->detachNextBuffer(&gb, &fence);
444 if (NO_ERROR != rc) {
445 ALOGE("%s: Failed to detach buffer from input producer: %d",
446 __FUNCTION__, rc);
447 return;
448 }
449
450 BufferItem &item = mInputBuffer->getBufferItem();
451 sp<GraphicBuffer> inputBuffer = item.mGraphicBuffer;
452 if (gb->handle != inputBuffer->handle) {
453 ALOGE("%s: Input mismatch, expected buffer %p received %p", __FUNCTION__,
454 inputBuffer->handle, gb->handle);
455 return;
456 }
457
458 mInputBuffer.clear();
459 ALOGV("%s: Memory optimization, clearing ZSL queue",
460 __FUNCTION__);
461 clearZslResultQueueLocked();
462
463 // Required so we accept more ZSL requests
464 mState = RUNNING;
465 }
466
onBufferReleased()467 void ZslProcessor::InputProducerListener::onBufferReleased() {
468 sp<ZslProcessor> parent = mParent.promote();
469 if (nullptr != parent.get()) {
470 parent->notifyInputReleased();
471 }
472 }
473
pushToReprocess(int32_t requestId)474 status_t ZslProcessor::pushToReprocess(int32_t requestId) {
475 ALOGV("%s: Send in reprocess request with id %d",
476 __FUNCTION__, requestId);
477 Mutex::Autolock l(mInputMutex);
478 status_t res;
479 sp<Camera2Client> client = mClient.promote();
480
481 if (client == 0) {
482 ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
483 return INVALID_OPERATION;
484 }
485
486 IF_ALOGV() {
487 dumpZslQueue(-1);
488 }
489
490 size_t metadataIdx;
491 nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx);
492
493 if (candidateTimestamp == -1) {
494 ALOGV("%s: Could not find good candidate for ZSL reprocessing",
495 __FUNCTION__);
496 return NOT_ENOUGH_DATA;
497 } else {
498 ALOGV("%s: Found good ZSL candidate idx: %u",
499 __FUNCTION__, (unsigned int) metadataIdx);
500 }
501
502 if (nullptr == mInputProducer.get()) {
503 res = client->getCameraDevice()->getInputBufferProducer(
504 &mInputProducer);
505 if (res != OK) {
506 ALOGE("%s: Camera %d: Unable to retrieve input producer: "
507 "%s (%d)", __FUNCTION__, client->getCameraId(),
508 strerror(-res), res);
509 return res;
510 }
511
512 IGraphicBufferProducer::QueueBufferOutput output;
513 res = mInputProducer->connect(new InputProducerListener(this),
514 NATIVE_WINDOW_API_CPU, false, &output);
515 if (res != OK) {
516 ALOGE("%s: Camera %d: Unable to connect to input producer: "
517 "%s (%d)", __FUNCTION__, client->getCameraId(),
518 strerror(-res), res);
519 return res;
520 }
521 }
522
523 res = enqueueInputBufferByTimestamp(candidateTimestamp,
524 /*actualTimestamp*/NULL);
525 if (res == NO_BUFFER_AVAILABLE) {
526 ALOGV("%s: No ZSL buffers yet", __FUNCTION__);
527 return NOT_ENOUGH_DATA;
528 } else if (res != OK) {
529 ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
530 __FUNCTION__, strerror(-res), res);
531 return res;
532 }
533
534 {
535 CameraMetadata request = mFrameList[metadataIdx];
536
537 // Verify that the frame is reasonable for reprocessing
538
539 camera_metadata_entry_t entry;
540 entry = request.find(ANDROID_CONTROL_AE_STATE);
541 if (entry.count == 0) {
542 ALOGE("%s: ZSL queue frame has no AE state field!",
543 __FUNCTION__);
544 return BAD_VALUE;
545 }
546 if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
547 entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
548 ALOGV("%s: ZSL queue frame AE state is %d, need full capture",
549 __FUNCTION__, entry.data.u8[0]);
550 return NOT_ENOUGH_DATA;
551 }
552
553 uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
554 res = request.update(ANDROID_REQUEST_TYPE,
555 &requestType, 1);
556 if (res != OK) {
557 ALOGE("%s: Unable to update request type",
558 __FUNCTION__);
559 return INVALID_OPERATION;
560 }
561
562 int32_t inputStreams[1] =
563 { mInputStreamId };
564 res = request.update(ANDROID_REQUEST_INPUT_STREAMS,
565 inputStreams, 1);
566 if (res != OK) {
567 ALOGE("%s: Unable to update request input streams",
568 __FUNCTION__);
569 return INVALID_OPERATION;
570 }
571
572 uint8_t captureIntent =
573 static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
574 res = request.update(ANDROID_CONTROL_CAPTURE_INTENT,
575 &captureIntent, 1);
576 if (res != OK ) {
577 ALOGE("%s: Unable to update request capture intent",
578 __FUNCTION__);
579 return INVALID_OPERATION;
580 }
581
582 // TODO: Shouldn't we also update the latest preview frame?
583 int32_t outputStreams[1] =
584 { client->getCaptureStreamId() };
585 res = request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
586 outputStreams, 1);
587 if (res != OK) {
588 ALOGE("%s: Unable to update request output streams",
589 __FUNCTION__);
590 return INVALID_OPERATION;
591 }
592
593 res = request.update(ANDROID_REQUEST_ID,
594 &requestId, 1);
595 if (res != OK ) {
596 ALOGE("%s: Unable to update frame to a reprocess request",
597 __FUNCTION__);
598 return INVALID_OPERATION;
599 }
600
601 res = client->stopStream();
602 if (res != OK) {
603 ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
604 "%s (%d)",
605 __FUNCTION__, client->getCameraId(), strerror(-res), res);
606 return INVALID_OPERATION;
607 }
608
609 // Update JPEG settings
610 {
611 SharedParameters::Lock l(client->getParameters());
612 res = l.mParameters.updateRequestJpeg(&request);
613 if (res != OK) {
614 ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL "
615 "capture request: %s (%d)", __FUNCTION__,
616 client->getCameraId(),
617 strerror(-res), res);
618 return res;
619 }
620 }
621
622 // Update post-processing settings
623 res = updateRequestWithDefaultStillRequest(request);
624 if (res != OK) {
625 ALOGW("%s: Unable to update post-processing tags, the reprocessed image quality "
626 "may be compromised", __FUNCTION__);
627 }
628
629 mLatestCapturedRequest = request;
630 res = client->getCameraDevice()->capture(request);
631 if (res != OK ) {
632 ALOGE("%s: Unable to send ZSL reprocess request to capture: %s"
633 " (%d)", __FUNCTION__, strerror(-res), res);
634 return res;
635 }
636
637 mState = LOCKED;
638 }
639
640 return OK;
641 }
642
enqueueInputBufferByTimestamp(nsecs_t timestamp,nsecs_t * actualTimestamp)643 status_t ZslProcessor::enqueueInputBufferByTimestamp(
644 nsecs_t timestamp,
645 nsecs_t* actualTimestamp) {
646
647 TimestampFinder timestampFinder = TimestampFinder(timestamp);
648
649 mInputBuffer = mProducer->pinSelectedBuffer(timestampFinder,
650 /*waitForFence*/false);
651
652 if (nullptr == mInputBuffer.get()) {
653 ALOGE("%s: No ZSL buffers were available yet", __FUNCTION__);
654 return NO_BUFFER_AVAILABLE;
655 }
656
657 nsecs_t actual = mInputBuffer->getBufferItem().mTimestamp;
658
659 if (actual != timestamp) {
660 // TODO: This is problematic, the metadata queue timestamp should
661 // usually have a corresponding ZSL buffer with the same timestamp.
662 // If this is not the case, then it is possible that we will use
663 // a ZSL buffer from a different request, which can result in
664 // side effects during the reprocess pass.
665 ALOGW("%s: ZSL buffer candidate search didn't find an exact match --"
666 " requested timestamp = %" PRId64 ", actual timestamp = %" PRId64,
667 __FUNCTION__, timestamp, actual);
668 }
669
670 if (nullptr != actualTimestamp) {
671 *actualTimestamp = actual;
672 }
673
674 BufferItem &item = mInputBuffer->getBufferItem();
675 auto rc = mInputProducer->attachBuffer(&mInputProducerSlot,
676 item.mGraphicBuffer);
677 if (OK != rc) {
678 ALOGE("%s: Failed to attach input ZSL buffer to producer: %d",
679 __FUNCTION__, rc);
680 return rc;
681 }
682
683 IGraphicBufferProducer::QueueBufferOutput output;
684 IGraphicBufferProducer::QueueBufferInput input(item.mTimestamp,
685 item.mIsAutoTimestamp, item.mDataSpace, item.mCrop,
686 item.mScalingMode, item.mTransform, item.mFence);
687 rc = mInputProducer->queueBuffer(mInputProducerSlot, input, &output);
688 if (OK != rc) {
689 ALOGE("%s: Failed to queue ZSL buffer to producer: %d",
690 __FUNCTION__, rc);
691 return rc;
692 }
693
694 return rc;
695 }
696
clearInputRingBufferLocked(nsecs_t * latestTimestamp)697 status_t ZslProcessor::clearInputRingBufferLocked(nsecs_t* latestTimestamp) {
698
699 if (nullptr != latestTimestamp) {
700 *latestTimestamp = mProducer->getLatestTimestamp();
701 }
702 mInputBuffer.clear();
703
704 return mProducer->clear();
705 }
706
clearZslQueue()707 status_t ZslProcessor::clearZslQueue() {
708 Mutex::Autolock l(mInputMutex);
709 // If in middle of capture, can't clear out queue
710 if (mState == LOCKED) return OK;
711
712 return clearZslQueueLocked();
713 }
714
clearZslQueueLocked()715 status_t ZslProcessor::clearZslQueueLocked() {
716 if (NO_STREAM != mZslStreamId) {
717 // clear result metadata list first.
718 clearZslResultQueueLocked();
719 return clearInputRingBufferLocked(&mLatestClearedBufferTimestamp);
720 }
721 return OK;
722 }
723
clearZslResultQueueLocked()724 void ZslProcessor::clearZslResultQueueLocked() {
725 mFrameList.clear();
726 mFrameListHead = 0;
727 mFrameList.insertAt(0, mFrameListDepth);
728 }
729
dump(int fd,const Vector<String16> &) const730 void ZslProcessor::dump(int fd, const Vector<String16>& /*args*/) const {
731 Mutex::Autolock l(mInputMutex);
732 if (!mLatestCapturedRequest.isEmpty()) {
733 String8 result(" Latest ZSL capture request:\n");
734 write(fd, result.string(), result.size());
735 mLatestCapturedRequest.dump(fd, 2, 6);
736 } else {
737 String8 result(" Latest ZSL capture request: none yet\n");
738 write(fd, result.string(), result.size());
739 }
740 dumpZslQueue(fd);
741 }
742
threadLoop()743 bool ZslProcessor::threadLoop() {
744 Mutex::Autolock l(mInputMutex);
745
746 if (mBuffersToDetach == 0) {
747 status_t res = mBuffersToDetachSignal.waitRelative(mInputMutex, kWaitDuration);
748 if (res == TIMED_OUT) return true;
749 }
750 while (mBuffersToDetach > 0) {
751 doNotifyInputReleasedLocked();
752 mBuffersToDetach--;
753 }
754
755 return true;
756 }
757
dumpZslQueue(int fd) const758 void ZslProcessor::dumpZslQueue(int fd) const {
759 String8 header("ZSL queue contents:");
760 String8 indent(" ");
761 ALOGV("%s", header.string());
762 if (fd != -1) {
763 header = indent + header + "\n";
764 write(fd, header.string(), header.size());
765 }
766 for (size_t i = 0; i < mZslQueue.size(); i++) {
767 const ZslPair &queueEntry = mZslQueue[i];
768 nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
769 camera_metadata_ro_entry_t entry;
770 nsecs_t frameTimestamp = 0;
771 int frameAeState = -1;
772 if (!queueEntry.frame.isEmpty()) {
773 entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP);
774 if (entry.count > 0) frameTimestamp = entry.data.i64[0];
775 entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE);
776 if (entry.count > 0) frameAeState = entry.data.u8[0];
777 }
778 String8 result =
779 String8::format(" %zu: b: %" PRId64 "\tf: %" PRId64 ", AE state: %d", i,
780 bufferTimestamp, frameTimestamp, frameAeState);
781 ALOGV("%s", result.string());
782 if (fd != -1) {
783 result = indent + result + "\n";
784 write(fd, result.string(), result.size());
785 }
786
787 }
788 }
789
isFixedFocusMode(uint8_t afMode) const790 bool ZslProcessor::isFixedFocusMode(uint8_t afMode) const {
791 switch (afMode) {
792 case ANDROID_CONTROL_AF_MODE_AUTO:
793 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
794 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
795 case ANDROID_CONTROL_AF_MODE_MACRO:
796 return false;
797 break;
798 case ANDROID_CONTROL_AF_MODE_OFF:
799 case ANDROID_CONTROL_AF_MODE_EDOF:
800 return true;
801 default:
802 ALOGE("%s: unknown focus mode %d", __FUNCTION__, afMode);
803 return false;
804 }
805 }
806
getCandidateTimestampLocked(size_t * metadataIdx) const807 nsecs_t ZslProcessor::getCandidateTimestampLocked(size_t* metadataIdx) const {
808 /**
809 * Find the smallest timestamp we know about so far
810 * - ensure that aeState is either converged or locked
811 */
812
813 size_t idx = 0;
814 nsecs_t minTimestamp = -1;
815
816 size_t emptyCount = mFrameList.size();
817
818 for (size_t j = 0; j < mFrameList.size(); j++) {
819 const CameraMetadata &frame = mFrameList[j];
820 if (!frame.isEmpty()) {
821
822 emptyCount--;
823
824 camera_metadata_ro_entry_t entry;
825 entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
826 if (entry.count == 0) {
827 ALOGE("%s: Can't find timestamp in frame!",
828 __FUNCTION__);
829 continue;
830 }
831 nsecs_t frameTimestamp = entry.data.i64[0];
832 if (minTimestamp > frameTimestamp || minTimestamp == -1) {
833
834 entry = frame.find(ANDROID_CONTROL_AE_STATE);
835
836 if (entry.count == 0) {
837 /**
838 * This is most likely a HAL bug. The aeState field is
839 * mandatory, so it should always be in a metadata packet.
840 */
841 ALOGW("%s: ZSL queue frame has no AE state field!",
842 __FUNCTION__);
843 continue;
844 }
845 if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
846 entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
847 ALOGVV("%s: ZSL queue frame AE state is %d, need "
848 "full capture", __FUNCTION__, entry.data.u8[0]);
849 continue;
850 }
851
852 entry = frame.find(ANDROID_CONTROL_AF_MODE);
853 if (entry.count == 0) {
854 ALOGW("%s: ZSL queue frame has no AF mode field!",
855 __FUNCTION__);
856 continue;
857 }
858 uint8_t afMode = entry.data.u8[0];
859 if (afMode == ANDROID_CONTROL_AF_MODE_OFF) {
860 // Skip all the ZSL buffer for manual AF mode, as we don't really
861 // know the af state.
862 continue;
863 }
864
865 // Check AF state if device has focuser and focus mode isn't fixed
866 if (mHasFocuser && !isFixedFocusMode(afMode)) {
867 // Make sure the candidate frame has good focus.
868 entry = frame.find(ANDROID_CONTROL_AF_STATE);
869 if (entry.count == 0) {
870 ALOGW("%s: ZSL queue frame has no AF state field!",
871 __FUNCTION__);
872 continue;
873 }
874 uint8_t afState = entry.data.u8[0];
875 if (afState != ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
876 afState != ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
877 afState != ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
878 ALOGVV("%s: ZSL queue frame AF state is %d is not good for capture, skip it",
879 __FUNCTION__, afState);
880 continue;
881 }
882 }
883
884 minTimestamp = frameTimestamp;
885 idx = j;
886 }
887
888 ALOGVV("%s: Saw timestamp %" PRId64, __FUNCTION__, frameTimestamp);
889 }
890 }
891
892 if (emptyCount == mFrameList.size()) {
893 /**
894 * This could be mildly bad and means our ZSL was triggered before
895 * there were any frames yet received by the camera framework.
896 *
897 * This is a fairly corner case which can happen under:
898 * + a user presses the shutter button real fast when the camera starts
899 * (startPreview followed immediately by takePicture).
900 * + burst capture case (hitting shutter button as fast possible)
901 *
902 * If this happens in steady case (preview running for a while, call
903 * a single takePicture) then this might be a fwk bug.
904 */
905 ALOGW("%s: ZSL queue has no metadata frames", __FUNCTION__);
906 }
907
908 ALOGV("%s: Candidate timestamp %" PRId64 " (idx %zu), empty frames: %zu",
909 __FUNCTION__, minTimestamp, idx, emptyCount);
910
911 if (metadataIdx) {
912 *metadataIdx = idx;
913 }
914
915 return minTimestamp;
916 }
917
918 }; // namespace camera2
919 }; // namespace android
920