1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera3-OutputUtils"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 //#define LOG_NNDEBUG 0 // Per-frame verbose logging
21
22 #ifdef LOG_NNDEBUG
23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
24 #else
25 #define ALOGVV(...) ((void)0)
26 #endif
27
28 // Convenience macros for transitioning to the error state
29 #define SET_ERR(fmt, ...) states.setErrIntf.setErrorState( \
30 "%s: " fmt, __FUNCTION__, \
31 ##__VA_ARGS__)
32
33 #include <inttypes.h>
34
35 #include <utils/Log.h>
36 #include <utils/SortedVector.h>
37 #include <utils/Trace.h>
38
39 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
40
41 #include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
42 #include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
43 #include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
44
45 #include <camera/CameraUtils.h>
46 #include <camera_metadata_hidden.h>
47
48 #include "device3/Camera3OutputUtils.h"
49
50 #include "system/camera_metadata.h"
51
52 using namespace android::camera3;
53 using namespace android::hardware::camera;
54
55 namespace android {
56 namespace camera3 {
57
fixupMonochromeTags(CaptureOutputStates & states,const CameraMetadata & deviceInfo,CameraMetadata & resultMetadata)58 status_t fixupMonochromeTags(
59 CaptureOutputStates& states,
60 const CameraMetadata& deviceInfo,
61 CameraMetadata& resultMetadata) {
62 status_t res = OK;
63 if (!states.needFixupMonoChrome) {
64 return res;
65 }
66
67 // Remove tags that are not applicable to monochrome camera.
68 int32_t tagsToRemove[] = {
69 ANDROID_SENSOR_GREEN_SPLIT,
70 ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
71 ANDROID_COLOR_CORRECTION_MODE,
72 ANDROID_COLOR_CORRECTION_TRANSFORM,
73 ANDROID_COLOR_CORRECTION_GAINS,
74 };
75 for (auto tag : tagsToRemove) {
76 res = resultMetadata.erase(tag);
77 if (res != OK) {
78 ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
79 return res;
80 }
81 }
82
83 // ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
84 camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
85 for (size_t i = 1; i < blEntry.count; i++) {
86 blEntry.data.f[i] = blEntry.data.f[0];
87 }
88
89 // ANDROID_SENSOR_NOISE_PROFILE
90 camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
91 if (npEntry.count > 0 && npEntry.count % 2 == 0) {
92 double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
93 res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
94 if (res != OK) {
95 ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
96 __FUNCTION__, strerror(-res), res);
97 return res;
98 }
99 }
100
101 // ANDROID_STATISTICS_LENS_SHADING_MAP
102 camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
103 camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
104 if (lsSizeEntry.count == 2 && lsEntry.count > 0
105 && (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
106 for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
107 lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
108 lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
109 lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
110 }
111 }
112
113 // ANDROID_TONEMAP_CURVE_BLUE
114 // ANDROID_TONEMAP_CURVE_GREEN
115 // ANDROID_TONEMAP_CURVE_RED
116 camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
117 camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
118 camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
119 if (tcbEntry.count > 0
120 && tcbEntry.count == tcgEntry.count
121 && tcbEntry.count == tcrEntry.count) {
122 for (size_t i = 0; i < tcbEntry.count; i++) {
123 tcbEntry.data.f[i] = tcrEntry.data.f[i];
124 tcgEntry.data.f[i] = tcrEntry.data.f[i];
125 }
126 }
127
128 return res;
129 }
130
insertResultLocked(CaptureOutputStates & states,CaptureResult * result,uint32_t frameNumber)131 void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
132 if (result == nullptr) return;
133
134 camera_metadata_t *meta = const_cast<camera_metadata_t *>(
135 result->mMetadata.getAndLock());
136 set_camera_metadata_vendor_id(meta, states.vendorTagId);
137 result->mMetadata.unlock(meta);
138
139 if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
140 (int32_t*)&frameNumber, 1) != OK) {
141 SET_ERR("Failed to set frame number %d in metadata", frameNumber);
142 return;
143 }
144
145 if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
146 SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
147 return;
148 }
149
150 // Update vendor tag id for physical metadata
151 for (auto& physicalMetadata : result->mPhysicalMetadatas) {
152 camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
153 physicalMetadata.mPhysicalCameraMetadata.getAndLock());
154 set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
155 physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
156 }
157
158 // Valid result, insert into queue
159 std::list<CaptureResult>::iterator queuedResult =
160 states.resultQueue.insert(states.resultQueue.end(), CaptureResult(*result));
161 ALOGV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
162 ", burstId = %" PRId32, __FUNCTION__,
163 queuedResult->mResultExtras.requestId,
164 queuedResult->mResultExtras.frameNumber,
165 queuedResult->mResultExtras.burstId);
166
167 states.resultSignal.notify_one();
168 }
169
170
sendPartialCaptureResult(CaptureOutputStates & states,const camera_metadata_t * partialResult,const CaptureResultExtras & resultExtras,uint32_t frameNumber)171 void sendPartialCaptureResult(CaptureOutputStates& states,
172 const camera_metadata_t * partialResult,
173 const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
174 ATRACE_CALL();
175 std::lock_guard<std::mutex> l(states.outputLock);
176
177 CaptureResult captureResult;
178 captureResult.mResultExtras = resultExtras;
179 captureResult.mMetadata = partialResult;
180
181 // Fix up result metadata for monochrome camera.
182 status_t res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
183 if (res != OK) {
184 SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
185 return;
186 }
187
188 // Update partial result by removing keys remapped by DistortionCorrection, ZoomRatio,
189 // and RotationAndCrop mappers.
190 std::set<uint32_t> keysToRemove;
191
192 auto iter = states.distortionMappers.find(states.cameraId.c_str());
193 if (iter != states.distortionMappers.end()) {
194 const auto& remappedKeys = iter->second.getRemappedKeys();
195 keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
196 }
197
198 const auto& remappedKeys = states.zoomRatioMappers[states.cameraId.c_str()].getRemappedKeys();
199 keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
200
201 auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
202 if (mapper != states.rotateAndCropMappers.end()) {
203 const auto& remappedKeys = iter->second.getRemappedKeys();
204 keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
205 }
206
207 for (uint32_t key : keysToRemove) {
208 captureResult.mMetadata.erase(key);
209 }
210
211 // Send partial result
212 if (captureResult.mMetadata.entryCount() > 0) {
213 insertResultLocked(states, &captureResult, frameNumber);
214 }
215 }
216
sendCaptureResult(CaptureOutputStates & states,CameraMetadata & pendingMetadata,CaptureResultExtras & resultExtras,CameraMetadata & collectedPartialResult,uint32_t frameNumber,bool reprocess,bool zslStillCapture,bool rotateAndCropAuto,const std::set<std::string> & cameraIdsWithZoom,const std::vector<PhysicalCaptureResultInfo> & physicalMetadatas)217 void sendCaptureResult(
218 CaptureOutputStates& states,
219 CameraMetadata &pendingMetadata,
220 CaptureResultExtras &resultExtras,
221 CameraMetadata &collectedPartialResult,
222 uint32_t frameNumber,
223 bool reprocess, bool zslStillCapture, bool rotateAndCropAuto,
224 const std::set<std::string>& cameraIdsWithZoom,
225 const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
226 ATRACE_CALL();
227 if (pendingMetadata.isEmpty())
228 return;
229
230 std::lock_guard<std::mutex> l(states.outputLock);
231
232 // TODO: need to track errors for tighter bounds on expected frame number
233 if (reprocess) {
234 if (frameNumber < states.nextReprocResultFrameNum) {
235 SET_ERR("Out-of-order reprocess capture result metadata submitted! "
236 "(got frame number %d, expecting %d)",
237 frameNumber, states.nextReprocResultFrameNum);
238 return;
239 }
240 states.nextReprocResultFrameNum = frameNumber + 1;
241 } else if (zslStillCapture) {
242 if (frameNumber < states.nextZslResultFrameNum) {
243 SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
244 "(got frame number %d, expecting %d)",
245 frameNumber, states.nextZslResultFrameNum);
246 return;
247 }
248 states.nextZslResultFrameNum = frameNumber + 1;
249 } else {
250 if (frameNumber < states.nextResultFrameNum) {
251 SET_ERR("Out-of-order capture result metadata submitted! "
252 "(got frame number %d, expecting %d)",
253 frameNumber, states.nextResultFrameNum);
254 return;
255 }
256 states.nextResultFrameNum = frameNumber + 1;
257 }
258
259 CaptureResult captureResult;
260 captureResult.mResultExtras = resultExtras;
261 captureResult.mMetadata = pendingMetadata;
262 captureResult.mPhysicalMetadatas = physicalMetadatas;
263
264 // Append any previous partials to form a complete result
265 if (states.usePartialResult && !collectedPartialResult.isEmpty()) {
266 captureResult.mMetadata.append(collectedPartialResult);
267 }
268
269 captureResult.mMetadata.sort();
270
271 // Check that there's a timestamp in the result metadata
272 camera_metadata_entry timestamp = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
273 if (timestamp.count == 0) {
274 SET_ERR("No timestamp provided by HAL for frame %d!",
275 frameNumber);
276 return;
277 }
278 nsecs_t sensorTimestamp = timestamp.data.i64[0];
279
280 for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
281 camera_metadata_entry timestamp =
282 physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
283 if (timestamp.count == 0) {
284 SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
285 String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
286 return;
287 }
288 }
289
290 // Fix up some result metadata to account for HAL-level distortion correction
291 status_t res = OK;
292 auto iter = states.distortionMappers.find(states.cameraId.c_str());
293 if (iter != states.distortionMappers.end()) {
294 res = iter->second.correctCaptureResult(&captureResult.mMetadata);
295 if (res != OK) {
296 SET_ERR("Unable to correct capture result metadata for frame %d: %s (%d)",
297 frameNumber, strerror(-res), res);
298 return;
299 }
300 }
301
302 // Fix up result metadata to account for zoom ratio availabilities between
303 // HAL and app.
304 bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId.c_str()) == cameraIdsWithZoom.end();
305 res = states.zoomRatioMappers[states.cameraId.c_str()].updateCaptureResult(
306 &captureResult.mMetadata, zoomRatioIs1);
307 if (res != OK) {
308 SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
309 frameNumber, strerror(-res), res);
310 return;
311 }
312
313 // Fix up result metadata to account for rotateAndCrop in AUTO mode
314 if (rotateAndCropAuto) {
315 auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
316 if (mapper != states.rotateAndCropMappers.end()) {
317 res = mapper->second.updateCaptureResult(
318 &captureResult.mMetadata);
319 if (res != OK) {
320 SET_ERR("Unable to correct capture result rotate-and-crop for frame %d: %s (%d)",
321 frameNumber, strerror(-res), res);
322 return;
323 }
324 }
325 }
326
327 for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
328 String8 cameraId8(physicalMetadata.mPhysicalCameraId);
329 auto mapper = states.distortionMappers.find(cameraId8.c_str());
330 if (mapper != states.distortionMappers.end()) {
331 res = mapper->second.correctCaptureResult(
332 &physicalMetadata.mPhysicalCameraMetadata);
333 if (res != OK) {
334 SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
335 frameNumber, strerror(-res), res);
336 return;
337 }
338 }
339
340 zoomRatioIs1 = cameraIdsWithZoom.find(cameraId8.c_str()) == cameraIdsWithZoom.end();
341 res = states.zoomRatioMappers[cameraId8.c_str()].updateCaptureResult(
342 &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
343 if (res != OK) {
344 SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
345 "frame %d: %s(%d)", cameraId8.c_str(), frameNumber, strerror(-res), res);
346 return;
347 }
348 }
349
350 // Fix up result metadata for monochrome camera.
351 res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
352 if (res != OK) {
353 SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
354 return;
355 }
356 for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
357 String8 cameraId8(physicalMetadata.mPhysicalCameraId);
358 res = fixupMonochromeTags(states,
359 states.physicalDeviceInfoMap.at(cameraId8.c_str()),
360 physicalMetadata.mPhysicalCameraMetadata);
361 if (res != OK) {
362 SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
363 return;
364 }
365 }
366
367 std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
368 for (auto& m : physicalMetadatas) {
369 monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
370 CameraMetadata(m.mPhysicalCameraMetadata));
371 }
372 states.tagMonitor.monitorMetadata(TagMonitor::RESULT,
373 frameNumber, sensorTimestamp, captureResult.mMetadata,
374 monitoredPhysicalMetadata);
375
376 insertResultLocked(states, &captureResult, frameNumber);
377 }
378
removeInFlightMapEntryLocked(CaptureOutputStates & states,int idx)379 void removeInFlightMapEntryLocked(CaptureOutputStates& states, int idx) {
380 ATRACE_CALL();
381 InFlightRequestMap& inflightMap = states.inflightMap;
382 nsecs_t duration = inflightMap.valueAt(idx).maxExpectedDuration;
383 inflightMap.removeItemsAt(idx, 1);
384
385 states.inflightIntf.onInflightEntryRemovedLocked(duration);
386 }
387
removeInFlightRequestIfReadyLocked(CaptureOutputStates & states,int idx)388 void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
389 InFlightRequestMap& inflightMap = states.inflightMap;
390 const InFlightRequest &request = inflightMap.valueAt(idx);
391 const uint32_t frameNumber = inflightMap.keyAt(idx);
392 SessionStatsBuilder& sessionStatsBuilder = states.sessionStatsBuilder;
393
394 nsecs_t sensorTimestamp = request.sensorTimestamp;
395 nsecs_t shutterTimestamp = request.shutterTimestamp;
396
397 // Check if it's okay to remove the request from InFlightMap:
398 // In the case of a successful request:
399 // all input and output buffers, all result metadata, shutter callback
400 // arrived.
401 // In the case of an unsuccessful request:
402 // all input and output buffers, as well as request/result error notifications, arrived.
403 if (request.numBuffersLeft == 0 &&
404 (request.skipResultMetadata ||
405 (request.haveResultMetadata && shutterTimestamp != 0))) {
406 if (request.stillCapture) {
407 ATRACE_ASYNC_END("still capture", frameNumber);
408 }
409
410 ATRACE_ASYNC_END("frame capture", frameNumber);
411
412 // Validation check - if sensor timestamp matches shutter timestamp in the
413 // case of request having callback.
414 if (request.hasCallback && request.requestStatus == OK &&
415 sensorTimestamp != shutterTimestamp) {
416 SET_ERR("sensor timestamp (%" PRId64
417 ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
418 sensorTimestamp, frameNumber, shutterTimestamp);
419 }
420
421 // for an unsuccessful request, it may have pending output buffers to
422 // return.
423 assert(request.requestStatus != OK ||
424 request.pendingOutputBuffers.size() == 0);
425
426 returnOutputBuffers(
427 states.useHalBufManager, states.listener,
428 request.pendingOutputBuffers.array(),
429 request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
430 /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
431 /*timestampIncreasing*/true,
432 request.outputSurfaces, request.resultExtras,
433 request.errorBufStrategy, request.transform);
434
435 // Note down the just completed frame number
436 if (request.hasInputBuffer) {
437 states.lastCompletedReprocessFrameNumber = frameNumber;
438 } else if (request.zslCapture && request.stillCapture) {
439 states.lastCompletedZslFrameNumber = frameNumber;
440 } else {
441 states.lastCompletedRegularFrameNumber = frameNumber;
442 }
443
444 sessionStatsBuilder.incResultCounter(request.skipResultMetadata);
445
446 removeInFlightMapEntryLocked(states, idx);
447 ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
448 }
449
450 states.inflightIntf.checkInflightMapLengthLocked();
451 }
452
453 // Erase the subset of physicalCameraIds that contains id
erasePhysicalCameraIdSet(std::set<std::set<String8>> & physicalCameraIds,const String8 & id)454 bool erasePhysicalCameraIdSet(
455 std::set<std::set<String8>>& physicalCameraIds, const String8& id) {
456 bool found = false;
457 for (auto iter = physicalCameraIds.begin(); iter != physicalCameraIds.end(); iter++) {
458 if (iter->count(id) == 1) {
459 physicalCameraIds.erase(iter);
460 found = true;
461 break;
462 }
463 }
464 return found;
465 }
466
processCaptureResult(CaptureOutputStates & states,const camera_capture_result * result)467 void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
468 ATRACE_CALL();
469
470 status_t res;
471
472 uint32_t frameNumber = result->frame_number;
473 if (result->result == NULL && result->num_output_buffers == 0 &&
474 result->input_buffer == NULL) {
475 SET_ERR("No result data provided by HAL for frame %d",
476 frameNumber);
477 return;
478 }
479
480 if (!states.usePartialResult &&
481 result->result != NULL &&
482 result->partial_result != 1) {
483 SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
484 " if partial result is not supported",
485 frameNumber, result->partial_result);
486 return;
487 }
488
489 bool isPartialResult = false;
490 CameraMetadata collectedPartialResult;
491 bool hasInputBufferInRequest = false;
492
493 // Get shutter timestamp and resultExtras from list of in-flight requests,
494 // where it was added by the shutter notification for this frame. If the
495 // shutter timestamp isn't received yet, append the output buffers to the
496 // in-flight request and they will be returned when the shutter timestamp
497 // arrives. Update the in-flight status and remove the in-flight entry if
498 // all result data and shutter timestamp have been received.
499 nsecs_t shutterTimestamp = 0;
500 {
501 std::lock_guard<std::mutex> l(states.inflightLock);
502 ssize_t idx = states.inflightMap.indexOfKey(frameNumber);
503 if (idx == NAME_NOT_FOUND) {
504 SET_ERR("Unknown frame number for capture result: %d",
505 frameNumber);
506 return;
507 }
508 InFlightRequest &request = states.inflightMap.editValueAt(idx);
509 ALOGVV("%s: got InFlightRequest requestId = %" PRId32
510 ", frameNumber = %" PRId64 ", burstId = %" PRId32
511 ", partialResultCount = %d/%d, hasCallback = %d, num_output_buffers %d"
512 ", usePartialResult = %d",
513 __FUNCTION__, request.resultExtras.requestId,
514 request.resultExtras.frameNumber, request.resultExtras.burstId,
515 result->partial_result, states.numPartialResults,
516 request.hasCallback, result->num_output_buffers,
517 states.usePartialResult);
518 // Always update the partial count to the latest one if it's not 0
519 // (buffers only). When framework aggregates adjacent partial results
520 // into one, the latest partial count will be used.
521 if (result->partial_result != 0)
522 request.resultExtras.partialResultCount = result->partial_result;
523
524 if (result->result != nullptr) {
525 camera_metadata_ro_entry entry;
526 auto ret = find_camera_metadata_ro_entry(result->result,
527 ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID, &entry);
528 if ((ret == OK) && (entry.count > 0)) {
529 std::string physicalId(reinterpret_cast<const char *>(entry.data.u8));
530 if (!states.activePhysicalId.empty() && physicalId != states.activePhysicalId) {
531 states.listener->notifyPhysicalCameraChange(physicalId);
532 }
533 states.activePhysicalId = physicalId;
534
535 if (!states.legacyClient && !states.overrideToPortrait) {
536 auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
537 if (deviceInfo != states.physicalDeviceInfoMap.end()) {
538 auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
539 if (orientation.count > 0) {
540 int32_t transform;
541 ret = CameraUtils::getRotationTransform(deviceInfo->second,
542 OutputConfiguration::MIRROR_MODE_AUTO, &transform);
543 if (ret == OK) {
544 // It is possible for camera providers to return the capture
545 // results after the processed frames. In such scenario, we will
546 // not be able to set the output transformation before the frames
547 // return back to the consumer for the current capture request
548 // but we could still try and configure it for any future requests
549 // that are still in flight. The assumption is that the physical
550 // device id remains the same for the duration of the pending queue.
551 for (size_t i = 0; i < states.inflightMap.size(); i++) {
552 auto &r = states.inflightMap.editValueAt(i);
553 if (r.requestTimeNs >= request.requestTimeNs) {
554 r.transform = transform;
555 }
556 }
557 } else {
558 ALOGE("%s: Failed to calculate current stream transformation: %s "
559 "(%d)", __FUNCTION__, strerror(-ret), ret);
560 }
561 } else {
562 ALOGE("%s: Physical device orientation absent!", __FUNCTION__);
563 }
564 } else {
565 ALOGE("%s: Physical device not found in device info map found!",
566 __FUNCTION__);
567 }
568 }
569 }
570 }
571
572 // Check if this result carries only partial metadata
573 if (states.usePartialResult && result->result != NULL) {
574 if (result->partial_result > states.numPartialResults || result->partial_result < 1) {
575 SET_ERR("Result is malformed for frame %d: partial_result %u must be in"
576 " the range of [1, %d] when metadata is included in the result",
577 frameNumber, result->partial_result, states.numPartialResults);
578 return;
579 }
580 isPartialResult = (result->partial_result < states.numPartialResults);
581 if (isPartialResult && result->num_physcam_metadata) {
582 SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
583 " physical camera result", frameNumber);
584 return;
585 }
586 if (isPartialResult) {
587 request.collectedPartialResult.append(result->result);
588 }
589
590 if (isPartialResult && request.hasCallback) {
591 // Send partial capture result
592 sendPartialCaptureResult(states, result->result, request.resultExtras,
593 frameNumber);
594 }
595 }
596
597 shutterTimestamp = request.shutterTimestamp;
598 hasInputBufferInRequest = request.hasInputBuffer;
599
600 // Did we get the (final) result metadata for this capture?
601 if (result->result != NULL && !isPartialResult) {
602 if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
603 SET_ERR("Expected physical Camera metadata count %d not equal to actual count %d",
604 request.physicalCameraIds.size(), result->num_physcam_metadata);
605 return;
606 }
607 if (request.haveResultMetadata) {
608 SET_ERR("Called multiple times with metadata for frame %d",
609 frameNumber);
610 return;
611 }
612 for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
613 String8 physicalId(result->physcam_ids[i]);
614 bool validPhysicalCameraMetadata =
615 erasePhysicalCameraIdSet(request.physicalCameraIds, physicalId);
616 if (!validPhysicalCameraMetadata) {
617 SET_ERR("Unexpected total result for frame %d camera %s",
618 frameNumber, physicalId.c_str());
619 return;
620 }
621 }
622 if (states.usePartialResult &&
623 !request.collectedPartialResult.isEmpty()) {
624 collectedPartialResult.acquire(
625 request.collectedPartialResult);
626 }
627 request.haveResultMetadata = true;
628 request.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
629 }
630
631 uint32_t numBuffersReturned = result->num_output_buffers;
632 if (result->input_buffer != NULL) {
633 if (hasInputBufferInRequest) {
634 numBuffersReturned += 1;
635 } else {
636 ALOGW("%s: Input buffer should be NULL if there is no input"
637 " buffer sent in the request",
638 __FUNCTION__);
639 }
640 }
641 request.numBuffersLeft -= numBuffersReturned;
642 if (request.numBuffersLeft < 0) {
643 SET_ERR("Too many buffers returned for frame %d",
644 frameNumber);
645 return;
646 }
647
648 camera_metadata_ro_entry_t entry;
649 res = find_camera_metadata_ro_entry(result->result,
650 ANDROID_SENSOR_TIMESTAMP, &entry);
651 if (res == OK && entry.count == 1) {
652 request.sensorTimestamp = entry.data.i64[0];
653 }
654
655 // If shutter event isn't received yet, do not return the pending output
656 // buffers.
657 request.pendingOutputBuffers.appendArray(result->output_buffers,
658 result->num_output_buffers);
659 if (shutterTimestamp != 0) {
660 returnAndRemovePendingOutputBuffers(
661 states.useHalBufManager, states.listener,
662 request, states.sessionStatsBuilder);
663 }
664
665 if (result->result != NULL && !isPartialResult) {
666 for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
667 CameraMetadata physicalMetadata;
668 physicalMetadata.append(result->physcam_metadata[i]);
669 request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
670 physicalMetadata});
671 }
672 if (shutterTimestamp == 0) {
673 request.pendingMetadata = result->result;
674 request.collectedPartialResult = collectedPartialResult;
675 } else if (request.hasCallback) {
676 CameraMetadata metadata;
677 metadata = result->result;
678 sendCaptureResult(states, metadata, request.resultExtras,
679 collectedPartialResult, frameNumber,
680 hasInputBufferInRequest, request.zslCapture && request.stillCapture,
681 request.rotateAndCropAuto, request.cameraIdsWithZoom,
682 request.physicalMetadatas);
683 }
684 }
685 removeInFlightRequestIfReadyLocked(states, idx);
686 } // scope for states.inFlightLock
687
688 if (result->input_buffer != NULL) {
689 if (hasInputBufferInRequest) {
690 Camera3Stream *stream =
691 Camera3Stream::cast(result->input_buffer->stream);
692 res = stream->returnInputBuffer(*(result->input_buffer));
693 // Note: stream may be deallocated at this point, if this buffer was the
694 // last reference to it.
695 if (res != OK) {
696 ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
697 " its stream:%s (%d)", __FUNCTION__,
698 frameNumber, strerror(-res), res);
699 }
700 } else {
701 ALOGW("%s: Input buffer should be NULL if there is no input"
702 " buffer sent in the request, skipping input buffer return.",
703 __FUNCTION__);
704 }
705 }
706 }
707
returnOutputBuffers(bool useHalBufManager,sp<NotificationListener> listener,const camera_stream_buffer_t * outputBuffers,size_t numBuffers,nsecs_t timestamp,nsecs_t readoutTimestamp,bool requested,nsecs_t requestTimeNs,SessionStatsBuilder & sessionStatsBuilder,bool timestampIncreasing,const SurfaceMap & outputSurfaces,const CaptureResultExtras & inResultExtras,ERROR_BUF_STRATEGY errorBufStrategy,int32_t transform)708 void returnOutputBuffers(
709 bool useHalBufManager,
710 sp<NotificationListener> listener,
711 const camera_stream_buffer_t *outputBuffers, size_t numBuffers,
712 nsecs_t timestamp, nsecs_t readoutTimestamp, bool requested,
713 nsecs_t requestTimeNs, SessionStatsBuilder& sessionStatsBuilder,
714 bool timestampIncreasing, const SurfaceMap& outputSurfaces,
715 const CaptureResultExtras &inResultExtras,
716 ERROR_BUF_STRATEGY errorBufStrategy, int32_t transform) {
717
718 for (size_t i = 0; i < numBuffers; i++)
719 {
720 Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
721 int streamId = stream->getId();
722
723 // Call notify(ERROR_BUFFER) if necessary.
724 if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR &&
725 errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
726 if (listener != nullptr) {
727 CaptureResultExtras extras = inResultExtras;
728 extras.errorStreamId = streamId;
729 listener->notifyError(
730 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
731 extras);
732 }
733 }
734
735 if (outputBuffers[i].buffer == nullptr) {
736 if (!useHalBufManager) {
737 // With HAL buffer management API, HAL sometimes will have to return buffers that
738 // has not got a output buffer handle filled yet. This is though illegal if HAL
739 // buffer management API is not being used.
740 ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
741 } else {
742 if (requested) {
743 sessionStatsBuilder.incCounter(streamId, /*dropped*/true, 0);
744 }
745 }
746 continue;
747 }
748
749 const auto& it = outputSurfaces.find(streamId);
750 status_t res = OK;
751
752 // Do not return the buffer if the buffer status is error, and the error
753 // buffer strategy is CACHE.
754 if (outputBuffers[i].status != CAMERA_BUFFER_STATUS_ERROR ||
755 errorBufStrategy != ERROR_BUF_CACHE) {
756 if (it != outputSurfaces.end()) {
757 res = stream->returnBuffer(
758 outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
759 it->second, inResultExtras.frameNumber, transform);
760 } else {
761 res = stream->returnBuffer(
762 outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
763 std::vector<size_t> (), inResultExtras.frameNumber, transform);
764 }
765 }
766 // Note: stream may be deallocated at this point, if this buffer was
767 // the last reference to it.
768 bool dropped = false;
769 if (res == NO_INIT || res == DEAD_OBJECT) {
770 ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
771 sessionStatsBuilder.stopCounter(streamId);
772 } else if (res != OK) {
773 ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
774 dropped = true;
775 } else {
776 if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR || timestamp == 0) {
777 dropped = true;
778 }
779 }
780 if (requested) {
781 nsecs_t bufferTimeNs = systemTime();
782 int32_t captureLatencyMs = ns2ms(bufferTimeNs - requestTimeNs);
783 sessionStatsBuilder.incCounter(streamId, dropped, captureLatencyMs);
784 }
785
786 // Long processing consumers can cause returnBuffer timeout for shared stream
787 // If that happens, cancel the buffer and send a buffer error to client
788 if (it != outputSurfaces.end() && res == TIMED_OUT &&
789 outputBuffers[i].status == CAMERA_BUFFER_STATUS_OK) {
790 // cancel the buffer
791 camera_stream_buffer_t sb = outputBuffers[i];
792 sb.status = CAMERA_BUFFER_STATUS_ERROR;
793 stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
794 timestampIncreasing, std::vector<size_t> (),
795 inResultExtras.frameNumber, transform);
796
797 if (listener != nullptr) {
798 CaptureResultExtras extras = inResultExtras;
799 extras.errorStreamId = streamId;
800 listener->notifyError(
801 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
802 extras);
803 }
804 }
805 }
806 }
807
returnAndRemovePendingOutputBuffers(bool useHalBufManager,sp<NotificationListener> listener,InFlightRequest & request,SessionStatsBuilder & sessionStatsBuilder)808 void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
809 sp<NotificationListener> listener, InFlightRequest& request,
810 SessionStatsBuilder& sessionStatsBuilder) {
811 bool timestampIncreasing =
812 !((request.zslCapture && request.stillCapture) || request.hasInputBuffer);
813 nsecs_t readoutTimestamp = request.resultExtras.hasReadoutTimestamp ?
814 request.resultExtras.readoutTimestamp : 0;
815 returnOutputBuffers(useHalBufManager, listener,
816 request.pendingOutputBuffers.array(),
817 request.pendingOutputBuffers.size(),
818 request.shutterTimestamp, readoutTimestamp,
819 /*requested*/true, request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
820 request.outputSurfaces, request.resultExtras,
821 request.errorBufStrategy, request.transform);
822
823 // Remove error buffers that are not cached.
824 for (auto iter = request.pendingOutputBuffers.begin();
825 iter != request.pendingOutputBuffers.end(); ) {
826 if (request.errorBufStrategy != ERROR_BUF_CACHE ||
827 iter->status != CAMERA_BUFFER_STATUS_ERROR) {
828 iter = request.pendingOutputBuffers.erase(iter);
829 } else {
830 iter++;
831 }
832 }
833 }
834
notifyShutter(CaptureOutputStates & states,const camera_shutter_msg_t & msg)835 void notifyShutter(CaptureOutputStates& states, const camera_shutter_msg_t &msg) {
836 ATRACE_CALL();
837 ssize_t idx;
838
839 // Set timestamp for the request in the in-flight tracking
840 // and get the request ID to send upstream
841 {
842 std::lock_guard<std::mutex> l(states.inflightLock);
843 InFlightRequestMap& inflightMap = states.inflightMap;
844 idx = inflightMap.indexOfKey(msg.frame_number);
845 if (idx >= 0) {
846 InFlightRequest &r = inflightMap.editValueAt(idx);
847
848 // Verify ordering of shutter notifications
849 {
850 std::lock_guard<std::mutex> l(states.outputLock);
851 // TODO: need to track errors for tighter bounds on expected frame number.
852 if (r.hasInputBuffer) {
853 if (msg.frame_number < states.nextReprocShutterFrameNum) {
854 SET_ERR("Reprocess shutter notification out-of-order. Expected "
855 "notification for frame %d, got frame %d",
856 states.nextReprocShutterFrameNum, msg.frame_number);
857 return;
858 }
859 states.nextReprocShutterFrameNum = msg.frame_number + 1;
860 } else if (r.zslCapture && r.stillCapture) {
861 if (msg.frame_number < states.nextZslShutterFrameNum) {
862 SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
863 "notification for frame %d, got frame %d",
864 states.nextZslShutterFrameNum, msg.frame_number);
865 return;
866 }
867 states.nextZslShutterFrameNum = msg.frame_number + 1;
868 } else {
869 if (msg.frame_number < states.nextShutterFrameNum) {
870 SET_ERR("Shutter notification out-of-order. Expected "
871 "notification for frame %d, got frame %d",
872 states.nextShutterFrameNum, msg.frame_number);
873 return;
874 }
875 states.nextShutterFrameNum = msg.frame_number + 1;
876 }
877 }
878
879 r.shutterTimestamp = msg.timestamp;
880 if (msg.readout_timestamp_valid) {
881 r.resultExtras.hasReadoutTimestamp = true;
882 r.resultExtras.readoutTimestamp = msg.readout_timestamp;
883 }
884 if (r.minExpectedDuration != states.minFrameDuration ||
885 r.isFixedFps != states.isFixedFps) {
886 for (size_t i = 0; i < states.outputStreams.size(); i++) {
887 auto outputStream = states.outputStreams[i];
888 outputStream->onMinDurationChanged(r.minExpectedDuration, r.isFixedFps);
889 }
890 states.minFrameDuration = r.minExpectedDuration;
891 states.isFixedFps = r.isFixedFps;
892 }
893 if (r.hasCallback) {
894 ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
895 states.cameraId.string(), __FUNCTION__,
896 msg.frame_number, r.resultExtras.requestId, msg.timestamp);
897 // Call listener, if any
898 if (states.listener != nullptr) {
899 r.resultExtras.lastCompletedRegularFrameNumber =
900 states.lastCompletedRegularFrameNumber;
901 r.resultExtras.lastCompletedReprocessFrameNumber =
902 states.lastCompletedReprocessFrameNumber;
903 r.resultExtras.lastCompletedZslFrameNumber =
904 states.lastCompletedZslFrameNumber;
905 states.listener->notifyShutter(r.resultExtras, msg.timestamp);
906 }
907 // send pending result and buffers
908 sendCaptureResult(states,
909 r.pendingMetadata, r.resultExtras,
910 r.collectedPartialResult, msg.frame_number,
911 r.hasInputBuffer, r.zslCapture && r.stillCapture,
912 r.rotateAndCropAuto, r.cameraIdsWithZoom, r.physicalMetadatas);
913 }
914 returnAndRemovePendingOutputBuffers(
915 states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
916
917 removeInFlightRequestIfReadyLocked(states, idx);
918 }
919 }
920 if (idx < 0) {
921 SET_ERR("Shutter notification for non-existent frame number %d",
922 msg.frame_number);
923 }
924 }
925
notifyError(CaptureOutputStates & states,const camera_error_msg_t & msg)926 void notifyError(CaptureOutputStates& states, const camera_error_msg_t &msg) {
927 ATRACE_CALL();
928 // Map camera HAL error codes to ICameraDeviceCallback error codes
929 // Index into this with the HAL error code
930 static const int32_t halErrorMap[CAMERA_MSG_NUM_ERRORS] = {
931 // 0 = Unused error code
932 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
933 // 1 = CAMERA_MSG_ERROR_DEVICE
934 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
935 // 2 = CAMERA_MSG_ERROR_REQUEST
936 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
937 // 3 = CAMERA_MSG_ERROR_RESULT
938 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
939 // 4 = CAMERA_MSG_ERROR_BUFFER
940 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
941 };
942
943 int32_t errorCode =
944 ((msg.error_code >= 0) &&
945 (msg.error_code < CAMERA_MSG_NUM_ERRORS)) ?
946 halErrorMap[msg.error_code] :
947 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
948
949 int streamId = 0;
950 String16 physicalCameraId;
951 if (msg.error_stream != nullptr) {
952 Camera3Stream *stream =
953 Camera3Stream::cast(msg.error_stream);
954 streamId = stream->getId();
955 physicalCameraId = String16(stream->physicalCameraId());
956 }
957 ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
958 states.cameraId.string(), __FUNCTION__, msg.frame_number,
959 streamId, msg.error_code);
960
961 CaptureResultExtras resultExtras;
962 switch (errorCode) {
963 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
964 // SET_ERR calls into listener to notify application
965 SET_ERR("Camera HAL reported serious device error");
966 break;
967 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
968 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
969 {
970 std::lock_guard<std::mutex> l(states.inflightLock);
971 ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
972 if (idx >= 0) {
973 InFlightRequest &r = states.inflightMap.editValueAt(idx);
974 r.requestStatus = msg.error_code;
975 resultExtras = r.resultExtras;
976 bool physicalDeviceResultError = false;
977 if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
978 errorCode) {
979 if (physicalCameraId.size() > 0) {
980 String8 cameraId(physicalCameraId);
981 bool validPhysicalCameraId =
982 erasePhysicalCameraIdSet(r.physicalCameraIds, cameraId);
983 if (!validPhysicalCameraId) {
984 ALOGE("%s: Reported result failure for physical camera device: %s "
985 " which is not part of the respective request!",
986 __FUNCTION__, cameraId.string());
987 break;
988 }
989 resultExtras.errorPhysicalCameraId = physicalCameraId;
990 physicalDeviceResultError = true;
991 }
992 }
993
994 if (!physicalDeviceResultError) {
995 r.skipResultMetadata = true;
996 if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT
997 == errorCode) {
998 r.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
999 } else {
1000 // errorCode is ERROR_CAMERA_REQUEST
1001 r.errorBufStrategy = ERROR_BUF_RETURN;
1002 }
1003
1004 // Check whether the buffers returned. If they returned,
1005 // remove inflight request.
1006 removeInFlightRequestIfReadyLocked(states, idx);
1007 }
1008 } else {
1009 resultExtras.frameNumber = msg.frame_number;
1010 ALOGE("Camera %s: %s: cannot find in-flight request on "
1011 "frame %" PRId64 " error", states.cameraId.string(), __FUNCTION__,
1012 resultExtras.frameNumber);
1013 }
1014 }
1015 resultExtras.errorStreamId = streamId;
1016 if (states.listener != nullptr) {
1017 states.listener->notifyError(errorCode, resultExtras);
1018 } else {
1019 ALOGE("Camera %s: %s: no listener available",
1020 states.cameraId.string(), __FUNCTION__);
1021 }
1022 break;
1023 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1024 // Do not depend on HAL ERROR_CAMERA_BUFFER to send buffer error
1025 // callback to the app. Rather, use STATUS_ERROR of image buffers.
1026 break;
1027 default:
1028 // SET_ERR calls notifyError
1029 SET_ERR("Unknown error message from HAL: %d", msg.error_code);
1030 break;
1031 }
1032 }
1033
notify(CaptureOutputStates & states,const camera_notify_msg * msg)1034 void notify(CaptureOutputStates& states, const camera_notify_msg *msg) {
1035 switch (msg->type) {
1036 case CAMERA_MSG_ERROR: {
1037 notifyError(states, msg->message.error);
1038 break;
1039 }
1040 case CAMERA_MSG_SHUTTER: {
1041 notifyShutter(states, msg->message.shutter);
1042 break;
1043 }
1044 default:
1045 SET_ERR("Unknown notify message from HAL: %d",
1046 msg->type);
1047 }
1048 }
1049
flushInflightRequests(FlushInflightReqStates & states)1050 void flushInflightRequests(FlushInflightReqStates& states) {
1051 ATRACE_CALL();
1052 { // First return buffers cached in inFlightMap
1053 std::lock_guard<std::mutex> l(states.inflightLock);
1054 for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
1055 const InFlightRequest &request = states.inflightMap.valueAt(idx);
1056 returnOutputBuffers(
1057 states.useHalBufManager, states.listener,
1058 request.pendingOutputBuffers.array(),
1059 request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
1060 /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
1061 /*timestampIncreasing*/true, request.outputSurfaces, request.resultExtras,
1062 request.errorBufStrategy);
1063 ALOGW("%s: Frame %d | Timestamp: %" PRId64 ", metadata"
1064 " arrived: %s, buffers left: %d.\n", __FUNCTION__,
1065 states.inflightMap.keyAt(idx), request.shutterTimestamp,
1066 request.haveResultMetadata ? "true" : "false",
1067 request.numBuffersLeft);
1068 }
1069
1070 states.inflightMap.clear();
1071 states.inflightIntf.onInflightMapFlushedLocked();
1072 }
1073
1074 // Then return all inflight buffers not returned by HAL
1075 std::vector<std::pair<int32_t, int32_t>> inflightKeys;
1076 states.flushBufferIntf.getInflightBufferKeys(&inflightKeys);
1077
1078 // Inflight buffers for HAL buffer manager
1079 std::vector<uint64_t> inflightRequestBufferKeys;
1080 states.flushBufferIntf.getInflightRequestBufferKeys(&inflightRequestBufferKeys);
1081
1082 // (streamId, frameNumber, buffer_handle_t*) tuple for all inflight buffers.
1083 // frameNumber will be -1 for buffers from HAL buffer manager
1084 std::vector<std::tuple<int32_t, int32_t, buffer_handle_t*>> inflightBuffers;
1085 inflightBuffers.reserve(inflightKeys.size() + inflightRequestBufferKeys.size());
1086
1087 for (auto& pair : inflightKeys) {
1088 int32_t frameNumber = pair.first;
1089 int32_t streamId = pair.second;
1090 buffer_handle_t* buffer;
1091 status_t res = states.bufferRecordsIntf.popInflightBuffer(frameNumber, streamId, &buffer);
1092 if (res != OK) {
1093 ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
1094 __FUNCTION__, frameNumber, streamId);
1095 continue;
1096 }
1097 inflightBuffers.push_back(std::make_tuple(streamId, frameNumber, buffer));
1098 }
1099
1100 for (auto& bufferId : inflightRequestBufferKeys) {
1101 int32_t streamId = -1;
1102 buffer_handle_t* buffer = nullptr;
1103 status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
1104 bufferId, &buffer, &streamId);
1105 if (res != OK) {
1106 ALOGE("%s: cannot find in-flight buffer %" PRIu64, __FUNCTION__, bufferId);
1107 continue;
1108 }
1109 inflightBuffers.push_back(std::make_tuple(streamId, /*frameNumber*/-1, buffer));
1110 }
1111
1112 std::vector<sp<Camera3StreamInterface>> streams = states.flushBufferIntf.getAllStreams();
1113
1114 for (auto& tuple : inflightBuffers) {
1115 status_t res = OK;
1116 int32_t streamId = std::get<0>(tuple);
1117 int32_t frameNumber = std::get<1>(tuple);
1118 buffer_handle_t* buffer = std::get<2>(tuple);
1119
1120 camera_stream_buffer_t streamBuffer;
1121 streamBuffer.buffer = buffer;
1122 streamBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
1123 streamBuffer.acquire_fence = -1;
1124 streamBuffer.release_fence = -1;
1125
1126 for (auto& stream : streams) {
1127 if (streamId == stream->getId()) {
1128 // Return buffer to deleted stream
1129 camera_stream* halStream = stream->asHalStream();
1130 streamBuffer.stream = halStream;
1131 switch (halStream->stream_type) {
1132 case CAMERA_STREAM_OUTPUT:
1133 res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
1134 /*readoutTimestamp*/0, /*timestampIncreasing*/true,
1135 std::vector<size_t> (), frameNumber);
1136 if (res != OK) {
1137 ALOGE("%s: Can't return output buffer for frame %d to"
1138 " stream %d: %s (%d)", __FUNCTION__,
1139 frameNumber, streamId, strerror(-res), res);
1140 }
1141 break;
1142 case CAMERA_STREAM_INPUT:
1143 res = stream->returnInputBuffer(streamBuffer);
1144 if (res != OK) {
1145 ALOGE("%s: Can't return input buffer for frame %d to"
1146 " stream %d: %s (%d)", __FUNCTION__,
1147 frameNumber, streamId, strerror(-res), res);
1148 }
1149 break;
1150 default: // Bi-direcitonal stream is deprecated
1151 ALOGE("%s: stream %d has unknown stream type %d",
1152 __FUNCTION__, streamId, halStream->stream_type);
1153 break;
1154 }
1155 break;
1156 }
1157 }
1158 }
1159 }
1160
1161 } // camera3
1162 } // namespace android
1163