1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera3-OutputUtils"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 //#define LOG_NNDEBUG 0 // Per-frame verbose logging
21
22 #ifdef LOG_NNDEBUG
23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
24 #else
25 #define ALOGVV(...) ((void)0)
26 #endif
27
28 // Convenience macros for transitioning to the error state
29 #define SET_ERR(fmt, ...) states.setErrIntf.setErrorState( \
30 "%s: " fmt, __FUNCTION__, \
31 ##__VA_ARGS__)
32
33 #include <inttypes.h>
34
35 #include <utils/Log.h>
36 #include <utils/SortedVector.h>
37 #include <utils/Trace.h>
38
39 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
40
41 #include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
42 #include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
43 #include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
44
45 #include <camera/CameraUtils.h>
46 #include <camera_metadata_hidden.h>
47
48 #include "device3/Camera3OutputUtils.h"
49
50 #include "system/camera_metadata.h"
51
52 using namespace android::camera3;
53 using namespace android::hardware::camera;
54
55 namespace android {
56 namespace camera3 {
57
fixupMonochromeTags(CaptureOutputStates & states,const CameraMetadata & deviceInfo,CameraMetadata & resultMetadata)58 status_t fixupMonochromeTags(
59 CaptureOutputStates& states,
60 const CameraMetadata& deviceInfo,
61 CameraMetadata& resultMetadata) {
62 status_t res = OK;
63 if (!states.needFixupMonoChrome) {
64 return res;
65 }
66
67 // Remove tags that are not applicable to monochrome camera.
68 int32_t tagsToRemove[] = {
69 ANDROID_SENSOR_GREEN_SPLIT,
70 ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
71 ANDROID_COLOR_CORRECTION_MODE,
72 ANDROID_COLOR_CORRECTION_TRANSFORM,
73 ANDROID_COLOR_CORRECTION_GAINS,
74 };
75 for (auto tag : tagsToRemove) {
76 res = resultMetadata.erase(tag);
77 if (res != OK) {
78 ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
79 return res;
80 }
81 }
82
83 // ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
84 camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
85 for (size_t i = 1; i < blEntry.count; i++) {
86 blEntry.data.f[i] = blEntry.data.f[0];
87 }
88
89 // ANDROID_SENSOR_NOISE_PROFILE
90 camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
91 if (npEntry.count > 0 && npEntry.count % 2 == 0) {
92 double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
93 res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
94 if (res != OK) {
95 ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
96 __FUNCTION__, strerror(-res), res);
97 return res;
98 }
99 }
100
101 // ANDROID_STATISTICS_LENS_SHADING_MAP
102 camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
103 camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
104 if (lsSizeEntry.count == 2 && lsEntry.count > 0
105 && (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
106 for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
107 lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
108 lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
109 lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
110 }
111 }
112
113 // ANDROID_TONEMAP_CURVE_BLUE
114 // ANDROID_TONEMAP_CURVE_GREEN
115 // ANDROID_TONEMAP_CURVE_RED
116 camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
117 camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
118 camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
119 if (tcbEntry.count > 0
120 && tcbEntry.count == tcgEntry.count
121 && tcbEntry.count == tcrEntry.count) {
122 for (size_t i = 0; i < tcbEntry.count; i++) {
123 tcbEntry.data.f[i] = tcrEntry.data.f[i];
124 tcgEntry.data.f[i] = tcrEntry.data.f[i];
125 }
126 }
127
128 return res;
129 }
130
fixupAutoframingTags(CameraMetadata & resultMetadata)131 status_t fixupAutoframingTags(CameraMetadata& resultMetadata) {
132 status_t res = OK;
133 camera_metadata_entry autoframingEntry =
134 resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING);
135 if (autoframingEntry.count == 0) {
136 const uint8_t defaultAutoframingEntry = ANDROID_CONTROL_AUTOFRAMING_OFF;
137 res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING, &defaultAutoframingEntry, 1);
138 if (res != OK) {
139 ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING: %s (%d)",
140 __FUNCTION__, strerror(-res), res);
141 return res;
142 }
143 }
144
145 camera_metadata_entry autoframingStateEntry =
146 resultMetadata.find(ANDROID_CONTROL_AUTOFRAMING_STATE);
147 if (autoframingStateEntry.count == 0) {
148 const uint8_t defaultAutoframingStateEntry = ANDROID_CONTROL_AUTOFRAMING_STATE_INACTIVE;
149 res = resultMetadata.update(ANDROID_CONTROL_AUTOFRAMING_STATE,
150 &defaultAutoframingStateEntry, 1);
151 if (res != OK) {
152 ALOGE("%s: Failed to update ANDROID_CONTROL_AUTOFRAMING_STATE: %s (%d)",
153 __FUNCTION__, strerror(-res), res);
154 return res;
155 }
156 }
157
158 return res;
159 }
160
correctMeteringRegions(camera_metadata_t * meta)161 void correctMeteringRegions(camera_metadata_t *meta) {
162 if (meta == nullptr) return;
163
164 uint32_t meteringRegionKeys[] = {
165 ANDROID_CONTROL_AE_REGIONS,
166 ANDROID_CONTROL_AWB_REGIONS,
167 ANDROID_CONTROL_AF_REGIONS };
168
169 for (uint32_t key : meteringRegionKeys) {
170 camera_metadata_entry_t entry;
171 int res = find_camera_metadata_entry(meta, key, &entry);
172 if (res != OK) continue;
173
174 for (size_t i = 0; i < entry.count; i += 5) {
175 if (entry.data.i32[0] > entry.data.i32[2]) {
176 ALOGW("%s: Invalid metering region (%d): left: %d, right: %d",
177 __FUNCTION__, key, entry.data.i32[0], entry.data.i32[2]);
178 entry.data.i32[2] = entry.data.i32[0];
179 }
180 if (entry.data.i32[1] > entry.data.i32[3]) {
181 ALOGW("%s: Invalid metering region (%d): top: %d, bottom: %d",
182 __FUNCTION__, key, entry.data.i32[1], entry.data.i32[3]);
183 entry.data.i32[3] = entry.data.i32[1];
184 }
185 }
186 }
187 }
188
insertResultLocked(CaptureOutputStates & states,CaptureResult * result,uint32_t frameNumber)189 void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
190 if (result == nullptr) return;
191
192 camera_metadata_t *meta = const_cast<camera_metadata_t *>(
193 result->mMetadata.getAndLock());
194 set_camera_metadata_vendor_id(meta, states.vendorTagId);
195 correctMeteringRegions(meta);
196 result->mMetadata.unlock(meta);
197
198 if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
199 (int32_t*)&frameNumber, 1) != OK) {
200 SET_ERR("Failed to set frame number %d in metadata", frameNumber);
201 return;
202 }
203
204 if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
205 SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
206 return;
207 }
208
209 // Update vendor tag id for physical metadata
210 for (auto& physicalMetadata : result->mPhysicalMetadatas) {
211 camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
212 physicalMetadata.mPhysicalCameraMetadata.getAndLock());
213 set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
214 correctMeteringRegions(pmeta);
215 physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
216 }
217
218 // Valid result, insert into queue
219 std::list<CaptureResult>::iterator queuedResult =
220 states.resultQueue.insert(states.resultQueue.end(), CaptureResult(*result));
221 ALOGV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
222 ", burstId = %" PRId32, __FUNCTION__,
223 queuedResult->mResultExtras.requestId,
224 queuedResult->mResultExtras.frameNumber,
225 queuedResult->mResultExtras.burstId);
226
227 states.resultSignal.notify_one();
228 }
229
230
sendPartialCaptureResult(CaptureOutputStates & states,const camera_metadata_t * partialResult,const CaptureResultExtras & resultExtras,uint32_t frameNumber)231 void sendPartialCaptureResult(CaptureOutputStates& states,
232 const camera_metadata_t * partialResult,
233 const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
234 ATRACE_CALL();
235 std::lock_guard<std::mutex> l(states.outputLock);
236
237 CaptureResult captureResult;
238 captureResult.mResultExtras = resultExtras;
239 captureResult.mMetadata = partialResult;
240
241 // Fix up result metadata for monochrome camera.
242 status_t res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
243 if (res != OK) {
244 SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
245 return;
246 }
247
248 // Update partial result by removing keys remapped by DistortionCorrection, ZoomRatio,
249 // and RotationAndCrop mappers.
250 std::set<uint32_t> keysToRemove;
251
252 auto iter = states.distortionMappers.find(states.cameraId.c_str());
253 if (iter != states.distortionMappers.end()) {
254 const auto& remappedKeys = iter->second.getRemappedKeys();
255 keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
256 }
257
258 const auto& remappedKeys = states.zoomRatioMappers[states.cameraId.c_str()].getRemappedKeys();
259 keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
260
261 auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
262 if (mapper != states.rotateAndCropMappers.end()) {
263 const auto& remappedKeys = iter->second.getRemappedKeys();
264 keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
265 }
266
267 for (uint32_t key : keysToRemove) {
268 captureResult.mMetadata.erase(key);
269 }
270
271 // Send partial result
272 if (captureResult.mMetadata.entryCount() > 0) {
273 insertResultLocked(states, &captureResult, frameNumber);
274 }
275 }
276
sendCaptureResult(CaptureOutputStates & states,CameraMetadata & pendingMetadata,CaptureResultExtras & resultExtras,CameraMetadata & collectedPartialResult,uint32_t frameNumber,bool reprocess,bool zslStillCapture,bool rotateAndCropAuto,const std::set<std::string> & cameraIdsWithZoom,const std::vector<PhysicalCaptureResultInfo> & physicalMetadatas)277 void sendCaptureResult(
278 CaptureOutputStates& states,
279 CameraMetadata &pendingMetadata,
280 CaptureResultExtras &resultExtras,
281 CameraMetadata &collectedPartialResult,
282 uint32_t frameNumber,
283 bool reprocess, bool zslStillCapture, bool rotateAndCropAuto,
284 const std::set<std::string>& cameraIdsWithZoom,
285 const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
286 ATRACE_CALL();
287 if (pendingMetadata.isEmpty())
288 return;
289
290 std::lock_guard<std::mutex> l(states.outputLock);
291
292 // TODO: need to track errors for tighter bounds on expected frame number
293 if (reprocess) {
294 if (frameNumber < states.nextReprocResultFrameNum) {
295 SET_ERR("Out-of-order reprocess capture result metadata submitted! "
296 "(got frame number %d, expecting %d)",
297 frameNumber, states.nextReprocResultFrameNum);
298 return;
299 }
300 states.nextReprocResultFrameNum = frameNumber + 1;
301 } else if (zslStillCapture) {
302 if (frameNumber < states.nextZslResultFrameNum) {
303 SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
304 "(got frame number %d, expecting %d)",
305 frameNumber, states.nextZslResultFrameNum);
306 return;
307 }
308 states.nextZslResultFrameNum = frameNumber + 1;
309 } else {
310 if (frameNumber < states.nextResultFrameNum) {
311 SET_ERR("Out-of-order capture result metadata submitted! "
312 "(got frame number %d, expecting %d)",
313 frameNumber, states.nextResultFrameNum);
314 return;
315 }
316 states.nextResultFrameNum = frameNumber + 1;
317 }
318
319 CaptureResult captureResult;
320 captureResult.mResultExtras = resultExtras;
321 captureResult.mMetadata = pendingMetadata;
322 captureResult.mPhysicalMetadatas = physicalMetadatas;
323
324 // Append any previous partials to form a complete result
325 if (states.usePartialResult && !collectedPartialResult.isEmpty()) {
326 captureResult.mMetadata.append(collectedPartialResult);
327 }
328
329 captureResult.mMetadata.sort();
330
331 // Check that there's a timestamp in the result metadata
332 camera_metadata_entry timestamp = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
333 if (timestamp.count == 0) {
334 SET_ERR("No timestamp provided by HAL for frame %d!",
335 frameNumber);
336 return;
337 }
338 nsecs_t sensorTimestamp = timestamp.data.i64[0];
339
340 for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
341 camera_metadata_entry timestamp =
342 physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
343 if (timestamp.count == 0) {
344 SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
345 String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
346 return;
347 }
348 }
349
350 // Fix up some result metadata to account for HAL-level distortion correction
351 status_t res = OK;
352 auto iter = states.distortionMappers.find(states.cameraId.c_str());
353 if (iter != states.distortionMappers.end()) {
354 res = iter->second.correctCaptureResult(&captureResult.mMetadata);
355 if (res != OK) {
356 SET_ERR("Unable to correct capture result metadata for frame %d: %s (%d)",
357 frameNumber, strerror(-res), res);
358 return;
359 }
360 }
361
362 // Fix up result metadata to account for zoom ratio availabilities between
363 // HAL and app.
364 bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId.c_str()) == cameraIdsWithZoom.end();
365 res = states.zoomRatioMappers[states.cameraId.c_str()].updateCaptureResult(
366 &captureResult.mMetadata, zoomRatioIs1);
367 if (res != OK) {
368 SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
369 frameNumber, strerror(-res), res);
370 return;
371 }
372
373 // Fix up result metadata to account for rotateAndCrop in AUTO mode
374 if (rotateAndCropAuto) {
375 auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
376 if (mapper != states.rotateAndCropMappers.end()) {
377 res = mapper->second.updateCaptureResult(
378 &captureResult.mMetadata);
379 if (res != OK) {
380 SET_ERR("Unable to correct capture result rotate-and-crop for frame %d: %s (%d)",
381 frameNumber, strerror(-res), res);
382 return;
383 }
384 }
385 }
386
387 // Fix up autoframing metadata
388 res = fixupAutoframingTags(captureResult.mMetadata);
389 if (res != OK) {
390 SET_ERR("Failed to set autoframing defaults in result metadata: %s (%d)",
391 strerror(-res), res);
392 return;
393 }
394 for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
395 res = fixupAutoframingTags(physicalMetadata.mPhysicalCameraMetadata);
396 if (res != OK) {
397 SET_ERR("Failed to set autoframing defaults in physical result metadata: %s (%d)",
398 strerror(-res), res);
399 return;
400 }
401 }
402
403 for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
404 String8 cameraId8(physicalMetadata.mPhysicalCameraId);
405 auto mapper = states.distortionMappers.find(cameraId8.c_str());
406 if (mapper != states.distortionMappers.end()) {
407 res = mapper->second.correctCaptureResult(
408 &physicalMetadata.mPhysicalCameraMetadata);
409 if (res != OK) {
410 SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
411 frameNumber, strerror(-res), res);
412 return;
413 }
414 }
415
416 zoomRatioIs1 = cameraIdsWithZoom.find(cameraId8.c_str()) == cameraIdsWithZoom.end();
417 res = states.zoomRatioMappers[cameraId8.c_str()].updateCaptureResult(
418 &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
419 if (res != OK) {
420 SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
421 "frame %d: %s(%d)", cameraId8.c_str(), frameNumber, strerror(-res), res);
422 return;
423 }
424 }
425
426 // Fix up result metadata for monochrome camera.
427 res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
428 if (res != OK) {
429 SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
430 return;
431 }
432 for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
433 String8 cameraId8(physicalMetadata.mPhysicalCameraId);
434 res = fixupMonochromeTags(states,
435 states.physicalDeviceInfoMap.at(cameraId8.c_str()),
436 physicalMetadata.mPhysicalCameraMetadata);
437 if (res != OK) {
438 SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
439 return;
440 }
441 }
442
443 std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
444 for (auto& m : physicalMetadatas) {
445 monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
446 CameraMetadata(m.mPhysicalCameraMetadata));
447 }
448 states.tagMonitor.monitorMetadata(TagMonitor::RESULT,
449 frameNumber, sensorTimestamp, captureResult.mMetadata,
450 monitoredPhysicalMetadata);
451
452 insertResultLocked(states, &captureResult, frameNumber);
453 }
454
removeInFlightMapEntryLocked(CaptureOutputStates & states,int idx)455 void removeInFlightMapEntryLocked(CaptureOutputStates& states, int idx) {
456 ATRACE_CALL();
457 InFlightRequestMap& inflightMap = states.inflightMap;
458 nsecs_t duration = inflightMap.valueAt(idx).maxExpectedDuration;
459 inflightMap.removeItemsAt(idx, 1);
460
461 states.inflightIntf.onInflightEntryRemovedLocked(duration);
462 }
463
removeInFlightRequestIfReadyLocked(CaptureOutputStates & states,int idx)464 void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
465 InFlightRequestMap& inflightMap = states.inflightMap;
466 const InFlightRequest &request = inflightMap.valueAt(idx);
467 const uint32_t frameNumber = inflightMap.keyAt(idx);
468 SessionStatsBuilder& sessionStatsBuilder = states.sessionStatsBuilder;
469
470 nsecs_t sensorTimestamp = request.sensorTimestamp;
471 nsecs_t shutterTimestamp = request.shutterTimestamp;
472
473 // Check if it's okay to remove the request from InFlightMap:
474 // In the case of a successful request:
475 // all input and output buffers, all result metadata, shutter callback
476 // arrived.
477 // In the case of an unsuccessful request:
478 // all input and output buffers, as well as request/result error notifications, arrived.
479 if (request.numBuffersLeft == 0 &&
480 (request.skipResultMetadata ||
481 (request.haveResultMetadata && shutterTimestamp != 0))) {
482 if (request.stillCapture) {
483 ATRACE_ASYNC_END("still capture", frameNumber);
484 }
485
486 ATRACE_ASYNC_END("frame capture", frameNumber);
487
488 // Validation check - if sensor timestamp matches shutter timestamp in the
489 // case of request having callback.
490 if (request.hasCallback && request.requestStatus == OK &&
491 sensorTimestamp != shutterTimestamp) {
492 SET_ERR("sensor timestamp (%" PRId64
493 ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
494 sensorTimestamp, frameNumber, shutterTimestamp);
495 }
496
497 // for an unsuccessful request, it may have pending output buffers to
498 // return.
499 assert(request.requestStatus != OK ||
500 request.pendingOutputBuffers.size() == 0);
501
502 returnOutputBuffers(
503 states.useHalBufManager, states.listener,
504 request.pendingOutputBuffers.array(),
505 request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
506 /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
507 /*timestampIncreasing*/true,
508 request.outputSurfaces, request.resultExtras,
509 request.errorBufStrategy, request.transform);
510
511 // Note down the just completed frame number
512 if (request.hasInputBuffer) {
513 states.lastCompletedReprocessFrameNumber = frameNumber;
514 } else if (request.zslCapture && request.stillCapture) {
515 states.lastCompletedZslFrameNumber = frameNumber;
516 } else {
517 states.lastCompletedRegularFrameNumber = frameNumber;
518 }
519
520 sessionStatsBuilder.incResultCounter(request.skipResultMetadata);
521
522 removeInFlightMapEntryLocked(states, idx);
523 ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
524 }
525
526 states.inflightIntf.checkInflightMapLengthLocked();
527 }
528
529 // Erase the subset of physicalCameraIds that contains id
erasePhysicalCameraIdSet(std::set<std::set<String8>> & physicalCameraIds,const String8 & id)530 bool erasePhysicalCameraIdSet(
531 std::set<std::set<String8>>& physicalCameraIds, const String8& id) {
532 bool found = false;
533 for (auto iter = physicalCameraIds.begin(); iter != physicalCameraIds.end(); iter++) {
534 if (iter->count(id) == 1) {
535 physicalCameraIds.erase(iter);
536 found = true;
537 break;
538 }
539 }
540 return found;
541 }
542
getCameraIdsWithZoomLocked(const InFlightRequestMap & inflightMap,const CameraMetadata & metadata,const std::set<std::string> & cameraIdsWithZoom)543 const std::set<std::string>& getCameraIdsWithZoomLocked(
544 const InFlightRequestMap& inflightMap, const CameraMetadata& metadata,
545 const std::set<std::string>& cameraIdsWithZoom) {
546 camera_metadata_ro_entry overrideEntry =
547 metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDE);
548 camera_metadata_ro_entry frameNumberEntry =
549 metadata.find(ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER);
550 if (overrideEntry.count != 1
551 || overrideEntry.data.i32[0] != ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM
552 || frameNumberEntry.count != 1) {
553 // No valid overriding frame number, skip
554 return cameraIdsWithZoom;
555 }
556
557 uint32_t overridingFrameNumber = frameNumberEntry.data.i32[0];
558 ssize_t idx = inflightMap.indexOfKey(overridingFrameNumber);
559 if (idx < 0) {
560 ALOGE("%s: Failed to find pending request #%d in inflight map",
561 __FUNCTION__, overridingFrameNumber);
562 return cameraIdsWithZoom;
563 }
564
565 const InFlightRequest &r = inflightMap.valueFor(overridingFrameNumber);
566 return r.cameraIdsWithZoom;
567 }
568
processCaptureResult(CaptureOutputStates & states,const camera_capture_result * result)569 void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
570 ATRACE_CALL();
571
572 status_t res;
573
574 uint32_t frameNumber = result->frame_number;
575 if (result->result == NULL && result->num_output_buffers == 0 &&
576 result->input_buffer == NULL) {
577 SET_ERR("No result data provided by HAL for frame %d",
578 frameNumber);
579 return;
580 }
581
582 if (!states.usePartialResult &&
583 result->result != NULL &&
584 result->partial_result != 1) {
585 SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
586 " if partial result is not supported",
587 frameNumber, result->partial_result);
588 return;
589 }
590
591 bool isPartialResult = false;
592 CameraMetadata collectedPartialResult;
593 bool hasInputBufferInRequest = false;
594
595 // Get shutter timestamp and resultExtras from list of in-flight requests,
596 // where it was added by the shutter notification for this frame. If the
597 // shutter timestamp isn't received yet, append the output buffers to the
598 // in-flight request and they will be returned when the shutter timestamp
599 // arrives. Update the in-flight status and remove the in-flight entry if
600 // all result data and shutter timestamp have been received.
601 nsecs_t shutterTimestamp = 0;
602 {
603 std::lock_guard<std::mutex> l(states.inflightLock);
604 ssize_t idx = states.inflightMap.indexOfKey(frameNumber);
605 if (idx == NAME_NOT_FOUND) {
606 SET_ERR("Unknown frame number for capture result: %d",
607 frameNumber);
608 return;
609 }
610 InFlightRequest &request = states.inflightMap.editValueAt(idx);
611 ALOGVV("%s: got InFlightRequest requestId = %" PRId32
612 ", frameNumber = %" PRId64 ", burstId = %" PRId32
613 ", partialResultCount = %d/%d, hasCallback = %d, num_output_buffers %d"
614 ", usePartialResult = %d",
615 __FUNCTION__, request.resultExtras.requestId,
616 request.resultExtras.frameNumber, request.resultExtras.burstId,
617 result->partial_result, states.numPartialResults,
618 request.hasCallback, result->num_output_buffers,
619 states.usePartialResult);
620 // Always update the partial count to the latest one if it's not 0
621 // (buffers only). When framework aggregates adjacent partial results
622 // into one, the latest partial count will be used.
623 if (result->partial_result != 0)
624 request.resultExtras.partialResultCount = result->partial_result;
625
626 if (result->result != nullptr) {
627 camera_metadata_ro_entry entry;
628 auto ret = find_camera_metadata_ro_entry(result->result,
629 ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID, &entry);
630 if ((ret == OK) && (entry.count > 0)) {
631 std::string physicalId(reinterpret_cast<const char *>(entry.data.u8));
632 if (!states.activePhysicalId.empty() && physicalId != states.activePhysicalId) {
633 states.listener->notifyPhysicalCameraChange(physicalId);
634 }
635 states.activePhysicalId = physicalId;
636
637 if (!states.legacyClient && !states.overrideToPortrait) {
638 auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
639 if (deviceInfo != states.physicalDeviceInfoMap.end()) {
640 auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
641 if (orientation.count > 0) {
642 int32_t transform;
643 ret = CameraUtils::getRotationTransform(deviceInfo->second,
644 OutputConfiguration::MIRROR_MODE_AUTO, &transform);
645 if (ret == OK) {
646 // It is possible for camera providers to return the capture
647 // results after the processed frames. In such scenario, we will
648 // not be able to set the output transformation before the frames
649 // return back to the consumer for the current capture request
650 // but we could still try and configure it for any future requests
651 // that are still in flight. The assumption is that the physical
652 // device id remains the same for the duration of the pending queue.
653 for (size_t i = 0; i < states.inflightMap.size(); i++) {
654 auto &r = states.inflightMap.editValueAt(i);
655 if (r.requestTimeNs >= request.requestTimeNs) {
656 r.transform = transform;
657 }
658 }
659 } else {
660 ALOGE("%s: Failed to calculate current stream transformation: %s "
661 "(%d)", __FUNCTION__, strerror(-ret), ret);
662 }
663 } else {
664 ALOGE("%s: Physical device orientation absent!", __FUNCTION__);
665 }
666 } else {
667 ALOGE("%s: Physical device not found in device info map found!",
668 __FUNCTION__);
669 }
670 }
671 }
672 }
673
674 // Check if this result carries only partial metadata
675 if (states.usePartialResult && result->result != NULL) {
676 if (result->partial_result > states.numPartialResults || result->partial_result < 1) {
677 SET_ERR("Result is malformed for frame %d: partial_result %u must be in"
678 " the range of [1, %d] when metadata is included in the result",
679 frameNumber, result->partial_result, states.numPartialResults);
680 return;
681 }
682 isPartialResult = (result->partial_result < states.numPartialResults);
683 if (isPartialResult && result->num_physcam_metadata) {
684 SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
685 " physical camera result", frameNumber);
686 return;
687 }
688 if (isPartialResult) {
689 request.collectedPartialResult.append(result->result);
690 }
691
692 if (isPartialResult && request.hasCallback) {
693 // Send partial capture result
694 sendPartialCaptureResult(states, result->result, request.resultExtras,
695 frameNumber);
696 }
697 }
698
699 shutterTimestamp = request.shutterTimestamp;
700 hasInputBufferInRequest = request.hasInputBuffer;
701
702 // Did we get the (final) result metadata for this capture?
703 if (result->result != NULL && !isPartialResult) {
704 if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
705 SET_ERR("Expected physical Camera metadata count %d not equal to actual count %d",
706 request.physicalCameraIds.size(), result->num_physcam_metadata);
707 return;
708 }
709 if (request.haveResultMetadata) {
710 SET_ERR("Called multiple times with metadata for frame %d",
711 frameNumber);
712 return;
713 }
714 for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
715 String8 physicalId(result->physcam_ids[i]);
716 bool validPhysicalCameraMetadata =
717 erasePhysicalCameraIdSet(request.physicalCameraIds, physicalId);
718 if (!validPhysicalCameraMetadata) {
719 SET_ERR("Unexpected total result for frame %d camera %s",
720 frameNumber, physicalId.c_str());
721 return;
722 }
723 }
724 if (states.usePartialResult &&
725 !request.collectedPartialResult.isEmpty()) {
726 collectedPartialResult.acquire(
727 request.collectedPartialResult);
728 }
729 request.haveResultMetadata = true;
730 request.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
731 }
732
733 uint32_t numBuffersReturned = result->num_output_buffers;
734 if (result->input_buffer != NULL) {
735 if (hasInputBufferInRequest) {
736 numBuffersReturned += 1;
737 } else {
738 ALOGW("%s: Input buffer should be NULL if there is no input"
739 " buffer sent in the request",
740 __FUNCTION__);
741 }
742 }
743 request.numBuffersLeft -= numBuffersReturned;
744 if (request.numBuffersLeft < 0) {
745 SET_ERR("Too many buffers returned for frame %d",
746 frameNumber);
747 return;
748 }
749
750 camera_metadata_ro_entry_t entry;
751 res = find_camera_metadata_ro_entry(result->result,
752 ANDROID_SENSOR_TIMESTAMP, &entry);
753 if (res == OK && entry.count == 1) {
754 request.sensorTimestamp = entry.data.i64[0];
755 }
756
757 // If shutter event isn't received yet, do not return the pending output
758 // buffers.
759 request.pendingOutputBuffers.appendArray(result->output_buffers,
760 result->num_output_buffers);
761 if (shutterTimestamp != 0) {
762 returnAndRemovePendingOutputBuffers(
763 states.useHalBufManager, states.listener,
764 request, states.sessionStatsBuilder);
765 }
766
767 if (result->result != NULL && !isPartialResult) {
768 for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
769 CameraMetadata physicalMetadata;
770 physicalMetadata.append(result->physcam_metadata[i]);
771 request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
772 physicalMetadata});
773 }
774 if (shutterTimestamp == 0) {
775 request.pendingMetadata = result->result;
776 request.collectedPartialResult = collectedPartialResult;
777 } else if (request.hasCallback) {
778 CameraMetadata metadata;
779 metadata = result->result;
780 auto cameraIdsWithZoom = getCameraIdsWithZoomLocked(
781 states.inflightMap, metadata, request.cameraIdsWithZoom);
782 sendCaptureResult(states, metadata, request.resultExtras,
783 collectedPartialResult, frameNumber,
784 hasInputBufferInRequest, request.zslCapture && request.stillCapture,
785 request.rotateAndCropAuto, cameraIdsWithZoom,
786 request.physicalMetadatas);
787 }
788 }
789 removeInFlightRequestIfReadyLocked(states, idx);
790 } // scope for states.inFlightLock
791
792 if (result->input_buffer != NULL) {
793 if (hasInputBufferInRequest) {
794 Camera3Stream *stream =
795 Camera3Stream::cast(result->input_buffer->stream);
796 res = stream->returnInputBuffer(*(result->input_buffer));
797 // Note: stream may be deallocated at this point, if this buffer was the
798 // last reference to it.
799 if (res != OK) {
800 ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
801 " its stream:%s (%d)", __FUNCTION__,
802 frameNumber, strerror(-res), res);
803 }
804 } else {
805 ALOGW("%s: Input buffer should be NULL if there is no input"
806 " buffer sent in the request, skipping input buffer return.",
807 __FUNCTION__);
808 }
809 }
810 }
811
returnOutputBuffers(bool useHalBufManager,sp<NotificationListener> listener,const camera_stream_buffer_t * outputBuffers,size_t numBuffers,nsecs_t timestamp,nsecs_t readoutTimestamp,bool requested,nsecs_t requestTimeNs,SessionStatsBuilder & sessionStatsBuilder,bool timestampIncreasing,const SurfaceMap & outputSurfaces,const CaptureResultExtras & inResultExtras,ERROR_BUF_STRATEGY errorBufStrategy,int32_t transform)812 void returnOutputBuffers(
813 bool useHalBufManager,
814 sp<NotificationListener> listener,
815 const camera_stream_buffer_t *outputBuffers, size_t numBuffers,
816 nsecs_t timestamp, nsecs_t readoutTimestamp, bool requested,
817 nsecs_t requestTimeNs, SessionStatsBuilder& sessionStatsBuilder,
818 bool timestampIncreasing, const SurfaceMap& outputSurfaces,
819 const CaptureResultExtras &inResultExtras,
820 ERROR_BUF_STRATEGY errorBufStrategy, int32_t transform) {
821
822 for (size_t i = 0; i < numBuffers; i++)
823 {
824 Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
825 int streamId = stream->getId();
826
827 // Call notify(ERROR_BUFFER) if necessary.
828 if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR &&
829 errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
830 if (listener != nullptr) {
831 CaptureResultExtras extras = inResultExtras;
832 extras.errorStreamId = streamId;
833 listener->notifyError(
834 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
835 extras);
836 }
837 }
838
839 if (outputBuffers[i].buffer == nullptr) {
840 if (!useHalBufManager) {
841 // With HAL buffer management API, HAL sometimes will have to return buffers that
842 // has not got a output buffer handle filled yet. This is though illegal if HAL
843 // buffer management API is not being used.
844 ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
845 } else {
846 if (requested) {
847 sessionStatsBuilder.incCounter(streamId, /*dropped*/true, 0);
848 }
849 }
850 continue;
851 }
852
853 const auto& it = outputSurfaces.find(streamId);
854 status_t res = OK;
855
856 // Do not return the buffer if the buffer status is error, and the error
857 // buffer strategy is CACHE.
858 if (outputBuffers[i].status != CAMERA_BUFFER_STATUS_ERROR ||
859 errorBufStrategy != ERROR_BUF_CACHE) {
860 if (it != outputSurfaces.end()) {
861 res = stream->returnBuffer(
862 outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
863 it->second, inResultExtras.frameNumber, transform);
864 } else {
865 res = stream->returnBuffer(
866 outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
867 std::vector<size_t> (), inResultExtras.frameNumber, transform);
868 }
869 }
870 // Note: stream may be deallocated at this point, if this buffer was
871 // the last reference to it.
872 bool dropped = false;
873 if (res == NO_INIT || res == DEAD_OBJECT) {
874 ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
875 sessionStatsBuilder.stopCounter(streamId);
876 } else if (res != OK) {
877 ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
878 dropped = true;
879 } else {
880 if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR || timestamp == 0) {
881 dropped = true;
882 }
883 }
884 if (requested) {
885 nsecs_t bufferTimeNs = systemTime();
886 int32_t captureLatencyMs = ns2ms(bufferTimeNs - requestTimeNs);
887 sessionStatsBuilder.incCounter(streamId, dropped, captureLatencyMs);
888 }
889
890 // Long processing consumers can cause returnBuffer timeout for shared stream
891 // If that happens, cancel the buffer and send a buffer error to client
892 if (it != outputSurfaces.end() && res == TIMED_OUT &&
893 outputBuffers[i].status == CAMERA_BUFFER_STATUS_OK) {
894 // cancel the buffer
895 camera_stream_buffer_t sb = outputBuffers[i];
896 sb.status = CAMERA_BUFFER_STATUS_ERROR;
897 stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
898 timestampIncreasing, std::vector<size_t> (),
899 inResultExtras.frameNumber, transform);
900
901 if (listener != nullptr) {
902 CaptureResultExtras extras = inResultExtras;
903 extras.errorStreamId = streamId;
904 listener->notifyError(
905 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
906 extras);
907 }
908 }
909 }
910 }
911
returnAndRemovePendingOutputBuffers(bool useHalBufManager,sp<NotificationListener> listener,InFlightRequest & request,SessionStatsBuilder & sessionStatsBuilder)912 void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
913 sp<NotificationListener> listener, InFlightRequest& request,
914 SessionStatsBuilder& sessionStatsBuilder) {
915 bool timestampIncreasing =
916 !((request.zslCapture && request.stillCapture) || request.hasInputBuffer);
917 nsecs_t readoutTimestamp = request.resultExtras.hasReadoutTimestamp ?
918 request.resultExtras.readoutTimestamp : 0;
919 returnOutputBuffers(useHalBufManager, listener,
920 request.pendingOutputBuffers.array(),
921 request.pendingOutputBuffers.size(),
922 request.shutterTimestamp, readoutTimestamp,
923 /*requested*/true, request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
924 request.outputSurfaces, request.resultExtras,
925 request.errorBufStrategy, request.transform);
926
927 // Remove error buffers that are not cached.
928 for (auto iter = request.pendingOutputBuffers.begin();
929 iter != request.pendingOutputBuffers.end(); ) {
930 if (request.errorBufStrategy != ERROR_BUF_CACHE ||
931 iter->status != CAMERA_BUFFER_STATUS_ERROR) {
932 iter = request.pendingOutputBuffers.erase(iter);
933 } else {
934 iter++;
935 }
936 }
937 }
938
notifyShutter(CaptureOutputStates & states,const camera_shutter_msg_t & msg)939 void notifyShutter(CaptureOutputStates& states, const camera_shutter_msg_t &msg) {
940 ATRACE_CALL();
941 ssize_t idx;
942
943 // Set timestamp for the request in the in-flight tracking
944 // and get the request ID to send upstream
945 {
946 std::lock_guard<std::mutex> l(states.inflightLock);
947 InFlightRequestMap& inflightMap = states.inflightMap;
948 idx = inflightMap.indexOfKey(msg.frame_number);
949 if (idx >= 0) {
950 InFlightRequest &r = inflightMap.editValueAt(idx);
951
952 // Verify ordering of shutter notifications
953 {
954 std::lock_guard<std::mutex> l(states.outputLock);
955 // TODO: need to track errors for tighter bounds on expected frame number.
956 if (r.hasInputBuffer) {
957 if (msg.frame_number < states.nextReprocShutterFrameNum) {
958 SET_ERR("Reprocess shutter notification out-of-order. Expected "
959 "notification for frame %d, got frame %d",
960 states.nextReprocShutterFrameNum, msg.frame_number);
961 return;
962 }
963 states.nextReprocShutterFrameNum = msg.frame_number + 1;
964 } else if (r.zslCapture && r.stillCapture) {
965 if (msg.frame_number < states.nextZslShutterFrameNum) {
966 SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
967 "notification for frame %d, got frame %d",
968 states.nextZslShutterFrameNum, msg.frame_number);
969 return;
970 }
971 states.nextZslShutterFrameNum = msg.frame_number + 1;
972 } else {
973 if (msg.frame_number < states.nextShutterFrameNum) {
974 SET_ERR("Shutter notification out-of-order. Expected "
975 "notification for frame %d, got frame %d",
976 states.nextShutterFrameNum, msg.frame_number);
977 return;
978 }
979 states.nextShutterFrameNum = msg.frame_number + 1;
980 }
981 }
982
983 r.shutterTimestamp = msg.timestamp;
984 if (msg.readout_timestamp_valid) {
985 r.resultExtras.hasReadoutTimestamp = true;
986 r.resultExtras.readoutTimestamp = msg.readout_timestamp;
987 }
988 if (r.minExpectedDuration != states.minFrameDuration ||
989 r.isFixedFps != states.isFixedFps) {
990 for (size_t i = 0; i < states.outputStreams.size(); i++) {
991 auto outputStream = states.outputStreams[i];
992 outputStream->onMinDurationChanged(r.minExpectedDuration, r.isFixedFps);
993 }
994 states.minFrameDuration = r.minExpectedDuration;
995 states.isFixedFps = r.isFixedFps;
996 }
997 if (r.hasCallback) {
998 ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
999 states.cameraId.string(), __FUNCTION__,
1000 msg.frame_number, r.resultExtras.requestId, msg.timestamp);
1001 // Call listener, if any
1002 if (states.listener != nullptr) {
1003 r.resultExtras.lastCompletedRegularFrameNumber =
1004 states.lastCompletedRegularFrameNumber;
1005 r.resultExtras.lastCompletedReprocessFrameNumber =
1006 states.lastCompletedReprocessFrameNumber;
1007 r.resultExtras.lastCompletedZslFrameNumber =
1008 states.lastCompletedZslFrameNumber;
1009 states.listener->notifyShutter(r.resultExtras, msg.timestamp);
1010 }
1011 // send pending result and buffers
1012 const auto& cameraIdsWithZoom = getCameraIdsWithZoomLocked(
1013 inflightMap, r.pendingMetadata, r.cameraIdsWithZoom);
1014 sendCaptureResult(states,
1015 r.pendingMetadata, r.resultExtras,
1016 r.collectedPartialResult, msg.frame_number,
1017 r.hasInputBuffer, r.zslCapture && r.stillCapture,
1018 r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
1019 }
1020 returnAndRemovePendingOutputBuffers(
1021 states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
1022
1023 removeInFlightRequestIfReadyLocked(states, idx);
1024 }
1025 }
1026 if (idx < 0) {
1027 SET_ERR("Shutter notification for non-existent frame number %d",
1028 msg.frame_number);
1029 }
1030 }
1031
notifyError(CaptureOutputStates & states,const camera_error_msg_t & msg)1032 void notifyError(CaptureOutputStates& states, const camera_error_msg_t &msg) {
1033 ATRACE_CALL();
1034 // Map camera HAL error codes to ICameraDeviceCallback error codes
1035 // Index into this with the HAL error code
1036 static const int32_t halErrorMap[CAMERA_MSG_NUM_ERRORS] = {
1037 // 0 = Unused error code
1038 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
1039 // 1 = CAMERA_MSG_ERROR_DEVICE
1040 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
1041 // 2 = CAMERA_MSG_ERROR_REQUEST
1042 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
1043 // 3 = CAMERA_MSG_ERROR_RESULT
1044 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
1045 // 4 = CAMERA_MSG_ERROR_BUFFER
1046 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
1047 };
1048
1049 int32_t errorCode =
1050 ((msg.error_code >= 0) &&
1051 (msg.error_code < CAMERA_MSG_NUM_ERRORS)) ?
1052 halErrorMap[msg.error_code] :
1053 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
1054
1055 int streamId = 0;
1056 String16 physicalCameraId;
1057 if (msg.error_stream != nullptr) {
1058 Camera3Stream *stream =
1059 Camera3Stream::cast(msg.error_stream);
1060 streamId = stream->getId();
1061 physicalCameraId = String16(stream->physicalCameraId());
1062 }
1063 ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
1064 states.cameraId.string(), __FUNCTION__, msg.frame_number,
1065 streamId, msg.error_code);
1066
1067 CaptureResultExtras resultExtras;
1068 switch (errorCode) {
1069 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
1070 // SET_ERR calls into listener to notify application
1071 SET_ERR("Camera HAL reported serious device error");
1072 break;
1073 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
1074 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
1075 {
1076 std::lock_guard<std::mutex> l(states.inflightLock);
1077 ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
1078 if (idx >= 0) {
1079 InFlightRequest &r = states.inflightMap.editValueAt(idx);
1080 r.requestStatus = msg.error_code;
1081 resultExtras = r.resultExtras;
1082 bool physicalDeviceResultError = false;
1083 if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
1084 errorCode) {
1085 if (physicalCameraId.size() > 0) {
1086 String8 cameraId(physicalCameraId);
1087 bool validPhysicalCameraId =
1088 erasePhysicalCameraIdSet(r.physicalCameraIds, cameraId);
1089 if (!validPhysicalCameraId) {
1090 ALOGE("%s: Reported result failure for physical camera device: %s "
1091 " which is not part of the respective request!",
1092 __FUNCTION__, cameraId.string());
1093 break;
1094 }
1095 resultExtras.errorPhysicalCameraId = physicalCameraId;
1096 physicalDeviceResultError = true;
1097 }
1098 }
1099
1100 if (!physicalDeviceResultError) {
1101 r.skipResultMetadata = true;
1102 if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT
1103 == errorCode) {
1104 r.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
1105 } else {
1106 // errorCode is ERROR_CAMERA_REQUEST
1107 r.errorBufStrategy = ERROR_BUF_RETURN;
1108 }
1109
1110 // Check whether the buffers returned. If they returned,
1111 // remove inflight request.
1112 removeInFlightRequestIfReadyLocked(states, idx);
1113 }
1114 } else {
1115 resultExtras.frameNumber = msg.frame_number;
1116 ALOGE("Camera %s: %s: cannot find in-flight request on "
1117 "frame %" PRId64 " error", states.cameraId.string(), __FUNCTION__,
1118 resultExtras.frameNumber);
1119 }
1120 }
1121 resultExtras.errorStreamId = streamId;
1122 if (states.listener != nullptr) {
1123 states.listener->notifyError(errorCode, resultExtras);
1124 } else {
1125 ALOGE("Camera %s: %s: no listener available",
1126 states.cameraId.string(), __FUNCTION__);
1127 }
1128 break;
1129 case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1130 // Do not depend on HAL ERROR_CAMERA_BUFFER to send buffer error
1131 // callback to the app. Rather, use STATUS_ERROR of image buffers.
1132 break;
1133 default:
1134 // SET_ERR calls notifyError
1135 SET_ERR("Unknown error message from HAL: %d", msg.error_code);
1136 break;
1137 }
1138 }
1139
notify(CaptureOutputStates & states,const camera_notify_msg * msg)1140 void notify(CaptureOutputStates& states, const camera_notify_msg *msg) {
1141 switch (msg->type) {
1142 case CAMERA_MSG_ERROR: {
1143 notifyError(states, msg->message.error);
1144 break;
1145 }
1146 case CAMERA_MSG_SHUTTER: {
1147 notifyShutter(states, msg->message.shutter);
1148 break;
1149 }
1150 default:
1151 SET_ERR("Unknown notify message from HAL: %d",
1152 msg->type);
1153 }
1154 }
1155
flushInflightRequests(FlushInflightReqStates & states)1156 void flushInflightRequests(FlushInflightReqStates& states) {
1157 ATRACE_CALL();
1158 { // First return buffers cached in inFlightMap
1159 std::lock_guard<std::mutex> l(states.inflightLock);
1160 for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
1161 const InFlightRequest &request = states.inflightMap.valueAt(idx);
1162 returnOutputBuffers(
1163 states.useHalBufManager, states.listener,
1164 request.pendingOutputBuffers.array(),
1165 request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
1166 /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
1167 /*timestampIncreasing*/true, request.outputSurfaces, request.resultExtras,
1168 request.errorBufStrategy);
1169 ALOGW("%s: Frame %d | Timestamp: %" PRId64 ", metadata"
1170 " arrived: %s, buffers left: %d.\n", __FUNCTION__,
1171 states.inflightMap.keyAt(idx), request.shutterTimestamp,
1172 request.haveResultMetadata ? "true" : "false",
1173 request.numBuffersLeft);
1174 }
1175
1176 states.inflightMap.clear();
1177 states.inflightIntf.onInflightMapFlushedLocked();
1178 }
1179
1180 // Then return all inflight buffers not returned by HAL
1181 std::vector<std::pair<int32_t, int32_t>> inflightKeys;
1182 states.flushBufferIntf.getInflightBufferKeys(&inflightKeys);
1183
1184 // Inflight buffers for HAL buffer manager
1185 std::vector<uint64_t> inflightRequestBufferKeys;
1186 states.flushBufferIntf.getInflightRequestBufferKeys(&inflightRequestBufferKeys);
1187
1188 // (streamId, frameNumber, buffer_handle_t*) tuple for all inflight buffers.
1189 // frameNumber will be -1 for buffers from HAL buffer manager
1190 std::vector<std::tuple<int32_t, int32_t, buffer_handle_t*>> inflightBuffers;
1191 inflightBuffers.reserve(inflightKeys.size() + inflightRequestBufferKeys.size());
1192
1193 for (auto& pair : inflightKeys) {
1194 int32_t frameNumber = pair.first;
1195 int32_t streamId = pair.second;
1196 buffer_handle_t* buffer;
1197 status_t res = states.bufferRecordsIntf.popInflightBuffer(frameNumber, streamId, &buffer);
1198 if (res != OK) {
1199 ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
1200 __FUNCTION__, frameNumber, streamId);
1201 continue;
1202 }
1203 inflightBuffers.push_back(std::make_tuple(streamId, frameNumber, buffer));
1204 }
1205
1206 for (auto& bufferId : inflightRequestBufferKeys) {
1207 int32_t streamId = -1;
1208 buffer_handle_t* buffer = nullptr;
1209 status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
1210 bufferId, &buffer, &streamId);
1211 if (res != OK) {
1212 ALOGE("%s: cannot find in-flight buffer %" PRIu64, __FUNCTION__, bufferId);
1213 continue;
1214 }
1215 inflightBuffers.push_back(std::make_tuple(streamId, /*frameNumber*/-1, buffer));
1216 }
1217
1218 std::vector<sp<Camera3StreamInterface>> streams = states.flushBufferIntf.getAllStreams();
1219
1220 for (auto& tuple : inflightBuffers) {
1221 status_t res = OK;
1222 int32_t streamId = std::get<0>(tuple);
1223 int32_t frameNumber = std::get<1>(tuple);
1224 buffer_handle_t* buffer = std::get<2>(tuple);
1225
1226 camera_stream_buffer_t streamBuffer;
1227 streamBuffer.buffer = buffer;
1228 streamBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
1229 streamBuffer.acquire_fence = -1;
1230 streamBuffer.release_fence = -1;
1231
1232 for (auto& stream : streams) {
1233 if (streamId == stream->getId()) {
1234 // Return buffer to deleted stream
1235 camera_stream* halStream = stream->asHalStream();
1236 streamBuffer.stream = halStream;
1237 switch (halStream->stream_type) {
1238 case CAMERA_STREAM_OUTPUT:
1239 res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
1240 /*readoutTimestamp*/0, /*timestampIncreasing*/true,
1241 std::vector<size_t> (), frameNumber);
1242 if (res != OK) {
1243 ALOGE("%s: Can't return output buffer for frame %d to"
1244 " stream %d: %s (%d)", __FUNCTION__,
1245 frameNumber, streamId, strerror(-res), res);
1246 }
1247 break;
1248 case CAMERA_STREAM_INPUT:
1249 res = stream->returnInputBuffer(streamBuffer);
1250 if (res != OK) {
1251 ALOGE("%s: Can't return input buffer for frame %d to"
1252 " stream %d: %s (%d)", __FUNCTION__,
1253 frameNumber, streamId, strerror(-res), res);
1254 }
1255 break;
1256 default: // Bi-direcitonal stream is deprecated
1257 ALOGE("%s: stream %d has unknown stream type %d",
1258 __FUNCTION__, streamId, halStream->stream_type);
1259 break;
1260 }
1261 break;
1262 }
1263 }
1264 }
1265 }
1266
1267 } // camera3
1268 } // namespace android
1269