• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera3-OutputUtils"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 //#define LOG_NNDEBUG 0  // Per-frame verbose logging
21 
22 #ifdef LOG_NNDEBUG
23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
24 #else
25 #define ALOGVV(...) ((void)0)
26 #endif
27 
28 // Convenience macros for transitioning to the error state
29 #define SET_ERR(fmt, ...) states.setErrIntf.setErrorState(   \
30     "%s: " fmt, __FUNCTION__,                         \
31     ##__VA_ARGS__)
32 
33 #include <inttypes.h>
34 
35 #include <utils/Log.h>
36 #include <utils/SortedVector.h>
37 #include <utils/Trace.h>
38 
39 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
40 
41 #include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
42 #include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
43 #include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
44 
45 #include <camera_metadata_hidden.h>
46 
47 #include "device3/Camera3OutputUtils.h"
48 
49 using namespace android::camera3;
50 using namespace android::hardware::camera;
51 
52 namespace android {
53 namespace camera3 {
54 
fixupMonochromeTags(CaptureOutputStates & states,const CameraMetadata & deviceInfo,CameraMetadata & resultMetadata)55 status_t fixupMonochromeTags(
56         CaptureOutputStates& states,
57         const CameraMetadata& deviceInfo,
58         CameraMetadata& resultMetadata) {
59     status_t res = OK;
60     if (!states.needFixupMonoChrome) {
61         return res;
62     }
63 
64     // Remove tags that are not applicable to monochrome camera.
65     int32_t tagsToRemove[] = {
66            ANDROID_SENSOR_GREEN_SPLIT,
67            ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
68            ANDROID_COLOR_CORRECTION_MODE,
69            ANDROID_COLOR_CORRECTION_TRANSFORM,
70            ANDROID_COLOR_CORRECTION_GAINS,
71     };
72     for (auto tag : tagsToRemove) {
73         res = resultMetadata.erase(tag);
74         if (res != OK) {
75             ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
76             return res;
77         }
78     }
79 
80     // ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
81     camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
82     for (size_t i = 1; i < blEntry.count; i++) {
83         blEntry.data.f[i] = blEntry.data.f[0];
84     }
85 
86     // ANDROID_SENSOR_NOISE_PROFILE
87     camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
88     if (npEntry.count > 0 && npEntry.count % 2 == 0) {
89         double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
90         res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
91         if (res != OK) {
92              ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
93                     __FUNCTION__, strerror(-res), res);
94             return res;
95         }
96     }
97 
98     // ANDROID_STATISTICS_LENS_SHADING_MAP
99     camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
100     camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
101     if (lsSizeEntry.count == 2 && lsEntry.count > 0
102             && (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
103         for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
104             lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
105             lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
106             lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
107         }
108     }
109 
110     // ANDROID_TONEMAP_CURVE_BLUE
111     // ANDROID_TONEMAP_CURVE_GREEN
112     // ANDROID_TONEMAP_CURVE_RED
113     camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
114     camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
115     camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
116     if (tcbEntry.count > 0
117             && tcbEntry.count == tcgEntry.count
118             && tcbEntry.count == tcrEntry.count) {
119         for (size_t i = 0; i < tcbEntry.count; i++) {
120             tcbEntry.data.f[i] = tcrEntry.data.f[i];
121             tcgEntry.data.f[i] = tcrEntry.data.f[i];
122         }
123     }
124 
125     return res;
126 }
127 
insertResultLocked(CaptureOutputStates & states,CaptureResult * result,uint32_t frameNumber)128 void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
129     if (result == nullptr) return;
130 
131     camera_metadata_t *meta = const_cast<camera_metadata_t *>(
132             result->mMetadata.getAndLock());
133     set_camera_metadata_vendor_id(meta, states.vendorTagId);
134     result->mMetadata.unlock(meta);
135 
136     if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
137             (int32_t*)&frameNumber, 1) != OK) {
138         SET_ERR("Failed to set frame number %d in metadata", frameNumber);
139         return;
140     }
141 
142     if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
143         SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
144         return;
145     }
146 
147     // Update vendor tag id for physical metadata
148     for (auto& physicalMetadata : result->mPhysicalMetadatas) {
149         camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
150                 physicalMetadata.mPhysicalCameraMetadata.getAndLock());
151         set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
152         physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
153     }
154 
155     // Valid result, insert into queue
156     std::list<CaptureResult>::iterator queuedResult =
157             states.resultQueue.insert(states.resultQueue.end(), CaptureResult(*result));
158     ALOGV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
159            ", burstId = %" PRId32, __FUNCTION__,
160            queuedResult->mResultExtras.requestId,
161            queuedResult->mResultExtras.frameNumber,
162            queuedResult->mResultExtras.burstId);
163 
164     states.resultSignal.notify_one();
165 }
166 
167 
sendPartialCaptureResult(CaptureOutputStates & states,const camera_metadata_t * partialResult,const CaptureResultExtras & resultExtras,uint32_t frameNumber)168 void sendPartialCaptureResult(CaptureOutputStates& states,
169         const camera_metadata_t * partialResult,
170         const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
171     ATRACE_CALL();
172     std::lock_guard<std::mutex> l(states.outputLock);
173 
174     CaptureResult captureResult;
175     captureResult.mResultExtras = resultExtras;
176     captureResult.mMetadata = partialResult;
177 
178     // Fix up result metadata for monochrome camera.
179     status_t res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
180     if (res != OK) {
181         SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
182         return;
183     }
184 
185     // Update partial result by removing keys remapped by DistortionCorrection, ZoomRatio,
186     // and RotationAndCrop mappers.
187     std::set<uint32_t> keysToRemove;
188 
189     auto iter = states.distortionMappers.find(states.cameraId.c_str());
190     if (iter != states.distortionMappers.end()) {
191         const auto& remappedKeys = iter->second.getRemappedKeys();
192         keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
193     }
194 
195     const auto& remappedKeys = states.zoomRatioMappers[states.cameraId.c_str()].getRemappedKeys();
196     keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
197 
198     auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
199     if (mapper != states.rotateAndCropMappers.end()) {
200         const auto& remappedKeys = iter->second.getRemappedKeys();
201         keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
202     }
203 
204     for (uint32_t key : keysToRemove) {
205         captureResult.mMetadata.erase(key);
206     }
207 
208     // Send partial result
209     if (captureResult.mMetadata.entryCount() > 0) {
210         insertResultLocked(states, &captureResult, frameNumber);
211     }
212 }
213 
sendCaptureResult(CaptureOutputStates & states,CameraMetadata & pendingMetadata,CaptureResultExtras & resultExtras,CameraMetadata & collectedPartialResult,uint32_t frameNumber,bool reprocess,bool zslStillCapture,bool rotateAndCropAuto,const std::set<std::string> & cameraIdsWithZoom,const std::vector<PhysicalCaptureResultInfo> & physicalMetadatas)214 void sendCaptureResult(
215         CaptureOutputStates& states,
216         CameraMetadata &pendingMetadata,
217         CaptureResultExtras &resultExtras,
218         CameraMetadata &collectedPartialResult,
219         uint32_t frameNumber,
220         bool reprocess, bool zslStillCapture, bool rotateAndCropAuto,
221         const std::set<std::string>& cameraIdsWithZoom,
222         const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
223     ATRACE_CALL();
224     if (pendingMetadata.isEmpty())
225         return;
226 
227     std::lock_guard<std::mutex> l(states.outputLock);
228 
229     // TODO: need to track errors for tighter bounds on expected frame number
230     if (reprocess) {
231         if (frameNumber < states.nextReprocResultFrameNum) {
232             SET_ERR("Out-of-order reprocess capture result metadata submitted! "
233                 "(got frame number %d, expecting %d)",
234                 frameNumber, states.nextReprocResultFrameNum);
235             return;
236         }
237         states.nextReprocResultFrameNum = frameNumber + 1;
238     } else if (zslStillCapture) {
239         if (frameNumber < states.nextZslResultFrameNum) {
240             SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
241                 "(got frame number %d, expecting %d)",
242                 frameNumber, states.nextZslResultFrameNum);
243             return;
244         }
245         states.nextZslResultFrameNum = frameNumber + 1;
246     } else {
247         if (frameNumber < states.nextResultFrameNum) {
248             SET_ERR("Out-of-order capture result metadata submitted! "
249                     "(got frame number %d, expecting %d)",
250                     frameNumber, states.nextResultFrameNum);
251             return;
252         }
253         states.nextResultFrameNum = frameNumber + 1;
254     }
255 
256     CaptureResult captureResult;
257     captureResult.mResultExtras = resultExtras;
258     captureResult.mMetadata = pendingMetadata;
259     captureResult.mPhysicalMetadatas = physicalMetadatas;
260 
261     // Append any previous partials to form a complete result
262     if (states.usePartialResult && !collectedPartialResult.isEmpty()) {
263         captureResult.mMetadata.append(collectedPartialResult);
264     }
265 
266     captureResult.mMetadata.sort();
267 
268     // Check that there's a timestamp in the result metadata
269     camera_metadata_entry timestamp = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
270     if (timestamp.count == 0) {
271         SET_ERR("No timestamp provided by HAL for frame %d!",
272                 frameNumber);
273         return;
274     }
275     nsecs_t sensorTimestamp = timestamp.data.i64[0];
276 
277     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
278         camera_metadata_entry timestamp =
279                 physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
280         if (timestamp.count == 0) {
281             SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
282                     String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
283             return;
284         }
285     }
286 
287     // Fix up some result metadata to account for HAL-level distortion correction
288     status_t res = OK;
289     auto iter = states.distortionMappers.find(states.cameraId.c_str());
290     if (iter != states.distortionMappers.end()) {
291         res = iter->second.correctCaptureResult(&captureResult.mMetadata);
292         if (res != OK) {
293             SET_ERR("Unable to correct capture result metadata for frame %d: %s (%d)",
294                     frameNumber, strerror(-res), res);
295             return;
296         }
297     }
298 
299     // Fix up result metadata to account for zoom ratio availabilities between
300     // HAL and app.
301     bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId.c_str()) == cameraIdsWithZoom.end();
302     res = states.zoomRatioMappers[states.cameraId.c_str()].updateCaptureResult(
303             &captureResult.mMetadata, zoomRatioIs1);
304     if (res != OK) {
305         SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
306                 frameNumber, strerror(-res), res);
307         return;
308     }
309 
310     // Fix up result metadata to account for rotateAndCrop in AUTO mode
311     if (rotateAndCropAuto) {
312         auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
313         if (mapper != states.rotateAndCropMappers.end()) {
314             res = mapper->second.updateCaptureResult(
315                     &captureResult.mMetadata);
316             if (res != OK) {
317                 SET_ERR("Unable to correct capture result rotate-and-crop for frame %d: %s (%d)",
318                         frameNumber, strerror(-res), res);
319                 return;
320             }
321         }
322     }
323 
324     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
325         String8 cameraId8(physicalMetadata.mPhysicalCameraId);
326         auto mapper = states.distortionMappers.find(cameraId8.c_str());
327         if (mapper != states.distortionMappers.end()) {
328             res = mapper->second.correctCaptureResult(
329                     &physicalMetadata.mPhysicalCameraMetadata);
330             if (res != OK) {
331                 SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
332                         frameNumber, strerror(-res), res);
333                 return;
334             }
335         }
336 
337         zoomRatioIs1 = cameraIdsWithZoom.find(cameraId8.c_str()) == cameraIdsWithZoom.end();
338         res = states.zoomRatioMappers[cameraId8.c_str()].updateCaptureResult(
339                 &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
340         if (res != OK) {
341             SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
342                     "frame %d: %s(%d)", cameraId8.c_str(), frameNumber, strerror(-res), res);
343             return;
344         }
345     }
346 
347     // Fix up result metadata for monochrome camera.
348     res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
349     if (res != OK) {
350         SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
351         return;
352     }
353     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
354         String8 cameraId8(physicalMetadata.mPhysicalCameraId);
355         res = fixupMonochromeTags(states,
356                 states.physicalDeviceInfoMap.at(cameraId8.c_str()),
357                 physicalMetadata.mPhysicalCameraMetadata);
358         if (res != OK) {
359             SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
360             return;
361         }
362     }
363 
364     std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
365     for (auto& m : physicalMetadatas) {
366         monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
367                 CameraMetadata(m.mPhysicalCameraMetadata));
368     }
369     states.tagMonitor.monitorMetadata(TagMonitor::RESULT,
370             frameNumber, sensorTimestamp, captureResult.mMetadata,
371             monitoredPhysicalMetadata);
372 
373     insertResultLocked(states, &captureResult, frameNumber);
374 }
375 
376 // Reading one camera metadata from result argument via fmq or from the result
377 // Assuming the fmq is protected by a lock already
readOneCameraMetadataLocked(std::unique_ptr<ResultMetadataQueue> & fmq,uint64_t fmqResultSize,hardware::camera::device::V3_2::CameraMetadata & resultMetadata,const hardware::camera::device::V3_2::CameraMetadata & result)378 status_t readOneCameraMetadataLocked(
379         std::unique_ptr<ResultMetadataQueue>& fmq,
380         uint64_t fmqResultSize,
381         hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
382         const hardware::camera::device::V3_2::CameraMetadata& result) {
383     if (fmqResultSize > 0) {
384         resultMetadata.resize(fmqResultSize);
385         if (fmq == nullptr) {
386             return NO_MEMORY; // logged in initialize()
387         }
388         if (!fmq->read(resultMetadata.data(), fmqResultSize)) {
389             ALOGE("%s: Cannot read camera metadata from fmq, size = %" PRIu64,
390                     __FUNCTION__, fmqResultSize);
391             return INVALID_OPERATION;
392         }
393     } else {
394         resultMetadata.setToExternal(const_cast<uint8_t *>(result.data()),
395                 result.size());
396     }
397 
398     if (resultMetadata.size() != 0) {
399         status_t res;
400         const camera_metadata_t* metadata =
401                 reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
402         size_t expected_metadata_size = resultMetadata.size();
403         if ((res = validate_camera_metadata_structure(metadata, &expected_metadata_size)) != OK) {
404             ALOGE("%s: Invalid camera metadata received by camera service from HAL: %s (%d)",
405                     __FUNCTION__, strerror(-res), res);
406             return INVALID_OPERATION;
407         }
408     }
409 
410     return OK;
411 }
412 
removeInFlightMapEntryLocked(CaptureOutputStates & states,int idx)413 void removeInFlightMapEntryLocked(CaptureOutputStates& states, int idx) {
414     ATRACE_CALL();
415     InFlightRequestMap& inflightMap = states.inflightMap;
416     nsecs_t duration = inflightMap.valueAt(idx).maxExpectedDuration;
417     inflightMap.removeItemsAt(idx, 1);
418 
419     states.inflightIntf.onInflightEntryRemovedLocked(duration);
420 }
421 
removeInFlightRequestIfReadyLocked(CaptureOutputStates & states,int idx)422 void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
423     InFlightRequestMap& inflightMap = states.inflightMap;
424     const InFlightRequest &request = inflightMap.valueAt(idx);
425     const uint32_t frameNumber = inflightMap.keyAt(idx);
426     SessionStatsBuilder& sessionStatsBuilder = states.sessionStatsBuilder;
427 
428     nsecs_t sensorTimestamp = request.sensorTimestamp;
429     nsecs_t shutterTimestamp = request.shutterTimestamp;
430 
431     // Check if it's okay to remove the request from InFlightMap:
432     // In the case of a successful request:
433     //      all input and output buffers, all result metadata, shutter callback
434     //      arrived.
435     // In the case of an unsuccessful request:
436     //      all input and output buffers, as well as request/result error notifications, arrived.
437     if (request.numBuffersLeft == 0 &&
438             (request.skipResultMetadata ||
439             (request.haveResultMetadata && shutterTimestamp != 0))) {
440         if (request.stillCapture) {
441             ATRACE_ASYNC_END("still capture", frameNumber);
442         }
443 
444         ATRACE_ASYNC_END("frame capture", frameNumber);
445 
446         // Validation check - if sensor timestamp matches shutter timestamp in the
447         // case of request having callback.
448         if (request.hasCallback && request.requestStatus == OK &&
449                 sensorTimestamp != shutterTimestamp) {
450             SET_ERR("sensor timestamp (%" PRId64
451                 ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
452                 sensorTimestamp, frameNumber, shutterTimestamp);
453         }
454 
455         // for an unsuccessful request, it may have pending output buffers to
456         // return.
457         assert(request.requestStatus != OK ||
458                request.pendingOutputBuffers.size() == 0);
459 
460         returnOutputBuffers(
461             states.useHalBufManager, states.listener,
462             request.pendingOutputBuffers.array(),
463             request.pendingOutputBuffers.size(), 0,
464             /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
465             /*timestampIncreasing*/true,
466             request.outputSurfaces, request.resultExtras,
467             request.errorBufStrategy);
468 
469         // Note down the just completed frame number
470         if (request.hasInputBuffer) {
471             states.lastCompletedReprocessFrameNumber = frameNumber;
472         } else if (request.zslCapture) {
473             states.lastCompletedZslFrameNumber = frameNumber;
474         } else {
475             states.lastCompletedRegularFrameNumber = frameNumber;
476         }
477 
478         sessionStatsBuilder.incResultCounter(request.skipResultMetadata);
479 
480         removeInFlightMapEntryLocked(states, idx);
481         ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
482     }
483 
484     states.inflightIntf.checkInflightMapLengthLocked();
485 }
486 
487 // Erase the subset of physicalCameraIds that contains id
erasePhysicalCameraIdSet(std::set<std::set<String8>> & physicalCameraIds,const String8 & id)488 bool erasePhysicalCameraIdSet(
489         std::set<std::set<String8>>& physicalCameraIds, const String8& id) {
490     bool found = false;
491     for (auto iter = physicalCameraIds.begin(); iter != physicalCameraIds.end(); iter++) {
492         if (iter->count(id) == 1) {
493             physicalCameraIds.erase(iter);
494             found = true;
495             break;
496         }
497     }
498     return found;
499 }
500 
processCaptureResult(CaptureOutputStates & states,const camera_capture_result * result)501 void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
502     ATRACE_CALL();
503 
504     status_t res;
505 
506     uint32_t frameNumber = result->frame_number;
507     if (result->result == NULL && result->num_output_buffers == 0 &&
508             result->input_buffer == NULL) {
509         SET_ERR("No result data provided by HAL for frame %d",
510                 frameNumber);
511         return;
512     }
513 
514     if (!states.usePartialResult &&
515             result->result != NULL &&
516             result->partial_result != 1) {
517         SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
518                 " if partial result is not supported",
519                 frameNumber, result->partial_result);
520         return;
521     }
522 
523     bool isPartialResult = false;
524     CameraMetadata collectedPartialResult;
525     bool hasInputBufferInRequest = false;
526 
527     // Get shutter timestamp and resultExtras from list of in-flight requests,
528     // where it was added by the shutter notification for this frame. If the
529     // shutter timestamp isn't received yet, append the output buffers to the
530     // in-flight request and they will be returned when the shutter timestamp
531     // arrives. Update the in-flight status and remove the in-flight entry if
532     // all result data and shutter timestamp have been received.
533     nsecs_t shutterTimestamp = 0;
534     {
535         std::lock_guard<std::mutex> l(states.inflightLock);
536         ssize_t idx = states.inflightMap.indexOfKey(frameNumber);
537         if (idx == NAME_NOT_FOUND) {
538             SET_ERR("Unknown frame number for capture result: %d",
539                     frameNumber);
540             return;
541         }
542         InFlightRequest &request = states.inflightMap.editValueAt(idx);
543         ALOGVV("%s: got InFlightRequest requestId = %" PRId32
544                 ", frameNumber = %" PRId64 ", burstId = %" PRId32
545                 ", partialResultCount = %d/%d, hasCallback = %d, num_output_buffers %d"
546                 ", usePartialResult = %d",
547                 __FUNCTION__, request.resultExtras.requestId,
548                 request.resultExtras.frameNumber, request.resultExtras.burstId,
549                 result->partial_result, states.numPartialResults,
550                 request.hasCallback, result->num_output_buffers,
551                 states.usePartialResult);
552         // Always update the partial count to the latest one if it's not 0
553         // (buffers only). When framework aggregates adjacent partial results
554         // into one, the latest partial count will be used.
555         if (result->partial_result != 0)
556             request.resultExtras.partialResultCount = result->partial_result;
557 
558         // Check if this result carries only partial metadata
559         if (states.usePartialResult && result->result != NULL) {
560             if (result->partial_result > states.numPartialResults || result->partial_result < 1) {
561                 SET_ERR("Result is malformed for frame %d: partial_result %u must be  in"
562                         " the range of [1, %d] when metadata is included in the result",
563                         frameNumber, result->partial_result, states.numPartialResults);
564                 return;
565             }
566             isPartialResult = (result->partial_result < states.numPartialResults);
567             if (isPartialResult && result->num_physcam_metadata) {
568                 SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
569                         " physical camera result", frameNumber);
570                 return;
571             }
572             if (isPartialResult) {
573                 request.collectedPartialResult.append(result->result);
574             }
575 
576             if (isPartialResult && request.hasCallback) {
577                 // Send partial capture result
578                 sendPartialCaptureResult(states, result->result, request.resultExtras,
579                         frameNumber);
580             }
581         }
582 
583         shutterTimestamp = request.shutterTimestamp;
584         hasInputBufferInRequest = request.hasInputBuffer;
585 
586         // Did we get the (final) result metadata for this capture?
587         if (result->result != NULL && !isPartialResult) {
588             if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
589                 SET_ERR("Expected physical Camera metadata count %d not equal to actual count %d",
590                         request.physicalCameraIds.size(), result->num_physcam_metadata);
591                 return;
592             }
593             if (request.haveResultMetadata) {
594                 SET_ERR("Called multiple times with metadata for frame %d",
595                         frameNumber);
596                 return;
597             }
598             for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
599                 String8 physicalId(result->physcam_ids[i]);
600                 bool validPhysicalCameraMetadata =
601                         erasePhysicalCameraIdSet(request.physicalCameraIds, physicalId);
602                 if (!validPhysicalCameraMetadata) {
603                     SET_ERR("Unexpected total result for frame %d camera %s",
604                             frameNumber, physicalId.c_str());
605                     return;
606                 }
607             }
608             if (states.usePartialResult &&
609                     !request.collectedPartialResult.isEmpty()) {
610                 collectedPartialResult.acquire(
611                     request.collectedPartialResult);
612             }
613             request.haveResultMetadata = true;
614             request.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
615         }
616 
617         uint32_t numBuffersReturned = result->num_output_buffers;
618         if (result->input_buffer != NULL) {
619             if (hasInputBufferInRequest) {
620                 numBuffersReturned += 1;
621             } else {
622                 ALOGW("%s: Input buffer should be NULL if there is no input"
623                         " buffer sent in the request",
624                         __FUNCTION__);
625             }
626         }
627         request.numBuffersLeft -= numBuffersReturned;
628         if (request.numBuffersLeft < 0) {
629             SET_ERR("Too many buffers returned for frame %d",
630                     frameNumber);
631             return;
632         }
633 
634         camera_metadata_ro_entry_t entry;
635         res = find_camera_metadata_ro_entry(result->result,
636                 ANDROID_SENSOR_TIMESTAMP, &entry);
637         if (res == OK && entry.count == 1) {
638             request.sensorTimestamp = entry.data.i64[0];
639         }
640 
641         // If shutter event isn't received yet, do not return the pending output
642         // buffers.
643         request.pendingOutputBuffers.appendArray(result->output_buffers,
644                 result->num_output_buffers);
645         if (shutterTimestamp != 0) {
646             returnAndRemovePendingOutputBuffers(
647                 states.useHalBufManager, states.listener,
648                 request, states.sessionStatsBuilder);
649         }
650 
651         if (result->result != NULL && !isPartialResult) {
652             for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
653                 CameraMetadata physicalMetadata;
654                 physicalMetadata.append(result->physcam_metadata[i]);
655                 request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
656                         physicalMetadata});
657             }
658             if (shutterTimestamp == 0) {
659                 request.pendingMetadata = result->result;
660                 request.collectedPartialResult = collectedPartialResult;
661             } else if (request.hasCallback) {
662                 CameraMetadata metadata;
663                 metadata = result->result;
664                 sendCaptureResult(states, metadata, request.resultExtras,
665                     collectedPartialResult, frameNumber,
666                     hasInputBufferInRequest, request.zslCapture && request.stillCapture,
667                     request.rotateAndCropAuto, request.cameraIdsWithZoom,
668                     request.physicalMetadatas);
669             }
670         }
671         removeInFlightRequestIfReadyLocked(states, idx);
672     } // scope for states.inFlightLock
673 
674     if (result->input_buffer != NULL) {
675         if (hasInputBufferInRequest) {
676             Camera3Stream *stream =
677                 Camera3Stream::cast(result->input_buffer->stream);
678             res = stream->returnInputBuffer(*(result->input_buffer));
679             // Note: stream may be deallocated at this point, if this buffer was the
680             // last reference to it.
681             if (res != OK) {
682                 ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
683                       "  its stream:%s (%d)",  __FUNCTION__,
684                       frameNumber, strerror(-res), res);
685             }
686         } else {
687             ALOGW("%s: Input buffer should be NULL if there is no input"
688                     " buffer sent in the request, skipping input buffer return.",
689                     __FUNCTION__);
690         }
691     }
692 }
693 
processOneCaptureResultLocked(CaptureOutputStates & states,const hardware::camera::device::V3_2::CaptureResult & result,const hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata)694 void processOneCaptureResultLocked(
695         CaptureOutputStates& states,
696         const hardware::camera::device::V3_2::CaptureResult& result,
697         const hardware::hidl_vec<
698                 hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata) {
699     using hardware::camera::device::V3_2::StreamBuffer;
700     using hardware::camera::device::V3_2::BufferStatus;
701     std::unique_ptr<ResultMetadataQueue>& fmq = states.fmq;
702     BufferRecordsInterface& bufferRecords = states.bufferRecordsIntf;
703     camera_capture_result r;
704     status_t res;
705     r.frame_number = result.frameNumber;
706 
707     // Read and validate the result metadata.
708     hardware::camera::device::V3_2::CameraMetadata resultMetadata;
709     res = readOneCameraMetadataLocked(
710             fmq, result.fmqResultSize,
711             resultMetadata, result.result);
712     if (res != OK) {
713         ALOGE("%s: Frame %d: Failed to read capture result metadata",
714                 __FUNCTION__, result.frameNumber);
715         return;
716     }
717     r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
718 
719     // Read and validate physical camera metadata
720     size_t physResultCount = physicalCameraMetadata.size();
721     std::vector<const char*> physCamIds(physResultCount);
722     std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
723     std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
724     physResultMetadata.resize(physResultCount);
725     for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
726         res = readOneCameraMetadataLocked(fmq, physicalCameraMetadata[i].fmqMetadataSize,
727                 physResultMetadata[i], physicalCameraMetadata[i].metadata);
728         if (res != OK) {
729             ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
730                     __FUNCTION__, result.frameNumber,
731                     physicalCameraMetadata[i].physicalCameraId.c_str());
732             return;
733         }
734         physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
735         phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
736                 physResultMetadata[i].data());
737     }
738     r.num_physcam_metadata = physResultCount;
739     r.physcam_ids = physCamIds.data();
740     r.physcam_metadata = phyCamMetadatas.data();
741 
742     std::vector<camera_stream_buffer_t> outputBuffers(result.outputBuffers.size());
743     std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
744     for (size_t i = 0; i < result.outputBuffers.size(); i++) {
745         auto& bDst = outputBuffers[i];
746         const StreamBuffer &bSrc = result.outputBuffers[i];
747 
748         sp<Camera3StreamInterface> stream = states.outputStreams.get(bSrc.streamId);
749         if (stream == nullptr) {
750             ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
751                     __FUNCTION__, result.frameNumber, i, bSrc.streamId);
752             return;
753         }
754         bDst.stream = stream->asHalStream();
755 
756         bool noBufferReturned = false;
757         buffer_handle_t *buffer = nullptr;
758         if (states.useHalBufManager) {
759             // This is suspicious most of the time but can be correct during flush where HAL
760             // has to return capture result before a buffer is requested
761             if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
762                 if (bSrc.status == BufferStatus::OK) {
763                     ALOGE("%s: Frame %d: Buffer %zu: No bufferId for stream %d",
764                             __FUNCTION__, result.frameNumber, i, bSrc.streamId);
765                     // Still proceeds so other buffers can be returned
766                 }
767                 noBufferReturned = true;
768             }
769             if (noBufferReturned) {
770                 res = OK;
771             } else {
772                 res = bufferRecords.popInflightRequestBuffer(bSrc.bufferId, &buffer);
773             }
774         } else {
775             res = bufferRecords.popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
776         }
777 
778         if (res != OK) {
779             ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
780                     __FUNCTION__, result.frameNumber, i, bSrc.streamId);
781             return;
782         }
783 
784         bDst.buffer = buffer;
785         bDst.status = mapHidlBufferStatus(bSrc.status);
786         bDst.acquire_fence = -1;
787         if (bSrc.releaseFence == nullptr) {
788             bDst.release_fence = -1;
789         } else if (bSrc.releaseFence->numFds == 1) {
790             if (noBufferReturned) {
791                 ALOGE("%s: got releaseFence without output buffer!", __FUNCTION__);
792             }
793             bDst.release_fence = dup(bSrc.releaseFence->data[0]);
794         } else {
795             ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
796                     __FUNCTION__, result.frameNumber, i, bSrc.releaseFence->numFds);
797             return;
798         }
799     }
800     r.num_output_buffers = outputBuffers.size();
801     r.output_buffers = outputBuffers.data();
802 
803     camera_stream_buffer_t inputBuffer;
804     if (result.inputBuffer.streamId == -1) {
805         r.input_buffer = nullptr;
806     } else {
807         if (states.inputStream->getId() != result.inputBuffer.streamId) {
808             ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
809                     result.frameNumber, result.inputBuffer.streamId);
810             return;
811         }
812         inputBuffer.stream = states.inputStream->asHalStream();
813         buffer_handle_t *buffer;
814         res = bufferRecords.popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
815                 &buffer);
816         if (res != OK) {
817             ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
818                     __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
819             return;
820         }
821         inputBuffer.buffer = buffer;
822         inputBuffer.status = mapHidlBufferStatus(result.inputBuffer.status);
823         inputBuffer.acquire_fence = -1;
824         if (result.inputBuffer.releaseFence == nullptr) {
825             inputBuffer.release_fence = -1;
826         } else if (result.inputBuffer.releaseFence->numFds == 1) {
827             inputBuffer.release_fence = dup(result.inputBuffer.releaseFence->data[0]);
828         } else {
829             ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
830                     __FUNCTION__, result.frameNumber, result.inputBuffer.releaseFence->numFds);
831             return;
832         }
833         r.input_buffer = &inputBuffer;
834     }
835 
836     r.partial_result = result.partialResult;
837 
838     processCaptureResult(states, &r);
839 }
840 
returnOutputBuffers(bool useHalBufManager,sp<NotificationListener> listener,const camera_stream_buffer_t * outputBuffers,size_t numBuffers,nsecs_t timestamp,bool requested,nsecs_t requestTimeNs,SessionStatsBuilder & sessionStatsBuilder,bool timestampIncreasing,const SurfaceMap & outputSurfaces,const CaptureResultExtras & inResultExtras,ERROR_BUF_STRATEGY errorBufStrategy)841 void returnOutputBuffers(
842         bool useHalBufManager,
843         sp<NotificationListener> listener,
844         const camera_stream_buffer_t *outputBuffers, size_t numBuffers,
845         nsecs_t timestamp, bool requested, nsecs_t requestTimeNs,
846         SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing,
847         const SurfaceMap& outputSurfaces,
848         const CaptureResultExtras &inResultExtras,
849         ERROR_BUF_STRATEGY errorBufStrategy) {
850 
851     for (size_t i = 0; i < numBuffers; i++)
852     {
853         Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
854         int streamId = stream->getId();
855 
856         // Call notify(ERROR_BUFFER) if necessary.
857         if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR &&
858                 errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
859             if (listener != nullptr) {
860                 CaptureResultExtras extras = inResultExtras;
861                 extras.errorStreamId = streamId;
862                 listener->notifyError(
863                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
864                         extras);
865             }
866         }
867 
868         if (outputBuffers[i].buffer == nullptr) {
869             if (!useHalBufManager) {
870                 // With HAL buffer management API, HAL sometimes will have to return buffers that
871                 // has not got a output buffer handle filled yet. This is though illegal if HAL
872                 // buffer management API is not being used.
873                 ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
874             } else {
875                 if (requested) {
876                     sessionStatsBuilder.incCounter(streamId, /*dropped*/true, 0);
877                 }
878             }
879             continue;
880         }
881 
882         const auto& it = outputSurfaces.find(streamId);
883         status_t res = OK;
884 
885         // Do not return the buffer if the buffer status is error, and the error
886         // buffer strategy is CACHE.
887         if (outputBuffers[i].status != CAMERA_BUFFER_STATUS_ERROR ||
888                 errorBufStrategy != ERROR_BUF_CACHE) {
889             if (it != outputSurfaces.end()) {
890                 res = stream->returnBuffer(
891                         outputBuffers[i], timestamp, timestampIncreasing, it->second,
892                         inResultExtras.frameNumber);
893             } else {
894                 res = stream->returnBuffer(
895                         outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
896                         inResultExtras.frameNumber);
897             }
898         }
899         // Note: stream may be deallocated at this point, if this buffer was
900         // the last reference to it.
901         bool dropped = false;
902         if (res == NO_INIT || res == DEAD_OBJECT) {
903             ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
904             sessionStatsBuilder.stopCounter(streamId);
905         } else if (res != OK) {
906             ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
907             dropped = true;
908         } else {
909             if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR || timestamp == 0) {
910                 dropped = true;
911             }
912         }
913         if (requested) {
914             nsecs_t bufferTimeNs = systemTime();
915             int32_t captureLatencyMs = ns2ms(bufferTimeNs - requestTimeNs);
916             sessionStatsBuilder.incCounter(streamId, dropped, captureLatencyMs);
917         }
918 
919         // Long processing consumers can cause returnBuffer timeout for shared stream
920         // If that happens, cancel the buffer and send a buffer error to client
921         if (it != outputSurfaces.end() && res == TIMED_OUT &&
922                 outputBuffers[i].status == CAMERA_BUFFER_STATUS_OK) {
923             // cancel the buffer
924             camera_stream_buffer_t sb = outputBuffers[i];
925             sb.status = CAMERA_BUFFER_STATUS_ERROR;
926             stream->returnBuffer(sb, /*timestamp*/0,
927                     timestampIncreasing, std::vector<size_t> (),
928                     inResultExtras.frameNumber);
929 
930             if (listener != nullptr) {
931                 CaptureResultExtras extras = inResultExtras;
932                 extras.errorStreamId = streamId;
933                 listener->notifyError(
934                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
935                         extras);
936             }
937         }
938     }
939 }
940 
returnAndRemovePendingOutputBuffers(bool useHalBufManager,sp<NotificationListener> listener,InFlightRequest & request,SessionStatsBuilder & sessionStatsBuilder)941 void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
942         sp<NotificationListener> listener, InFlightRequest& request,
943         SessionStatsBuilder& sessionStatsBuilder) {
944     bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
945     returnOutputBuffers(useHalBufManager, listener,
946             request.pendingOutputBuffers.array(),
947             request.pendingOutputBuffers.size(),
948             request.shutterTimestamp, /*requested*/true,
949             request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
950             request.outputSurfaces, request.resultExtras,
951             request.errorBufStrategy);
952 
953     // Remove error buffers that are not cached.
954     for (auto iter = request.pendingOutputBuffers.begin();
955             iter != request.pendingOutputBuffers.end(); ) {
956         if (request.errorBufStrategy != ERROR_BUF_CACHE ||
957                 iter->status != CAMERA_BUFFER_STATUS_ERROR) {
958             iter = request.pendingOutputBuffers.erase(iter);
959         } else {
960             iter++;
961         }
962     }
963 }
964 
notifyShutter(CaptureOutputStates & states,const camera_shutter_msg_t & msg)965 void notifyShutter(CaptureOutputStates& states, const camera_shutter_msg_t &msg) {
966     ATRACE_CALL();
967     ssize_t idx;
968 
969     // Set timestamp for the request in the in-flight tracking
970     // and get the request ID to send upstream
971     {
972         std::lock_guard<std::mutex> l(states.inflightLock);
973         InFlightRequestMap& inflightMap = states.inflightMap;
974         idx = inflightMap.indexOfKey(msg.frame_number);
975         if (idx >= 0) {
976             InFlightRequest &r = inflightMap.editValueAt(idx);
977 
978             // Verify ordering of shutter notifications
979             {
980                 std::lock_guard<std::mutex> l(states.outputLock);
981                 // TODO: need to track errors for tighter bounds on expected frame number.
982                 if (r.hasInputBuffer) {
983                     if (msg.frame_number < states.nextReprocShutterFrameNum) {
984                         SET_ERR("Reprocess shutter notification out-of-order. Expected "
985                                 "notification for frame %d, got frame %d",
986                                 states.nextReprocShutterFrameNum, msg.frame_number);
987                         return;
988                     }
989                     states.nextReprocShutterFrameNum = msg.frame_number + 1;
990                 } else if (r.zslCapture && r.stillCapture) {
991                     if (msg.frame_number < states.nextZslShutterFrameNum) {
992                         SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
993                                 "notification for frame %d, got frame %d",
994                                 states.nextZslShutterFrameNum, msg.frame_number);
995                         return;
996                     }
997                     states.nextZslShutterFrameNum = msg.frame_number + 1;
998                 } else {
999                     if (msg.frame_number < states.nextShutterFrameNum) {
1000                         SET_ERR("Shutter notification out-of-order. Expected "
1001                                 "notification for frame %d, got frame %d",
1002                                 states.nextShutterFrameNum, msg.frame_number);
1003                         return;
1004                     }
1005                     states.nextShutterFrameNum = msg.frame_number + 1;
1006                 }
1007             }
1008 
1009             r.shutterTimestamp = msg.timestamp;
1010             if (r.hasCallback) {
1011                 ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
1012                     states.cameraId.string(), __FUNCTION__,
1013                     msg.frame_number, r.resultExtras.requestId, msg.timestamp);
1014                 // Call listener, if any
1015                 if (states.listener != nullptr) {
1016                     r.resultExtras.lastCompletedRegularFrameNumber =
1017                             states.lastCompletedRegularFrameNumber;
1018                     r.resultExtras.lastCompletedReprocessFrameNumber =
1019                             states.lastCompletedReprocessFrameNumber;
1020                     r.resultExtras.lastCompletedZslFrameNumber =
1021                             states.lastCompletedZslFrameNumber;
1022                     states.listener->notifyShutter(r.resultExtras, msg.timestamp);
1023                 }
1024                 // send pending result and buffers
1025                 sendCaptureResult(states,
1026                     r.pendingMetadata, r.resultExtras,
1027                     r.collectedPartialResult, msg.frame_number,
1028                     r.hasInputBuffer, r.zslCapture && r.stillCapture,
1029                     r.rotateAndCropAuto, r.cameraIdsWithZoom, r.physicalMetadatas);
1030             }
1031             returnAndRemovePendingOutputBuffers(
1032                     states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
1033 
1034             removeInFlightRequestIfReadyLocked(states, idx);
1035         }
1036     }
1037     if (idx < 0) {
1038         SET_ERR("Shutter notification for non-existent frame number %d",
1039                 msg.frame_number);
1040     }
1041 }
1042 
notifyError(CaptureOutputStates & states,const camera_error_msg_t & msg)1043 void notifyError(CaptureOutputStates& states, const camera_error_msg_t &msg) {
1044     ATRACE_CALL();
1045     // Map camera HAL error codes to ICameraDeviceCallback error codes
1046     // Index into this with the HAL error code
1047     static const int32_t halErrorMap[CAMERA_MSG_NUM_ERRORS] = {
1048         // 0 = Unused error code
1049         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
1050         // 1 = CAMERA_MSG_ERROR_DEVICE
1051         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
1052         // 2 = CAMERA_MSG_ERROR_REQUEST
1053         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
1054         // 3 = CAMERA_MSG_ERROR_RESULT
1055         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
1056         // 4 = CAMERA_MSG_ERROR_BUFFER
1057         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
1058     };
1059 
1060     int32_t errorCode =
1061             ((msg.error_code >= 0) &&
1062                     (msg.error_code < CAMERA_MSG_NUM_ERRORS)) ?
1063             halErrorMap[msg.error_code] :
1064             hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
1065 
1066     int streamId = 0;
1067     String16 physicalCameraId;
1068     if (msg.error_stream != nullptr) {
1069         Camera3Stream *stream =
1070                 Camera3Stream::cast(msg.error_stream);
1071         streamId = stream->getId();
1072         physicalCameraId = String16(stream->physicalCameraId());
1073     }
1074     ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
1075             states.cameraId.string(), __FUNCTION__, msg.frame_number,
1076             streamId, msg.error_code);
1077 
1078     CaptureResultExtras resultExtras;
1079     switch (errorCode) {
1080         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
1081             // SET_ERR calls into listener to notify application
1082             SET_ERR("Camera HAL reported serious device error");
1083             break;
1084         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
1085         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
1086             {
1087                 std::lock_guard<std::mutex> l(states.inflightLock);
1088                 ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
1089                 if (idx >= 0) {
1090                     InFlightRequest &r = states.inflightMap.editValueAt(idx);
1091                     r.requestStatus = msg.error_code;
1092                     resultExtras = r.resultExtras;
1093                     bool physicalDeviceResultError = false;
1094                     if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
1095                             errorCode) {
1096                         if (physicalCameraId.size() > 0) {
1097                             String8 cameraId(physicalCameraId);
1098                             bool validPhysicalCameraId =
1099                                     erasePhysicalCameraIdSet(r.physicalCameraIds, cameraId);
1100                             if (!validPhysicalCameraId) {
1101                                 ALOGE("%s: Reported result failure for physical camera device: %s "
1102                                         " which is not part of the respective request!",
1103                                         __FUNCTION__, cameraId.string());
1104                                 break;
1105                             }
1106                             resultExtras.errorPhysicalCameraId = physicalCameraId;
1107                             physicalDeviceResultError = true;
1108                         }
1109                     }
1110 
1111                     if (!physicalDeviceResultError) {
1112                         r.skipResultMetadata = true;
1113                         if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT
1114                                 == errorCode) {
1115                             r.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
1116                         } else {
1117                             // errorCode is ERROR_CAMERA_REQUEST
1118                             r.errorBufStrategy = ERROR_BUF_RETURN;
1119                         }
1120 
1121                         // Check whether the buffers returned. If they returned,
1122                         // remove inflight request.
1123                         removeInFlightRequestIfReadyLocked(states, idx);
1124                     }
1125                 } else {
1126                     resultExtras.frameNumber = msg.frame_number;
1127                     ALOGE("Camera %s: %s: cannot find in-flight request on "
1128                             "frame %" PRId64 " error", states.cameraId.string(), __FUNCTION__,
1129                             resultExtras.frameNumber);
1130                 }
1131             }
1132             resultExtras.errorStreamId = streamId;
1133             if (states.listener != nullptr) {
1134                 states.listener->notifyError(errorCode, resultExtras);
1135             } else {
1136                 ALOGE("Camera %s: %s: no listener available",
1137                         states.cameraId.string(), __FUNCTION__);
1138             }
1139             break;
1140         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1141             // Do not depend on HAL ERROR_CAMERA_BUFFER to send buffer error
1142             // callback to the app. Rather, use STATUS_ERROR of image buffers.
1143             break;
1144         default:
1145             // SET_ERR calls notifyError
1146             SET_ERR("Unknown error message from HAL: %d", msg.error_code);
1147             break;
1148     }
1149 }
1150 
notify(CaptureOutputStates & states,const camera_notify_msg * msg)1151 void notify(CaptureOutputStates& states, const camera_notify_msg *msg) {
1152     switch (msg->type) {
1153         case CAMERA_MSG_ERROR: {
1154             notifyError(states, msg->message.error);
1155             break;
1156         }
1157         case CAMERA_MSG_SHUTTER: {
1158             notifyShutter(states, msg->message.shutter);
1159             break;
1160         }
1161         default:
1162             SET_ERR("Unknown notify message from HAL: %d",
1163                     msg->type);
1164     }
1165 }
1166 
notify(CaptureOutputStates & states,const hardware::camera::device::V3_2::NotifyMsg & msg)1167 void notify(CaptureOutputStates& states,
1168         const hardware::camera::device::V3_2::NotifyMsg& msg) {
1169     using android::hardware::camera::device::V3_2::MsgType;
1170     using android::hardware::camera::device::V3_2::ErrorCode;
1171 
1172     ATRACE_CALL();
1173     camera_notify_msg m;
1174     switch (msg.type) {
1175         case MsgType::ERROR:
1176             m.type = CAMERA_MSG_ERROR;
1177             m.message.error.frame_number = msg.msg.error.frameNumber;
1178             if (msg.msg.error.errorStreamId >= 0) {
1179                 sp<Camera3StreamInterface> stream =
1180                         states.outputStreams.get(msg.msg.error.errorStreamId);
1181                 if (stream == nullptr) {
1182                     ALOGE("%s: Frame %d: Invalid error stream id %d", __FUNCTION__,
1183                             m.message.error.frame_number, msg.msg.error.errorStreamId);
1184                     return;
1185                 }
1186                 m.message.error.error_stream = stream->asHalStream();
1187             } else {
1188                 m.message.error.error_stream = nullptr;
1189             }
1190             switch (msg.msg.error.errorCode) {
1191                 case ErrorCode::ERROR_DEVICE:
1192                     m.message.error.error_code = CAMERA_MSG_ERROR_DEVICE;
1193                     break;
1194                 case ErrorCode::ERROR_REQUEST:
1195                     m.message.error.error_code = CAMERA_MSG_ERROR_REQUEST;
1196                     break;
1197                 case ErrorCode::ERROR_RESULT:
1198                     m.message.error.error_code = CAMERA_MSG_ERROR_RESULT;
1199                     break;
1200                 case ErrorCode::ERROR_BUFFER:
1201                     m.message.error.error_code = CAMERA_MSG_ERROR_BUFFER;
1202                     break;
1203             }
1204             break;
1205         case MsgType::SHUTTER:
1206             m.type = CAMERA_MSG_SHUTTER;
1207             m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
1208             m.message.shutter.timestamp = msg.msg.shutter.timestamp;
1209             break;
1210     }
1211     notify(states, &m);
1212 }
1213 
requestStreamBuffers(RequestBufferStates & states,const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest> & bufReqs,hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb)1214 void requestStreamBuffers(RequestBufferStates& states,
1215         const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
1216         hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb) {
1217     using android::hardware::camera::device::V3_2::BufferStatus;
1218     using android::hardware::camera::device::V3_2::StreamBuffer;
1219     using android::hardware::camera::device::V3_5::BufferRequestStatus;
1220     using android::hardware::camera::device::V3_5::StreamBufferRet;
1221     using android::hardware::camera::device::V3_5::StreamBufferRequestError;
1222 
1223     std::lock_guard<std::mutex> lock(states.reqBufferLock);
1224 
1225     hardware::hidl_vec<StreamBufferRet> bufRets;
1226     if (!states.useHalBufManager) {
1227         ALOGE("%s: Camera %s does not support HAL buffer management",
1228                 __FUNCTION__, states.cameraId.string());
1229         _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
1230         return;
1231     }
1232 
1233     SortedVector<int32_t> streamIds;
1234     ssize_t sz = streamIds.setCapacity(bufReqs.size());
1235     if (sz < 0 || static_cast<size_t>(sz) != bufReqs.size()) {
1236         ALOGE("%s: failed to allocate memory for %zu buffer requests",
1237                 __FUNCTION__, bufReqs.size());
1238         _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
1239         return;
1240     }
1241 
1242     if (bufReqs.size() > states.outputStreams.size()) {
1243         ALOGE("%s: too many buffer requests (%zu > # of output streams %zu)",
1244                 __FUNCTION__, bufReqs.size(), states.outputStreams.size());
1245         _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
1246         return;
1247     }
1248 
1249     // Check for repeated streamId
1250     for (const auto& bufReq : bufReqs) {
1251         if (streamIds.indexOf(bufReq.streamId) != NAME_NOT_FOUND) {
1252             ALOGE("%s: Stream %d appear multiple times in buffer requests",
1253                     __FUNCTION__, bufReq.streamId);
1254             _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
1255             return;
1256         }
1257         streamIds.add(bufReq.streamId);
1258     }
1259 
1260     if (!states.reqBufferIntf.startRequestBuffer()) {
1261         ALOGE("%s: request buffer disallowed while camera service is configuring",
1262                 __FUNCTION__);
1263         _hidl_cb(BufferRequestStatus::FAILED_CONFIGURING, bufRets);
1264         return;
1265     }
1266 
1267     bufRets.resize(bufReqs.size());
1268 
1269     bool allReqsSucceeds = true;
1270     bool oneReqSucceeds = false;
1271     for (size_t i = 0; i < bufReqs.size(); i++) {
1272         const auto& bufReq = bufReqs[i];
1273         auto& bufRet = bufRets[i];
1274         int32_t streamId = bufReq.streamId;
1275         sp<Camera3OutputStreamInterface> outputStream = states.outputStreams.get(streamId);
1276         if (outputStream == nullptr) {
1277             ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
1278             hardware::hidl_vec<StreamBufferRet> emptyBufRets;
1279             _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, emptyBufRets);
1280             states.reqBufferIntf.endRequestBuffer();
1281             return;
1282         }
1283 
1284         bufRet.streamId = streamId;
1285         if (outputStream->isAbandoned()) {
1286             bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
1287             allReqsSucceeds = false;
1288             continue;
1289         }
1290 
1291         size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
1292         uint32_t numBuffersRequested = bufReq.numBuffersRequested;
1293         size_t totalHandout = handOutBufferCount + numBuffersRequested;
1294         uint32_t maxBuffers = outputStream->asHalStream()->max_buffers;
1295         if (totalHandout > maxBuffers) {
1296             // Not able to allocate enough buffer. Exit early for this stream
1297             ALOGE("%s: request too much buffers for stream %d: at HAL: %zu + requesting: %d"
1298                     " > max: %d", __FUNCTION__, streamId, handOutBufferCount,
1299                     numBuffersRequested, maxBuffers);
1300             bufRet.val.error(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
1301             allReqsSucceeds = false;
1302             continue;
1303         }
1304 
1305         hardware::hidl_vec<StreamBuffer> tmpRetBuffers(numBuffersRequested);
1306         bool currentReqSucceeds = true;
1307         std::vector<camera_stream_buffer_t> streamBuffers(numBuffersRequested);
1308         size_t numAllocatedBuffers = 0;
1309         size_t numPushedInflightBuffers = 0;
1310         for (size_t b = 0; b < numBuffersRequested; b++) {
1311             camera_stream_buffer_t& sb = streamBuffers[b];
1312             // Since this method can run concurrently with request thread
1313             // We need to update the wait duration everytime we call getbuffer
1314             nsecs_t waitDuration =  states.reqBufferIntf.getWaitDuration();
1315             status_t res = outputStream->getBuffer(&sb, waitDuration);
1316             if (res != OK) {
1317                 if (res == NO_INIT || res == DEAD_OBJECT) {
1318                     ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
1319                             __FUNCTION__, streamId, strerror(-res), res);
1320                     bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
1321                     states.sessionStatsBuilder.stopCounter(streamId);
1322                 } else {
1323                     ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
1324                             __FUNCTION__, streamId, strerror(-res), res);
1325                     if (res == TIMED_OUT || res == NO_MEMORY) {
1326                         bufRet.val.error(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
1327                     } else {
1328                         bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
1329                     }
1330                 }
1331                 currentReqSucceeds = false;
1332                 break;
1333             }
1334             numAllocatedBuffers++;
1335 
1336             buffer_handle_t *buffer = sb.buffer;
1337             auto pair = states.bufferRecordsIntf.getBufferId(*buffer, streamId);
1338             bool isNewBuffer = pair.first;
1339             uint64_t bufferId = pair.second;
1340             StreamBuffer& hBuf = tmpRetBuffers[b];
1341 
1342             hBuf.streamId = streamId;
1343             hBuf.bufferId = bufferId;
1344             hBuf.buffer = (isNewBuffer) ? *buffer : nullptr;
1345             hBuf.status = BufferStatus::OK;
1346             hBuf.releaseFence = nullptr;
1347 
1348             native_handle_t *acquireFence = nullptr;
1349             if (sb.acquire_fence != -1) {
1350                 acquireFence = native_handle_create(1,0);
1351                 acquireFence->data[0] = sb.acquire_fence;
1352             }
1353             hBuf.acquireFence.setTo(acquireFence, /*shouldOwn*/true);
1354             hBuf.releaseFence = nullptr;
1355 
1356             res = states.bufferRecordsIntf.pushInflightRequestBuffer(bufferId, buffer, streamId);
1357             if (res != OK) {
1358                 ALOGE("%s: Can't get register request buffers for stream %d: %s (%d)",
1359                         __FUNCTION__, streamId, strerror(-res), res);
1360                 bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
1361                 currentReqSucceeds = false;
1362                 break;
1363             }
1364             numPushedInflightBuffers++;
1365         }
1366         if (currentReqSucceeds) {
1367             bufRet.val.buffers(std::move(tmpRetBuffers));
1368             oneReqSucceeds = true;
1369         } else {
1370             allReqsSucceeds = false;
1371             for (size_t b = 0; b < numPushedInflightBuffers; b++) {
1372                 StreamBuffer& hBuf = tmpRetBuffers[b];
1373                 buffer_handle_t* buffer;
1374                 status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
1375                         hBuf.bufferId, &buffer);
1376                 if (res != OK) {
1377                     SET_ERR("%s: popInflightRequestBuffer failed for stream %d: %s (%d)",
1378                             __FUNCTION__, streamId, strerror(-res), res);
1379                 }
1380             }
1381             for (size_t b = 0; b < numAllocatedBuffers; b++) {
1382                 camera_stream_buffer_t& sb = streamBuffers[b];
1383                 sb.acquire_fence = -1;
1384                 sb.status = CAMERA_BUFFER_STATUS_ERROR;
1385             }
1386             returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
1387                     streamBuffers.data(), numAllocatedBuffers, 0, /*requested*/false,
1388                     /*requestTimeNs*/0, states.sessionStatsBuilder);
1389         }
1390     }
1391 
1392     _hidl_cb(allReqsSucceeds ? BufferRequestStatus::OK :
1393             oneReqSucceeds ? BufferRequestStatus::FAILED_PARTIAL :
1394                              BufferRequestStatus::FAILED_UNKNOWN,
1395             bufRets);
1396     states.reqBufferIntf.endRequestBuffer();
1397 }
1398 
returnStreamBuffers(ReturnBufferStates & states,const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer> & buffers)1399 void returnStreamBuffers(ReturnBufferStates& states,
1400         const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
1401     if (!states.useHalBufManager) {
1402         ALOGE("%s: Camera %s does not support HAL buffer managerment",
1403                 __FUNCTION__, states.cameraId.string());
1404         return;
1405     }
1406 
1407     for (const auto& buf : buffers) {
1408         if (buf.bufferId == BUFFER_ID_NO_BUFFER) {
1409             ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
1410             continue;
1411         }
1412 
1413         buffer_handle_t* buffer;
1414         status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(buf.bufferId, &buffer);
1415 
1416         if (res != OK) {
1417             ALOGE("%s: cannot find in-flight buffer %" PRIu64 " for stream %d",
1418                     __FUNCTION__, buf.bufferId, buf.streamId);
1419             continue;
1420         }
1421 
1422         camera_stream_buffer_t streamBuffer;
1423         streamBuffer.buffer = buffer;
1424         streamBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
1425         streamBuffer.acquire_fence = -1;
1426         streamBuffer.release_fence = -1;
1427 
1428         if (buf.releaseFence == nullptr) {
1429             streamBuffer.release_fence = -1;
1430         } else if (buf.releaseFence->numFds == 1) {
1431             streamBuffer.release_fence = dup(buf.releaseFence->data[0]);
1432         } else {
1433             ALOGE("%s: Invalid release fence, fd count is %d, not 1",
1434                     __FUNCTION__, buf.releaseFence->numFds);
1435             continue;
1436         }
1437 
1438         sp<Camera3StreamInterface> stream = states.outputStreams.get(buf.streamId);
1439         if (stream == nullptr) {
1440             ALOGE("%s: Output stream id %d not found!", __FUNCTION__, buf.streamId);
1441             continue;
1442         }
1443         streamBuffer.stream = stream->asHalStream();
1444         returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
1445                 &streamBuffer, /*size*/1, /*timestamp*/ 0, /*requested*/false,
1446                 /*requestTimeNs*/0, states.sessionStatsBuilder);
1447     }
1448 }
1449 
flushInflightRequests(FlushInflightReqStates & states)1450 void flushInflightRequests(FlushInflightReqStates& states) {
1451     ATRACE_CALL();
1452     { // First return buffers cached in inFlightMap
1453         std::lock_guard<std::mutex> l(states.inflightLock);
1454         for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
1455             const InFlightRequest &request = states.inflightMap.valueAt(idx);
1456             returnOutputBuffers(
1457                 states.useHalBufManager, states.listener,
1458                 request.pendingOutputBuffers.array(),
1459                 request.pendingOutputBuffers.size(), 0, /*requested*/true,
1460                 request.requestTimeNs, states.sessionStatsBuilder, /*timestampIncreasing*/true,
1461                 request.outputSurfaces, request.resultExtras, request.errorBufStrategy);
1462             ALOGW("%s: Frame %d |  Timestamp: %" PRId64 ", metadata"
1463                     " arrived: %s, buffers left: %d.\n", __FUNCTION__,
1464                     states.inflightMap.keyAt(idx), request.shutterTimestamp,
1465                     request.haveResultMetadata ? "true" : "false",
1466                     request.numBuffersLeft);
1467         }
1468 
1469         states.inflightMap.clear();
1470         states.inflightIntf.onInflightMapFlushedLocked();
1471     }
1472 
1473     // Then return all inflight buffers not returned by HAL
1474     std::vector<std::pair<int32_t, int32_t>> inflightKeys;
1475     states.flushBufferIntf.getInflightBufferKeys(&inflightKeys);
1476 
1477     // Inflight buffers for HAL buffer manager
1478     std::vector<uint64_t> inflightRequestBufferKeys;
1479     states.flushBufferIntf.getInflightRequestBufferKeys(&inflightRequestBufferKeys);
1480 
1481     // (streamId, frameNumber, buffer_handle_t*) tuple for all inflight buffers.
1482     // frameNumber will be -1 for buffers from HAL buffer manager
1483     std::vector<std::tuple<int32_t, int32_t, buffer_handle_t*>> inflightBuffers;
1484     inflightBuffers.reserve(inflightKeys.size() + inflightRequestBufferKeys.size());
1485 
1486     for (auto& pair : inflightKeys) {
1487         int32_t frameNumber = pair.first;
1488         int32_t streamId = pair.second;
1489         buffer_handle_t* buffer;
1490         status_t res = states.bufferRecordsIntf.popInflightBuffer(frameNumber, streamId, &buffer);
1491         if (res != OK) {
1492             ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
1493                     __FUNCTION__, frameNumber, streamId);
1494             continue;
1495         }
1496         inflightBuffers.push_back(std::make_tuple(streamId, frameNumber, buffer));
1497     }
1498 
1499     for (auto& bufferId : inflightRequestBufferKeys) {
1500         int32_t streamId = -1;
1501         buffer_handle_t* buffer = nullptr;
1502         status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
1503                 bufferId, &buffer, &streamId);
1504         if (res != OK) {
1505             ALOGE("%s: cannot find in-flight buffer %" PRIu64, __FUNCTION__, bufferId);
1506             continue;
1507         }
1508         inflightBuffers.push_back(std::make_tuple(streamId, /*frameNumber*/-1, buffer));
1509     }
1510 
1511     std::vector<sp<Camera3StreamInterface>> streams = states.flushBufferIntf.getAllStreams();
1512 
1513     for (auto& tuple : inflightBuffers) {
1514         status_t res = OK;
1515         int32_t streamId = std::get<0>(tuple);
1516         int32_t frameNumber = std::get<1>(tuple);
1517         buffer_handle_t* buffer = std::get<2>(tuple);
1518 
1519         camera_stream_buffer_t streamBuffer;
1520         streamBuffer.buffer = buffer;
1521         streamBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
1522         streamBuffer.acquire_fence = -1;
1523         streamBuffer.release_fence = -1;
1524 
1525         for (auto& stream : streams) {
1526             if (streamId == stream->getId()) {
1527                 // Return buffer to deleted stream
1528                 camera_stream* halStream = stream->asHalStream();
1529                 streamBuffer.stream = halStream;
1530                 switch (halStream->stream_type) {
1531                     case CAMERA_STREAM_OUTPUT:
1532                         res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
1533                                 /*timestampIncreasing*/true,
1534                                 std::vector<size_t> (), frameNumber);
1535                         if (res != OK) {
1536                             ALOGE("%s: Can't return output buffer for frame %d to"
1537                                   " stream %d: %s (%d)",  __FUNCTION__,
1538                                   frameNumber, streamId, strerror(-res), res);
1539                         }
1540                         break;
1541                     case CAMERA_STREAM_INPUT:
1542                         res = stream->returnInputBuffer(streamBuffer);
1543                         if (res != OK) {
1544                             ALOGE("%s: Can't return input buffer for frame %d to"
1545                                   " stream %d: %s (%d)",  __FUNCTION__,
1546                                   frameNumber, streamId, strerror(-res), res);
1547                         }
1548                         break;
1549                     default: // Bi-direcitonal stream is deprecated
1550                         ALOGE("%s: stream %d has unknown stream type %d",
1551                                 __FUNCTION__, streamId, halStream->stream_type);
1552                         break;
1553                 }
1554                 break;
1555             }
1556         }
1557     }
1558 }
1559 
1560 } // camera3
1561 } // namespace android
1562