• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera3-HeicCompositeStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 #define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
20 //#define LOG_NDEBUG 0
21 
22 #include <linux/memfd.h>
23 #include <pthread.h>
24 #include <sys/syscall.h>
25 
26 #include <aidl/android/hardware/camera/device/CameraBlob.h>
27 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
28 #include <camera/StringUtils.h>
29 #include <com_android_graphics_libgui_flags.h>
30 #include <com_android_internal_camera_flags.h>
31 #include <gui/Surface.h>
32 #include <libyuv.h>
33 #include <utils/Log.h>
34 #include <utils/Trace.h>
35 #include <ultrahdr/jpegr.h>
36 #include <ultrahdr/ultrahdrcommon.h>
37 
38 #include <media/MediaCodecBuffer.h>
39 #include <media/stagefright/MediaCodecConstants.h>
40 #include <media/stagefright/MetaData.h>
41 #include <media/stagefright/foundation/ABuffer.h>
42 #include <media/stagefright/foundation/MediaDefs.h>
43 #include <mediadrm/ICrypto.h>
44 #include <memory>
45 
46 #include "HeicCompositeStream.h"
47 #include "HeicEncoderInfoManager.h"
48 #include "common/CameraDeviceBase.h"
49 #include "system/camera_metadata.h"
50 #include "utils/ExifUtils.h"
51 #include "utils/SessionConfigurationUtils.h"
52 #include "utils/Utils.h"
53 
54 using aidl::android::hardware::camera::device::CameraBlob;
55 using aidl::android::hardware::camera::device::CameraBlobId;
56 
57 namespace flags = com::android::internal::camera::flags;
58 
59 namespace android {
60 namespace camera3 {
61 
HeicCompositeStream(sp<CameraDeviceBase> device,wp<hardware::camera2::ICameraDeviceCallbacks> cb)62 HeicCompositeStream::HeicCompositeStream(sp<CameraDeviceBase> device,
63                                          wp<hardware::camera2::ICameraDeviceCallbacks> cb)
64     : CompositeStream(device, cb),
65       mUseHeic(false),
66       mNumOutputTiles(1),
67       mNumGainmapOutputTiles(1),
68       mOutputWidth(0),
69       mOutputHeight(0),
70       mGainmapOutputWidth(0),
71       mGainmapOutputHeight(0),
72       mMaxHeicBufferSize(0),
73       mGridWidth(HeicEncoderInfoManager::kGridWidth),
74       mGridHeight(HeicEncoderInfoManager::kGridHeight),
75       mGainmapGridWidth(HeicEncoderInfoManager::kGridWidth),
76       mGainmapGridHeight(HeicEncoderInfoManager::kGridHeight),
77       mGridRows(1),
78       mGridCols(1),
79       mGainmapGridRows(1),
80       mGainmapGridCols(1),
81       mUseGrid(false),
82       mGainmapUseGrid(false),
83       mAppSegmentStreamId(-1),
84       mAppSegmentSurfaceId(-1),
85       mMainImageStreamId(-1),
86       mMainImageSurfaceId(-1),
87       mYuvBufferAcquired(false),
88       mStreamSurfaceListener(new StreamSurfaceListener()),
89       mDequeuedOutputBufferCnt(0),
90       mCodecOutputCounter(0),
91       mCodecGainmapOutputCounter(0),
92       mQuality(-1),
93       mGridTimestampUs(0),
94       mStatusId(StatusTracker::NO_STATUS_ID) {
95     mStaticInfo = device->info();
96     camera_metadata_entry halHeicSupport = mStaticInfo.find(ANDROID_HEIC_INFO_SUPPORTED);
97     if (halHeicSupport.count == 1 &&
98             halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_TRUE) {
99         // The camera device supports the HEIC stream combination,
100         // use the standard stream combintion.
101         mAppSegmentSupported = true;
102     }
103 }
104 
~HeicCompositeStream()105 HeicCompositeStream::~HeicCompositeStream() {
106     // Call deinitCodec in case stream hasn't been deleted yet to avoid any
107     // memory/resource leak.
108     deinitCodec();
109 
110     mInputAppSegmentBuffers.clear();
111     mCodecOutputBuffers.clear();
112     mGainmapCodecOutputBuffers.clear();
113 
114     mAppSegmentStreamId = -1;
115     mAppSegmentSurfaceId = -1;
116     mAppSegmentConsumer.clear();
117     mAppSegmentSurface.clear();
118 
119     mMainImageStreamId = -1;
120     mMainImageSurfaceId = -1;
121     mMainImageConsumer.clear();
122     mMainImageSurface.clear();
123 }
124 
isHeicCompositeStreamInfo(const OutputStreamInfo & streamInfo,bool isCompositeHeicDisabled,bool isCompositeHeicUltraHDRDisabled)125 bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo,
126                                                     bool isCompositeHeicDisabled,
127                                                     bool isCompositeHeicUltraHDRDisabled) {
128     return (((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF) &&
129               !isCompositeHeicDisabled) ||
130              (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace) &&
131               !isCompositeHeicUltraHDRDisabled)) &&
132             (streamInfo.format == HAL_PIXEL_FORMAT_BLOB));
133 }
134 
isHeicCompositeStream(const sp<Surface> & surface,bool isCompositeHeicDisabled,bool isCompositeHeicUltraHDRDisabled)135 bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface>& surface,
136                                                 bool isCompositeHeicDisabled,
137                                                 bool isCompositeHeicUltraHDRDisabled) {
138     ANativeWindow* anw = surface.get();
139     status_t err;
140     int format;
141     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
142         std::string msg = fmt::sprintf("Failed to query Surface format: %s (%d)", strerror(-err),
143                 err);
144         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
145         return false;
146     }
147 
148     int dataspace;
149     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
150         std::string msg = fmt::sprintf("Failed to query Surface dataspace: %s (%d)", strerror(-err),
151                 err);
152         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
153         return false;
154     }
155 
156     return ((format == HAL_PIXEL_FORMAT_BLOB) &&
157             ((dataspace == HAL_DATASPACE_HEIF && !isCompositeHeicDisabled) ||
158              (dataspace == static_cast<int>(kUltraHDRDataSpace) &&
159               !isCompositeHeicUltraHDRDisabled)));
160 }
161 
createInternalStreams(const std::vector<SurfaceHolder> & consumers,bool,uint32_t width,uint32_t height,int format,camera_stream_rotation_t rotation,int * id,const std::string & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,std::vector<int> * surfaceIds,int,bool,int32_t colorSpace,int64_t,int64_t,bool useReadoutTimestamp)162 status_t HeicCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
163         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
164         camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
165         const std::unordered_set<int32_t> &sensorPixelModesUsed,
166         std::vector<int> *surfaceIds,
167         int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
168         int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
169 
170     sp<CameraDeviceBase> device = mDevice.promote();
171     if (!device.get()) {
172         ALOGE("%s: Invalid camera device!", __FUNCTION__);
173         return NO_INIT;
174     }
175 
176     ANativeWindow* anw = consumers[0].mSurface.get();
177     int dataspace;
178     status_t res;
179     if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
180         ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-res),
181                 res);
182         return res;
183     }
184     if ((dataspace == static_cast<int>(kUltraHDRDataSpace)) && flags::camera_heif_gainmap()) {
185         mHDRGainmapEnabled = true;
186         mInternalDataSpace = static_cast<android_dataspace_t>(HAL_DATASPACE_BT2020_HLG);
187     }
188 
189     res = initializeCodec(width, height, device);
190     if (res != OK) {
191         ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
192                 __FUNCTION__, strerror(-res), res);
193         return NO_INIT;
194     }
195 
196     if (mAppSegmentSupported) {
197         std::tie(mAppSegmentConsumer, mAppSegmentSurface) =
198                 CpuConsumer::create(kMaxAcquiredAppSegment);
199         mAppSegmentConsumer->setFrameAvailableListener(this);
200         mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
201     }
202     sp<IGraphicBufferProducer> producer = mAppSegmentSurface.get() != nullptr
203                                                   ? mAppSegmentSurface->getIGraphicBufferProducer()
204                                                   : nullptr;
205 
206     if (mAppSegmentSupported) {
207         std::vector<int> sourceSurfaceId;
208         res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
209                 kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
210                 sensorPixelModesUsed, &sourceSurfaceId, camera3::CAMERA3_STREAM_SET_ID_INVALID,
211                 /*isShared*/false, /*isMultiResolution*/false,
212                 /*consumerUsage*/0, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
213                 ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
214                 OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
215                 OutputConfiguration::MIRROR_MODE_AUTO,
216                 colorSpace,
217                 useReadoutTimestamp);
218         if (res == OK) {
219             mAppSegmentSurfaceId = sourceSurfaceId[0];
220         } else {
221             ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
222                     strerror(-res), res);
223             return res;
224         }
225     }
226 
227     if (!mUseGrid && !mHDRGainmapEnabled) {
228         res = mCodec->createInputSurface(&producer);
229         if (res != OK) {
230             ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
231                     __FUNCTION__, strerror(-res), res);
232             return res;
233         }
234     } else {
235         sp<Surface> surface;
236         std::tie(mMainImageConsumer, surface) = CpuConsumer::create(1);
237         producer = surface->getIGraphicBufferProducer();
238         mMainImageConsumer->setFrameAvailableListener(this);
239         mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
240     }
241     mMainImageSurface = new Surface(producer);
242 
243     res = mCodec->start();
244     if (res != OK) {
245         ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
246                 strerror(-res), res);
247         return res;
248     }
249 
250     if (mHDRGainmapEnabled) {
251         res = mGainmapCodec->start();
252         if (res != OK) {
253             ALOGE("%s: Failed to start gainmap codec: %s (%d)", __FUNCTION__,
254                     strerror(-res), res);
255             return res;
256         }
257     }
258 
259     //Use YUV_420 format if framework tiling is needed.
260     int srcStreamFmt = mHDRGainmapEnabled ?
261         static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010) : mUseGrid ?
262         HAL_PIXEL_FORMAT_YCbCr_420_888 : HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
263     res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, mInternalDataSpace,
264             rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
265             camera3::CAMERA3_STREAM_SET_ID_INVALID, /*isShared*/false, /*isMultiResolution*/false,
266             /*consumerUsage*/0, mHDRGainmapEnabled ?
267             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10 :
268             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
269             ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
270             OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
271             OutputConfiguration::MIRROR_MODE_AUTO,
272             colorSpace,
273             useReadoutTimestamp);
274     if (res == OK) {
275         mMainImageSurfaceId = (*surfaceIds)[0];
276         mMainImageStreamId = *id;
277     } else {
278         ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
279                 strerror(-res), res);
280         return res;
281     }
282 
283     mOutputSurface = consumers[0].mSurface;
284     res = registerCompositeStreamListener(mMainImageStreamId);
285     if (res != OK) {
286         ALOGE("%s: Failed to register HAL main image stream: %s (%d)", __FUNCTION__,
287                 strerror(-res), res);
288         return res;
289     }
290 
291     if (mAppSegmentSupported) {
292         res = registerCompositeStreamListener(mAppSegmentStreamId);
293         if (res != OK) {
294             ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
295                     strerror(-res), res);
296             return res;
297         }
298     }
299 
300     initCopyRowFunction(width);
301     return res;
302 }
303 
deleteInternalStreams()304 status_t HeicCompositeStream::deleteInternalStreams() {
305     requestExit();
306     auto res = join();
307     if (res != OK) {
308         ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
309                 strerror(-res), res);
310     }
311 
312     deinitCodec();
313 
314     if (mAppSegmentStreamId >= 0) {
315         // Camera devices may not be valid after switching to offline mode.
316         // In this case, all offline streams including internal composite streams
317         // are managed and released by the offline session.
318         sp<CameraDeviceBase> device = mDevice.promote();
319         if (device.get() != nullptr) {
320             res = device->deleteStream(mAppSegmentStreamId);
321         }
322 
323         mAppSegmentStreamId = -1;
324     }
325 
326     if (mOutputSurface != nullptr) {
327         mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
328         mOutputSurface.clear();
329     }
330 
331     sp<StatusTracker> statusTracker = mStatusTracker.promote();
332     if (statusTracker != nullptr && mStatusId != StatusTracker::NO_STATUS_ID) {
333         statusTracker->removeComponent(mStatusId);
334         mStatusId = StatusTracker::NO_STATUS_ID;
335     }
336 
337     if (mPendingInputFrames.size() > 0) {
338         ALOGW("%s: mPendingInputFrames has %zu stale entries",
339                 __FUNCTION__, mPendingInputFrames.size());
340         mPendingInputFrames.clear();
341     }
342 
343     return res;
344 }
345 
onBufferReleased(const BufferInfo & bufferInfo)346 void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
347     Mutex::Autolock l(mMutex);
348 
349     if (bufferInfo.mError) return;
350 
351     if (bufferInfo.mStreamId == mMainImageStreamId) {
352         mMainImageFrameNumbers.push(bufferInfo.mFrameNumber);
353         mCodecOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
354         ALOGV("%s: [%" PRId64 "]: Adding main image frame number (%zu frame numbers in total)",
355                 __FUNCTION__, bufferInfo.mFrameNumber, mMainImageFrameNumbers.size());
356         if (mHDRGainmapEnabled) {
357             mCodecGainmapOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
358         }
359     } else if (bufferInfo.mStreamId == mAppSegmentStreamId) {
360         mAppSegmentFrameNumbers.push(bufferInfo.mFrameNumber);
361         ALOGV("%s: [%" PRId64 "]: Adding app segment frame number (%zu frame numbers in total)",
362                 __FUNCTION__, bufferInfo.mFrameNumber, mAppSegmentFrameNumbers.size());
363     }
364 }
365 
366 // We need to get the settings early to handle the case where the codec output
367 // arrives earlier than result metadata.
onBufferRequestForFrameNumber(uint64_t frameNumber,int streamId,const CameraMetadata & settings)368 void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
369         const CameraMetadata& settings) {
370     ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
371 
372     Mutex::Autolock l(mMutex);
373     if (mErrorState || (streamId != getStreamId())) {
374         return;
375     }
376 
377     mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
378 
379     camera_metadata_ro_entry entry;
380 
381     int32_t orientation = 0;
382     entry = settings.find(ANDROID_JPEG_ORIENTATION);
383     if (entry.count == 1) {
384         orientation = entry.data.i32[0];
385     }
386 
387     int32_t quality = kDefaultJpegQuality;
388     entry = settings.find(ANDROID_JPEG_QUALITY);
389     if (entry.count == 1) {
390         quality = entry.data.i32[0];
391     }
392 
393     mSettingsByFrameNumber[frameNumber] = {orientation, quality};
394 }
395 
onFrameAvailable(const BufferItem & item)396 void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
397     if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
398         ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
399                 __func__, ns2ms(item.mTimestamp));
400 
401         Mutex::Autolock l(mMutex);
402         if (!mErrorState) {
403             mInputAppSegmentBuffers.push_back(item.mTimestamp);
404             mInputReadyCondition.signal();
405         }
406     } else if (item.mDataSpace == mInternalDataSpace) {
407         ALOGV("%s: YUV_420 buffer with ts: %" PRIu64 " ms. arrived!",
408                 __func__, ns2ms(item.mTimestamp));
409 
410         Mutex::Autolock l(mMutex);
411         if (!mUseGrid && !mHDRGainmapEnabled) {
412             ALOGE("%s: YUV_420 internal stream is only supported for HEVC tiling",
413                     __FUNCTION__);
414             return;
415         }
416         if (!mErrorState) {
417             mInputYuvBuffers.push_back(item.mTimestamp);
418             mInputReadyCondition.signal();
419         }
420     } else {
421         ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
422     }
423 }
424 
getCompositeStreamInfo(const OutputStreamInfo & streamInfo,const CameraMetadata & ch,std::vector<OutputStreamInfo> * compositeOutput)425 status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
426             const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
427     bool gainmapEnabled = false;
428     if (compositeOutput == nullptr) {
429         return BAD_VALUE;
430     }
431 
432     compositeOutput->clear();
433 
434     bool useGrid, useHeic;
435     bool isSizeSupported = isSizeSupportedByHeifEncoder(
436             streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
437     if (!isSizeSupported) {
438         // Size is not supported by either encoder.
439         return OK;
440     }
441 
442     if (streamInfo.dataSpace == static_cast<android_dataspace_t>(kUltraHDRDataSpace)) {
443         gainmapEnabled = true;
444     }
445 
446     compositeOutput->clear();
447     compositeOutput->push_back({});
448 
449     // YUV/IMPLEMENTATION_DEFINED stream info
450     (*compositeOutput)[0].width = streamInfo.width;
451     (*compositeOutput)[0].height = streamInfo.height;
452     (*compositeOutput)[0].format = gainmapEnabled ?
453         static_cast<android_pixel_format_t>(HAL_PIXEL_FORMAT_YCBCR_P010) : useGrid ?
454         HAL_PIXEL_FORMAT_YCbCr_420_888 : HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
455     (*compositeOutput)[0].dataSpace = gainmapEnabled ?
456         static_cast<android_dataspace_t>(HAL_DATASPACE_BT2020_HLG) : kHeifDataSpace;
457     (*compositeOutput)[0].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
458             useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
459 
460 
461     camera_metadata_ro_entry halHeicSupport = ch.find(ANDROID_HEIC_INFO_SUPPORTED);
462     if (halHeicSupport.count == 1 &&
463             halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_TRUE) {
464 
465         compositeOutput->push_back({});
466         // JPEG APPS segments Blob stream info
467         (*compositeOutput)[1].width = calcAppSegmentMaxSize(ch);
468         (*compositeOutput)[1].height = 1;
469         (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
470         (*compositeOutput)[1].dataSpace = kAppSegmentDataSpace;
471         (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
472     }
473 
474     return NO_ERROR;
475 }
476 
isSizeSupportedByHeifEncoder(int32_t width,int32_t height,bool * useHeic,bool * useGrid,int64_t * stall,AString * hevcName,bool allowSWCodec)477 bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
478         bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName, bool allowSWCodec) {
479     static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance(allowSWCodec);
480     return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
481 }
482 
isInMemoryTempFileSupported()483 bool HeicCompositeStream::isInMemoryTempFileSupported() {
484     int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
485     if (memfd == -1) {
486         if (errno != ENOSYS) {
487             ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
488         }
489         return false;
490     }
491     close(memfd);
492     return true;
493 }
494 
onHeicOutputFrameAvailable(const CodecOutputBufferInfo & outputBufferInfo,bool isGainmap)495 void HeicCompositeStream::onHeicOutputFrameAvailable(
496         const CodecOutputBufferInfo& outputBufferInfo, bool isGainmap) {
497     Mutex::Autolock l(mMutex);
498 
499     ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
500             __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
501             outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
502 
503     if (!mErrorState) {
504         if ((outputBufferInfo.size > 0) &&
505                 ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
506             isGainmap ? mGainmapCodecOutputBuffers.push_back(outputBufferInfo) :
507                 mCodecOutputBuffers.push_back(outputBufferInfo);
508             mInputReadyCondition.signal();
509         } else {
510             ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
511                 outputBufferInfo.size, outputBufferInfo.flags);
512             isGainmap ? mGainmapCodec->releaseOutputBuffer(outputBufferInfo.index) :
513                 mCodec->releaseOutputBuffer(outputBufferInfo.index);
514         }
515     } else {
516         isGainmap ? mGainmapCodec->releaseOutputBuffer(outputBufferInfo.index) :
517             mCodec->releaseOutputBuffer(outputBufferInfo.index);
518     }
519 }
520 
onHeicInputFrameAvailable(int32_t index,bool isGainmap)521 void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index, bool isGainmap) {
522     Mutex::Autolock l(mMutex);
523 
524     if (!mUseGrid && !mHDRGainmapEnabled) {
525         ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
526         return;
527     }
528 
529     isGainmap ? mGainmapCodecInputBuffers.push_back(index) : mCodecInputBuffers.push_back(index);
530     mInputReadyCondition.signal();
531 }
532 
onHeicGainmapFormatChanged(sp<AMessage> & newFormat)533 void HeicCompositeStream::onHeicGainmapFormatChanged(sp<AMessage>& newFormat) {
534     if (newFormat == nullptr) {
535         ALOGE("%s: newFormat must not be null!", __FUNCTION__);
536         return;
537     }
538 
539     Mutex::Autolock l(mMutex);
540 
541     AString mime;
542     AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
543     newFormat->findString(KEY_MIME, &mime);
544     if (mime != mimeHeic) {
545         // For HEVC codec, below keys need to be filled out or overwritten so that the
546         // muxer can handle them as HEIC output image.
547         newFormat->setString(KEY_MIME, mimeHeic);
548         newFormat->setInt32(KEY_WIDTH, mGainmapOutputWidth);
549         newFormat->setInt32(KEY_HEIGHT, mGainmapOutputHeight);
550     }
551 
552     if (mGainmapUseGrid) {
553         int32_t gridRows, gridCols, tileWidth, tileHeight;
554         if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
555                 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols) &&
556                 newFormat->findInt32(KEY_TILE_WIDTH, &tileWidth) &&
557                 newFormat->findInt32(KEY_TILE_HEIGHT, &tileHeight)) {
558             mGainmapGridWidth = tileWidth;
559             mGainmapGridHeight = tileHeight;
560             mGainmapGridRows = gridRows;
561             mGainmapGridCols = gridCols;
562         } else {
563             newFormat->setInt32(KEY_TILE_WIDTH, mGainmapGridWidth);
564             newFormat->setInt32(KEY_TILE_HEIGHT, mGainmapGridHeight);
565             newFormat->setInt32(KEY_GRID_ROWS, mGainmapGridRows);
566             newFormat->setInt32(KEY_GRID_COLUMNS, mGainmapGridCols);
567         }
568         int32_t left, top, right, bottom;
569         if (newFormat->findRect("crop", &left, &top, &right, &bottom)) {
570             newFormat->setRect("crop", 0, 0, mGainmapOutputWidth - 1, mGainmapOutputHeight - 1);
571         }
572     }
573     newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
574 
575     int32_t gridRows, gridCols;
576     if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
577             newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
578         mNumGainmapOutputTiles = gridRows * gridCols;
579     } else {
580         mNumGainmapOutputTiles = 1;
581     }
582 
583     mGainmapFormat = newFormat;
584 
585     ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
586     mInputReadyCondition.signal();
587 }
588 
589 
onHeicFormatChanged(sp<AMessage> & newFormat,bool isGainmap)590 void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat, bool isGainmap) {
591     if (newFormat == nullptr) {
592         ALOGE("%s: newFormat must not be null!", __FUNCTION__);
593         return;
594     }
595 
596     if (isGainmap) {
597         return onHeicGainmapFormatChanged(newFormat);
598     }
599     Mutex::Autolock l(mMutex);
600 
601     AString mime;
602     AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
603     newFormat->findString(KEY_MIME, &mime);
604     if (mime != mimeHeic) {
605         // For HEVC codec, below keys need to be filled out or overwritten so that the
606         // muxer can handle them as HEIC output image.
607         newFormat->setString(KEY_MIME, mimeHeic);
608         newFormat->setInt32(KEY_WIDTH, mOutputWidth);
609         newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
610     }
611 
612     if (mUseGrid || mUseHeic) {
613         int32_t gridRows, gridCols, tileWidth, tileHeight;
614         if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
615                 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols) &&
616                 newFormat->findInt32(KEY_TILE_WIDTH, &tileWidth) &&
617                 newFormat->findInt32(KEY_TILE_HEIGHT, &tileHeight)) {
618             mGridWidth = tileWidth;
619             mGridHeight = tileHeight;
620             mGridRows = gridRows;
621             mGridCols = gridCols;
622         } else {
623             newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
624             newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
625             newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
626             newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
627         }
628         int32_t left, top, right, bottom;
629         if (newFormat->findRect("crop", &left, &top, &right, &bottom)) {
630             newFormat->setRect("crop", 0, 0, mOutputWidth - 1, mOutputHeight - 1);
631         }
632     }
633     newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
634 
635     int32_t gridRows, gridCols;
636     if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
637             newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
638         mNumOutputTiles = gridRows * gridCols;
639     } else {
640         mNumOutputTiles = 1;
641     }
642 
643     mFormat = newFormat;
644 
645     ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
646     mInputReadyCondition.signal();
647 }
648 
onHeicCodecError()649 void HeicCompositeStream::onHeicCodecError() {
650     Mutex::Autolock l(mMutex);
651     mErrorState = true;
652 }
653 
configureStream()654 status_t HeicCompositeStream::configureStream() {
655     if (isRunning()) {
656         // Processing thread is already running, nothing more to do.
657         return NO_ERROR;
658     }
659 
660     if (mOutputSurface.get() == nullptr) {
661         ALOGE("%s: No valid output surface set!", __FUNCTION__);
662         return NO_INIT;
663     }
664 
665     auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
666     if (res != OK) {
667         ALOGE("%s: Unable to connect to native window for stream %d",
668                 __FUNCTION__, mMainImageStreamId);
669         return res;
670     }
671 
672     if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
673             != OK) {
674         ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
675                 mMainImageStreamId);
676         return res;
677     }
678 
679     ANativeWindow *anwConsumer = mOutputSurface.get();
680     int maxConsumerBuffers;
681     if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
682                     &maxConsumerBuffers)) != OK) {
683         ALOGE("%s: Unable to query consumer undequeued"
684                 " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
685         return res;
686     }
687 
688     // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
689     // buffer count.
690     if ((res = native_window_set_buffer_count(
691                     anwConsumer, kMaxOutputSurfaceProducerCount + maxConsumerBuffers)) != OK) {
692         ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
693         return res;
694     }
695 
696     if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
697         ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
698                 __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
699         return res;
700     }
701 
702     sp<camera3::StatusTracker> statusTracker = mStatusTracker.promote();
703     if (statusTracker != nullptr) {
704         std::string name = std::string("HeicStream ") + std::to_string(getStreamId());
705         mStatusId = statusTracker->addComponent(name);
706     }
707 
708     run("HeicCompositeStreamProc");
709 
710     return NO_ERROR;
711 }
712 
insertGbp(SurfaceMap * outSurfaceMap,Vector<int32_t> * outputStreamIds,int32_t * currentStreamId)713 status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
714         Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
715     if (mAppSegmentSupported) {
716         if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
717             outputStreamIds->push_back(mAppSegmentStreamId);
718         }
719         (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
720     }
721 
722     if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
723         outputStreamIds->push_back(mMainImageStreamId);
724     }
725     (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
726 
727     if (currentStreamId != nullptr) {
728         *currentStreamId = mMainImageStreamId;
729     }
730 
731     return NO_ERROR;
732 }
733 
insertCompositeStreamIds(std::vector<int32_t> * compositeStreamIds)734 status_t HeicCompositeStream::insertCompositeStreamIds(
735         std::vector<int32_t>* compositeStreamIds /*out*/) {
736     if (compositeStreamIds == nullptr) {
737         return BAD_VALUE;
738     }
739 
740     if (mAppSegmentSupported) {
741         compositeStreamIds->push_back(mAppSegmentStreamId);
742     }
743     compositeStreamIds->push_back(mMainImageStreamId);
744 
745     return OK;
746 }
747 
onShutter(const CaptureResultExtras & resultExtras,nsecs_t timestamp)748 void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
749     Mutex::Autolock l(mMutex);
750     if (mErrorState) {
751         return;
752     }
753 
754     if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
755         ALOGV("%s: [%" PRId64 "]: timestamp %" PRId64 ", requestId %d", __FUNCTION__,
756                 resultExtras.frameNumber, timestamp, resultExtras.requestId);
757         mSettingsByFrameNumber[resultExtras.frameNumber].shutterNotified = true;
758         mSettingsByFrameNumber[resultExtras.frameNumber].timestamp = timestamp;
759         mSettingsByFrameNumber[resultExtras.frameNumber].requestId = resultExtras.requestId;
760         mInputReadyCondition.signal();
761     }
762 }
763 
compilePendingInputLocked()764 void HeicCompositeStream::compilePendingInputLocked() {
765     auto i = mSettingsByFrameNumber.begin();
766     while (i != mSettingsByFrameNumber.end()) {
767         if (i->second.shutterNotified) {
768             mPendingInputFrames[i->first].orientation = i->second.orientation;
769             mPendingInputFrames[i->first].quality = i->second.quality;
770             mPendingInputFrames[i->first].timestamp = i->second.timestamp;
771             mPendingInputFrames[i->first].requestId = i->second.requestId;
772             ALOGV("%s: [%" PRId64 "]: timestamp is %" PRId64, __FUNCTION__,
773                     i->first, i->second.timestamp);
774             i = mSettingsByFrameNumber.erase(i);
775 
776             // Set encoder quality if no inflight encoding
777             if (mPendingInputFrames.size() == 1) {
778                 sp<StatusTracker> statusTracker = mStatusTracker.promote();
779                 if (statusTracker != nullptr) {
780                     statusTracker->markComponentActive(mStatusId);
781                     ALOGV("%s: Mark component as active", __FUNCTION__);
782                 }
783 
784                 int32_t newQuality = mPendingInputFrames.begin()->second.quality;
785                 updateCodecQualityLocked(newQuality);
786             }
787         } else {
788             i++;
789         }
790     }
791 
792     while (!mInputAppSegmentBuffers.empty() && mAppSegmentFrameNumbers.size() > 0) {
793         CpuConsumer::LockedBuffer imgBuffer;
794         auto it = mInputAppSegmentBuffers.begin();
795         auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
796         if (res == NOT_ENOUGH_DATA) {
797             // Can not lock any more buffers.
798             break;
799         } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
800             if (res != OK) {
801                 ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
802                         strerror(-res), res);
803             } else {
804                 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
805                         " received buffer with time stamp: %" PRId64, __FUNCTION__,
806                         *it, imgBuffer.timestamp);
807                 mAppSegmentConsumer->unlockBuffer(imgBuffer);
808             }
809             mPendingInputFrames[*it].error = true;
810             mInputAppSegmentBuffers.erase(it);
811             continue;
812         }
813 
814         if (mPendingInputFrames.find(mAppSegmentFrameNumbers.front()) == mPendingInputFrames.end()) {
815             ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
816                     mAppSegmentFrameNumbers.front());
817             mInputAppSegmentBuffers.erase(it);
818             mAppSegmentFrameNumbers.pop();
819             continue;
820         }
821 
822         int64_t frameNumber = mAppSegmentFrameNumbers.front();
823         // If mPendingInputFrames doesn't contain the expected frame number, the captured
824         // input app segment frame must have been dropped via a buffer error.  Simply
825         // return the buffer to the buffer queue.
826         if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
827                 (mPendingInputFrames[frameNumber].error)) {
828             mAppSegmentConsumer->unlockBuffer(imgBuffer);
829         } else {
830             mPendingInputFrames[frameNumber].appSegmentBuffer = imgBuffer;
831         }
832         mInputAppSegmentBuffers.erase(it);
833         mAppSegmentFrameNumbers.pop();
834     }
835 
836     while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired && mMainImageFrameNumbers.size() > 0) {
837         CpuConsumer::LockedBuffer imgBuffer;
838         auto it = mInputYuvBuffers.begin();
839         auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
840         if (res == NOT_ENOUGH_DATA) {
841             // Can not lock any more buffers.
842             break;
843         } else if (res != OK) {
844             ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
845                     strerror(-res), res);
846             mPendingInputFrames[*it].error = true;
847             mInputYuvBuffers.erase(it);
848             continue;
849         } else if (*it != imgBuffer.timestamp) {
850             ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
851                     "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
852             mPendingInputFrames[*it].error = true;
853             mInputYuvBuffers.erase(it);
854             continue;
855         }
856 
857         if (mPendingInputFrames.find(mMainImageFrameNumbers.front()) == mPendingInputFrames.end()) {
858             ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
859                     mMainImageFrameNumbers.front());
860             mInputYuvBuffers.erase(it);
861             mMainImageFrameNumbers.pop();
862             continue;
863         }
864 
865         int64_t frameNumber = mMainImageFrameNumbers.front();
866         // If mPendingInputFrames doesn't contain the expected frame number, the captured
867         // input main image must have been dropped via a buffer error. Simply
868         // return the buffer to the buffer queue.
869         if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
870                 (mPendingInputFrames[frameNumber].error)) {
871             mMainImageConsumer->unlockBuffer(imgBuffer);
872         } else {
873             mPendingInputFrames[frameNumber].yuvBuffer = imgBuffer;
874             mYuvBufferAcquired = true;
875         }
876         mInputYuvBuffers.erase(it);
877         mMainImageFrameNumbers.pop();
878     }
879 
880     while (!mCodecOutputBuffers.empty()) {
881         auto it = mCodecOutputBuffers.begin();
882         // Assume encoder input to output is FIFO, use a queue to look up
883         // frameNumber when handling codec outputs.
884         int64_t bufferFrameNumber = -1;
885         if (mCodecOutputBufferFrameNumbers.empty()) {
886             ALOGV("%s: Failed to find buffer frameNumber for codec output buffer!", __FUNCTION__);
887             break;
888         } else {
889             // Direct mapping between camera frame number and codec timestamp (in us).
890             bufferFrameNumber = mCodecOutputBufferFrameNumbers.front();
891             mCodecOutputCounter++;
892             if (mCodecOutputCounter == mNumOutputTiles) {
893                 mCodecOutputBufferFrameNumbers.pop();
894                 mCodecOutputCounter = 0;
895             }
896 
897             mPendingInputFrames[bufferFrameNumber].codecOutputBuffers.push_back(*it);
898             ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (frameNumber %" PRId64 ")",
899                     __FUNCTION__, bufferFrameNumber, it->timeUs);
900         }
901         mCodecOutputBuffers.erase(it);
902     }
903 
904     while (!mGainmapCodecOutputBuffers.empty()) {
905         auto it = mGainmapCodecOutputBuffers.begin();
906         // Assume encoder input to output is FIFO, use a queue to look up
907         // frameNumber when handling codec outputs.
908         int64_t bufferFrameNumber = -1;
909         if (mCodecGainmapOutputBufferFrameNumbers.empty()) {
910             ALOGV("%s: Failed to find buffer frameNumber for gainmap codec output buffer!",
911                     __FUNCTION__);
912             break;
913         } else {
914             // Direct mapping between camera frame number and codec timestamp (in us).
915             bufferFrameNumber = mCodecGainmapOutputBufferFrameNumbers.front();
916             mCodecGainmapOutputCounter++;
917             if (mCodecGainmapOutputCounter == mNumGainmapOutputTiles) {
918                 mCodecGainmapOutputBufferFrameNumbers.pop();
919                 mCodecGainmapOutputCounter = 0;
920             }
921 
922             mPendingInputFrames[bufferFrameNumber].gainmapCodecOutputBuffers.push_back(*it);
923             ALOGV("%s: [%" PRId64 "]: Pushing gainmap codecOutputBuffers (frameNumber %" PRId64 ")",
924                     __FUNCTION__, bufferFrameNumber, it->timeUs);
925         }
926         mGainmapCodecOutputBuffers.erase(it);
927     }
928 
929     while (!mCaptureResults.empty()) {
930         auto it = mCaptureResults.begin();
931         // Negative frame number indicates that something went wrong during the capture result
932         // collection process.
933         int64_t frameNumber = std::get<0>(it->second);
934         if (it->first >= 0 &&
935                 mPendingInputFrames.find(frameNumber) != mPendingInputFrames.end()) {
936             if (mPendingInputFrames[frameNumber].timestamp == it->first) {
937                 mPendingInputFrames[frameNumber].result =
938                         std::make_unique<CameraMetadata>(std::get<1>(it->second));
939                 if (!mAppSegmentSupported) {
940                     mPendingInputFrames[frameNumber].exifError = true;
941                 }
942             } else {
943                 ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
944                         "shutter and capture result! before: %" PRId64 ", after: %" PRId64,
945                         __FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
946                         it->first);
947             }
948         }
949         mCaptureResults.erase(it);
950     }
951 
952     // mErrorFrameNumbers stores frame number of dropped buffers.
953     auto it = mErrorFrameNumbers.begin();
954     while (it != mErrorFrameNumbers.end()) {
955         if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
956             mPendingInputFrames[*it].error = true;
957         } else {
958             //Error callback is guaranteed to arrive after shutter notify, which
959             //results in mPendingInputFrames being populated.
960             ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
961                     *it);
962         }
963         it = mErrorFrameNumbers.erase(it);
964     }
965 
966     // mExifErrorFrameNumbers stores the frame number of dropped APP_SEGMENT buffers
967     it = mExifErrorFrameNumbers.begin();
968     while (it != mExifErrorFrameNumbers.end()) {
969         if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
970             mPendingInputFrames[*it].exifError = true;
971         }
972         it = mExifErrorFrameNumbers.erase(it);
973     }
974 
975     // Distribute codec input buffers to be filled out from YUV output
976     for (auto it = mPendingInputFrames.begin();
977             it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
978         InputFrame& inputFrame(it->second);
979         if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
980             // Available input tiles that are required for the current input
981             // image.
982             size_t newInputTiles = std::min(mCodecInputBuffers.size(),
983                     mGridRows * mGridCols - inputFrame.codecInputCounter);
984             for (size_t i = 0; i < newInputTiles; i++) {
985                 CodecInputBufferInfo inputInfo =
986                         { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
987                 inputFrame.codecInputBuffers.push_back(inputInfo);
988 
989                 mCodecInputBuffers.erase(mCodecInputBuffers.begin());
990                 inputFrame.codecInputCounter++;
991             }
992             break;
993         }
994     }
995 
996     // Distribute codec input buffers to be filled out from YUV output
997     for (auto it = mPendingInputFrames.begin();
998             it != mPendingInputFrames.end() && mGainmapCodecInputBuffers.size() > 0; it++) {
999         InputFrame& inputFrame(it->second);
1000         if (inputFrame.gainmapCodecInputCounter < mGainmapGridRows * mGainmapGridCols) {
1001             // Available input tiles that are required for the current input
1002             // image.
1003             size_t newInputTiles = std::min(mGainmapCodecInputBuffers.size(),
1004                     mGainmapGridRows * mGainmapGridCols - inputFrame.gainmapCodecInputCounter);
1005             for (size_t i = 0; i < newInputTiles; i++) {
1006                 CodecInputBufferInfo inputInfo = { mGainmapCodecInputBuffers[0],
1007                     mGridTimestampUs++, inputFrame.gainmapCodecInputCounter };
1008                 inputFrame.gainmapCodecInputBuffers.push_back(inputInfo);
1009 
1010                 mGainmapCodecInputBuffers.erase(mGainmapCodecInputBuffers.begin());
1011                 inputFrame.gainmapCodecInputCounter++;
1012             }
1013             break;
1014         }
1015     }
1016 }
1017 
getNextReadyInputLocked(int64_t * frameNumber)1018 bool HeicCompositeStream::getNextReadyInputLocked(int64_t *frameNumber /*out*/) {
1019     if (frameNumber == nullptr) {
1020         return false;
1021     }
1022 
1023     bool newInputAvailable = false;
1024     for (auto& it : mPendingInputFrames) {
1025         // New input is considered to be available only if:
1026         // 1. input buffers are ready, or
1027         // 2. App segment and muxer is created, or
1028         // 3. A codec output tile is ready, and an output buffer is available.
1029         // This makes sure that muxer gets created only when an output tile is
1030         // generated, because right now we only handle 1 HEIC output buffer at a
1031         // time (max dequeued buffer count is 1).
1032         bool appSegmentReady =
1033                 (it.second.appSegmentBuffer.data != nullptr || it.second.exifError) &&
1034                 !it.second.appSegmentWritten && it.second.result != nullptr &&
1035                 it.second.muxer != nullptr;
1036         bool codecOutputReady = !it.second.codecOutputBuffers.empty() ||
1037                 !it.second.gainmapCodecOutputBuffers.empty();
1038         bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
1039                 (!it.second.codecInputBuffers.empty());
1040         bool hasOutputBuffer = it.second.muxer != nullptr ||
1041                 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
1042         if ((!it.second.error) &&
1043                 (appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
1044             *frameNumber = it.first;
1045             if (it.second.format == nullptr && mFormat != nullptr) {
1046                 it.second.format = mFormat->dup();
1047             }
1048             if (it.second.gainmapFormat == nullptr && mGainmapFormat != nullptr){
1049                 it.second.gainmapFormat = mGainmapFormat->dup();
1050                 it.second.gainmapFormat->setInt32("gainmap", 1);
1051             }
1052             newInputAvailable = true;
1053             break;
1054         }
1055     }
1056 
1057     return newInputAvailable;
1058 }
1059 
getNextFailingInputLocked()1060 int64_t HeicCompositeStream::getNextFailingInputLocked() {
1061     int64_t res = -1;
1062 
1063     for (const auto& it : mPendingInputFrames) {
1064         if (it.second.error) {
1065             res = it.first;
1066             break;
1067         }
1068     }
1069 
1070     return res;
1071 }
1072 
processInputFrame(int64_t frameNumber,InputFrame & inputFrame)1073 status_t HeicCompositeStream::processInputFrame(int64_t frameNumber,
1074         InputFrame &inputFrame) {
1075     ATRACE_CALL();
1076     status_t res = OK;
1077 
1078     bool appSegmentReady =
1079             (inputFrame.appSegmentBuffer.data != nullptr || inputFrame.exifError) &&
1080             !inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
1081             inputFrame.muxer != nullptr;
1082     bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0 ||
1083             inputFrame.gainmapCodecOutputBuffers.size() > 0;
1084     bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
1085             !inputFrame.codecInputBuffers.empty();
1086     bool gainmapCodecInputReady = inputFrame.gainmapImage.get() != nullptr &&
1087             !inputFrame.gainmapCodecInputBuffers.empty();
1088     bool hasOutputBuffer = inputFrame.muxer != nullptr ||
1089             (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
1090     bool hasGainmapMetadata = !inputFrame.isoGainmapMetadata.empty();
1091 
1092     ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
1093             " dequeuedOutputBuffer %d, timestamp %" PRId64, __FUNCTION__, frameNumber,
1094             appSegmentReady, codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt,
1095             inputFrame.timestamp);
1096 
1097     // Handle inputs for Hevc tiling
1098     if (codecInputReady) {
1099         if (mHDRGainmapEnabled && (inputFrame.baseBuffer.get() == nullptr)) {
1100             auto res = generateBaseImageAndGainmap(inputFrame);
1101             if (res != OK) {
1102                 ALOGE("%s: Error generating SDR base image and HDR gainmap: %s (%d)", __FUNCTION__,
1103                         strerror(-res), res);
1104                 return res;
1105             }
1106         }
1107 
1108         res = processCodecInputFrame(inputFrame);
1109         if (res != OK) {
1110             ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
1111                     strerror(-res), res);
1112             return res;
1113         }
1114     }
1115 
1116     if (gainmapCodecInputReady) {
1117         res = processCodecGainmapInputFrame(inputFrame);
1118         if (res != OK) {
1119             ALOGE("%s: Failed to process gainmap codec input frame: %s (%d)", __FUNCTION__,
1120                     strerror(-res), res);
1121             return res;
1122         }
1123     }
1124 
1125     if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
1126         return OK;
1127     }
1128 
1129     // Initialize and start muxer if not yet done so. In this case,
1130     // codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
1131     // to be false, and the function must have returned early.
1132     if (inputFrame.muxer == nullptr) {
1133         res = startMuxerForInputFrame(frameNumber, inputFrame);
1134         if (res != OK) {
1135             ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
1136                     strerror(-res), res);
1137             return res;
1138         }
1139     }
1140 
1141     // Write the HDR gainmap metadata
1142     if (hasGainmapMetadata) {
1143         uint8_t kGainmapMetaMarker[] = {'t', 'm', 'a', 'p', '\0', '\0'};
1144         sp<ABuffer> aBuffer =
1145                 new ABuffer(inputFrame.isoGainmapMetadata.size() + sizeof(kGainmapMetaMarker));
1146         memcpy(aBuffer->data(), kGainmapMetaMarker, sizeof(kGainmapMetaMarker));
1147         memcpy(aBuffer->data() + sizeof(kGainmapMetaMarker), inputFrame.isoGainmapMetadata.data(),
1148                inputFrame.isoGainmapMetadata.size());
1149 
1150         aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecColorFormat);
1151         aBuffer->meta()->setInt32("color-primaries", kCodecColorPrimaries);
1152         aBuffer->meta()->setInt32("color-transfer", kCodecColorTransfer);
1153         aBuffer->meta()->setInt32("color-matrix", kCodecColorMatrix);
1154         aBuffer->meta()->setInt32("color-range", kCodecColorRange);
1155         auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
1156                                                      inputFrame.timestamp,
1157                                                      MediaCodec::BUFFER_FLAG_MUXER_DATA);
1158         if (res != OK) {
1159             ALOGE("%s: Failed to write HDR gainmap metadata to muxer: %s (%d)",
1160                     __FUNCTION__, strerror(-res), res);
1161             return res;
1162         }
1163         inputFrame.isoGainmapMetadata.clear();
1164     }
1165 
1166     // Write JPEG APP segments data to the muxer.
1167     if (appSegmentReady) {
1168         res = processAppSegment(frameNumber, inputFrame);
1169         if (res != OK) {
1170             ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
1171                     strerror(-res), res);
1172             return res;
1173         }
1174     }
1175 
1176     // Write media codec bitstream buffers to muxer.
1177     while (!inputFrame.codecOutputBuffers.empty()) {
1178         res = processOneCodecOutputFrame(frameNumber, inputFrame);
1179         if (res != OK) {
1180             ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
1181                     strerror(-res), res);
1182             return res;
1183         }
1184     }
1185 
1186     // Write media codec gainmap bitstream buffers to muxer.
1187     while (!inputFrame.gainmapCodecOutputBuffers.empty()) {
1188         res = processOneCodecGainmapOutputFrame(frameNumber, inputFrame);
1189         if (res != OK) {
1190             ALOGE("%s: Failed to process codec gainmap output frame: %s (%d)", __FUNCTION__,
1191                     strerror(-res), res);
1192             return res;
1193         }
1194     }
1195 
1196     if ((inputFrame.pendingOutputTiles == 0) && (inputFrame.gainmapPendingOutputTiles == 0)) {
1197         if (inputFrame.appSegmentWritten) {
1198             res = processCompletedInputFrame(frameNumber, inputFrame);
1199             if (res != OK) {
1200                 ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
1201                         strerror(-res), res);
1202                 return res;
1203             }
1204         }
1205     }
1206 
1207     return res;
1208 }
1209 
startMuxerForInputFrame(int64_t frameNumber,InputFrame & inputFrame)1210 status_t HeicCompositeStream::startMuxerForInputFrame(int64_t frameNumber, InputFrame &inputFrame) {
1211     sp<ANativeWindow> outputANW = mOutputSurface;
1212 
1213     auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
1214     if (res != OK) {
1215         ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
1216                 res);
1217         return res;
1218     }
1219     mDequeuedOutputBufferCnt++;
1220 
1221     // Combine current thread id, stream id and timestamp to uniquely identify image.
1222     std::ostringstream tempOutputFile;
1223     tempOutputFile << "HEIF-" << pthread_self() << "-"
1224             << getStreamId() << "-" << frameNumber;
1225     inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
1226     if (inputFrame.fileFd < 0) {
1227         ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
1228                 tempOutputFile.str().c_str(), errno);
1229         return NO_INIT;
1230     }
1231     inputFrame.muxer = MediaMuxer::create(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
1232     if (inputFrame.muxer == nullptr) {
1233         ALOGE("%s: Failed to create MediaMuxer for file fd %d",
1234                 __FUNCTION__, inputFrame.fileFd);
1235         return NO_INIT;
1236     }
1237 
1238     res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
1239     if (res != OK) {
1240         ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
1241                 strerror(-res), res);
1242         return res;
1243     }
1244 
1245     ssize_t trackId = inputFrame.muxer->addTrack(inputFrame.format);
1246     if (trackId < 0) {
1247         ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
1248         return NO_INIT;
1249     }
1250 
1251     inputFrame.trackIndex = trackId;
1252     inputFrame.pendingOutputTiles = mNumOutputTiles;
1253 
1254     if (inputFrame.gainmapFormat.get() != nullptr) {
1255         trackId = inputFrame.muxer->addTrack(inputFrame.gainmapFormat);
1256         if (trackId < 0) {
1257             ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
1258             return NO_INIT;
1259         }
1260         inputFrame.gainmapTrackIndex = trackId;
1261         inputFrame.gainmapPendingOutputTiles = mNumGainmapOutputTiles;
1262     }
1263 
1264     res = inputFrame.muxer->start();
1265     if (res != OK) {
1266         ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
1267                 __FUNCTION__, strerror(-res), res);
1268         return res;
1269     }
1270 
1271     ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
1272             frameNumber);
1273     return OK;
1274 }
1275 
processAppSegment(int64_t frameNumber,InputFrame & inputFrame)1276 status_t HeicCompositeStream::processAppSegment(int64_t frameNumber, InputFrame &inputFrame) {
1277     size_t app1Size = 0;
1278     size_t appSegmentSize = 0;
1279     if (!inputFrame.exifError) {
1280         appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
1281                 inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
1282                 &app1Size);
1283         if (appSegmentSize == 0) {
1284             ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
1285             return NO_INIT;
1286         }
1287     }
1288 
1289     std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
1290     auto exifRes = inputFrame.exifError ?
1291             exifUtils->initializeEmpty() :
1292             exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
1293     if (!exifRes) {
1294         ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
1295         return BAD_VALUE;
1296     }
1297     exifRes = exifUtils->setFromMetadata(*inputFrame.result, mStaticInfo,
1298             mOutputWidth, mOutputHeight);
1299     if (!exifRes) {
1300         ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
1301         return BAD_VALUE;
1302     }
1303     exifRes = exifUtils->setOrientation(inputFrame.orientation);
1304     if (!exifRes) {
1305         ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
1306         return BAD_VALUE;
1307     }
1308     exifRes = exifUtils->generateApp1();
1309     if (!exifRes) {
1310         ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
1311         return BAD_VALUE;
1312     }
1313 
1314     unsigned int newApp1Length = exifUtils->getApp1Length();
1315     const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
1316 
1317     //Assemble the APP1 marker buffer required by MediaCodec
1318     uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
1319     kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
1320     kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
1321     size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
1322             appSegmentSize - app1Size + newApp1Length;
1323     uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
1324     memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
1325     memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
1326     if (appSegmentSize - app1Size > 0) {
1327         memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
1328                 inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
1329     }
1330 
1331     sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
1332     auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
1333             inputFrame.timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
1334     delete[] appSegmentBuffer;
1335 
1336     if (res != OK) {
1337         ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
1338                 __FUNCTION__, strerror(-res), res);
1339         return res;
1340     }
1341 
1342     ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
1343           __FUNCTION__, frameNumber, appSegmentSize, inputFrame.appSegmentBuffer.width,
1344           inputFrame.appSegmentBuffer.height, app1Size);
1345 
1346     inputFrame.appSegmentWritten = true;
1347     // Release the buffer now so any pending input app segments can be processed
1348     if (!inputFrame.exifError) {
1349         mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
1350         inputFrame.appSegmentBuffer.data = nullptr;
1351         inputFrame.exifError = false;
1352     }
1353 
1354     return OK;
1355 }
1356 
generateBaseImageAndGainmap(InputFrame & inputFrame)1357 status_t HeicCompositeStream::generateBaseImageAndGainmap(InputFrame &inputFrame) {
1358     ultrahdr::JpegR jpegR(nullptr /*gles ctx*/, kGainmapScale);
1359     inputFrame.baseBuffer = std::make_unique<ultrahdr::uhdr_raw_image_ext_t>(
1360             kUltraHdrOutputFmt, kUltraHdrOutputGamut, kUltraHdrInputTransfer, kUltraHdrOutputRange,
1361             inputFrame.yuvBuffer.width, inputFrame.yuvBuffer.height, 8/*stride*/);
1362 
1363     uhdr_raw_image_t hdr_intent;
1364     hdr_intent.fmt = kUltraHdrInputFmt;
1365     hdr_intent.cg = kUltraHdrInputGamut;
1366     hdr_intent.ct = kUltraHdrInputTransfer;
1367     hdr_intent.range = kUltraHdrInputRange;
1368     hdr_intent.w = inputFrame.yuvBuffer.width;
1369     hdr_intent.h = inputFrame.yuvBuffer.height;
1370     hdr_intent.planes[UHDR_PLANE_Y] = inputFrame.yuvBuffer.data;
1371     hdr_intent.planes[UHDR_PLANE_UV] = inputFrame.yuvBuffer.dataCb;
1372     hdr_intent.planes[UHDR_PLANE_V] = nullptr;
1373     //libUltraHDR expects the stride in pixels
1374     hdr_intent.stride[UHDR_PLANE_Y] = inputFrame.yuvBuffer.stride / 2;
1375     hdr_intent.stride[UHDR_PLANE_UV] = inputFrame.yuvBuffer.chromaStride / 2;
1376     hdr_intent.stride[UHDR_PLANE_V] = 0;
1377     auto res = jpegR.toneMap(&hdr_intent, inputFrame.baseBuffer.get());
1378     if (res.error_code == UHDR_CODEC_OK) {
1379         ALOGV("%s: Base image tonemapped successfully", __FUNCTION__);
1380     } else {
1381         ALOGE("%s: Failed during HDR to SDR tonemap: %d", __FUNCTION__, res.error_code);
1382         return BAD_VALUE;
1383     }
1384 
1385     inputFrame.baseImage = std::make_unique<CpuConsumer::LockedBuffer>();
1386     *inputFrame.baseImage = inputFrame.yuvBuffer;
1387     inputFrame.baseImage->data = reinterpret_cast<uint8_t*>(
1388             inputFrame.baseBuffer->planes[UHDR_PLANE_Y]);
1389     inputFrame.baseImage->dataCb = reinterpret_cast<uint8_t*>(
1390             inputFrame.baseBuffer->planes[UHDR_PLANE_U]);
1391     inputFrame.baseImage->dataCr = reinterpret_cast<uint8_t*>(
1392             inputFrame.baseBuffer->planes[UHDR_PLANE_V]);
1393     inputFrame.baseImage->chromaStep = 1;
1394     inputFrame.baseImage->stride = inputFrame.baseBuffer->stride[UHDR_PLANE_Y];
1395     inputFrame.baseImage->chromaStride = inputFrame.baseBuffer->stride[UHDR_PLANE_UV];
1396     inputFrame.baseImage->dataSpace = HAL_DATASPACE_V0_JFIF;
1397 
1398     ultrahdr::uhdr_gainmap_metadata_ext_t metadata;
1399     res = jpegR.generateGainMap(inputFrame.baseBuffer.get(), &hdr_intent, &metadata,
1400             inputFrame.gainmap, false /*sdr_is_601*/, true /*use_luminance*/);
1401     if (res.error_code == UHDR_CODEC_OK) {
1402         ALOGV("%s: HDR gainmap generated successfully!", __FUNCTION__);
1403     } else {
1404         ALOGE("%s: Failed HDR gainmap: %d", __FUNCTION__, res.error_code);
1405         return BAD_VALUE;
1406     }
1407     // We can only generate a single channel gainmap at the moment. However only
1408     // multi channel HEVC encoding (like YUV420) is required. Set the extra U/V
1409     // planes to 128 to avoid encoding any actual color data.
1410     inputFrame.gainmapChroma = std::make_unique<uint8_t[]>(
1411             inputFrame.gainmap->w * inputFrame.gainmap->h / 2);
1412     memset(inputFrame.gainmapChroma.get(), 128, inputFrame.gainmap->w * inputFrame.gainmap->h / 2);
1413 
1414     ultrahdr::uhdr_gainmap_metadata_frac iso_secondary_metadata;
1415     res = ultrahdr::uhdr_gainmap_metadata_frac::gainmapMetadataFloatToFraction(
1416                 &metadata, &iso_secondary_metadata);
1417     if (res.error_code == UHDR_CODEC_OK) {
1418         ALOGV("%s: HDR gainmap converted to fractions successfully!", __FUNCTION__);
1419     } else {
1420         ALOGE("%s: Failed to convert HDR gainmap to fractions: %d", __FUNCTION__,
1421                 res.error_code);
1422         return BAD_VALUE;
1423     }
1424 
1425     res = ultrahdr::uhdr_gainmap_metadata_frac::encodeGainmapMetadata(&iso_secondary_metadata,
1426                                                                inputFrame.isoGainmapMetadata);
1427     if (res.error_code == UHDR_CODEC_OK) {
1428         ALOGV("%s: HDR gainmap encoded to ISO format successfully!", __FUNCTION__);
1429     } else {
1430         ALOGE("%s: Failed to encode HDR gainmap to ISO format: %d", __FUNCTION__,
1431                 res.error_code);
1432         return BAD_VALUE;
1433     }
1434     // 6.6.2.4.2 of ISO/IEC 23008-12:2024 expects the ISO 21496-1 gainmap to be
1435     // preceded by an u8 version equal to 0
1436     inputFrame.isoGainmapMetadata.insert(inputFrame.isoGainmapMetadata.begin(), 0);
1437 
1438     inputFrame.gainmapImage = std::make_unique<CpuConsumer::LockedBuffer>();
1439     *inputFrame.gainmapImage = inputFrame.yuvBuffer;
1440     inputFrame.gainmapImage->data = reinterpret_cast<uint8_t*>(
1441             inputFrame.gainmap->planes[UHDR_PLANE_Y]);
1442     inputFrame.gainmapImage->dataCb = inputFrame.gainmapChroma.get();
1443     inputFrame.gainmapImage->dataCr = inputFrame.gainmapChroma.get() + 1;
1444     inputFrame.gainmapImage->chromaStep = 2;
1445     inputFrame.gainmapImage->stride = inputFrame.gainmap->stride[UHDR_PLANE_Y];
1446     inputFrame.gainmapImage->chromaStride = inputFrame.gainmap->w;
1447     inputFrame.gainmapImage->dataSpace = HAL_DATASPACE_V0_JFIF;
1448 
1449     return OK;
1450 }
1451 
processCodecInputFrame(InputFrame & inputFrame)1452 status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
1453     for (auto& inputBuffer : inputFrame.codecInputBuffers) {
1454         sp<MediaCodecBuffer> buffer;
1455         auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
1456         if (res != OK) {
1457             ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
1458                     strerror(-res), res);
1459             return res;
1460         }
1461 
1462         // Copy one tile from source to destination.
1463         size_t tileX = inputBuffer.tileIndex % mGridCols;
1464         size_t tileY = inputBuffer.tileIndex / mGridCols;
1465         size_t top = mGridHeight * tileY;
1466         size_t left = mGridWidth * tileX;
1467         size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
1468                 mOutputWidth - tileX * mGridWidth : mGridWidth;
1469         size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
1470                 mOutputHeight - tileY * mGridHeight : mGridHeight;
1471         ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu,"
1472                 " timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
1473                 inputBuffer.timeUs);
1474 
1475         auto yuvInput = (inputFrame.baseImage.get() != nullptr) ?
1476             *inputFrame.baseImage.get() : inputFrame.yuvBuffer;
1477         res = copyOneYuvTile(buffer, yuvInput, top, left, width, height);
1478         if (res != OK) {
1479             ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
1480                     strerror(-res), res);
1481             return res;
1482         }
1483 
1484         res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
1485                 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
1486         if (res != OK) {
1487             ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
1488                     __FUNCTION__, strerror(-res), res);
1489             return res;
1490         }
1491     }
1492 
1493     inputFrame.codecInputBuffers.clear();
1494     return OK;
1495 }
1496 
processCodecGainmapInputFrame(InputFrame & inputFrame)1497 status_t HeicCompositeStream::processCodecGainmapInputFrame(InputFrame &inputFrame) {
1498     for (auto& inputBuffer : inputFrame.gainmapCodecInputBuffers) {
1499         sp<MediaCodecBuffer> buffer;
1500         auto res = mGainmapCodec->getInputBuffer(inputBuffer.index, &buffer);
1501         if (res != OK) {
1502             ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
1503                     strerror(-res), res);
1504             return res;
1505         }
1506 
1507         // Copy one tile from source to destination.
1508         size_t tileX = inputBuffer.tileIndex % mGainmapGridCols;
1509         size_t tileY = inputBuffer.tileIndex / mGainmapGridCols;
1510         size_t top = mGainmapGridHeight * tileY;
1511         size_t left = mGainmapGridWidth * tileX;
1512         size_t width = (tileX == static_cast<size_t>(mGainmapGridCols) - 1) ?
1513                 mGainmapOutputWidth - tileX * mGainmapGridWidth : mGainmapGridWidth;
1514         size_t height = (tileY == static_cast<size_t>(mGainmapGridRows) - 1) ?
1515                 mGainmapOutputHeight - tileY * mGainmapGridHeight : mGainmapGridHeight;
1516         ALOGV("%s: gainmap inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, "
1517                 "height %zu, timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
1518                 inputBuffer.timeUs);
1519 
1520         auto yuvInput = *inputFrame.gainmapImage;
1521         res = copyOneYuvTile(buffer, yuvInput, top, left, width, height);
1522         if (res != OK) {
1523             ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
1524                     strerror(-res), res);
1525             return res;
1526         }
1527 
1528         res = mGainmapCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
1529                 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
1530         if (res != OK) {
1531             ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
1532                     __FUNCTION__, strerror(-res), res);
1533             return res;
1534         }
1535     }
1536 
1537     inputFrame.gainmapCodecInputBuffers.clear();
1538     return OK;
1539 }
1540 
processOneCodecOutputFrame(int64_t frameNumber,InputFrame & inputFrame)1541 status_t HeicCompositeStream::processOneCodecOutputFrame(int64_t frameNumber,
1542         InputFrame &inputFrame) {
1543     auto it = inputFrame.codecOutputBuffers.begin();
1544     sp<MediaCodecBuffer> buffer;
1545     status_t res = mCodec->getOutputBuffer(it->index, &buffer);
1546     if (res != OK) {
1547         ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
1548                 __FUNCTION__, it->index, strerror(-res), res);
1549         return res;
1550     }
1551     if (buffer == nullptr) {
1552         ALOGE("%s: Invalid Heic codec output buffer at index %d",
1553                 __FUNCTION__, it->index);
1554         return BAD_VALUE;
1555     }
1556 
1557     sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
1558     if (mHDRGainmapEnabled) {
1559         aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecColorFormat);
1560         aBuffer->meta()->setInt32("color-primaries", kCodecColorPrimaries);
1561         aBuffer->meta()->setInt32("color-transfer", kCodecColorTransfer);
1562         aBuffer->meta()->setInt32("color-matrix", kCodecColorMatrix);
1563         aBuffer->meta()->setInt32("color-range", kCodecColorRange);
1564     }
1565     res = inputFrame.muxer->writeSampleData(
1566             aBuffer, inputFrame.trackIndex, inputFrame.timestamp, 0 /*flags*/);
1567     if (res != OK) {
1568         ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
1569                 __FUNCTION__, it->index, strerror(-res), res);
1570         return res;
1571     }
1572 
1573     mCodec->releaseOutputBuffer(it->index);
1574     if (inputFrame.pendingOutputTiles == 0) {
1575         ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
1576     } else {
1577         inputFrame.pendingOutputTiles--;
1578     }
1579 
1580     inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
1581 
1582     ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
1583         __FUNCTION__, frameNumber, it->index);
1584     return OK;
1585 }
1586 
processOneCodecGainmapOutputFrame(int64_t frameNumber,InputFrame & inputFrame)1587 status_t HeicCompositeStream::processOneCodecGainmapOutputFrame(int64_t frameNumber,
1588         InputFrame &inputFrame) {
1589     auto it = inputFrame.gainmapCodecOutputBuffers.begin();
1590     sp<MediaCodecBuffer> buffer;
1591     status_t res = mGainmapCodec->getOutputBuffer(it->index, &buffer);
1592     if (res != OK) {
1593         ALOGE("%s: Error getting Heic gainmap codec output buffer at index %d: %s (%d)",
1594                 __FUNCTION__, it->index, strerror(-res), res);
1595         return res;
1596     }
1597     if (buffer == nullptr) {
1598         ALOGE("%s: Invalid Heic gainmap codec output buffer at index %d",
1599                 __FUNCTION__, it->index);
1600         return BAD_VALUE;
1601     }
1602 
1603     uint8_t kGainmapMarker[] = {'g', 'm', 'a', 'p', '\0', '\0'};
1604     sp<ABuffer> aBuffer = new ABuffer(buffer->size() + sizeof(kGainmapMarker));
1605     memcpy(aBuffer->data(), kGainmapMarker, sizeof(kGainmapMarker));
1606     memcpy(aBuffer->data() + sizeof(kGainmapMarker), buffer->data(), buffer->size());
1607     aBuffer->meta()->setInt32(KEY_COLOR_FORMAT, kCodecGainmapColorFormat);
1608     aBuffer->meta()->setInt32("color-primaries", kCodecGainmapColorPrimaries);
1609     aBuffer->meta()->setInt32("color-transfer", kCodecGainmapColorTransfer);
1610     aBuffer->meta()->setInt32("color-matrix", kCodecGainmapColorMatrix);
1611     aBuffer->meta()->setInt32("color-range", kCodecGainmapColorRange);
1612     res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.gainmapTrackIndex,
1613                                             inputFrame.timestamp,
1614                                             MediaCodec::BUFFER_FLAG_MUXER_DATA);
1615     if (res != OK) {
1616         ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
1617                 __FUNCTION__, it->index, strerror(-res), res);
1618         return res;
1619     }
1620 
1621     mGainmapCodec->releaseOutputBuffer(it->index);
1622     if (inputFrame.gainmapPendingOutputTiles == 0) {
1623         ALOGW("%s: Codec generated more gainmap tiles than expected!", __FUNCTION__);
1624     } else {
1625         inputFrame.gainmapPendingOutputTiles--;
1626     }
1627 
1628     inputFrame.gainmapCodecOutputBuffers.erase(inputFrame.gainmapCodecOutputBuffers.begin());
1629 
1630     ALOGV("%s: [%" PRId64 "]: Gainmap output buffer index %d",
1631         __FUNCTION__, frameNumber, it->index);
1632     return OK;
1633 }
1634 
processCompletedInputFrame(int64_t frameNumber,InputFrame & inputFrame)1635 status_t HeicCompositeStream::processCompletedInputFrame(int64_t frameNumber,
1636         InputFrame &inputFrame) {
1637     sp<ANativeWindow> outputANW = mOutputSurface;
1638     inputFrame.muxer->stop();
1639 
1640     // Copy the content of the file to memory.
1641     sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
1642     void* dstBuffer;
1643     GraphicBufferLocker gbLocker(gb);
1644     auto res = gbLocker.lockAsync(&dstBuffer, inputFrame.fenceFd);
1645     if (res != OK) {
1646         ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
1647                 strerror(-res), res);
1648         return res;
1649     }
1650 
1651     off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
1652     if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
1653         ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
1654                 __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
1655         return BAD_VALUE;
1656     }
1657 
1658     lseek(inputFrame.fileFd, 0, SEEK_SET);
1659     ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
1660     if (bytesRead < fSize) {
1661         ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
1662         return BAD_VALUE;
1663     }
1664 
1665     close(inputFrame.fileFd);
1666     inputFrame.fileFd = -1;
1667 
1668     // Fill in HEIC header
1669     // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
1670     uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
1671     CameraBlob blobHeader = {
1672         .blobId = static_cast<CameraBlobId>(0x00FE),
1673         .blobSizeBytes = static_cast<int32_t>(fSize)
1674     };
1675     memcpy(header, &blobHeader, sizeof(CameraBlob));
1676 
1677     res = native_window_set_buffers_timestamp(mOutputSurface.get(), inputFrame.timestamp);
1678     if (res != OK) {
1679         ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
1680                __FUNCTION__, getStreamId(), strerror(-res), res);
1681         return res;
1682     }
1683 
1684     res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
1685     if (res != OK) {
1686         ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
1687                 strerror(-res), res);
1688         return res;
1689     }
1690     inputFrame.anb = nullptr;
1691     mDequeuedOutputBufferCnt--;
1692 
1693     ALOGV("%s: [%" PRId64 "]", __FUNCTION__, frameNumber);
1694     ATRACE_ASYNC_END("HEIC capture", frameNumber);
1695     return OK;
1696 }
1697 
1698 
releaseInputFrameLocked(int64_t frameNumber,InputFrame * inputFrame)1699 void HeicCompositeStream::releaseInputFrameLocked(int64_t frameNumber,
1700         InputFrame *inputFrame /*out*/) {
1701     if (inputFrame == nullptr) {
1702         return;
1703     }
1704 
1705     if (inputFrame->appSegmentBuffer.data != nullptr) {
1706         mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
1707         inputFrame->appSegmentBuffer.data = nullptr;
1708     }
1709 
1710     while (!inputFrame->codecOutputBuffers.empty()) {
1711         auto it = inputFrame->codecOutputBuffers.begin();
1712         ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
1713         mCodec->releaseOutputBuffer(it->index);
1714         inputFrame->codecOutputBuffers.erase(it);
1715     }
1716 
1717     while (!inputFrame->gainmapCodecOutputBuffers.empty()) {
1718         auto it = inputFrame->gainmapCodecOutputBuffers.begin();
1719         ALOGV("%s: release gainmap output buffer index %d", __FUNCTION__, it->index);
1720         mGainmapCodec->releaseOutputBuffer(it->index);
1721         inputFrame->gainmapCodecOutputBuffers.erase(it);
1722     }
1723 
1724     if (inputFrame->yuvBuffer.data != nullptr) {
1725         mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
1726         inputFrame->yuvBuffer.data = nullptr;
1727         mYuvBufferAcquired = false;
1728     }
1729 
1730     while (!inputFrame->codecInputBuffers.empty()) {
1731         auto it = inputFrame->codecInputBuffers.begin();
1732         inputFrame->codecInputBuffers.erase(it);
1733     }
1734 
1735     while (!inputFrame->gainmapCodecInputBuffers.empty()) {
1736         auto it = inputFrame->gainmapCodecInputBuffers.begin();
1737         inputFrame->gainmapCodecInputBuffers.erase(it);
1738     }
1739 
1740     if (inputFrame->error || mErrorState) {
1741         ALOGV("%s: notifyError called for frameNumber %" PRId64, __FUNCTION__, frameNumber);
1742         notifyError(frameNumber, inputFrame->requestId);
1743     }
1744 
1745     if (inputFrame->fileFd >= 0) {
1746         close(inputFrame->fileFd);
1747         inputFrame->fileFd = -1;
1748     }
1749 
1750     if (inputFrame->anb != nullptr) {
1751         sp<ANativeWindow> outputANW = mOutputSurface;
1752         outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
1753         inputFrame->anb = nullptr;
1754 
1755         mDequeuedOutputBufferCnt--;
1756     }
1757 }
1758 
releaseInputFramesLocked()1759 void HeicCompositeStream::releaseInputFramesLocked() {
1760     auto it = mPendingInputFrames.begin();
1761     bool inputFrameDone = false;
1762     while (it != mPendingInputFrames.end()) {
1763         auto& inputFrame = it->second;
1764         if (inputFrame.error ||
1765                 (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0 &&
1766                  inputFrame.gainmapPendingOutputTiles == 0)) {
1767             releaseInputFrameLocked(it->first, &inputFrame);
1768             it = mPendingInputFrames.erase(it);
1769             inputFrameDone = true;
1770         } else {
1771             it++;
1772         }
1773     }
1774 
1775     // Update codec quality based on first upcoming input frame.
1776     // Note that when encoding is in surface mode, currently there is  no
1777     // way for camera service to synchronize quality setting on a per-frame
1778     // basis: we don't get notification when codec is ready to consume a new
1779     // input frame. So we update codec quality on a best-effort basis.
1780     if (inputFrameDone) {
1781         auto firstPendingFrame = mPendingInputFrames.begin();
1782         if (firstPendingFrame != mPendingInputFrames.end()) {
1783             updateCodecQualityLocked(firstPendingFrame->second.quality);
1784         } else {
1785             if (mSettingsByFrameNumber.size() == 0) {
1786                 markTrackerIdle();
1787             }
1788         }
1789     }
1790 }
1791 
initializeGainmapCodec()1792 status_t HeicCompositeStream::initializeGainmapCodec() {
1793     ALOGV("%s", __FUNCTION__);
1794 
1795     if (!mHDRGainmapEnabled) {
1796         return OK;
1797     }
1798     uint32_t width = mOutputWidth / kGainmapScale;
1799     uint32_t height = mOutputHeight / kGainmapScale;
1800     bool useGrid = false;
1801     bool useHeic = false;
1802     AString hevcName;
1803     bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
1804             &useHeic, &useGrid, nullptr, &hevcName);
1805     if (!isSizeSupported) {
1806         ALOGE("%s: Encoder doesn't support size %u x %u!",
1807                 __FUNCTION__, width, height);
1808         return BAD_VALUE;
1809     }
1810 
1811     // Create HEVC codec.
1812     mGainmapCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
1813     if (mGainmapCodec == nullptr) {
1814         ALOGE("%s: Failed to create gainmap codec", __FUNCTION__);
1815         return NO_INIT;
1816     }
1817 
1818     // Create Looper and handler for Codec callback.
1819     mGainmapCodecCallbackHandler = new CodecCallbackHandler(this, true /*isGainmap*/);
1820     if (mGainmapCodecCallbackHandler == nullptr) {
1821         ALOGE("%s: Failed to create gainmap codec callback handler", __FUNCTION__);
1822         return NO_MEMORY;
1823     }
1824     mGainmapCallbackLooper = new ALooper;
1825     mGainmapCallbackLooper->setName("Camera3-HeicComposite-MediaCodecGainmapCallbackLooper");
1826     auto res = mGainmapCallbackLooper->start(
1827             false,   // runOnCallingThread
1828             false,    // canCallJava
1829             PRIORITY_AUDIO);
1830     if (res != OK) {
1831         ALOGE("%s: Failed to start gainmap media callback looper: %s (%d)",
1832                 __FUNCTION__, strerror(-res), res);
1833         return NO_INIT;
1834     }
1835     mGainmapCallbackLooper->registerHandler(mGainmapCodecCallbackHandler);
1836 
1837     mGainmapAsyncNotify = new AMessage(kWhatCallbackNotify, mGainmapCodecCallbackHandler);
1838     res = mGainmapCodec->setCallback(mGainmapAsyncNotify);
1839     if (res != OK) {
1840         ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1841                 strerror(-res), res);
1842         return res;
1843     }
1844 
1845     // Create output format and configure the Codec.
1846     sp<AMessage> outputFormat = new AMessage();
1847     outputFormat->setString(KEY_MIME, MIMETYPE_VIDEO_HEVC);
1848     outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1849     outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1850     // Ask codec to skip timestamp check and encode all frames.
1851     outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
1852 
1853     int32_t gridWidth, gridHeight, gridRows, gridCols;
1854     if (useGrid){
1855         gridWidth = HeicEncoderInfoManager::kGridWidth;
1856         gridHeight = HeicEncoderInfoManager::kGridHeight;
1857         gridRows = (height + gridHeight - 1)/gridHeight;
1858         gridCols = (width + gridWidth - 1)/gridWidth;
1859     } else {
1860         gridWidth = width;
1861         gridHeight = height;
1862         gridRows = 1;
1863         gridCols = 1;
1864     }
1865 
1866     outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1867     outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1868     outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
1869     outputFormat->setInt32(KEY_COLOR_FORMAT, COLOR_FormatYUV420Flexible);
1870     outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
1871     // This only serves as a hint to encoder when encoding is not real-time.
1872     outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
1873 
1874     res = mGainmapCodec->configure(outputFormat, nullptr /*nativeWindow*/,
1875             nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
1876     if (res != OK) {
1877         ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
1878                 strerror(-res), res);
1879         return res;
1880     }
1881 
1882     mGainmapGridWidth = gridWidth;
1883     mGainmapGridHeight = gridHeight;
1884     mGainmapGridRows = gridRows;
1885     mGainmapGridCols = gridCols;
1886     mGainmapUseGrid = useGrid;
1887     mGainmapOutputWidth = width;
1888     mGainmapOutputHeight = height;
1889     mMaxHeicBufferSize +=
1890         ALIGN(mGainmapOutputWidth, HeicEncoderInfoManager::kGridWidth) *
1891         ALIGN(mGainmapOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2;
1892 
1893     return OK;
1894 }
1895 
initializeCodec(uint32_t width,uint32_t height,const sp<CameraDeviceBase> & cameraDevice)1896 status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
1897         const sp<CameraDeviceBase>& cameraDevice) {
1898     ALOGV("%s", __FUNCTION__);
1899 
1900     bool useGrid = false;
1901     AString hevcName;
1902     bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
1903             &mUseHeic, &useGrid, nullptr, &hevcName);
1904     if (!isSizeSupported) {
1905         ALOGE("%s: Encoder doesnt' support size %u x %u!",
1906                 __FUNCTION__, width, height);
1907         return BAD_VALUE;
1908     }
1909     if (mHDRGainmapEnabled) {
1910         // HDR Gainmap tonemapping and generation can only be done in SW
1911         // using P010 as input. HEIC codecs expect private/impl.defined
1912         // which is opaque.
1913         mUseHeic = false;
1914     }
1915 
1916     // Create Looper for MediaCodec.
1917     auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
1918     mCodecLooper = new ALooper;
1919     mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
1920     status_t res = mCodecLooper->start(
1921             false,   // runOnCallingThread
1922             false,    // canCallJava
1923             PRIORITY_AUDIO);
1924     if (res != OK) {
1925         ALOGE("%s: Failed to start codec looper: %s (%d)",
1926                 __FUNCTION__, strerror(-res), res);
1927         return NO_INIT;
1928     }
1929 
1930     // Create HEIC/HEVC codec.
1931     if (mUseHeic) {
1932         mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
1933     } else {
1934         mCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
1935     }
1936     if (mCodec == nullptr) {
1937         ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
1938         return NO_INIT;
1939     }
1940 
1941     // Create Looper and handler for Codec callback.
1942     mCodecCallbackHandler = new CodecCallbackHandler(this);
1943     if (mCodecCallbackHandler == nullptr) {
1944         ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
1945         return NO_MEMORY;
1946     }
1947     mCallbackLooper = new ALooper;
1948     mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
1949     res = mCallbackLooper->start(
1950             false,   // runOnCallingThread
1951             false,    // canCallJava
1952             PRIORITY_AUDIO);
1953     if (res != OK) {
1954         ALOGE("%s: Failed to start media callback looper: %s (%d)",
1955                 __FUNCTION__, strerror(-res), res);
1956         return NO_INIT;
1957     }
1958     mCallbackLooper->registerHandler(mCodecCallbackHandler);
1959 
1960     mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
1961     res = mCodec->setCallback(mAsyncNotify);
1962     if (res != OK) {
1963         ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1964                 strerror(-res), res);
1965         return res;
1966     }
1967 
1968     // Create output format and configure the Codec.
1969     sp<AMessage> outputFormat = new AMessage();
1970     outputFormat->setString(KEY_MIME, desiredMime);
1971     outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1972     outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1973     // Ask codec to skip timestamp check and encode all frames.
1974     outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
1975 
1976     int32_t gridWidth, gridHeight, gridRows, gridCols;
1977     if (useGrid || mUseHeic) {
1978         gridWidth = HeicEncoderInfoManager::kGridWidth;
1979         gridHeight = HeicEncoderInfoManager::kGridHeight;
1980         gridRows = (height + gridHeight - 1)/gridHeight;
1981         gridCols = (width + gridWidth - 1)/gridWidth;
1982 
1983         if (mUseHeic) {
1984             outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
1985             outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
1986             outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
1987             outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
1988         }
1989 
1990     } else {
1991         gridWidth = width;
1992         gridHeight = height;
1993         gridRows = 1;
1994         gridCols = 1;
1995     }
1996 
1997     outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1998     outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1999     outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
2000     outputFormat->setInt32(KEY_COLOR_FORMAT,
2001             useGrid || mHDRGainmapEnabled ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
2002     outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
2003     // This only serves as a hint to encoder when encoding is not real-time.
2004     outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
2005 
2006     res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
2007             nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
2008     if (res != OK) {
2009         ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
2010                 strerror(-res), res);
2011         return res;
2012     }
2013 
2014     mGridWidth = gridWidth;
2015     mGridHeight = gridHeight;
2016     mGridRows = gridRows;
2017     mGridCols = gridCols;
2018     mUseGrid = useGrid;
2019     mOutputWidth = width;
2020     mOutputHeight = height;
2021     mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
2022     mMaxHeicBufferSize =
2023         ALIGN(mOutputWidth, HeicEncoderInfoManager::kGridWidth) *
2024         ALIGN(mOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2 + mAppSegmentMaxSize;
2025 
2026     return initializeGainmapCodec();
2027 }
2028 
deinitGainmapCodec()2029 void HeicCompositeStream::deinitGainmapCodec() {
2030     ALOGV("%s", __FUNCTION__);
2031     if (mGainmapCodec != nullptr) {
2032         mGainmapCodec->stop();
2033         mGainmapCodec->release();
2034         mGainmapCodec.clear();
2035     }
2036 
2037     if (mGainmapCallbackLooper != nullptr) {
2038         mGainmapCallbackLooper->stop();
2039         mGainmapCallbackLooper.clear();
2040     }
2041 
2042     mGainmapAsyncNotify.clear();
2043     mGainmapFormat.clear();
2044 }
2045 
deinitCodec()2046 void HeicCompositeStream::deinitCodec() {
2047     ALOGV("%s", __FUNCTION__);
2048     if (mCodec != nullptr) {
2049         mCodec->stop();
2050         mCodec->release();
2051         mCodec.clear();
2052     }
2053 
2054     deinitGainmapCodec();
2055 
2056     if (mCodecLooper != nullptr) {
2057         mCodecLooper->stop();
2058         mCodecLooper.clear();
2059     }
2060 
2061     if (mCallbackLooper != nullptr) {
2062         mCallbackLooper->stop();
2063         mCallbackLooper.clear();
2064     }
2065 
2066     mAsyncNotify.clear();
2067     mFormat.clear();
2068 }
2069 
2070 // Return the size of the complete list of app segment, 0 indicates failure
findAppSegmentsSize(const uint8_t * appSegmentBuffer,size_t maxSize,size_t * app1SegmentSize)2071 size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
2072         size_t maxSize, size_t *app1SegmentSize) {
2073     if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
2074         ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
2075                 __FUNCTION__, appSegmentBuffer, app1SegmentSize);
2076         return 0;
2077     }
2078 
2079     size_t expectedSize = 0;
2080     // First check for EXIF transport header at the end of the buffer
2081     const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(CameraBlob));
2082     const CameraBlob *blob = (const CameraBlob*)(header);
2083     if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
2084         ALOGE("%s: Invalid EXIF blobId %d", __FUNCTION__, eToI(blob->blobId));
2085         return 0;
2086     }
2087 
2088     expectedSize = blob->blobSizeBytes;
2089     if (expectedSize == 0 || expectedSize > maxSize - sizeof(CameraBlob)) {
2090         ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
2091         return 0;
2092     }
2093 
2094     uint32_t totalSize = 0;
2095 
2096     // Verify APP1 marker (mandatory)
2097     uint8_t app1Marker[] = {0xFF, 0xE1};
2098     if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
2099         ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
2100                 appSegmentBuffer[0], appSegmentBuffer[1]);
2101         return 0;
2102     }
2103     totalSize += sizeof(app1Marker);
2104 
2105     uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
2106             appSegmentBuffer[totalSize+1];
2107     totalSize += app1Size;
2108 
2109     ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
2110             __FUNCTION__, expectedSize, app1Size);
2111     while (totalSize < expectedSize) {
2112         if (appSegmentBuffer[totalSize] != 0xFF ||
2113                 appSegmentBuffer[totalSize+1] <= 0xE1 ||
2114                 appSegmentBuffer[totalSize+1] > 0xEF) {
2115             // Invalid APPn marker
2116             ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
2117                     appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
2118             return 0;
2119         }
2120         totalSize += 2;
2121 
2122         uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
2123                 appSegmentBuffer[totalSize+1];
2124         totalSize += appnSize;
2125     }
2126 
2127     if (totalSize != expectedSize) {
2128         ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
2129                 __FUNCTION__, totalSize, expectedSize);
2130         return 0;
2131     }
2132 
2133     *app1SegmentSize = app1Size + sizeof(app1Marker);
2134     return expectedSize;
2135 }
2136 
copyOneYuvTile(sp<MediaCodecBuffer> & codecBuffer,const CpuConsumer::LockedBuffer & yuvBuffer,size_t top,size_t left,size_t width,size_t height)2137 status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
2138         const CpuConsumer::LockedBuffer& yuvBuffer,
2139         size_t top, size_t left, size_t width, size_t height) {
2140     ATRACE_CALL();
2141 
2142     // Get stride information for codecBuffer
2143     sp<ABuffer> imageData;
2144     if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
2145         ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
2146         return BAD_VALUE;
2147     }
2148     if (imageData->size() != sizeof(MediaImage2)) {
2149         ALOGE("%s: Invalid codec input image size %zu, expected %zu",
2150                 __FUNCTION__, imageData->size(), sizeof(MediaImage2));
2151         return BAD_VALUE;
2152     }
2153     MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
2154     if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
2155             imageInfo->mBitDepth != 8 ||
2156             imageInfo->mBitDepthAllocated != 8 ||
2157             imageInfo->mNumPlanes != 3) {
2158         ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
2159                 "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
2160                 imageInfo->mType, imageInfo->mBitDepth,
2161                 imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
2162         return BAD_VALUE;
2163     }
2164 
2165     ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
2166             __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
2167     ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
2168             __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
2169             imageInfo->mPlane[MediaImage2::V].mOffset,
2170             imageInfo->mPlane[MediaImage2::U].mRowInc,
2171             imageInfo->mPlane[MediaImage2::V].mRowInc,
2172             imageInfo->mPlane[MediaImage2::U].mColInc,
2173             imageInfo->mPlane[MediaImage2::V].mColInc);
2174 
2175     // Y
2176     for (auto row = top; row < top+height; row++) {
2177         uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
2178                 imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
2179         mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
2180     }
2181 
2182     // U is Cb, V is Cr
2183     bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
2184             imageInfo->mPlane[MediaImage2::U].mOffset;
2185     uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
2186             imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
2187             imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
2188     bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
2189             (imageInfo->mPlane[MediaImage2::U].mRowInc ==
2190             imageInfo->mPlane[MediaImage2::V].mRowInc) &&
2191             (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
2192             (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
2193     bool isCodecUvPlannar =
2194             ((codecUPlaneFirst && codecUvOffsetDiff >=
2195                     imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
2196             ((!codecUPlaneFirst && codecUvOffsetDiff >=
2197                     imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
2198             imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
2199             imageInfo->mPlane[MediaImage2::V].mColInc == 1;
2200     bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
2201 
2202     if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
2203             (codecUPlaneFirst == cameraUPlaneFirst)) {
2204         // UV semiplannar
2205         // The chrome plane could be either Cb first, or Cr first. Take the
2206         // smaller address.
2207         uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
2208         MediaImage2::PlaneIndex dstPlane = codecUPlaneFirst ? MediaImage2::U : MediaImage2::V;
2209         for (auto row = top/2; row < (top+height)/2; row++) {
2210             uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
2211                     imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
2212             mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
2213         }
2214     } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
2215         // U plane
2216         for (auto row = top/2; row < (top+height)/2; row++) {
2217             uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
2218                     imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
2219             mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
2220         }
2221 
2222         // V plane
2223         for (auto row = top/2; row < (top+height)/2; row++) {
2224             uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
2225                     imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
2226             mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
2227         }
2228     } else {
2229         // Convert between semiplannar and plannar, or when UV orders are
2230         // different.
2231         uint8_t *dst = codecBuffer->data();
2232         for (auto row = top/2; row < (top+height)/2; row++) {
2233             for (auto col = left/2; col < (left+width)/2; col++) {
2234                 // U/Cb
2235                 int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
2236                         imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
2237                         imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
2238                 int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
2239                 dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
2240 
2241                 // V/Cr
2242                 dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
2243                         imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
2244                         imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
2245                 srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
2246                 dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
2247             }
2248         }
2249     }
2250     return OK;
2251 }
2252 
initCopyRowFunction(int32_t width)2253 void HeicCompositeStream::initCopyRowFunction([[maybe_unused]] int32_t width)
2254 {
2255     using namespace libyuv;
2256 
2257     mFnCopyRow = CopyRow_C;
2258 #if defined(HAS_COPYROW_SSE2)
2259     if (TestCpuFlag(kCpuHasSSE2)) {
2260         mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
2261     }
2262 #endif
2263 #if defined(HAS_COPYROW_AVX)
2264     if (TestCpuFlag(kCpuHasAVX)) {
2265         mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
2266     }
2267 #endif
2268 #if defined(HAS_COPYROW_ERMS)
2269     if (TestCpuFlag(kCpuHasERMS)) {
2270         mFnCopyRow = CopyRow_ERMS;
2271     }
2272 #endif
2273 #if defined(HAS_COPYROW_NEON)
2274     if (TestCpuFlag(kCpuHasNEON)) {
2275         mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
2276     }
2277 #endif
2278 #if defined(HAS_COPYROW_MIPS)
2279     if (TestCpuFlag(kCpuHasMIPS)) {
2280         mFnCopyRow = CopyRow_MIPS;
2281     }
2282 #endif
2283 }
2284 
calcAppSegmentMaxSize(const CameraMetadata & info)2285 size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
2286     camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
2287     size_t maxAppsSegment = 1;
2288     if (entry.count > 0) {
2289         maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
2290                 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
2291     }
2292     return maxAppsSegment * (2 + 0xFFFF) + sizeof(CameraBlob);
2293 }
2294 
updateCodecQualityLocked(int32_t quality)2295 void HeicCompositeStream::updateCodecQualityLocked(int32_t quality) {
2296     if (quality != mQuality) {
2297         sp<AMessage> qualityParams = new AMessage;
2298         qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, quality);
2299         status_t res = mCodec->setParameters(qualityParams);
2300         if (res != OK) {
2301             ALOGE("%s: Failed to set codec quality: %s (%d)",
2302                     __FUNCTION__, strerror(-res), res);
2303         } else {
2304             mQuality = quality;
2305         }
2306     }
2307 }
2308 
threadLoop()2309 bool HeicCompositeStream::threadLoop() {
2310     int64_t frameNumber = -1;
2311     bool newInputAvailable = false;
2312 
2313     {
2314         Mutex::Autolock l(mMutex);
2315         if (mErrorState) {
2316             // In case we landed in error state, return any pending buffers and
2317             // halt all further processing.
2318             compilePendingInputLocked();
2319             releaseInputFramesLocked();
2320             return false;
2321         }
2322 
2323 
2324         while (!newInputAvailable) {
2325             compilePendingInputLocked();
2326             newInputAvailable = getNextReadyInputLocked(&frameNumber);
2327 
2328             if (!newInputAvailable) {
2329                 auto failingFrameNumber = getNextFailingInputLocked();
2330                 if (failingFrameNumber >= 0) {
2331                     releaseInputFrameLocked(failingFrameNumber,
2332                             &mPendingInputFrames[failingFrameNumber]);
2333 
2334                     // It's okay to remove the entry from mPendingInputFrames
2335                     // because:
2336                     // 1. Only one internal stream (main input) is critical in
2337                     // backing the output stream.
2338                     // 2. If captureResult/appSegment arrives after the entry is
2339                     // removed, they are simply skipped.
2340                     mPendingInputFrames.erase(failingFrameNumber);
2341                     if (mPendingInputFrames.size() == 0) {
2342                         if (mSettingsByFrameNumber.size() == 0) {
2343                             markTrackerIdle();
2344                         }
2345                     }
2346                     return true;
2347                 }
2348 
2349                 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
2350                 if (ret == TIMED_OUT) {
2351                     return true;
2352                 } else if (ret != OK) {
2353                     ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
2354                             strerror(-ret), ret);
2355                     return false;
2356                 }
2357             }
2358         }
2359     }
2360 
2361     auto res = processInputFrame(frameNumber, mPendingInputFrames[frameNumber]);
2362     Mutex::Autolock l(mMutex);
2363     if (res != OK) {
2364         ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ", frameNumber: %"
2365                 PRId64 ": %s (%d)", __FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
2366                 frameNumber, strerror(-res), res);
2367         mPendingInputFrames[frameNumber].error = true;
2368     }
2369 
2370     releaseInputFramesLocked();
2371 
2372     return true;
2373 }
2374 
flagAnExifErrorFrameNumber(int64_t frameNumber)2375 void HeicCompositeStream::flagAnExifErrorFrameNumber(int64_t frameNumber) {
2376     Mutex::Autolock l(mMutex);
2377     mExifErrorFrameNumbers.emplace(frameNumber);
2378     mInputReadyCondition.signal();
2379 }
2380 
onStreamBufferError(const CaptureResultExtras & resultExtras)2381 bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
2382     bool res = false;
2383     int64_t frameNumber = resultExtras.frameNumber;
2384 
2385     // Buffer errors concerning internal composite streams should not be directly visible to
2386     // camera clients. They must only receive a single buffer error with the public composite
2387     // stream id.
2388     if (resultExtras.errorStreamId == mAppSegmentStreamId) {
2389         ALOGV("%s: APP_SEGMENT frameNumber: %" PRId64, __FUNCTION__, frameNumber);
2390         flagAnExifErrorFrameNumber(frameNumber);
2391         res = true;
2392     } else if (resultExtras.errorStreamId == mMainImageStreamId) {
2393         ALOGV("%s: YUV frameNumber: %" PRId64, __FUNCTION__, frameNumber);
2394         flagAnErrorFrameNumber(frameNumber);
2395         res = true;
2396     }
2397 
2398     return res;
2399 }
2400 
onResultError(const CaptureResultExtras & resultExtras)2401 void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
2402     // For result error, since the APPS_SEGMENT buffer already contains EXIF,
2403     // simply skip using the capture result metadata to override EXIF.
2404     Mutex::Autolock l(mMutex);
2405 
2406     int64_t timestamp = -1;
2407     for (const auto& fn : mSettingsByFrameNumber) {
2408         if (fn.first == resultExtras.frameNumber) {
2409             timestamp = fn.second.timestamp;
2410             break;
2411         }
2412     }
2413     if (timestamp == -1) {
2414         for (const auto& inputFrame : mPendingInputFrames) {
2415             if (inputFrame.first == resultExtras.frameNumber) {
2416                 timestamp = inputFrame.second.timestamp;
2417                 break;
2418             }
2419         }
2420     }
2421 
2422     if (timestamp == -1) {
2423         ALOGE("%s: Failed to find shutter timestamp for result error!", __FUNCTION__);
2424         return;
2425     }
2426 
2427     mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
2428     ALOGV("%s: timestamp %" PRId64 ", frameNumber %" PRId64, __FUNCTION__,
2429             timestamp, resultExtras.frameNumber);
2430     mInputReadyCondition.signal();
2431 }
2432 
onRequestError(const CaptureResultExtras & resultExtras)2433 void HeicCompositeStream::onRequestError(const CaptureResultExtras& resultExtras) {
2434     auto frameNumber = resultExtras.frameNumber;
2435     ALOGV("%s: frameNumber: %" PRId64, __FUNCTION__, frameNumber);
2436     Mutex::Autolock l(mMutex);
2437     auto numRequests = mSettingsByFrameNumber.erase(frameNumber);
2438     if (numRequests == 0) {
2439         // Pending request has been populated into mPendingInputFrames
2440         mErrorFrameNumbers.emplace(frameNumber);
2441         mInputReadyCondition.signal();
2442     } else {
2443         // REQUEST_ERROR was received without onShutter.
2444     }
2445 }
2446 
markTrackerIdle()2447 void HeicCompositeStream::markTrackerIdle() {
2448     sp<StatusTracker> statusTracker = mStatusTracker.promote();
2449     if (statusTracker != nullptr) {
2450         statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
2451         ALOGV("%s: Mark component as idle", __FUNCTION__);
2452     }
2453 }
2454 
onMessageReceived(const sp<AMessage> & msg)2455 void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
2456     sp<HeicCompositeStream> parent = mParent.promote();
2457     if (parent == nullptr) return;
2458 
2459     switch (msg->what()) {
2460         case kWhatCallbackNotify: {
2461              int32_t cbID;
2462              if (!msg->findInt32("callbackID", &cbID)) {
2463                  ALOGE("kWhatCallbackNotify: callbackID is expected.");
2464                  break;
2465              }
2466 
2467              ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
2468 
2469              switch (cbID) {
2470                  case MediaCodec::CB_INPUT_AVAILABLE: {
2471                      int32_t index;
2472                      if (!msg->findInt32("index", &index)) {
2473                          ALOGE("CB_INPUT_AVAILABLE: index is expected.");
2474                          break;
2475                      }
2476                      parent->onHeicInputFrameAvailable(index, mIsGainmap);
2477                      break;
2478                  }
2479 
2480                  case MediaCodec::CB_OUTPUT_AVAILABLE: {
2481                      int32_t index;
2482                      size_t offset;
2483                      size_t size;
2484                      int64_t timeUs;
2485                      int32_t flags;
2486 
2487                      if (!msg->findInt32("index", &index)) {
2488                          ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
2489                          break;
2490                      }
2491                      if (!msg->findSize("offset", &offset)) {
2492                          ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
2493                          break;
2494                      }
2495                      if (!msg->findSize("size", &size)) {
2496                          ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
2497                          break;
2498                      }
2499                      if (!msg->findInt64("timeUs", &timeUs)) {
2500                          ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
2501                          break;
2502                      }
2503                      if (!msg->findInt32("flags", &flags)) {
2504                          ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
2505                          break;
2506                      }
2507 
2508                      CodecOutputBufferInfo bufferInfo = {
2509                          index,
2510                          (int32_t)offset,
2511                          (int32_t)size,
2512                          timeUs,
2513                          (uint32_t)flags};
2514 
2515                      parent->onHeicOutputFrameAvailable(bufferInfo, mIsGainmap);
2516                      break;
2517                  }
2518 
2519                  case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
2520                      sp<AMessage> format;
2521                      if (!msg->findMessage("format", &format)) {
2522                          ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
2523                          break;
2524                      }
2525                      // Here format is MediaCodec's internal copy of output format.
2526                      // Make a copy since onHeicFormatChanged() might modify it.
2527                      sp<AMessage> formatCopy;
2528                      if (format != nullptr) {
2529                          formatCopy = format->dup();
2530                      }
2531                      parent->onHeicFormatChanged(formatCopy, mIsGainmap);
2532                      break;
2533                  }
2534 
2535                  case MediaCodec::CB_ERROR: {
2536                      status_t err;
2537                      int32_t actionCode;
2538                      AString detail;
2539                      if (!msg->findInt32("err", &err)) {
2540                          ALOGE("CB_ERROR: err is expected.");
2541                          break;
2542                      }
2543                      if (!msg->findInt32("action", &actionCode)) {
2544                          ALOGE("CB_ERROR: action is expected.");
2545                          break;
2546                      }
2547                      msg->findString("detail", &detail);
2548                      ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
2549                              err, actionCode, detail.c_str());
2550 
2551                      parent->onHeicCodecError();
2552                      break;
2553                  }
2554 
2555                  case MediaCodec::CB_METRICS_FLUSHED:
2556                  case MediaCodec::CB_REQUIRED_RESOURCES_CHANGED:
2557                  {
2558                     // Nothing to do. Informational. Safe to ignore.
2559                     break;
2560                  }
2561 
2562                  case MediaCodec::CB_CRYPTO_ERROR:
2563                  // unexpected as we are not using crypto
2564                  case MediaCodec::CB_LARGE_FRAME_OUTPUT_AVAILABLE:
2565                  // unexpected as we are not using large frames
2566                  default: {
2567                      ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
2568                      break;
2569                  }
2570              }
2571              break;
2572         }
2573 
2574         default:
2575             ALOGE("shouldn't be here");
2576             break;
2577     }
2578 }
2579 
2580 }; // namespace camera3
2581 }; // namespace android
2582