1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera3-HeicCompositeStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 #define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
20 //#define LOG_NDEBUG 0
21
22 #include <linux/memfd.h>
23 #include <pthread.h>
24 #include <sys/syscall.h>
25
26 #include <android/hardware/camera/device/3.5/types.h>
27 #include <libyuv.h>
28 #include <gui/Surface.h>
29 #include <utils/Log.h>
30 #include <utils/Trace.h>
31
32 #include <mediadrm/ICrypto.h>
33 #include <media/MediaCodecBuffer.h>
34 #include <media/stagefright/foundation/ABuffer.h>
35 #include <media/stagefright/foundation/MediaDefs.h>
36 #include <media/stagefright/MediaCodecConstants.h>
37
38 #include "common/CameraDeviceBase.h"
39 #include "utils/ExifUtils.h"
40 #include "utils/SessionConfigurationUtils.h"
41 #include "HeicEncoderInfoManager.h"
42 #include "HeicCompositeStream.h"
43
44 using android::hardware::camera::device::V3_5::CameraBlob;
45 using android::hardware::camera::device::V3_5::CameraBlobId;
46
47 namespace android {
48 namespace camera3 {
49
HeicCompositeStream(sp<CameraDeviceBase> device,wp<hardware::camera2::ICameraDeviceCallbacks> cb)50 HeicCompositeStream::HeicCompositeStream(sp<CameraDeviceBase> device,
51 wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
52 CompositeStream(device, cb),
53 mUseHeic(false),
54 mNumOutputTiles(1),
55 mOutputWidth(0),
56 mOutputHeight(0),
57 mMaxHeicBufferSize(0),
58 mGridWidth(HeicEncoderInfoManager::kGridWidth),
59 mGridHeight(HeicEncoderInfoManager::kGridHeight),
60 mGridRows(1),
61 mGridCols(1),
62 mUseGrid(false),
63 mAppSegmentStreamId(-1),
64 mAppSegmentSurfaceId(-1),
65 mMainImageStreamId(-1),
66 mMainImageSurfaceId(-1),
67 mYuvBufferAcquired(false),
68 mProducerListener(new ProducerListener()),
69 mDequeuedOutputBufferCnt(0),
70 mCodecOutputCounter(0),
71 mQuality(-1),
72 mGridTimestampUs(0),
73 mStatusId(StatusTracker::NO_STATUS_ID) {
74 }
75
~HeicCompositeStream()76 HeicCompositeStream::~HeicCompositeStream() {
77 // Call deinitCodec in case stream hasn't been deleted yet to avoid any
78 // memory/resource leak.
79 deinitCodec();
80
81 mInputAppSegmentBuffers.clear();
82 mCodecOutputBuffers.clear();
83
84 mAppSegmentStreamId = -1;
85 mAppSegmentSurfaceId = -1;
86 mAppSegmentConsumer.clear();
87 mAppSegmentSurface.clear();
88
89 mMainImageStreamId = -1;
90 mMainImageSurfaceId = -1;
91 mMainImageConsumer.clear();
92 mMainImageSurface.clear();
93 }
94
isHeicCompositeStream(const sp<Surface> & surface)95 bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
96 ANativeWindow *anw = surface.get();
97 status_t err;
98 int format;
99 if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
100 String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
101 err);
102 ALOGE("%s: %s", __FUNCTION__, msg.string());
103 return false;
104 }
105
106 int dataspace;
107 if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
108 String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
109 err);
110 ALOGE("%s: %s", __FUNCTION__, msg.string());
111 return false;
112 }
113
114 return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
115 }
116
createInternalStreams(const std::vector<sp<Surface>> & consumers,bool,uint32_t width,uint32_t height,int format,camera_stream_rotation_t rotation,int * id,const String8 & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,std::vector<int> * surfaceIds,int,bool)117 status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
118 bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
119 camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
120 const std::unordered_set<int32_t> &sensorPixelModesUsed,
121 std::vector<int> *surfaceIds,
122 int /*streamSetId*/, bool /*isShared*/) {
123
124 sp<CameraDeviceBase> device = mDevice.promote();
125 if (!device.get()) {
126 ALOGE("%s: Invalid camera device!", __FUNCTION__);
127 return NO_INIT;
128 }
129
130 status_t res = initializeCodec(width, height, device);
131 if (res != OK) {
132 ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
133 __FUNCTION__, strerror(-res), res);
134 return NO_INIT;
135 }
136
137 sp<IGraphicBufferProducer> producer;
138 sp<IGraphicBufferConsumer> consumer;
139 BufferQueue::createBufferQueue(&producer, &consumer);
140 mAppSegmentConsumer = new CpuConsumer(consumer, kMaxAcquiredAppSegment);
141 mAppSegmentConsumer->setFrameAvailableListener(this);
142 mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
143 mAppSegmentSurface = new Surface(producer);
144
145 mStaticInfo = device->info();
146
147 res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
148 kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId,
149 sensorPixelModesUsed,surfaceIds);
150 if (res == OK) {
151 mAppSegmentSurfaceId = (*surfaceIds)[0];
152 } else {
153 ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
154 strerror(-res), res);
155 return res;
156 }
157
158 if (!mUseGrid) {
159 res = mCodec->createInputSurface(&producer);
160 if (res != OK) {
161 ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
162 __FUNCTION__, strerror(-res), res);
163 return res;
164 }
165 } else {
166 BufferQueue::createBufferQueue(&producer, &consumer);
167 mMainImageConsumer = new CpuConsumer(consumer, 1);
168 mMainImageConsumer->setFrameAvailableListener(this);
169 mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
170 }
171 mMainImageSurface = new Surface(producer);
172
173 res = mCodec->start();
174 if (res != OK) {
175 ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
176 strerror(-res), res);
177 return res;
178 }
179
180 std::vector<int> sourceSurfaceId;
181 //Use YUV_888 format if framework tiling is needed.
182 int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
183 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
184 res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
185 rotation, id, physicalCameraId, sensorPixelModesUsed, &sourceSurfaceId);
186 if (res == OK) {
187 mMainImageSurfaceId = sourceSurfaceId[0];
188 mMainImageStreamId = *id;
189 } else {
190 ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
191 strerror(-res), res);
192 return res;
193 }
194
195 mOutputSurface = consumers[0];
196 res = registerCompositeStreamListener(mMainImageStreamId);
197 if (res != OK) {
198 ALOGE("%s: Failed to register HAL main image stream: %s (%d)", __FUNCTION__,
199 strerror(-res), res);
200 return res;
201 }
202
203 res = registerCompositeStreamListener(mAppSegmentStreamId);
204 if (res != OK) {
205 ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
206 strerror(-res), res);
207 return res;
208 }
209
210 initCopyRowFunction(width);
211 return res;
212 }
213
deleteInternalStreams()214 status_t HeicCompositeStream::deleteInternalStreams() {
215 requestExit();
216 auto res = join();
217 if (res != OK) {
218 ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
219 strerror(-res), res);
220 }
221
222 deinitCodec();
223
224 if (mAppSegmentStreamId >= 0) {
225 // Camera devices may not be valid after switching to offline mode.
226 // In this case, all offline streams including internal composite streams
227 // are managed and released by the offline session.
228 sp<CameraDeviceBase> device = mDevice.promote();
229 if (device.get() != nullptr) {
230 res = device->deleteStream(mAppSegmentStreamId);
231 }
232
233 mAppSegmentStreamId = -1;
234 }
235
236 if (mOutputSurface != nullptr) {
237 mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
238 mOutputSurface.clear();
239 }
240
241 sp<StatusTracker> statusTracker = mStatusTracker.promote();
242 if (statusTracker != nullptr && mStatusId != StatusTracker::NO_STATUS_ID) {
243 statusTracker->removeComponent(mStatusId);
244 mStatusId = StatusTracker::NO_STATUS_ID;
245 }
246
247 if (mPendingInputFrames.size() > 0) {
248 ALOGW("%s: mPendingInputFrames has %zu stale entries",
249 __FUNCTION__, mPendingInputFrames.size());
250 mPendingInputFrames.clear();
251 }
252
253 return res;
254 }
255
onBufferReleased(const BufferInfo & bufferInfo)256 void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
257 Mutex::Autolock l(mMutex);
258
259 if (bufferInfo.mError) return;
260
261 if (bufferInfo.mStreamId == mMainImageStreamId) {
262 mMainImageFrameNumbers.push(bufferInfo.mFrameNumber);
263 mCodecOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
264 ALOGV("%s: [%" PRId64 "]: Adding main image frame number (%zu frame numbers in total)",
265 __FUNCTION__, bufferInfo.mFrameNumber, mMainImageFrameNumbers.size());
266 } else if (bufferInfo.mStreamId == mAppSegmentStreamId) {
267 mAppSegmentFrameNumbers.push(bufferInfo.mFrameNumber);
268 ALOGV("%s: [%" PRId64 "]: Adding app segment frame number (%zu frame numbers in total)",
269 __FUNCTION__, bufferInfo.mFrameNumber, mAppSegmentFrameNumbers.size());
270 }
271 }
272
273 // We need to get the settings early to handle the case where the codec output
274 // arrives earlier than result metadata.
onBufferRequestForFrameNumber(uint64_t frameNumber,int streamId,const CameraMetadata & settings)275 void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
276 const CameraMetadata& settings) {
277 ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
278
279 Mutex::Autolock l(mMutex);
280 if (mErrorState || (streamId != getStreamId())) {
281 return;
282 }
283
284 mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
285
286 camera_metadata_ro_entry entry;
287
288 int32_t orientation = 0;
289 entry = settings.find(ANDROID_JPEG_ORIENTATION);
290 if (entry.count == 1) {
291 orientation = entry.data.i32[0];
292 }
293
294 int32_t quality = kDefaultJpegQuality;
295 entry = settings.find(ANDROID_JPEG_QUALITY);
296 if (entry.count == 1) {
297 quality = entry.data.i32[0];
298 }
299
300 mSettingsByFrameNumber[frameNumber] = {orientation, quality};
301 }
302
onFrameAvailable(const BufferItem & item)303 void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
304 if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
305 ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
306 __func__, ns2ms(item.mTimestamp));
307
308 Mutex::Autolock l(mMutex);
309 if (!mErrorState) {
310 mInputAppSegmentBuffers.push_back(item.mTimestamp);
311 mInputReadyCondition.signal();
312 }
313 } else if (item.mDataSpace == kHeifDataSpace) {
314 ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
315 __func__, ns2ms(item.mTimestamp));
316
317 Mutex::Autolock l(mMutex);
318 if (!mUseGrid) {
319 ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
320 __FUNCTION__);
321 return;
322 }
323 if (!mErrorState) {
324 mInputYuvBuffers.push_back(item.mTimestamp);
325 mInputReadyCondition.signal();
326 }
327 } else {
328 ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
329 }
330 }
331
getCompositeStreamInfo(const OutputStreamInfo & streamInfo,const CameraMetadata & ch,std::vector<OutputStreamInfo> * compositeOutput)332 status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
333 const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
334 if (compositeOutput == nullptr) {
335 return BAD_VALUE;
336 }
337
338 compositeOutput->clear();
339
340 bool useGrid, useHeic;
341 bool isSizeSupported = isSizeSupportedByHeifEncoder(
342 streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
343 if (!isSizeSupported) {
344 // Size is not supported by either encoder.
345 return OK;
346 }
347
348 compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
349
350 // JPEG APPS segments Blob stream info
351 (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
352 (*compositeOutput)[0].height = 1;
353 (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
354 (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
355 (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
356
357 // YUV/IMPLEMENTATION_DEFINED stream info
358 (*compositeOutput)[1].width = streamInfo.width;
359 (*compositeOutput)[1].height = streamInfo.height;
360 (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
361 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
362 (*compositeOutput)[1].dataSpace = kHeifDataSpace;
363 (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
364 useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
365
366 return NO_ERROR;
367 }
368
isSizeSupportedByHeifEncoder(int32_t width,int32_t height,bool * useHeic,bool * useGrid,int64_t * stall,AString * hevcName)369 bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
370 bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) {
371 static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
372 return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall, hevcName);
373 }
374
isInMemoryTempFileSupported()375 bool HeicCompositeStream::isInMemoryTempFileSupported() {
376 int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
377 if (memfd == -1) {
378 if (errno != ENOSYS) {
379 ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
380 }
381 return false;
382 }
383 close(memfd);
384 return true;
385 }
386
onHeicOutputFrameAvailable(const CodecOutputBufferInfo & outputBufferInfo)387 void HeicCompositeStream::onHeicOutputFrameAvailable(
388 const CodecOutputBufferInfo& outputBufferInfo) {
389 Mutex::Autolock l(mMutex);
390
391 ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
392 __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
393 outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
394
395 if (!mErrorState) {
396 if ((outputBufferInfo.size > 0) &&
397 ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
398 mCodecOutputBuffers.push_back(outputBufferInfo);
399 mInputReadyCondition.signal();
400 } else {
401 ALOGV("%s: Releasing output buffer: size %d flags: 0x%x ", __FUNCTION__,
402 outputBufferInfo.size, outputBufferInfo.flags);
403 mCodec->releaseOutputBuffer(outputBufferInfo.index);
404 }
405 } else {
406 mCodec->releaseOutputBuffer(outputBufferInfo.index);
407 }
408 }
409
onHeicInputFrameAvailable(int32_t index)410 void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
411 Mutex::Autolock l(mMutex);
412
413 if (!mUseGrid) {
414 ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
415 return;
416 }
417
418 mCodecInputBuffers.push_back(index);
419 mInputReadyCondition.signal();
420 }
421
onHeicFormatChanged(sp<AMessage> & newFormat)422 void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
423 if (newFormat == nullptr) {
424 ALOGE("%s: newFormat must not be null!", __FUNCTION__);
425 return;
426 }
427
428 Mutex::Autolock l(mMutex);
429
430 AString mime;
431 AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
432 newFormat->findString(KEY_MIME, &mime);
433 if (mime != mimeHeic) {
434 // For HEVC codec, below keys need to be filled out or overwritten so that the
435 // muxer can handle them as HEIC output image.
436 newFormat->setString(KEY_MIME, mimeHeic);
437 newFormat->setInt32(KEY_WIDTH, mOutputWidth);
438 newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
439 if (mUseGrid) {
440 newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
441 newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
442 newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
443 newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
444 }
445 }
446 newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
447
448 int32_t gridRows, gridCols;
449 if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
450 newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
451 mNumOutputTiles = gridRows * gridCols;
452 } else {
453 mNumOutputTiles = 1;
454 }
455
456 mFormat = newFormat;
457
458 ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
459 mInputReadyCondition.signal();
460 }
461
onHeicCodecError()462 void HeicCompositeStream::onHeicCodecError() {
463 Mutex::Autolock l(mMutex);
464 mErrorState = true;
465 }
466
configureStream()467 status_t HeicCompositeStream::configureStream() {
468 if (isRunning()) {
469 // Processing thread is already running, nothing more to do.
470 return NO_ERROR;
471 }
472
473 if (mOutputSurface.get() == nullptr) {
474 ALOGE("%s: No valid output surface set!", __FUNCTION__);
475 return NO_INIT;
476 }
477
478 auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
479 if (res != OK) {
480 ALOGE("%s: Unable to connect to native window for stream %d",
481 __FUNCTION__, mMainImageStreamId);
482 return res;
483 }
484
485 if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
486 != OK) {
487 ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
488 mMainImageStreamId);
489 return res;
490 }
491
492 ANativeWindow *anwConsumer = mOutputSurface.get();
493 int maxConsumerBuffers;
494 if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
495 &maxConsumerBuffers)) != OK) {
496 ALOGE("%s: Unable to query consumer undequeued"
497 " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
498 return res;
499 }
500
501 // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
502 // buffer count.
503 if ((res = native_window_set_buffer_count(
504 anwConsumer, kMaxOutputSurfaceProducerCount + maxConsumerBuffers)) != OK) {
505 ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
506 return res;
507 }
508
509 if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
510 ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
511 __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
512 return res;
513 }
514
515 sp<camera3::StatusTracker> statusTracker = mStatusTracker.promote();
516 if (statusTracker != nullptr) {
517 std::string name = std::string("HeicStream ") + std::to_string(getStreamId());
518 mStatusId = statusTracker->addComponent(name);
519 }
520
521 run("HeicCompositeStreamProc");
522
523 return NO_ERROR;
524 }
525
insertGbp(SurfaceMap * outSurfaceMap,Vector<int32_t> * outputStreamIds,int32_t * currentStreamId)526 status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
527 Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
528 if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
529 outputStreamIds->push_back(mAppSegmentStreamId);
530 }
531 (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
532
533 if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
534 outputStreamIds->push_back(mMainImageStreamId);
535 }
536 (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
537
538 if (currentStreamId != nullptr) {
539 *currentStreamId = mMainImageStreamId;
540 }
541
542 return NO_ERROR;
543 }
544
insertCompositeStreamIds(std::vector<int32_t> * compositeStreamIds)545 status_t HeicCompositeStream::insertCompositeStreamIds(
546 std::vector<int32_t>* compositeStreamIds /*out*/) {
547 if (compositeStreamIds == nullptr) {
548 return BAD_VALUE;
549 }
550
551 compositeStreamIds->push_back(mAppSegmentStreamId);
552 compositeStreamIds->push_back(mMainImageStreamId);
553
554 return OK;
555 }
556
onShutter(const CaptureResultExtras & resultExtras,nsecs_t timestamp)557 void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
558 Mutex::Autolock l(mMutex);
559 if (mErrorState) {
560 return;
561 }
562
563 if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
564 ALOGV("%s: [%" PRId64 "]: timestamp %" PRId64 ", requestId %d", __FUNCTION__,
565 resultExtras.frameNumber, timestamp, resultExtras.requestId);
566 mSettingsByFrameNumber[resultExtras.frameNumber].shutterNotified = true;
567 mSettingsByFrameNumber[resultExtras.frameNumber].timestamp = timestamp;
568 mSettingsByFrameNumber[resultExtras.frameNumber].requestId = resultExtras.requestId;
569 mInputReadyCondition.signal();
570 }
571 }
572
compilePendingInputLocked()573 void HeicCompositeStream::compilePendingInputLocked() {
574 auto i = mSettingsByFrameNumber.begin();
575 while (i != mSettingsByFrameNumber.end()) {
576 if (i->second.shutterNotified) {
577 mPendingInputFrames[i->first].orientation = i->second.orientation;
578 mPendingInputFrames[i->first].quality = i->second.quality;
579 mPendingInputFrames[i->first].timestamp = i->second.timestamp;
580 mPendingInputFrames[i->first].requestId = i->second.requestId;
581 ALOGV("%s: [%" PRId64 "]: timestamp is %" PRId64, __FUNCTION__,
582 i->first, i->second.timestamp);
583 i = mSettingsByFrameNumber.erase(i);
584
585 // Set encoder quality if no inflight encoding
586 if (mPendingInputFrames.size() == 1) {
587 sp<StatusTracker> statusTracker = mStatusTracker.promote();
588 if (statusTracker != nullptr) {
589 statusTracker->markComponentActive(mStatusId);
590 ALOGV("%s: Mark component as active", __FUNCTION__);
591 }
592
593 int32_t newQuality = mPendingInputFrames.begin()->second.quality;
594 updateCodecQualityLocked(newQuality);
595 }
596 } else {
597 i++;
598 }
599 }
600
601 while (!mInputAppSegmentBuffers.empty() && mAppSegmentFrameNumbers.size() > 0) {
602 CpuConsumer::LockedBuffer imgBuffer;
603 auto it = mInputAppSegmentBuffers.begin();
604 auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
605 if (res == NOT_ENOUGH_DATA) {
606 // Can not lock any more buffers.
607 break;
608 } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
609 if (res != OK) {
610 ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
611 strerror(-res), res);
612 } else {
613 ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
614 " received buffer with time stamp: %" PRId64, __FUNCTION__,
615 *it, imgBuffer.timestamp);
616 mAppSegmentConsumer->unlockBuffer(imgBuffer);
617 }
618 mPendingInputFrames[*it].error = true;
619 mInputAppSegmentBuffers.erase(it);
620 continue;
621 }
622
623 if (mPendingInputFrames.find(mAppSegmentFrameNumbers.front()) == mPendingInputFrames.end()) {
624 ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
625 mAppSegmentFrameNumbers.front());
626 mInputAppSegmentBuffers.erase(it);
627 mAppSegmentFrameNumbers.pop();
628 continue;
629 }
630
631 int64_t frameNumber = mAppSegmentFrameNumbers.front();
632 // If mPendingInputFrames doesn't contain the expected frame number, the captured
633 // input app segment frame must have been dropped via a buffer error. Simply
634 // return the buffer to the buffer queue.
635 if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
636 (mPendingInputFrames[frameNumber].error)) {
637 mAppSegmentConsumer->unlockBuffer(imgBuffer);
638 } else {
639 mPendingInputFrames[frameNumber].appSegmentBuffer = imgBuffer;
640 }
641 mInputAppSegmentBuffers.erase(it);
642 mAppSegmentFrameNumbers.pop();
643 }
644
645 while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired && mMainImageFrameNumbers.size() > 0) {
646 CpuConsumer::LockedBuffer imgBuffer;
647 auto it = mInputYuvBuffers.begin();
648 auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
649 if (res == NOT_ENOUGH_DATA) {
650 // Can not lock any more buffers.
651 break;
652 } else if (res != OK) {
653 ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
654 strerror(-res), res);
655 mPendingInputFrames[*it].error = true;
656 mInputYuvBuffers.erase(it);
657 continue;
658 } else if (*it != imgBuffer.timestamp) {
659 ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
660 "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
661 mPendingInputFrames[*it].error = true;
662 mInputYuvBuffers.erase(it);
663 continue;
664 }
665
666 if (mPendingInputFrames.find(mMainImageFrameNumbers.front()) == mPendingInputFrames.end()) {
667 ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
668 mMainImageFrameNumbers.front());
669 mInputYuvBuffers.erase(it);
670 mMainImageFrameNumbers.pop();
671 continue;
672 }
673
674 int64_t frameNumber = mMainImageFrameNumbers.front();
675 // If mPendingInputFrames doesn't contain the expected frame number, the captured
676 // input main image must have been dropped via a buffer error. Simply
677 // return the buffer to the buffer queue.
678 if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
679 (mPendingInputFrames[frameNumber].error)) {
680 mMainImageConsumer->unlockBuffer(imgBuffer);
681 } else {
682 mPendingInputFrames[frameNumber].yuvBuffer = imgBuffer;
683 mYuvBufferAcquired = true;
684 }
685 mInputYuvBuffers.erase(it);
686 mMainImageFrameNumbers.pop();
687 }
688
689 while (!mCodecOutputBuffers.empty()) {
690 auto it = mCodecOutputBuffers.begin();
691 // Assume encoder input to output is FIFO, use a queue to look up
692 // frameNumber when handling codec outputs.
693 int64_t bufferFrameNumber = -1;
694 if (mCodecOutputBufferFrameNumbers.empty()) {
695 ALOGV("%s: Failed to find buffer frameNumber for codec output buffer!", __FUNCTION__);
696 break;
697 } else {
698 // Direct mapping between camera frame number and codec timestamp (in us).
699 bufferFrameNumber = mCodecOutputBufferFrameNumbers.front();
700 mCodecOutputCounter++;
701 if (mCodecOutputCounter == mNumOutputTiles) {
702 mCodecOutputBufferFrameNumbers.pop();
703 mCodecOutputCounter = 0;
704 }
705
706 mPendingInputFrames[bufferFrameNumber].codecOutputBuffers.push_back(*it);
707 ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (frameNumber %" PRId64 ")",
708 __FUNCTION__, bufferFrameNumber, it->timeUs);
709 }
710 mCodecOutputBuffers.erase(it);
711 }
712
713 while (!mCaptureResults.empty()) {
714 auto it = mCaptureResults.begin();
715 // Negative frame number indicates that something went wrong during the capture result
716 // collection process.
717 int64_t frameNumber = std::get<0>(it->second);
718 if (it->first >= 0 &&
719 mPendingInputFrames.find(frameNumber) != mPendingInputFrames.end()) {
720 if (mPendingInputFrames[frameNumber].timestamp == it->first) {
721 mPendingInputFrames[frameNumber].result =
722 std::make_unique<CameraMetadata>(std::get<1>(it->second));
723 } else {
724 ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
725 "shutter and capture result! before: %" PRId64 ", after: %" PRId64,
726 __FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
727 it->first);
728 }
729 }
730 mCaptureResults.erase(it);
731 }
732
733 // mErrorFrameNumbers stores frame number of dropped buffers.
734 auto it = mErrorFrameNumbers.begin();
735 while (it != mErrorFrameNumbers.end()) {
736 if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
737 mPendingInputFrames[*it].error = true;
738 } else {
739 //Error callback is guaranteed to arrive after shutter notify, which
740 //results in mPendingInputFrames being populated.
741 ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
742 *it);
743 }
744 it = mErrorFrameNumbers.erase(it);
745 }
746
747 // mExifErrorFrameNumbers stores the frame number of dropped APP_SEGMENT buffers
748 it = mExifErrorFrameNumbers.begin();
749 while (it != mExifErrorFrameNumbers.end()) {
750 if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
751 mPendingInputFrames[*it].exifError = true;
752 }
753 it = mExifErrorFrameNumbers.erase(it);
754 }
755
756 // Distribute codec input buffers to be filled out from YUV output
757 for (auto it = mPendingInputFrames.begin();
758 it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
759 InputFrame& inputFrame(it->second);
760 if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
761 // Available input tiles that are required for the current input
762 // image.
763 size_t newInputTiles = std::min(mCodecInputBuffers.size(),
764 mGridRows * mGridCols - inputFrame.codecInputCounter);
765 for (size_t i = 0; i < newInputTiles; i++) {
766 CodecInputBufferInfo inputInfo =
767 { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
768 inputFrame.codecInputBuffers.push_back(inputInfo);
769
770 mCodecInputBuffers.erase(mCodecInputBuffers.begin());
771 inputFrame.codecInputCounter++;
772 }
773 break;
774 }
775 }
776 }
777
getNextReadyInputLocked(int64_t * frameNumber)778 bool HeicCompositeStream::getNextReadyInputLocked(int64_t *frameNumber /*out*/) {
779 if (frameNumber == nullptr) {
780 return false;
781 }
782
783 bool newInputAvailable = false;
784 for (auto& it : mPendingInputFrames) {
785 // New input is considered to be available only if:
786 // 1. input buffers are ready, or
787 // 2. App segment and muxer is created, or
788 // 3. A codec output tile is ready, and an output buffer is available.
789 // This makes sure that muxer gets created only when an output tile is
790 // generated, because right now we only handle 1 HEIC output buffer at a
791 // time (max dequeued buffer count is 1).
792 bool appSegmentReady =
793 (it.second.appSegmentBuffer.data != nullptr || it.second.exifError) &&
794 !it.second.appSegmentWritten && it.second.result != nullptr &&
795 it.second.muxer != nullptr;
796 bool codecOutputReady = !it.second.codecOutputBuffers.empty();
797 bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
798 (!it.second.codecInputBuffers.empty());
799 bool hasOutputBuffer = it.second.muxer != nullptr ||
800 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
801 if ((!it.second.error) &&
802 (appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
803 *frameNumber = it.first;
804 if (it.second.format == nullptr && mFormat != nullptr) {
805 it.second.format = mFormat->dup();
806 }
807 newInputAvailable = true;
808 break;
809 }
810 }
811
812 return newInputAvailable;
813 }
814
getNextFailingInputLocked()815 int64_t HeicCompositeStream::getNextFailingInputLocked() {
816 int64_t res = -1;
817
818 for (const auto& it : mPendingInputFrames) {
819 if (it.second.error) {
820 res = it.first;
821 break;
822 }
823 }
824
825 return res;
826 }
827
processInputFrame(int64_t frameNumber,InputFrame & inputFrame)828 status_t HeicCompositeStream::processInputFrame(int64_t frameNumber,
829 InputFrame &inputFrame) {
830 ATRACE_CALL();
831 status_t res = OK;
832
833 bool appSegmentReady =
834 (inputFrame.appSegmentBuffer.data != nullptr || inputFrame.exifError) &&
835 !inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
836 inputFrame.muxer != nullptr;
837 bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
838 bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
839 !inputFrame.codecInputBuffers.empty();
840 bool hasOutputBuffer = inputFrame.muxer != nullptr ||
841 (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
842
843 ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
844 " dequeuedOutputBuffer %d, timestamp %" PRId64, __FUNCTION__, frameNumber,
845 appSegmentReady, codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt,
846 inputFrame.timestamp);
847
848 // Handle inputs for Hevc tiling
849 if (codecInputReady) {
850 res = processCodecInputFrame(inputFrame);
851 if (res != OK) {
852 ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
853 strerror(-res), res);
854 return res;
855 }
856 }
857
858 if (!(codecOutputReady && hasOutputBuffer) && !appSegmentReady) {
859 return OK;
860 }
861
862 // Initialize and start muxer if not yet done so. In this case,
863 // codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
864 // to be false, and the function must have returned early.
865 if (inputFrame.muxer == nullptr) {
866 res = startMuxerForInputFrame(frameNumber, inputFrame);
867 if (res != OK) {
868 ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
869 strerror(-res), res);
870 return res;
871 }
872 }
873
874 // Write JPEG APP segments data to the muxer.
875 if (appSegmentReady) {
876 res = processAppSegment(frameNumber, inputFrame);
877 if (res != OK) {
878 ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
879 strerror(-res), res);
880 return res;
881 }
882 }
883
884 // Write media codec bitstream buffers to muxer.
885 while (!inputFrame.codecOutputBuffers.empty()) {
886 res = processOneCodecOutputFrame(frameNumber, inputFrame);
887 if (res != OK) {
888 ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
889 strerror(-res), res);
890 return res;
891 }
892 }
893
894 if (inputFrame.pendingOutputTiles == 0) {
895 if (inputFrame.appSegmentWritten) {
896 res = processCompletedInputFrame(frameNumber, inputFrame);
897 if (res != OK) {
898 ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
899 strerror(-res), res);
900 return res;
901 }
902 }
903 }
904
905 return res;
906 }
907
startMuxerForInputFrame(int64_t frameNumber,InputFrame & inputFrame)908 status_t HeicCompositeStream::startMuxerForInputFrame(int64_t frameNumber, InputFrame &inputFrame) {
909 sp<ANativeWindow> outputANW = mOutputSurface;
910
911 auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
912 if (res != OK) {
913 ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
914 res);
915 return res;
916 }
917 mDequeuedOutputBufferCnt++;
918
919 // Combine current thread id, stream id and timestamp to uniquely identify image.
920 std::ostringstream tempOutputFile;
921 tempOutputFile << "HEIF-" << pthread_self() << "-"
922 << getStreamId() << "-" << frameNumber;
923 inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
924 if (inputFrame.fileFd < 0) {
925 ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
926 tempOutputFile.str().c_str(), errno);
927 return NO_INIT;
928 }
929 inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
930 if (inputFrame.muxer == nullptr) {
931 ALOGE("%s: Failed to create MediaMuxer for file fd %d",
932 __FUNCTION__, inputFrame.fileFd);
933 return NO_INIT;
934 }
935
936 res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
937 if (res != OK) {
938 ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
939 strerror(-res), res);
940 return res;
941 }
942
943 ssize_t trackId = inputFrame.muxer->addTrack(inputFrame.format);
944 if (trackId < 0) {
945 ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
946 return NO_INIT;
947 }
948
949 inputFrame.trackIndex = trackId;
950 inputFrame.pendingOutputTiles = mNumOutputTiles;
951
952 res = inputFrame.muxer->start();
953 if (res != OK) {
954 ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
955 __FUNCTION__, strerror(-res), res);
956 return res;
957 }
958
959 ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
960 frameNumber);
961 return OK;
962 }
963
processAppSegment(int64_t frameNumber,InputFrame & inputFrame)964 status_t HeicCompositeStream::processAppSegment(int64_t frameNumber, InputFrame &inputFrame) {
965 size_t app1Size = 0;
966 size_t appSegmentSize = 0;
967 if (!inputFrame.exifError) {
968 appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
969 inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
970 &app1Size);
971 if (appSegmentSize == 0) {
972 ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
973 return NO_INIT;
974 }
975 }
976
977 std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
978 auto exifRes = inputFrame.exifError ?
979 exifUtils->initializeEmpty() :
980 exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
981 if (!exifRes) {
982 ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
983 return BAD_VALUE;
984 }
985 exifRes = exifUtils->setFromMetadata(*inputFrame.result, mStaticInfo,
986 mOutputWidth, mOutputHeight);
987 if (!exifRes) {
988 ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
989 return BAD_VALUE;
990 }
991 exifRes = exifUtils->setOrientation(inputFrame.orientation);
992 if (!exifRes) {
993 ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
994 return BAD_VALUE;
995 }
996 exifRes = exifUtils->generateApp1();
997 if (!exifRes) {
998 ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
999 return BAD_VALUE;
1000 }
1001
1002 unsigned int newApp1Length = exifUtils->getApp1Length();
1003 const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
1004
1005 //Assemble the APP1 marker buffer required by MediaCodec
1006 uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
1007 kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
1008 kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
1009 size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
1010 appSegmentSize - app1Size + newApp1Length;
1011 uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
1012 memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
1013 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
1014 if (appSegmentSize - app1Size > 0) {
1015 memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
1016 inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
1017 }
1018
1019 sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
1020 auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
1021 inputFrame.timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
1022 delete[] appSegmentBuffer;
1023
1024 if (res != OK) {
1025 ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
1026 __FUNCTION__, strerror(-res), res);
1027 return res;
1028 }
1029
1030 ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
1031 __FUNCTION__, frameNumber, appSegmentSize, inputFrame.appSegmentBuffer.width,
1032 inputFrame.appSegmentBuffer.height, app1Size);
1033
1034 inputFrame.appSegmentWritten = true;
1035 // Release the buffer now so any pending input app segments can be processed
1036 mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
1037 inputFrame.appSegmentBuffer.data = nullptr;
1038 inputFrame.exifError = false;
1039
1040 return OK;
1041 }
1042
processCodecInputFrame(InputFrame & inputFrame)1043 status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
1044 for (auto& inputBuffer : inputFrame.codecInputBuffers) {
1045 sp<MediaCodecBuffer> buffer;
1046 auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
1047 if (res != OK) {
1048 ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
1049 strerror(-res), res);
1050 return res;
1051 }
1052
1053 // Copy one tile from source to destination.
1054 size_t tileX = inputBuffer.tileIndex % mGridCols;
1055 size_t tileY = inputBuffer.tileIndex / mGridCols;
1056 size_t top = mGridHeight * tileY;
1057 size_t left = mGridWidth * tileX;
1058 size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
1059 mOutputWidth - tileX * mGridWidth : mGridWidth;
1060 size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
1061 mOutputHeight - tileY * mGridHeight : mGridHeight;
1062 ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu,"
1063 " timeUs %" PRId64, __FUNCTION__, tileX, tileY, top, left, width, height,
1064 inputBuffer.timeUs);
1065
1066 res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
1067 if (res != OK) {
1068 ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
1069 strerror(-res), res);
1070 return res;
1071 }
1072
1073 res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
1074 inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
1075 if (res != OK) {
1076 ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
1077 __FUNCTION__, strerror(-res), res);
1078 return res;
1079 }
1080 }
1081
1082 inputFrame.codecInputBuffers.clear();
1083 return OK;
1084 }
1085
processOneCodecOutputFrame(int64_t frameNumber,InputFrame & inputFrame)1086 status_t HeicCompositeStream::processOneCodecOutputFrame(int64_t frameNumber,
1087 InputFrame &inputFrame) {
1088 auto it = inputFrame.codecOutputBuffers.begin();
1089 sp<MediaCodecBuffer> buffer;
1090 status_t res = mCodec->getOutputBuffer(it->index, &buffer);
1091 if (res != OK) {
1092 ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
1093 __FUNCTION__, it->index, strerror(-res), res);
1094 return res;
1095 }
1096 if (buffer == nullptr) {
1097 ALOGE("%s: Invalid Heic codec output buffer at index %d",
1098 __FUNCTION__, it->index);
1099 return BAD_VALUE;
1100 }
1101
1102 sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
1103 res = inputFrame.muxer->writeSampleData(
1104 aBuffer, inputFrame.trackIndex, inputFrame.timestamp, 0 /*flags*/);
1105 if (res != OK) {
1106 ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
1107 __FUNCTION__, it->index, strerror(-res), res);
1108 return res;
1109 }
1110
1111 mCodec->releaseOutputBuffer(it->index);
1112 if (inputFrame.pendingOutputTiles == 0) {
1113 ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
1114 } else {
1115 inputFrame.pendingOutputTiles--;
1116 }
1117
1118 inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
1119
1120 ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
1121 __FUNCTION__, frameNumber, it->index);
1122 return OK;
1123 }
1124
processCompletedInputFrame(int64_t frameNumber,InputFrame & inputFrame)1125 status_t HeicCompositeStream::processCompletedInputFrame(int64_t frameNumber,
1126 InputFrame &inputFrame) {
1127 sp<ANativeWindow> outputANW = mOutputSurface;
1128 inputFrame.muxer->stop();
1129
1130 // Copy the content of the file to memory.
1131 sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
1132 void* dstBuffer;
1133 auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
1134 if (res != OK) {
1135 ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
1136 strerror(-res), res);
1137 return res;
1138 }
1139
1140 off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
1141 if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
1142 ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
1143 __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
1144 return BAD_VALUE;
1145 }
1146
1147 lseek(inputFrame.fileFd, 0, SEEK_SET);
1148 ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
1149 if (bytesRead < fSize) {
1150 ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
1151 return BAD_VALUE;
1152 }
1153
1154 close(inputFrame.fileFd);
1155 inputFrame.fileFd = -1;
1156
1157 // Fill in HEIC header
1158 uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
1159 struct CameraBlob *blobHeader = (struct CameraBlob *)header;
1160 // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
1161 blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
1162 blobHeader->blobSize = fSize;
1163
1164 res = native_window_set_buffers_timestamp(mOutputSurface.get(), inputFrame.timestamp);
1165 if (res != OK) {
1166 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
1167 __FUNCTION__, getStreamId(), strerror(-res), res);
1168 return res;
1169 }
1170
1171 res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
1172 if (res != OK) {
1173 ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
1174 strerror(-res), res);
1175 return res;
1176 }
1177 inputFrame.anb = nullptr;
1178 mDequeuedOutputBufferCnt--;
1179
1180 ALOGV("%s: [%" PRId64 "]", __FUNCTION__, frameNumber);
1181 ATRACE_ASYNC_END("HEIC capture", frameNumber);
1182 return OK;
1183 }
1184
1185
releaseInputFrameLocked(int64_t frameNumber,InputFrame * inputFrame)1186 void HeicCompositeStream::releaseInputFrameLocked(int64_t frameNumber,
1187 InputFrame *inputFrame /*out*/) {
1188 if (inputFrame == nullptr) {
1189 return;
1190 }
1191
1192 if (inputFrame->appSegmentBuffer.data != nullptr) {
1193 mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
1194 inputFrame->appSegmentBuffer.data = nullptr;
1195 }
1196
1197 while (!inputFrame->codecOutputBuffers.empty()) {
1198 auto it = inputFrame->codecOutputBuffers.begin();
1199 ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
1200 mCodec->releaseOutputBuffer(it->index);
1201 inputFrame->codecOutputBuffers.erase(it);
1202 }
1203
1204 if (inputFrame->yuvBuffer.data != nullptr) {
1205 mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
1206 inputFrame->yuvBuffer.data = nullptr;
1207 mYuvBufferAcquired = false;
1208 }
1209
1210 while (!inputFrame->codecInputBuffers.empty()) {
1211 auto it = inputFrame->codecInputBuffers.begin();
1212 inputFrame->codecInputBuffers.erase(it);
1213 }
1214
1215 if (inputFrame->error || mErrorState) {
1216 ALOGV("%s: notifyError called for frameNumber %" PRId64, __FUNCTION__, frameNumber);
1217 notifyError(frameNumber, inputFrame->requestId);
1218 }
1219
1220 if (inputFrame->fileFd >= 0) {
1221 close(inputFrame->fileFd);
1222 inputFrame->fileFd = -1;
1223 }
1224
1225 if (inputFrame->anb != nullptr) {
1226 sp<ANativeWindow> outputANW = mOutputSurface;
1227 outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
1228 inputFrame->anb = nullptr;
1229
1230 mDequeuedOutputBufferCnt--;
1231 }
1232 }
1233
releaseInputFramesLocked()1234 void HeicCompositeStream::releaseInputFramesLocked() {
1235 auto it = mPendingInputFrames.begin();
1236 bool inputFrameDone = false;
1237 while (it != mPendingInputFrames.end()) {
1238 auto& inputFrame = it->second;
1239 if (inputFrame.error ||
1240 (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
1241 releaseInputFrameLocked(it->first, &inputFrame);
1242 it = mPendingInputFrames.erase(it);
1243 inputFrameDone = true;
1244 } else {
1245 it++;
1246 }
1247 }
1248
1249 // Update codec quality based on first upcoming input frame.
1250 // Note that when encoding is in surface mode, currently there is no
1251 // way for camera service to synchronize quality setting on a per-frame
1252 // basis: we don't get notification when codec is ready to consume a new
1253 // input frame. So we update codec quality on a best-effort basis.
1254 if (inputFrameDone) {
1255 auto firstPendingFrame = mPendingInputFrames.begin();
1256 if (firstPendingFrame != mPendingInputFrames.end()) {
1257 updateCodecQualityLocked(firstPendingFrame->second.quality);
1258 } else {
1259 markTrackerIdle();
1260 }
1261 }
1262 }
1263
initializeCodec(uint32_t width,uint32_t height,const sp<CameraDeviceBase> & cameraDevice)1264 status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
1265 const sp<CameraDeviceBase>& cameraDevice) {
1266 ALOGV("%s", __FUNCTION__);
1267
1268 bool useGrid = false;
1269 AString hevcName;
1270 bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
1271 &mUseHeic, &useGrid, nullptr, &hevcName);
1272 if (!isSizeSupported) {
1273 ALOGE("%s: Encoder doesnt' support size %u x %u!",
1274 __FUNCTION__, width, height);
1275 return BAD_VALUE;
1276 }
1277
1278 // Create Looper for MediaCodec.
1279 auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
1280 mCodecLooper = new ALooper;
1281 mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
1282 status_t res = mCodecLooper->start(
1283 false, // runOnCallingThread
1284 false, // canCallJava
1285 PRIORITY_AUDIO);
1286 if (res != OK) {
1287 ALOGE("%s: Failed to start codec looper: %s (%d)",
1288 __FUNCTION__, strerror(-res), res);
1289 return NO_INIT;
1290 }
1291
1292 // Create HEIC/HEVC codec.
1293 if (mUseHeic) {
1294 mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
1295 } else {
1296 mCodec = MediaCodec::CreateByComponentName(mCodecLooper, hevcName);
1297 }
1298 if (mCodec == nullptr) {
1299 ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
1300 return NO_INIT;
1301 }
1302
1303 // Create Looper and handler for Codec callback.
1304 mCodecCallbackHandler = new CodecCallbackHandler(this);
1305 if (mCodecCallbackHandler == nullptr) {
1306 ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
1307 return NO_MEMORY;
1308 }
1309 mCallbackLooper = new ALooper;
1310 mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
1311 res = mCallbackLooper->start(
1312 false, // runOnCallingThread
1313 false, // canCallJava
1314 PRIORITY_AUDIO);
1315 if (res != OK) {
1316 ALOGE("%s: Failed to start media callback looper: %s (%d)",
1317 __FUNCTION__, strerror(-res), res);
1318 return NO_INIT;
1319 }
1320 mCallbackLooper->registerHandler(mCodecCallbackHandler);
1321
1322 mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
1323 res = mCodec->setCallback(mAsyncNotify);
1324 if (res != OK) {
1325 ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
1326 strerror(-res), res);
1327 return res;
1328 }
1329
1330 // Create output format and configure the Codec.
1331 sp<AMessage> outputFormat = new AMessage();
1332 outputFormat->setString(KEY_MIME, desiredMime);
1333 outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
1334 outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
1335 // Ask codec to skip timestamp check and encode all frames.
1336 outputFormat->setInt64(KEY_MAX_PTS_GAP_TO_ENCODER, kNoFrameDropMaxPtsGap);
1337
1338 int32_t gridWidth, gridHeight, gridRows, gridCols;
1339 if (useGrid || mUseHeic) {
1340 gridWidth = HeicEncoderInfoManager::kGridWidth;
1341 gridHeight = HeicEncoderInfoManager::kGridHeight;
1342 gridRows = (height + gridHeight - 1)/gridHeight;
1343 gridCols = (width + gridWidth - 1)/gridWidth;
1344
1345 if (mUseHeic) {
1346 outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
1347 outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
1348 outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
1349 outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
1350 }
1351
1352 } else {
1353 gridWidth = width;
1354 gridHeight = height;
1355 gridRows = 1;
1356 gridCols = 1;
1357 }
1358
1359 outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
1360 outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
1361 outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
1362 outputFormat->setInt32(KEY_COLOR_FORMAT,
1363 useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
1364 outputFormat->setInt32(KEY_FRAME_RATE, useGrid ? gridRows * gridCols : kNoGridOpRate);
1365 // This only serves as a hint to encoder when encoding is not real-time.
1366 outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
1367
1368 res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
1369 nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
1370 if (res != OK) {
1371 ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
1372 strerror(-res), res);
1373 return res;
1374 }
1375
1376 mGridWidth = gridWidth;
1377 mGridHeight = gridHeight;
1378 mGridRows = gridRows;
1379 mGridCols = gridCols;
1380 mUseGrid = useGrid;
1381 mOutputWidth = width;
1382 mOutputHeight = height;
1383 mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
1384 mMaxHeicBufferSize =
1385 ALIGN(mOutputWidth, HeicEncoderInfoManager::kGridWidth) *
1386 ALIGN(mOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2 + mAppSegmentMaxSize;
1387
1388 return OK;
1389 }
1390
deinitCodec()1391 void HeicCompositeStream::deinitCodec() {
1392 ALOGV("%s", __FUNCTION__);
1393 if (mCodec != nullptr) {
1394 mCodec->stop();
1395 mCodec->release();
1396 mCodec.clear();
1397 }
1398
1399 if (mCodecLooper != nullptr) {
1400 mCodecLooper->stop();
1401 mCodecLooper.clear();
1402 }
1403
1404 if (mCallbackLooper != nullptr) {
1405 mCallbackLooper->stop();
1406 mCallbackLooper.clear();
1407 }
1408
1409 mAsyncNotify.clear();
1410 mFormat.clear();
1411 }
1412
1413 // Return the size of the complete list of app segment, 0 indicates failure
findAppSegmentsSize(const uint8_t * appSegmentBuffer,size_t maxSize,size_t * app1SegmentSize)1414 size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
1415 size_t maxSize, size_t *app1SegmentSize) {
1416 if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
1417 ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
1418 __FUNCTION__, appSegmentBuffer, app1SegmentSize);
1419 return 0;
1420 }
1421
1422 size_t expectedSize = 0;
1423 // First check for EXIF transport header at the end of the buffer
1424 const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
1425 const struct CameraBlob *blob = (const struct CameraBlob*)(header);
1426 if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
1427 ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
1428 return 0;
1429 }
1430
1431 expectedSize = blob->blobSize;
1432 if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
1433 ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
1434 return 0;
1435 }
1436
1437 uint32_t totalSize = 0;
1438
1439 // Verify APP1 marker (mandatory)
1440 uint8_t app1Marker[] = {0xFF, 0xE1};
1441 if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
1442 ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
1443 appSegmentBuffer[0], appSegmentBuffer[1]);
1444 return 0;
1445 }
1446 totalSize += sizeof(app1Marker);
1447
1448 uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1449 appSegmentBuffer[totalSize+1];
1450 totalSize += app1Size;
1451
1452 ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
1453 __FUNCTION__, expectedSize, app1Size);
1454 while (totalSize < expectedSize) {
1455 if (appSegmentBuffer[totalSize] != 0xFF ||
1456 appSegmentBuffer[totalSize+1] <= 0xE1 ||
1457 appSegmentBuffer[totalSize+1] > 0xEF) {
1458 // Invalid APPn marker
1459 ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
1460 appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
1461 return 0;
1462 }
1463 totalSize += 2;
1464
1465 uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
1466 appSegmentBuffer[totalSize+1];
1467 totalSize += appnSize;
1468 }
1469
1470 if (totalSize != expectedSize) {
1471 ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
1472 __FUNCTION__, totalSize, expectedSize);
1473 return 0;
1474 }
1475
1476 *app1SegmentSize = app1Size + sizeof(app1Marker);
1477 return expectedSize;
1478 }
1479
copyOneYuvTile(sp<MediaCodecBuffer> & codecBuffer,const CpuConsumer::LockedBuffer & yuvBuffer,size_t top,size_t left,size_t width,size_t height)1480 status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
1481 const CpuConsumer::LockedBuffer& yuvBuffer,
1482 size_t top, size_t left, size_t width, size_t height) {
1483 ATRACE_CALL();
1484
1485 // Get stride information for codecBuffer
1486 sp<ABuffer> imageData;
1487 if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
1488 ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
1489 return BAD_VALUE;
1490 }
1491 if (imageData->size() != sizeof(MediaImage2)) {
1492 ALOGE("%s: Invalid codec input image size %zu, expected %zu",
1493 __FUNCTION__, imageData->size(), sizeof(MediaImage2));
1494 return BAD_VALUE;
1495 }
1496 MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
1497 if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
1498 imageInfo->mBitDepth != 8 ||
1499 imageInfo->mBitDepthAllocated != 8 ||
1500 imageInfo->mNumPlanes != 3) {
1501 ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
1502 "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
1503 imageInfo->mType, imageInfo->mBitDepth,
1504 imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
1505 return BAD_VALUE;
1506 }
1507
1508 ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
1509 __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
1510 ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
1511 __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
1512 imageInfo->mPlane[MediaImage2::V].mOffset,
1513 imageInfo->mPlane[MediaImage2::U].mRowInc,
1514 imageInfo->mPlane[MediaImage2::V].mRowInc,
1515 imageInfo->mPlane[MediaImage2::U].mColInc,
1516 imageInfo->mPlane[MediaImage2::V].mColInc);
1517
1518 // Y
1519 for (auto row = top; row < top+height; row++) {
1520 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
1521 imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
1522 mFnCopyRow(yuvBuffer.data+row*yuvBuffer.stride+left, dst, width);
1523 }
1524
1525 // U is Cb, V is Cr
1526 bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
1527 imageInfo->mPlane[MediaImage2::U].mOffset;
1528 uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
1529 imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
1530 imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
1531 bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
1532 (imageInfo->mPlane[MediaImage2::U].mRowInc ==
1533 imageInfo->mPlane[MediaImage2::V].mRowInc) &&
1534 (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
1535 (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
1536 bool isCodecUvPlannar =
1537 ((codecUPlaneFirst && codecUvOffsetDiff >=
1538 imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
1539 ((!codecUPlaneFirst && codecUvOffsetDiff >=
1540 imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
1541 imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
1542 imageInfo->mPlane[MediaImage2::V].mColInc == 1;
1543 bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
1544
1545 if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
1546 (codecUPlaneFirst == cameraUPlaneFirst)) {
1547 // UV semiplannar
1548 // The chrome plane could be either Cb first, or Cr first. Take the
1549 // smaller address.
1550 uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
1551 MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
1552 for (auto row = top/2; row < (top+height)/2; row++) {
1553 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
1554 imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
1555 mFnCopyRow(src+row*yuvBuffer.chromaStride+left, dst, width);
1556 }
1557 } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
1558 // U plane
1559 for (auto row = top/2; row < (top+height)/2; row++) {
1560 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
1561 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
1562 mFnCopyRow(yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, dst, width/2);
1563 }
1564
1565 // V plane
1566 for (auto row = top/2; row < (top+height)/2; row++) {
1567 uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
1568 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
1569 mFnCopyRow(yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, dst, width/2);
1570 }
1571 } else {
1572 // Convert between semiplannar and plannar, or when UV orders are
1573 // different.
1574 uint8_t *dst = codecBuffer->data();
1575 for (auto row = top/2; row < (top+height)/2; row++) {
1576 for (auto col = left/2; col < (left+width)/2; col++) {
1577 // U/Cb
1578 int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
1579 imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
1580 imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
1581 int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1582 dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
1583
1584 // V/Cr
1585 dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
1586 imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
1587 imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
1588 srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
1589 dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
1590 }
1591 }
1592 }
1593 return OK;
1594 }
1595
initCopyRowFunction(int32_t width)1596 void HeicCompositeStream::initCopyRowFunction(int32_t width)
1597 {
1598 using namespace libyuv;
1599
1600 mFnCopyRow = CopyRow_C;
1601 #if defined(HAS_COPYROW_SSE2)
1602 if (TestCpuFlag(kCpuHasSSE2)) {
1603 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_SSE2 : CopyRow_Any_SSE2;
1604 }
1605 #endif
1606 #if defined(HAS_COPYROW_AVX)
1607 if (TestCpuFlag(kCpuHasAVX)) {
1608 mFnCopyRow = IS_ALIGNED(width, 64) ? CopyRow_AVX : CopyRow_Any_AVX;
1609 }
1610 #endif
1611 #if defined(HAS_COPYROW_ERMS)
1612 if (TestCpuFlag(kCpuHasERMS)) {
1613 mFnCopyRow = CopyRow_ERMS;
1614 }
1615 #endif
1616 #if defined(HAS_COPYROW_NEON)
1617 if (TestCpuFlag(kCpuHasNEON)) {
1618 mFnCopyRow = IS_ALIGNED(width, 32) ? CopyRow_NEON : CopyRow_Any_NEON;
1619 }
1620 #endif
1621 #if defined(HAS_COPYROW_MIPS)
1622 if (TestCpuFlag(kCpuHasMIPS)) {
1623 mFnCopyRow = CopyRow_MIPS;
1624 }
1625 #endif
1626 }
1627
calcAppSegmentMaxSize(const CameraMetadata & info)1628 size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
1629 camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
1630 size_t maxAppsSegment = 1;
1631 if (entry.count > 0) {
1632 maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
1633 entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
1634 }
1635 return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
1636 }
1637
updateCodecQualityLocked(int32_t quality)1638 void HeicCompositeStream::updateCodecQualityLocked(int32_t quality) {
1639 if (quality != mQuality) {
1640 sp<AMessage> qualityParams = new AMessage;
1641 qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, quality);
1642 status_t res = mCodec->setParameters(qualityParams);
1643 if (res != OK) {
1644 ALOGE("%s: Failed to set codec quality: %s (%d)",
1645 __FUNCTION__, strerror(-res), res);
1646 } else {
1647 mQuality = quality;
1648 }
1649 }
1650 }
1651
threadLoop()1652 bool HeicCompositeStream::threadLoop() {
1653 int64_t frameNumber = -1;
1654 bool newInputAvailable = false;
1655
1656 {
1657 Mutex::Autolock l(mMutex);
1658 if (mErrorState) {
1659 // In case we landed in error state, return any pending buffers and
1660 // halt all further processing.
1661 compilePendingInputLocked();
1662 releaseInputFramesLocked();
1663 return false;
1664 }
1665
1666
1667 while (!newInputAvailable) {
1668 compilePendingInputLocked();
1669 newInputAvailable = getNextReadyInputLocked(&frameNumber);
1670
1671 if (!newInputAvailable) {
1672 auto failingFrameNumber = getNextFailingInputLocked();
1673 if (failingFrameNumber >= 0) {
1674 releaseInputFrameLocked(failingFrameNumber,
1675 &mPendingInputFrames[failingFrameNumber]);
1676
1677 // It's okay to remove the entry from mPendingInputFrames
1678 // because:
1679 // 1. Only one internal stream (main input) is critical in
1680 // backing the output stream.
1681 // 2. If captureResult/appSegment arrives after the entry is
1682 // removed, they are simply skipped.
1683 mPendingInputFrames.erase(failingFrameNumber);
1684 if (mPendingInputFrames.size() == 0) {
1685 markTrackerIdle();
1686 }
1687 return true;
1688 }
1689
1690 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
1691 if (ret == TIMED_OUT) {
1692 return true;
1693 } else if (ret != OK) {
1694 ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
1695 strerror(-ret), ret);
1696 return false;
1697 }
1698 }
1699 }
1700 }
1701
1702 auto res = processInputFrame(frameNumber, mPendingInputFrames[frameNumber]);
1703 Mutex::Autolock l(mMutex);
1704 if (res != OK) {
1705 ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ", frameNumber: %"
1706 PRId64 ": %s (%d)", __FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
1707 frameNumber, strerror(-res), res);
1708 mPendingInputFrames[frameNumber].error = true;
1709 }
1710
1711 releaseInputFramesLocked();
1712
1713 return true;
1714 }
1715
flagAnExifErrorFrameNumber(int64_t frameNumber)1716 void HeicCompositeStream::flagAnExifErrorFrameNumber(int64_t frameNumber) {
1717 Mutex::Autolock l(mMutex);
1718 mExifErrorFrameNumbers.emplace(frameNumber);
1719 mInputReadyCondition.signal();
1720 }
1721
onStreamBufferError(const CaptureResultExtras & resultExtras)1722 bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
1723 bool res = false;
1724 int64_t frameNumber = resultExtras.frameNumber;
1725
1726 // Buffer errors concerning internal composite streams should not be directly visible to
1727 // camera clients. They must only receive a single buffer error with the public composite
1728 // stream id.
1729 if (resultExtras.errorStreamId == mAppSegmentStreamId) {
1730 ALOGV("%s: APP_SEGMENT frameNumber: %" PRId64, __FUNCTION__, frameNumber);
1731 flagAnExifErrorFrameNumber(frameNumber);
1732 res = true;
1733 } else if (resultExtras.errorStreamId == mMainImageStreamId) {
1734 ALOGV("%s: YUV frameNumber: %" PRId64, __FUNCTION__, frameNumber);
1735 flagAnErrorFrameNumber(frameNumber);
1736 res = true;
1737 }
1738
1739 return res;
1740 }
1741
onResultError(const CaptureResultExtras & resultExtras)1742 void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
1743 // For result error, since the APPS_SEGMENT buffer already contains EXIF,
1744 // simply skip using the capture result metadata to override EXIF.
1745 Mutex::Autolock l(mMutex);
1746
1747 int64_t timestamp = -1;
1748 for (const auto& fn : mSettingsByFrameNumber) {
1749 if (fn.first == resultExtras.frameNumber) {
1750 timestamp = fn.second.timestamp;
1751 break;
1752 }
1753 }
1754 if (timestamp == -1) {
1755 for (const auto& inputFrame : mPendingInputFrames) {
1756 if (inputFrame.first == resultExtras.frameNumber) {
1757 timestamp = inputFrame.second.timestamp;
1758 break;
1759 }
1760 }
1761 }
1762
1763 if (timestamp == -1) {
1764 ALOGE("%s: Failed to find shutter timestamp for result error!", __FUNCTION__);
1765 return;
1766 }
1767
1768 mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
1769 ALOGV("%s: timestamp %" PRId64 ", frameNumber %" PRId64, __FUNCTION__,
1770 timestamp, resultExtras.frameNumber);
1771 mInputReadyCondition.signal();
1772 }
1773
onRequestError(const CaptureResultExtras & resultExtras)1774 void HeicCompositeStream::onRequestError(const CaptureResultExtras& resultExtras) {
1775 auto frameNumber = resultExtras.frameNumber;
1776 ALOGV("%s: frameNumber: %" PRId64, __FUNCTION__, frameNumber);
1777 Mutex::Autolock l(mMutex);
1778 auto numRequests = mSettingsByFrameNumber.erase(frameNumber);
1779 if (numRequests == 0) {
1780 // Pending request has been populated into mPendingInputFrames
1781 mErrorFrameNumbers.emplace(frameNumber);
1782 mInputReadyCondition.signal();
1783 } else {
1784 // REQUEST_ERROR was received without onShutter.
1785 }
1786 }
1787
markTrackerIdle()1788 void HeicCompositeStream::markTrackerIdle() {
1789 sp<StatusTracker> statusTracker = mStatusTracker.promote();
1790 if (statusTracker != nullptr) {
1791 statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
1792 ALOGV("%s: Mark component as idle", __FUNCTION__);
1793 }
1794 }
1795
onMessageReceived(const sp<AMessage> & msg)1796 void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
1797 sp<HeicCompositeStream> parent = mParent.promote();
1798 if (parent == nullptr) return;
1799
1800 switch (msg->what()) {
1801 case kWhatCallbackNotify: {
1802 int32_t cbID;
1803 if (!msg->findInt32("callbackID", &cbID)) {
1804 ALOGE("kWhatCallbackNotify: callbackID is expected.");
1805 break;
1806 }
1807
1808 ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
1809
1810 switch (cbID) {
1811 case MediaCodec::CB_INPUT_AVAILABLE: {
1812 int32_t index;
1813 if (!msg->findInt32("index", &index)) {
1814 ALOGE("CB_INPUT_AVAILABLE: index is expected.");
1815 break;
1816 }
1817 parent->onHeicInputFrameAvailable(index);
1818 break;
1819 }
1820
1821 case MediaCodec::CB_OUTPUT_AVAILABLE: {
1822 int32_t index;
1823 size_t offset;
1824 size_t size;
1825 int64_t timeUs;
1826 int32_t flags;
1827
1828 if (!msg->findInt32("index", &index)) {
1829 ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
1830 break;
1831 }
1832 if (!msg->findSize("offset", &offset)) {
1833 ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
1834 break;
1835 }
1836 if (!msg->findSize("size", &size)) {
1837 ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
1838 break;
1839 }
1840 if (!msg->findInt64("timeUs", &timeUs)) {
1841 ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
1842 break;
1843 }
1844 if (!msg->findInt32("flags", &flags)) {
1845 ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
1846 break;
1847 }
1848
1849 CodecOutputBufferInfo bufferInfo = {
1850 index,
1851 (int32_t)offset,
1852 (int32_t)size,
1853 timeUs,
1854 (uint32_t)flags};
1855
1856 parent->onHeicOutputFrameAvailable(bufferInfo);
1857 break;
1858 }
1859
1860 case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
1861 sp<AMessage> format;
1862 if (!msg->findMessage("format", &format)) {
1863 ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
1864 break;
1865 }
1866 // Here format is MediaCodec's internal copy of output format.
1867 // Make a copy since onHeicFormatChanged() might modify it.
1868 sp<AMessage> formatCopy;
1869 if (format != nullptr) {
1870 formatCopy = format->dup();
1871 }
1872 parent->onHeicFormatChanged(formatCopy);
1873 break;
1874 }
1875
1876 case MediaCodec::CB_ERROR: {
1877 status_t err;
1878 int32_t actionCode;
1879 AString detail;
1880 if (!msg->findInt32("err", &err)) {
1881 ALOGE("CB_ERROR: err is expected.");
1882 break;
1883 }
1884 if (!msg->findInt32("action", &actionCode)) {
1885 ALOGE("CB_ERROR: action is expected.");
1886 break;
1887 }
1888 msg->findString("detail", &detail);
1889 ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
1890 err, actionCode, detail.c_str());
1891
1892 parent->onHeicCodecError();
1893 break;
1894 }
1895
1896 default: {
1897 ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
1898 break;
1899 }
1900 }
1901 break;
1902 }
1903
1904 default:
1905 ALOGE("shouldn't be here");
1906 break;
1907 }
1908 }
1909
1910 }; // namespace camera3
1911 }; // namespace android
1912