• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera3-JpegRCompositeStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 // #define LOG_NDEBUG 0
20 
21 #include <aidl/android/hardware/camera/device/CameraBlob.h>
22 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
23 
24 #include "common/CameraProviderManager.h"
25 #include "utils/SessionConfigurationUtils.h"
26 
27 #include <com_android_graphics_libgui_flags.h>
28 #include <gui/CpuConsumer.h>
29 #include <gui/Surface.h>
30 #include <hardware/gralloc.h>
31 #include <system/graphics-base-v1.0.h>
32 #include <system/graphics-base-v1.1.h>
33 #include <ultrahdr/jpegr.h>
34 #include <utils/ExifUtils.h>
35 #include <utils/Log.h>
36 #include <utils/Trace.h>
37 
38 #include "JpegRCompositeStream.h"
39 
40 namespace android {
41 namespace camera3 {
42 
43 using aidl::android::hardware::camera::device::CameraBlob;
44 using aidl::android::hardware::camera::device::CameraBlobId;
45 
JpegRCompositeStream(sp<CameraDeviceBase> device,wp<hardware::camera2::ICameraDeviceCallbacks> cb)46 JpegRCompositeStream::JpegRCompositeStream(sp<CameraDeviceBase> device,
47         wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
48         CompositeStream(device, cb),
49         mBlobStreamId(-1),
50         mBlobSurfaceId(-1),
51         mP010StreamId(-1),
52         mP010SurfaceId(-1),
53         mBlobWidth(0),
54         mBlobHeight(0),
55         mP010BufferAcquired(false),
56         mBlobBufferAcquired(false),
57         mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
58         mOutputStreamUseCase(0),
59         mFirstRequestLatency(-1),
60         mStreamSurfaceListener(new StreamSurfaceListener()),
61         mMaxJpegBufferSize(-1),
62         mUHRMaxJpegBufferSize(-1),
63         mStaticInfo(device->info()) {
64     auto entry = mStaticInfo.find(ANDROID_JPEG_MAX_SIZE);
65     if (entry.count > 0) {
66         mMaxJpegBufferSize = entry.data.i32[0];
67     } else {
68         ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
69     }
70 
71     mUHRMaxJpegSize =
72             SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
73                     /*ultraHighResolution*/true);
74     mDefaultMaxJpegSize =
75             SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
76                     /*isUltraHighResolution*/false);
77 
78     mUHRMaxJpegBufferSize =
79         SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
80                 mMaxJpegBufferSize);
81 }
82 
~JpegRCompositeStream()83 JpegRCompositeStream::~JpegRCompositeStream() {
84     mBlobConsumer.clear(),
85     mBlobSurface.clear(),
86     mBlobStreamId = -1;
87     mBlobSurfaceId = -1;
88     mP010Consumer.clear();
89     mP010Surface.clear();
90     mP010Consumer = nullptr;
91     mP010Surface = nullptr;
92 }
93 
compilePendingInputLocked()94 void JpegRCompositeStream::compilePendingInputLocked() {
95     CpuConsumer::LockedBuffer imgBuffer;
96 
97     while (mSupportInternalJpeg && !mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
98         auto it = mInputJpegBuffers.begin();
99         auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
100         if (res == NOT_ENOUGH_DATA) {
101             // Can not lock any more buffers.
102             break;
103         } else if (res != OK) {
104             ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
105                     strerror(-res), res);
106             mPendingInputFrames[*it].error = true;
107             mInputJpegBuffers.erase(it);
108             continue;
109         }
110 
111         if (*it != imgBuffer.timestamp) {
112             ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
113                     "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
114         }
115 
116         if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
117                 (mPendingInputFrames[imgBuffer.timestamp].error)) {
118             mBlobConsumer->unlockBuffer(imgBuffer);
119         } else {
120             mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
121             mBlobBufferAcquired = true;
122         }
123         mInputJpegBuffers.erase(it);
124     }
125 
126     while (!mInputP010Buffers.empty() && !mP010BufferAcquired) {
127         auto it = mInputP010Buffers.begin();
128         auto res = mP010Consumer->lockNextBuffer(&imgBuffer);
129         if (res == NOT_ENOUGH_DATA) {
130             // Can not lock any more buffers.
131             break;
132         } else if (res != OK) {
133             ALOGE("%s: Error receiving P010 image buffer: %s (%d)", __FUNCTION__,
134                     strerror(-res), res);
135             mPendingInputFrames[*it].error = true;
136             mInputP010Buffers.erase(it);
137             continue;
138         }
139 
140         if (*it != imgBuffer.timestamp) {
141             ALOGW("%s: Expecting P010 buffer with time stamp: %" PRId64 " received buffer with "
142                     "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
143         }
144 
145         if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
146                 (mPendingInputFrames[imgBuffer.timestamp].error)) {
147             mP010Consumer->unlockBuffer(imgBuffer);
148         } else {
149             mPendingInputFrames[imgBuffer.timestamp].p010Buffer = imgBuffer;
150             mP010BufferAcquired = true;
151         }
152         mInputP010Buffers.erase(it);
153     }
154 
155     while (!mCaptureResults.empty()) {
156         auto it = mCaptureResults.begin();
157         // Negative timestamp indicates that something went wrong during the capture result
158         // collection process.
159         if (it->first >= 0) {
160             auto frameNumber = std::get<0>(it->second);
161             mPendingInputFrames[it->first].frameNumber = frameNumber;
162             mPendingInputFrames[it->first].result = std::get<1>(it->second);
163             mSessionStatsBuilder.incResultCounter(false /*dropped*/);
164         }
165         mCaptureResults.erase(it);
166     }
167 
168     while (!mFrameNumberMap.empty()) {
169         auto it = mFrameNumberMap.begin();
170         auto frameNumber = it->first;
171         mPendingInputFrames[it->second].frameNumber = frameNumber;
172         auto requestTimeIt = mRequestTimeMap.find(frameNumber);
173         if (requestTimeIt != mRequestTimeMap.end()) {
174             mPendingInputFrames[it->second].requestTimeNs = requestTimeIt->second;
175             mRequestTimeMap.erase(requestTimeIt);
176         }
177         mFrameNumberMap.erase(it);
178     }
179 
180     auto it = mErrorFrameNumbers.begin();
181     while (it != mErrorFrameNumbers.end()) {
182         bool frameFound = false;
183         for (auto &inputFrame : mPendingInputFrames) {
184             if (inputFrame.second.frameNumber == *it) {
185                 inputFrame.second.error = true;
186                 frameFound = true;
187                 break;
188             }
189         }
190 
191         if (frameFound) {
192             mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/,
193                     0 /*captureLatencyMs*/);
194             it = mErrorFrameNumbers.erase(it);
195         } else {
196             ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
197                     *it);
198             it++;
199         }
200     }
201 }
202 
getNextReadyInputLocked(int64_t * currentTs)203 bool JpegRCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
204     if (currentTs == nullptr) {
205         return false;
206     }
207 
208     bool newInputAvailable = false;
209     for (const auto& it : mPendingInputFrames) {
210         if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
211                 (it.second.requestTimeNs != -1) &&
212                 ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
213                 (it.first < *currentTs)) {
214             *currentTs = it.first;
215             newInputAvailable = true;
216         }
217     }
218 
219     return newInputAvailable;
220 }
221 
getNextFailingInputLocked(int64_t * currentTs)222 int64_t JpegRCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
223     int64_t ret = -1;
224     if (currentTs == nullptr) {
225         return ret;
226     }
227 
228     for (const auto& it : mPendingInputFrames) {
229         if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
230             *currentTs = it.first;
231             ret = it.second.frameNumber;
232         }
233     }
234 
235     return ret;
236 }
237 
processInputFrame(nsecs_t ts,const InputFrame & inputFrame)238 status_t JpegRCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
239     status_t res;
240     sp<ANativeWindow> outputANW = mOutputSurface;
241     ANativeWindowBuffer *anb;
242     int fenceFd;
243     void *dstBuffer;
244 
245     size_t maxJpegRBufferSize = 0;
246     if (mMaxJpegBufferSize > 0) {
247         // If this is an ultra high resolution sensor and the input frames size
248         // is > default res jpeg.
249         if (mUHRMaxJpegSize.width != 0 &&
250                 inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
251                 mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
252             maxJpegRBufferSize = mUHRMaxJpegBufferSize;
253         } else {
254             maxJpegRBufferSize = mMaxJpegBufferSize;
255         }
256     } else {
257         maxJpegRBufferSize = inputFrame.p010Buffer.width * inputFrame.p010Buffer.height;
258     }
259 
260     uint8_t jpegQuality = 100;
261     auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
262     if (entry.count > 0) {
263         jpegQuality = entry.data.u8[0];
264     }
265 
266     if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegRBufferSize, 1))
267             != OK) {
268         ALOGE("%s: Unable to configure stream buffer dimensions"
269                 " %zux%u for stream %d", __FUNCTION__, maxJpegRBufferSize, 1U, mP010StreamId);
270         return res;
271     }
272 
273     res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
274     if (res != OK) {
275         ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
276                 res);
277         return res;
278     }
279 
280     sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
281     GraphicBufferLocker gbLocker(gb);
282     res = gbLocker.lockAsync(&dstBuffer, fenceFd);
283     if (res != OK) {
284         ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
285                 strerror(-res), res);
286         outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
287         return res;
288     }
289 
290     if ((gb->getWidth() < maxJpegRBufferSize) || (gb->getHeight() != 1)) {
291         ALOGE("%s: Blob buffer size mismatch, expected %zux%u received %dx%d", __FUNCTION__,
292                 maxJpegRBufferSize, 1, gb->getWidth(), gb->getHeight());
293         outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
294         return BAD_VALUE;
295     }
296 
297     size_t actualJpegRSize = 0;
298     ultrahdr::jpegr_uncompressed_struct p010;
299     ultrahdr::jpegr_compressed_struct jpegR;
300     ultrahdr::JpegR jpegREncoder;
301 
302     p010.height = inputFrame.p010Buffer.height;
303     p010.width = inputFrame.p010Buffer.width;
304     p010.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
305     p010.data = inputFrame.p010Buffer.data;
306     p010.chroma_data = inputFrame.p010Buffer.dataCb;
307     // Strides are expected to be in pixels not bytes
308     p010.luma_stride = inputFrame.p010Buffer.stride / 2;
309     p010.chroma_stride = inputFrame.p010Buffer.chromaStride / 2;
310 
311     jpegR.data = dstBuffer;
312     jpegR.maxLength = maxJpegRBufferSize;
313 
314     ultrahdr::ultrahdr_transfer_function transferFunction;
315     switch (mP010DynamicRange) {
316         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
317         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
318             transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_PQ;
319             break;
320         default:
321             transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_HLG;
322     }
323 
324     if (mSupportInternalJpeg) {
325         ultrahdr::jpegr_compressed_struct jpeg;
326 
327         jpeg.data = inputFrame.jpegBuffer.data;
328         jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
329                 inputFrame.jpegBuffer.width);
330         if (jpeg.length == 0) {
331             ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!",
332                     __FUNCTION__);
333             jpeg.length = inputFrame.jpegBuffer.width;
334         }
335 
336         if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
337             jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_P3;
338         } else {
339             jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT709;
340         }
341 
342         res = jpegREncoder.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR);
343     } else {
344         const uint8_t* exifBuffer = nullptr;
345         size_t exifBufferSize = 0;
346         std::unique_ptr<ExifUtils> utils(ExifUtils::create());
347         utils->initializeEmpty();
348         utils->setFromMetadata(inputFrame.result, mStaticInfo, inputFrame.p010Buffer.width,
349                 inputFrame.p010Buffer.height);
350         if (utils->generateApp1()) {
351             exifBuffer = utils->getApp1Buffer();
352             exifBufferSize = utils->getApp1Length();
353         } else {
354             ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
355         }
356 
357         ultrahdr::jpegr_exif_struct exif;
358         exif.data = reinterpret_cast<void*>(const_cast<uint8_t*>(exifBuffer));
359         exif.length = exifBufferSize;
360 
361         res = jpegREncoder.encodeJPEGR(&p010, transferFunction, &jpegR, jpegQuality, &exif);
362     }
363 
364     if (res != OK) {
365         ALOGE("%s: Error trying to encode JPEG/R: %s (%d)", __FUNCTION__, strerror(-res), res);
366         return res;
367     }
368 
369     actualJpegRSize = jpegR.length;
370 
371     size_t finalJpegRSize = actualJpegRSize + sizeof(CameraBlob);
372     if (finalJpegRSize > maxJpegRBufferSize) {
373         ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
374         outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
375         return NO_MEMORY;
376     }
377 
378     res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
379     if (res != OK) {
380         ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
381                 getStreamId(), strerror(-res), res);
382         return res;
383     }
384 
385     ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegRSize);
386     uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
387         (gb->getWidth() - sizeof(CameraBlob));
388     CameraBlob blobHeader = {
389         .blobId = CameraBlobId::JPEG,
390         .blobSizeBytes = static_cast<int32_t>(actualJpegRSize)
391     };
392     memcpy(header, &blobHeader, sizeof(CameraBlob));
393 
394     if (inputFrame.requestTimeNs != -1) {
395         auto captureLatency = ns2ms(systemTime() - inputFrame.requestTimeNs);
396         mSessionStatsBuilder.incCounter(mP010StreamId, false /*dropped*/, captureLatency);
397         if (mFirstRequestLatency == -1) {
398             mFirstRequestLatency = captureLatency;
399         }
400     }
401     outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
402 
403     return res;
404 }
405 
releaseInputFrameLocked(InputFrame * inputFrame)406 void JpegRCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
407     if (inputFrame == nullptr) {
408         return;
409     }
410 
411     if (inputFrame->p010Buffer.data != nullptr) {
412         mP010Consumer->unlockBuffer(inputFrame->p010Buffer);
413         inputFrame->p010Buffer.data = nullptr;
414         mP010BufferAcquired = false;
415     }
416 
417     if (inputFrame->jpegBuffer.data != nullptr) {
418         mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
419         inputFrame->jpegBuffer.data = nullptr;
420         mBlobBufferAcquired = false;
421     }
422 
423     if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
424         //TODO: Figure out correct requestId
425         notifyError(inputFrame->frameNumber, -1 /*requestId*/);
426         inputFrame->errorNotified = true;
427         mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/, 0 /*captureLatencyMs*/);
428     }
429 }
430 
releaseInputFramesLocked(int64_t currentTs)431 void JpegRCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
432     auto it = mPendingInputFrames.begin();
433     while (it != mPendingInputFrames.end()) {
434         if (it->first <= currentTs) {
435             releaseInputFrameLocked(&it->second);
436             it = mPendingInputFrames.erase(it);
437         } else {
438             it++;
439         }
440     }
441 }
442 
threadLoop()443 bool JpegRCompositeStream::threadLoop() {
444     int64_t currentTs = INT64_MAX;
445     bool newInputAvailable = false;
446 
447     {
448         Mutex::Autolock l(mMutex);
449 
450         if (mErrorState) {
451             // In case we landed in error state, return any pending buffers and
452             // halt all further processing.
453             compilePendingInputLocked();
454             releaseInputFramesLocked(currentTs);
455             return false;
456         }
457 
458         while (!newInputAvailable) {
459             compilePendingInputLocked();
460             newInputAvailable = getNextReadyInputLocked(&currentTs);
461             if (!newInputAvailable) {
462                 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
463                 if (failingFrameNumber >= 0) {
464                     // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
465                     // possible for two internal stream buffers to fail. In such scenario the
466                     // composite stream should notify the client about a stream buffer error only
467                     // once and this information is kept within 'errorNotified'.
468                     // Any present failed input frames will be removed on a subsequent call to
469                     // 'releaseInputFramesLocked()'.
470                     releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
471                     currentTs = INT64_MAX;
472                 }
473 
474                 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
475                 if (ret == TIMED_OUT) {
476                     return true;
477                 } else if (ret != OK) {
478                     ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
479                             strerror(-ret), ret);
480                     return false;
481                 }
482             }
483         }
484     }
485 
486     auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
487     Mutex::Autolock l(mMutex);
488     if (res != OK) {
489         ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
490                 currentTs, strerror(-res), res);
491         mPendingInputFrames[currentTs].error = true;
492     }
493 
494     releaseInputFramesLocked(currentTs);
495 
496     return true;
497 }
498 
isJpegRCompositeStream(const sp<Surface> & surface)499 bool JpegRCompositeStream::isJpegRCompositeStream(const sp<Surface> &surface) {
500     if (CameraProviderManager::kFrameworkJpegRDisabled) {
501         return false;
502     }
503     ANativeWindow *anw = surface.get();
504     status_t err;
505     int format;
506     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
507         ALOGE("%s: Failed to query Surface format: %s (%d)", __FUNCTION__, strerror(-err),
508                 err);
509         return false;
510     }
511 
512     int dataspace;
513     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
514         ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-err),
515                 err);
516         return false;
517     }
518 
519     if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == static_cast<int>(kJpegRDataSpace))) {
520         return true;
521     }
522 
523     return false;
524 }
525 
isJpegRCompositeStreamInfo(const OutputStreamInfo & streamInfo)526 bool JpegRCompositeStream::isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
527     if ((streamInfo.format == HAL_PIXEL_FORMAT_BLOB) &&
528             (streamInfo.dataSpace == static_cast<int>(kJpegRDataSpace))) {
529         return true;
530     }
531 
532     return false;
533 }
534 
deriveDynamicRangeAndDataspace(int64_t dynamicProfile,int64_t * dynamicRange,int64_t * dataSpace)535 void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
536         int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
537     if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
538         return;
539     }
540 
541     switch (dynamicProfile) {
542         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
543         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
544             *dynamicRange = dynamicProfile;
545             *dataSpace = HAL_DATASPACE_BT2020_ITU_PQ;
546             break;
547         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
548         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
549         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
550         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
551             *dynamicRange = dynamicProfile;
552             *dataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
553             break;
554         default:
555             *dynamicRange = kP010DefaultDynamicRange;
556             *dataSpace = kP010DefaultDataSpace;
557     }
558 
559 }
560 
createInternalStreams(const std::vector<SurfaceHolder> & consumers,bool,uint32_t width,uint32_t height,int format,camera_stream_rotation_t rotation,int * id,const std::string & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,std::vector<int> * surfaceIds,int,bool,int32_t colorSpace,int64_t dynamicProfile,int64_t streamUseCase,bool useReadoutTimestamp)561 status_t JpegRCompositeStream::createInternalStreams(const std::vector<SurfaceHolder>& consumers,
562         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
563         camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
564         const std::unordered_set<int32_t> &sensorPixelModesUsed,
565         std::vector<int> *surfaceIds,
566         int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
567         int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
568     sp<CameraDeviceBase> device = mDevice.promote();
569     if (!device.get()) {
570         ALOGE("%s: Invalid camera device!", __FUNCTION__);
571         return NO_INIT;
572     }
573 
574     deriveDynamicRangeAndDataspace(dynamicProfile, &mP010DynamicRange, &mP010DataSpace);
575     mSupportInternalJpeg = CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
576             mStaticInfo, mP010DynamicRange,
577             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
578 
579     std::tie(mP010Consumer, mP010Surface) =
580             CpuConsumer::create(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
581     mP010Consumer->setFrameAvailableListener(this);
582     mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
583 
584     auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
585             static_cast<android_dataspace>(mP010DataSpace), rotation,
586             id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
587             camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/,
588             GRALLOC_USAGE_SW_READ_OFTEN, mP010DynamicRange, streamUseCase,
589             OutputConfiguration::TIMESTAMP_BASE_DEFAULT, OutputConfiguration::MIRROR_MODE_AUTO,
590             ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, useReadoutTimestamp);
591     if (ret == OK) {
592         mP010StreamId = *id;
593         mP010SurfaceId = (*surfaceIds)[0];
594         mOutputSurface = consumers[0].mSurface;
595     } else {
596         return ret;
597     }
598 
599     if (mSupportInternalJpeg) {
600         std::tie(mBlobConsumer, mBlobSurface) =
601                 CpuConsumer::create(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
602         mBlobConsumer->setFrameAvailableListener(this);
603         mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
604 
605         std::vector<int> blobSurfaceId;
606         ret = device->createStream(mBlobSurface, width, height, format,
607                 kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
608                 &blobSurfaceId,
609                 /*streamSetI*/ camera3::CAMERA3_STREAM_SET_ID_INVALID,
610                 /*isShared*/  false,
611                 /*isMultiResolution*/ false,
612                 /*consumerUsage*/ GRALLOC_USAGE_SW_READ_OFTEN,
613                 /*dynamicProfile*/ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
614                 streamUseCase,
615                 /*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
616                 /*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO,
617                 /*colorSpace*/ colorSpace, useReadoutTimestamp);
618         if (ret == OK) {
619             mBlobSurfaceId = blobSurfaceId[0];
620         } else {
621             return ret;
622         }
623 
624         ret = registerCompositeStreamListener(mBlobStreamId);
625         if (ret != OK) {
626             ALOGE("%s: Failed to register jpeg stream listener!", __FUNCTION__);
627             return ret;
628         }
629     }
630 
631     ret = registerCompositeStreamListener(getStreamId());
632     if (ret != OK) {
633         ALOGE("%s: Failed to register P010 stream listener!", __FUNCTION__);
634         return ret;
635     }
636 
637     mOutputColorSpace = colorSpace;
638     mOutputStreamUseCase = streamUseCase;
639     mBlobWidth = width;
640     mBlobHeight = height;
641 
642     return ret;
643 }
644 
configureStream()645 status_t JpegRCompositeStream::configureStream() {
646     if (isRunning()) {
647         // Processing thread is already running, nothing more to do.
648         return NO_ERROR;
649     }
650 
651     if (mOutputSurface.get() == nullptr) {
652         ALOGE("%s: No valid output surface set!", __FUNCTION__);
653         return NO_INIT;
654     }
655 
656     auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
657     if (res != OK) {
658         ALOGE("%s: Unable to connect to native window for stream %d",
659                 __FUNCTION__, mP010StreamId);
660         return res;
661     }
662 
663     if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
664             != OK) {
665         ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
666                 mP010StreamId);
667         return res;
668     }
669 
670     if ((res = native_window_set_usage(mOutputSurface.get(),
671             GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN)) != OK) {
672         ALOGE("%s: Unable to configure stream buffer usage for stream %d", __FUNCTION__,
673                 mP010StreamId);
674         return res;
675     }
676 
677     int maxProducerBuffers;
678     ANativeWindow *anw = mP010Surface.get();
679     if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
680         ALOGE("%s: Unable to query consumer undequeued"
681                 " buffer count for stream %d", __FUNCTION__, mP010StreamId);
682         return res;
683     }
684 
685     ANativeWindow *anwConsumer = mOutputSurface.get();
686     int maxConsumerBuffers;
687     if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
688                     &maxConsumerBuffers)) != OK) {
689         ALOGE("%s: Unable to query consumer undequeued"
690                 " buffer count for stream %d", __FUNCTION__, mP010StreamId);
691         return res;
692     }
693 
694     if ((res = native_window_set_buffer_count(
695                     anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
696         ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mP010StreamId);
697         return res;
698     }
699 
700     mSessionStatsBuilder.addStream(mP010StreamId);
701 
702     run("JpegRCompositeStreamProc");
703 
704     return NO_ERROR;
705 }
706 
deleteInternalStreams()707 status_t JpegRCompositeStream::deleteInternalStreams() {
708     // The 'CameraDeviceClient' parent will delete the P010 stream
709     requestExit();
710 
711     auto ret = join();
712     if (ret != OK) {
713         ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
714                 strerror(-ret), ret);
715     }
716 
717     if (mBlobStreamId >= 0) {
718         // Camera devices may not be valid after switching to offline mode.
719         // In this case, all offline streams including internal composite streams
720         // are managed and released by the offline session.
721         sp<CameraDeviceBase> device = mDevice.promote();
722         if (device.get() != nullptr) {
723             ret = device->deleteStream(mBlobStreamId);
724         }
725 
726         mBlobStreamId = -1;
727     }
728 
729     if (mOutputSurface != nullptr) {
730         mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
731         mOutputSurface.clear();
732     }
733 
734     return ret;
735 }
736 
onFrameAvailable(const BufferItem & item)737 void JpegRCompositeStream::onFrameAvailable(const BufferItem& item) {
738     if (item.mDataSpace == kJpegDataSpace) {
739         ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
740                 __func__, ns2ms(item.mTimestamp));
741 
742         Mutex::Autolock l(mMutex);
743         if (!mErrorState) {
744             mInputJpegBuffers.push_back(item.mTimestamp);
745             mInputReadyCondition.signal();
746         }
747     } else if (item.mDataSpace == static_cast<android_dataspace_t>(mP010DataSpace)) {
748         ALOGV("%s: P010 buffer with ts: %" PRIu64 " ms. arrived!", __func__,
749                 ns2ms(item.mTimestamp));
750 
751         Mutex::Autolock l(mMutex);
752         if (!mErrorState) {
753             mInputP010Buffers.push_back(item.mTimestamp);
754             mInputReadyCondition.signal();
755         }
756     } else {
757         ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
758     }
759 }
760 
insertGbp(SurfaceMap * outSurfaceMap,Vector<int32_t> * outputStreamIds,int32_t * currentStreamId)761 status_t JpegRCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
762         Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
763     if (outputStreamIds == nullptr) {
764         return BAD_VALUE;
765     }
766 
767     if (outSurfaceMap->find(mP010StreamId) == outSurfaceMap->end()) {
768         outputStreamIds->push_back(mP010StreamId);
769     }
770     (*outSurfaceMap)[mP010StreamId].push_back(mP010SurfaceId);
771 
772     if (mSupportInternalJpeg) {
773         if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
774             outputStreamIds->push_back(mBlobStreamId);
775         }
776         (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
777     }
778 
779     if (currentStreamId != nullptr) {
780         *currentStreamId = mP010StreamId;
781     }
782 
783     return NO_ERROR;
784 }
785 
insertCompositeStreamIds(std::vector<int32_t> * compositeStreamIds)786 status_t JpegRCompositeStream::insertCompositeStreamIds(
787         std::vector<int32_t>* compositeStreamIds /*out*/) {
788     if (compositeStreamIds == nullptr) {
789         return BAD_VALUE;
790     }
791 
792     compositeStreamIds->push_back(mP010StreamId);
793     if (mSupportInternalJpeg) {
794         compositeStreamIds->push_back(mBlobStreamId);
795     }
796 
797     return OK;
798 }
799 
onResultError(const CaptureResultExtras & resultExtras)800 void JpegRCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
801     // Processing can continue even in case of result errors.
802     // At the moment Jpeg/R composite stream processing relies mainly on static camera
803     // characteristics data. The actual result data can be used for the jpeg quality but
804     // in case it is absent we can default to maximum.
805     eraseResult(resultExtras.frameNumber);
806     mSessionStatsBuilder.incResultCounter(true /*dropped*/);
807 }
808 
onStreamBufferError(const CaptureResultExtras & resultExtras)809 bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
810     bool ret = false;
811     // Buffer errors concerning internal composite streams should not be directly visible to
812     // camera clients. They must only receive a single buffer error with the public composite
813     // stream id.
814     if ((resultExtras.errorStreamId == mP010StreamId) ||
815             (resultExtras.errorStreamId == mBlobStreamId)) {
816         flagAnErrorFrameNumber(resultExtras.frameNumber);
817         ret = true;
818     }
819 
820     return ret;
821 }
822 
getCompositeStreamInfo(const OutputStreamInfo & streamInfo,const CameraMetadata & staticInfo,std::vector<OutputStreamInfo> * compositeOutput)823 status_t JpegRCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
824             const CameraMetadata& staticInfo,
825             std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
826     if (compositeOutput == nullptr) {
827         return BAD_VALUE;
828     }
829 
830     int64_t dynamicRange, dataSpace;
831     deriveDynamicRangeAndDataspace(streamInfo.dynamicRangeProfile, &dynamicRange, &dataSpace);
832 
833     compositeOutput->clear();
834     compositeOutput->push_back({});
835     (*compositeOutput)[0].width = streamInfo.width;
836     (*compositeOutput)[0].height = streamInfo.height;
837     (*compositeOutput)[0].format = kP010PixelFormat;
838     (*compositeOutput)[0].dataSpace = static_cast<android_dataspace_t>(dataSpace);
839     (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
840     (*compositeOutput)[0].dynamicRangeProfile = dynamicRange;
841     (*compositeOutput)[0].colorSpace =
842         ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
843 
844     if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
845                 staticInfo, dynamicRange,
846                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
847         compositeOutput->push_back({});
848         (*compositeOutput)[1].width = streamInfo.width;
849         (*compositeOutput)[1].height = streamInfo.height;
850         (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
851         (*compositeOutput)[1].dataSpace = kJpegDataSpace;
852         (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
853         (*compositeOutput)[1].dynamicRangeProfile =
854             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
855         (*compositeOutput)[1].colorSpace = streamInfo.colorSpace;
856     }
857 
858     return NO_ERROR;
859 }
860 
getStreamStats(hardware::CameraStreamStats * streamStats)861 void JpegRCompositeStream::getStreamStats(hardware::CameraStreamStats* streamStats) {
862     if ((streamStats == nullptr) || (mFirstRequestLatency != -1)) {
863         return;
864     }
865 
866     bool deviceError;
867     std::map<int, StreamStats> stats;
868     std::pair<int32_t, int32_t> mostRequestedFps;
869     mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
870             &deviceError, &mostRequestedFps, &stats);
871     if (stats.find(mP010StreamId) != stats.end()) {
872         streamStats->mWidth = mBlobWidth;
873         streamStats->mHeight = mBlobHeight;
874         streamStats->mFormat = HAL_PIXEL_FORMAT_BLOB;
875         streamStats->mDataSpace = static_cast<int>(kJpegRDataSpace);
876         streamStats->mDynamicRangeProfile = mP010DynamicRange;
877         streamStats->mColorSpace = mOutputColorSpace;
878         streamStats->mStreamUseCase = mOutputStreamUseCase;
879         streamStats->mStartLatencyMs = mFirstRequestLatency;
880         streamStats->mHistogramType = hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
881         streamStats->mHistogramBins.assign(stats[mP010StreamId].mCaptureLatencyBins.begin(),
882                 stats[mP010StreamId].mCaptureLatencyBins.end());
883         streamStats->mHistogramCounts.assign(stats[mP010StreamId].mCaptureLatencyHistogram.begin(),
884                 stats[mP010StreamId].mCaptureLatencyHistogram.end());
885     }
886 }
887 
888 }; // namespace camera3
889 }; // namespace android
890