• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "hardware/gralloc.h"
18 #include "system/graphics-base-v1.0.h"
19 #include "system/graphics-base-v1.1.h"
20 #define LOG_TAG "Camera3-JpegRCompositeStream"
21 #define ATRACE_TAG ATRACE_TAG_CAMERA
22 //#define LOG_NDEBUG 0
23 
24 #include <aidl/android/hardware/camera/device/CameraBlob.h>
25 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
26 
27 #include "common/CameraProviderManager.h"
28 #include <gui/Surface.h>
29 #include <ultrahdr/jpegr.h>
30 #include <utils/ExifUtils.h>
31 #include <utils/Log.h>
32 #include "utils/SessionConfigurationUtils.h"
33 #include <utils/Trace.h>
34 
35 #include "JpegRCompositeStream.h"
36 
37 namespace android {
38 namespace camera3 {
39 
40 using aidl::android::hardware::camera::device::CameraBlob;
41 using aidl::android::hardware::camera::device::CameraBlobId;
42 
JpegRCompositeStream(sp<CameraDeviceBase> device,wp<hardware::camera2::ICameraDeviceCallbacks> cb)43 JpegRCompositeStream::JpegRCompositeStream(sp<CameraDeviceBase> device,
44         wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
45         CompositeStream(device, cb),
46         mBlobStreamId(-1),
47         mBlobSurfaceId(-1),
48         mP010StreamId(-1),
49         mP010SurfaceId(-1),
50         mBlobWidth(0),
51         mBlobHeight(0),
52         mP010BufferAcquired(false),
53         mBlobBufferAcquired(false),
54         mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
55         mOutputStreamUseCase(0),
56         mFirstRequestLatency(-1),
57         mProducerListener(new ProducerListener()),
58         mMaxJpegBufferSize(-1),
59         mUHRMaxJpegBufferSize(-1),
60         mStaticInfo(device->info()) {
61     auto entry = mStaticInfo.find(ANDROID_JPEG_MAX_SIZE);
62     if (entry.count > 0) {
63         mMaxJpegBufferSize = entry.data.i32[0];
64     } else {
65         ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
66     }
67 
68     mUHRMaxJpegSize =
69             SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
70                     /*ultraHighResolution*/true);
71     mDefaultMaxJpegSize =
72             SessionConfigurationUtils::getMaxJpegResolution(mStaticInfo,
73                     /*isUltraHighResolution*/false);
74 
75     mUHRMaxJpegBufferSize =
76         SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
77                 mMaxJpegBufferSize);
78 }
79 
~JpegRCompositeStream()80 JpegRCompositeStream::~JpegRCompositeStream() {
81     mBlobConsumer.clear(),
82     mBlobSurface.clear(),
83     mBlobStreamId = -1;
84     mBlobSurfaceId = -1;
85     mP010Consumer.clear();
86     mP010Surface.clear();
87     mP010Consumer = nullptr;
88     mP010Surface = nullptr;
89 }
90 
compilePendingInputLocked()91 void JpegRCompositeStream::compilePendingInputLocked() {
92     CpuConsumer::LockedBuffer imgBuffer;
93 
94     while (mSupportInternalJpeg && !mInputJpegBuffers.empty() && !mBlobBufferAcquired) {
95         auto it = mInputJpegBuffers.begin();
96         auto res = mBlobConsumer->lockNextBuffer(&imgBuffer);
97         if (res == NOT_ENOUGH_DATA) {
98             // Can not lock any more buffers.
99             break;
100         } else if (res != OK) {
101             ALOGE("%s: Error locking blob image buffer: %s (%d)", __FUNCTION__,
102                     strerror(-res), res);
103             mPendingInputFrames[*it].error = true;
104             mInputJpegBuffers.erase(it);
105             continue;
106         }
107 
108         if (*it != imgBuffer.timestamp) {
109             ALOGW("%s: Expecting jpeg buffer with time stamp: %" PRId64 " received buffer with "
110                     "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
111         }
112 
113         if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
114                 (mPendingInputFrames[imgBuffer.timestamp].error)) {
115             mBlobConsumer->unlockBuffer(imgBuffer);
116         } else {
117             mPendingInputFrames[imgBuffer.timestamp].jpegBuffer = imgBuffer;
118             mBlobBufferAcquired = true;
119         }
120         mInputJpegBuffers.erase(it);
121     }
122 
123     while (!mInputP010Buffers.empty() && !mP010BufferAcquired) {
124         auto it = mInputP010Buffers.begin();
125         auto res = mP010Consumer->lockNextBuffer(&imgBuffer);
126         if (res == NOT_ENOUGH_DATA) {
127             // Can not lock any more buffers.
128             break;
129         } else if (res != OK) {
130             ALOGE("%s: Error receiving P010 image buffer: %s (%d)", __FUNCTION__,
131                     strerror(-res), res);
132             mPendingInputFrames[*it].error = true;
133             mInputP010Buffers.erase(it);
134             continue;
135         }
136 
137         if (*it != imgBuffer.timestamp) {
138             ALOGW("%s: Expecting P010 buffer with time stamp: %" PRId64 " received buffer with "
139                     "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
140         }
141 
142         if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
143                 (mPendingInputFrames[imgBuffer.timestamp].error)) {
144             mP010Consumer->unlockBuffer(imgBuffer);
145         } else {
146             mPendingInputFrames[imgBuffer.timestamp].p010Buffer = imgBuffer;
147             mP010BufferAcquired = true;
148         }
149         mInputP010Buffers.erase(it);
150     }
151 
152     while (!mCaptureResults.empty()) {
153         auto it = mCaptureResults.begin();
154         // Negative timestamp indicates that something went wrong during the capture result
155         // collection process.
156         if (it->first >= 0) {
157             auto frameNumber = std::get<0>(it->second);
158             mPendingInputFrames[it->first].frameNumber = frameNumber;
159             mPendingInputFrames[it->first].result = std::get<1>(it->second);
160             mSessionStatsBuilder.incResultCounter(false /*dropped*/);
161         }
162         mCaptureResults.erase(it);
163     }
164 
165     while (!mFrameNumberMap.empty()) {
166         auto it = mFrameNumberMap.begin();
167         auto frameNumber = it->first;
168         mPendingInputFrames[it->second].frameNumber = frameNumber;
169         auto requestTimeIt = mRequestTimeMap.find(frameNumber);
170         if (requestTimeIt != mRequestTimeMap.end()) {
171             mPendingInputFrames[it->second].requestTimeNs = requestTimeIt->second;
172             mRequestTimeMap.erase(requestTimeIt);
173         }
174         mFrameNumberMap.erase(it);
175     }
176 
177     auto it = mErrorFrameNumbers.begin();
178     while (it != mErrorFrameNumbers.end()) {
179         bool frameFound = false;
180         for (auto &inputFrame : mPendingInputFrames) {
181             if (inputFrame.second.frameNumber == *it) {
182                 inputFrame.second.error = true;
183                 frameFound = true;
184                 break;
185             }
186         }
187 
188         if (frameFound) {
189             mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/,
190                     0 /*captureLatencyMs*/);
191             it = mErrorFrameNumbers.erase(it);
192         } else {
193             ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
194                     *it);
195             it++;
196         }
197     }
198 }
199 
getNextReadyInputLocked(int64_t * currentTs)200 bool JpegRCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*inout*/) {
201     if (currentTs == nullptr) {
202         return false;
203     }
204 
205     bool newInputAvailable = false;
206     for (const auto& it : mPendingInputFrames) {
207         if ((!it.second.error) && (it.second.p010Buffer.data != nullptr) &&
208                 (it.second.requestTimeNs != -1) &&
209                 ((it.second.jpegBuffer.data != nullptr) || !mSupportInternalJpeg) &&
210                 (it.first < *currentTs)) {
211             *currentTs = it.first;
212             newInputAvailable = true;
213         }
214     }
215 
216     return newInputAvailable;
217 }
218 
getNextFailingInputLocked(int64_t * currentTs)219 int64_t JpegRCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*inout*/) {
220     int64_t ret = -1;
221     if (currentTs == nullptr) {
222         return ret;
223     }
224 
225     for (const auto& it : mPendingInputFrames) {
226         if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
227             *currentTs = it.first;
228             ret = it.second.frameNumber;
229         }
230     }
231 
232     return ret;
233 }
234 
processInputFrame(nsecs_t ts,const InputFrame & inputFrame)235 status_t JpegRCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &inputFrame) {
236     status_t res;
237     sp<ANativeWindow> outputANW = mOutputSurface;
238     ANativeWindowBuffer *anb;
239     int fenceFd;
240     void *dstBuffer;
241 
242     size_t maxJpegRBufferSize = 0;
243     if (mMaxJpegBufferSize > 0) {
244         // If this is an ultra high resolution sensor and the input frames size
245         // is > default res jpeg.
246         if (mUHRMaxJpegSize.width != 0 &&
247                 inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
248                 mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
249             maxJpegRBufferSize = mUHRMaxJpegBufferSize;
250         } else {
251             maxJpegRBufferSize = mMaxJpegBufferSize;
252         }
253     } else {
254         maxJpegRBufferSize = inputFrame.p010Buffer.width * inputFrame.p010Buffer.height;
255     }
256 
257     uint8_t jpegQuality = 100;
258     auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
259     if (entry.count > 0) {
260         jpegQuality = entry.data.u8[0];
261     }
262 
263     if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), maxJpegRBufferSize, 1))
264             != OK) {
265         ALOGE("%s: Unable to configure stream buffer dimensions"
266                 " %zux%u for stream %d", __FUNCTION__, maxJpegRBufferSize, 1U, mP010StreamId);
267         return res;
268     }
269 
270     res = outputANW->dequeueBuffer(mOutputSurface.get(), &anb, &fenceFd);
271     if (res != OK) {
272         ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
273                 res);
274         return res;
275     }
276 
277     sp<GraphicBuffer> gb = GraphicBuffer::from(anb);
278     GraphicBufferLocker gbLocker(gb);
279     res = gbLocker.lockAsync(&dstBuffer, fenceFd);
280     if (res != OK) {
281         ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
282                 strerror(-res), res);
283         outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
284         return res;
285     }
286 
287     if ((gb->getWidth() < maxJpegRBufferSize) || (gb->getHeight() != 1)) {
288         ALOGE("%s: Blob buffer size mismatch, expected %zux%u received %dx%d", __FUNCTION__,
289                 maxJpegRBufferSize, 1, gb->getWidth(), gb->getHeight());
290         outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
291         return BAD_VALUE;
292     }
293 
294     size_t actualJpegRSize = 0;
295     ultrahdr::jpegr_uncompressed_struct p010;
296     ultrahdr::jpegr_compressed_struct jpegR;
297     ultrahdr::JpegR jpegREncoder;
298 
299     p010.height = inputFrame.p010Buffer.height;
300     p010.width = inputFrame.p010Buffer.width;
301     p010.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
302     p010.data = inputFrame.p010Buffer.data;
303     p010.chroma_data = inputFrame.p010Buffer.dataCb;
304     // Strides are expected to be in pixels not bytes
305     p010.luma_stride = inputFrame.p010Buffer.stride / 2;
306     p010.chroma_stride = inputFrame.p010Buffer.chromaStride / 2;
307 
308     jpegR.data = dstBuffer;
309     jpegR.maxLength = maxJpegRBufferSize;
310 
311     ultrahdr::ultrahdr_transfer_function transferFunction;
312     switch (mP010DynamicRange) {
313         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
314         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
315             transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_PQ;
316             break;
317         default:
318             transferFunction = ultrahdr::ultrahdr_transfer_function::ULTRAHDR_TF_HLG;
319     }
320 
321     if (mSupportInternalJpeg) {
322         ultrahdr::jpegr_compressed_struct jpeg;
323 
324         jpeg.data = inputFrame.jpegBuffer.data;
325         jpeg.length = android::camera2::JpegProcessor::findJpegSize(inputFrame.jpegBuffer.data,
326                 inputFrame.jpegBuffer.width);
327         if (jpeg.length == 0) {
328             ALOGW("%s: Failed to find input jpeg size, default to using entire buffer!",
329                     __FUNCTION__);
330             jpeg.length = inputFrame.jpegBuffer.width;
331         }
332 
333         if (mOutputColorSpace == ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3) {
334             jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_P3;
335         } else {
336             jpeg.colorGamut = ultrahdr::ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT709;
337         }
338 
339         res = jpegREncoder.encodeJPEGR(&p010, &jpeg, transferFunction, &jpegR);
340     } else {
341         const uint8_t* exifBuffer = nullptr;
342         size_t exifBufferSize = 0;
343         std::unique_ptr<ExifUtils> utils(ExifUtils::create());
344         utils->initializeEmpty();
345         utils->setFromMetadata(inputFrame.result, mStaticInfo, inputFrame.p010Buffer.width,
346                 inputFrame.p010Buffer.height);
347         if (utils->generateApp1()) {
348             exifBuffer = utils->getApp1Buffer();
349             exifBufferSize = utils->getApp1Length();
350         } else {
351             ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
352         }
353 
354         ultrahdr::jpegr_exif_struct exif;
355         exif.data = reinterpret_cast<void*>(const_cast<uint8_t*>(exifBuffer));
356         exif.length = exifBufferSize;
357 
358         res = jpegREncoder.encodeJPEGR(&p010, transferFunction, &jpegR, jpegQuality, &exif);
359     }
360 
361     if (res != OK) {
362         ALOGE("%s: Error trying to encode JPEG/R: %s (%d)", __FUNCTION__, strerror(-res), res);
363         return res;
364     }
365 
366     actualJpegRSize = jpegR.length;
367 
368     size_t finalJpegRSize = actualJpegRSize + sizeof(CameraBlob);
369     if (finalJpegRSize > maxJpegRBufferSize) {
370         ALOGE("%s: Final jpeg buffer not large enough for the jpeg blob header", __FUNCTION__);
371         outputANW->cancelBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
372         return NO_MEMORY;
373     }
374 
375     res = native_window_set_buffers_timestamp(mOutputSurface.get(), ts);
376     if (res != OK) {
377         ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", __FUNCTION__,
378                 getStreamId(), strerror(-res), res);
379         return res;
380     }
381 
382     ALOGV("%s: Final jpeg size: %zu", __func__, finalJpegRSize);
383     uint8_t* header = static_cast<uint8_t *> (dstBuffer) +
384         (gb->getWidth() - sizeof(CameraBlob));
385     CameraBlob blobHeader = {
386         .blobId = CameraBlobId::JPEG,
387         .blobSizeBytes = static_cast<int32_t>(actualJpegRSize)
388     };
389     memcpy(header, &blobHeader, sizeof(CameraBlob));
390 
391     if (inputFrame.requestTimeNs != -1) {
392         auto captureLatency = ns2ms(systemTime() - inputFrame.requestTimeNs);
393         mSessionStatsBuilder.incCounter(mP010StreamId, false /*dropped*/, captureLatency);
394         if (mFirstRequestLatency == -1) {
395             mFirstRequestLatency = captureLatency;
396         }
397     }
398     outputANW->queueBuffer(mOutputSurface.get(), anb, /*fence*/ -1);
399 
400     return res;
401 }
402 
releaseInputFrameLocked(InputFrame * inputFrame)403 void JpegRCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
404     if (inputFrame == nullptr) {
405         return;
406     }
407 
408     if (inputFrame->p010Buffer.data != nullptr) {
409         mP010Consumer->unlockBuffer(inputFrame->p010Buffer);
410         inputFrame->p010Buffer.data = nullptr;
411         mP010BufferAcquired = false;
412     }
413 
414     if (inputFrame->jpegBuffer.data != nullptr) {
415         mBlobConsumer->unlockBuffer(inputFrame->jpegBuffer);
416         inputFrame->jpegBuffer.data = nullptr;
417         mBlobBufferAcquired = false;
418     }
419 
420     if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
421         //TODO: Figure out correct requestId
422         notifyError(inputFrame->frameNumber, -1 /*requestId*/);
423         inputFrame->errorNotified = true;
424         mSessionStatsBuilder.incCounter(mP010StreamId, true /*dropped*/, 0 /*captureLatencyMs*/);
425     }
426 }
427 
releaseInputFramesLocked(int64_t currentTs)428 void JpegRCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
429     auto it = mPendingInputFrames.begin();
430     while (it != mPendingInputFrames.end()) {
431         if (it->first <= currentTs) {
432             releaseInputFrameLocked(&it->second);
433             it = mPendingInputFrames.erase(it);
434         } else {
435             it++;
436         }
437     }
438 }
439 
threadLoop()440 bool JpegRCompositeStream::threadLoop() {
441     int64_t currentTs = INT64_MAX;
442     bool newInputAvailable = false;
443 
444     {
445         Mutex::Autolock l(mMutex);
446 
447         if (mErrorState) {
448             // In case we landed in error state, return any pending buffers and
449             // halt all further processing.
450             compilePendingInputLocked();
451             releaseInputFramesLocked(currentTs);
452             return false;
453         }
454 
455         while (!newInputAvailable) {
456             compilePendingInputLocked();
457             newInputAvailable = getNextReadyInputLocked(&currentTs);
458             if (!newInputAvailable) {
459                 auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
460                 if (failingFrameNumber >= 0) {
461                     // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
462                     // possible for two internal stream buffers to fail. In such scenario the
463                     // composite stream should notify the client about a stream buffer error only
464                     // once and this information is kept within 'errorNotified'.
465                     // Any present failed input frames will be removed on a subsequent call to
466                     // 'releaseInputFramesLocked()'.
467                     releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
468                     currentTs = INT64_MAX;
469                 }
470 
471                 auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
472                 if (ret == TIMED_OUT) {
473                     return true;
474                 } else if (ret != OK) {
475                     ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
476                             strerror(-ret), ret);
477                     return false;
478                 }
479             }
480         }
481     }
482 
483     auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
484     Mutex::Autolock l(mMutex);
485     if (res != OK) {
486         ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", __FUNCTION__,
487                 currentTs, strerror(-res), res);
488         mPendingInputFrames[currentTs].error = true;
489     }
490 
491     releaseInputFramesLocked(currentTs);
492 
493     return true;
494 }
495 
isJpegRCompositeStream(const sp<Surface> & surface)496 bool JpegRCompositeStream::isJpegRCompositeStream(const sp<Surface> &surface) {
497     if (CameraProviderManager::kFrameworkJpegRDisabled) {
498         return false;
499     }
500     ANativeWindow *anw = surface.get();
501     status_t err;
502     int format;
503     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
504         ALOGE("%s: Failed to query Surface format: %s (%d)", __FUNCTION__, strerror(-err),
505                 err);
506         return false;
507     }
508 
509     int dataspace;
510     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
511         ALOGE("%s: Failed to query Surface dataspace: %s (%d)", __FUNCTION__, strerror(-err),
512                 err);
513         return false;
514     }
515 
516     if ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == static_cast<int>(kJpegRDataSpace))) {
517         return true;
518     }
519 
520     return false;
521 }
522 
deriveDynamicRangeAndDataspace(int64_t dynamicProfile,int64_t * dynamicRange,int64_t * dataSpace)523 void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
524         int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
525     if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
526         return;
527     }
528 
529     switch (dynamicProfile) {
530         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
531         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
532             *dynamicRange = dynamicProfile;
533             *dataSpace = HAL_DATASPACE_BT2020_ITU_PQ;
534             break;
535         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
536         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
537         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
538         case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
539             *dynamicRange = dynamicProfile;
540             *dataSpace = HAL_DATASPACE_BT2020_ITU_HLG;
541             break;
542         default:
543             *dynamicRange = kP010DefaultDynamicRange;
544             *dataSpace = kP010DefaultDataSpace;
545     }
546 
547 }
548 
createInternalStreams(const std::vector<sp<Surface>> & consumers,bool,uint32_t width,uint32_t height,int format,camera_stream_rotation_t rotation,int * id,const String8 & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,std::vector<int> * surfaceIds,int,bool,int32_t colorSpace,int64_t dynamicProfile,int64_t streamUseCase,bool useReadoutTimestamp)549 status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
550         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
551         camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
552         const std::unordered_set<int32_t> &sensorPixelModesUsed,
553         std::vector<int> *surfaceIds,
554         int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
555         int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) {
556     sp<CameraDeviceBase> device = mDevice.promote();
557     if (!device.get()) {
558         ALOGE("%s: Invalid camera device!", __FUNCTION__);
559         return NO_INIT;
560     }
561 
562     deriveDynamicRangeAndDataspace(dynamicProfile, &mP010DynamicRange, &mP010DataSpace);
563     mSupportInternalJpeg = CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
564             mStaticInfo, mP010DynamicRange,
565             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
566 
567     sp<IGraphicBufferProducer> producer;
568     sp<IGraphicBufferConsumer> consumer;
569     BufferQueue::createBufferQueue(&producer, &consumer);
570     mP010Consumer = new CpuConsumer(consumer, /*maxLockedBuffers*/1, /*controlledByApp*/ true);
571     mP010Consumer->setFrameAvailableListener(this);
572     mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
573     mP010Surface = new Surface(producer);
574 
575     auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
576             static_cast<android_dataspace>(mP010DataSpace), rotation,
577             id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
578             camera3::CAMERA3_STREAM_SET_ID_INVALID, false /*isShared*/, false /*isMultiResolution*/,
579             GRALLOC_USAGE_SW_READ_OFTEN, mP010DynamicRange, streamUseCase,
580             OutputConfiguration::TIMESTAMP_BASE_DEFAULT, OutputConfiguration::MIRROR_MODE_AUTO,
581             ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, useReadoutTimestamp);
582     if (ret == OK) {
583         mP010StreamId = *id;
584         mP010SurfaceId = (*surfaceIds)[0];
585         mOutputSurface = consumers[0];
586     } else {
587         return ret;
588     }
589 
590     if (mSupportInternalJpeg) {
591         BufferQueue::createBufferQueue(&producer, &consumer);
592         mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
593         mBlobConsumer->setFrameAvailableListener(this);
594         mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
595         mBlobSurface = new Surface(producer);
596         std::vector<int> blobSurfaceId;
597         ret = device->createStream(mBlobSurface, width, height, format,
598                 kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
599                 &blobSurfaceId,
600                 /*streamSetI*/ camera3::CAMERA3_STREAM_SET_ID_INVALID,
601                 /*isShared*/  false,
602                 /*isMultiResolution*/ false,
603                 /*consumerUsage*/ GRALLOC_USAGE_SW_READ_OFTEN,
604                 /*dynamicProfile*/ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
605                 streamUseCase,
606                 /*timestampBase*/ OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
607                 /*mirrorMode*/ OutputConfiguration::MIRROR_MODE_AUTO,
608                 /*colorSpace*/ colorSpace, useReadoutTimestamp);
609         if (ret == OK) {
610             mBlobSurfaceId = blobSurfaceId[0];
611         } else {
612             return ret;
613         }
614 
615         ret = registerCompositeStreamListener(mBlobStreamId);
616         if (ret != OK) {
617             ALOGE("%s: Failed to register jpeg stream listener!", __FUNCTION__);
618             return ret;
619         }
620     }
621 
622     ret = registerCompositeStreamListener(getStreamId());
623     if (ret != OK) {
624         ALOGE("%s: Failed to register P010 stream listener!", __FUNCTION__);
625         return ret;
626     }
627 
628     mOutputColorSpace = colorSpace;
629     mOutputStreamUseCase = streamUseCase;
630     mBlobWidth = width;
631     mBlobHeight = height;
632 
633     return ret;
634 }
635 
configureStream()636 status_t JpegRCompositeStream::configureStream() {
637     if (isRunning()) {
638         // Processing thread is already running, nothing more to do.
639         return NO_ERROR;
640     }
641 
642     if (mOutputSurface.get() == nullptr) {
643         ALOGE("%s: No valid output surface set!", __FUNCTION__);
644         return NO_INIT;
645     }
646 
647     auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
648     if (res != OK) {
649         ALOGE("%s: Unable to connect to native window for stream %d",
650                 __FUNCTION__, mP010StreamId);
651         return res;
652     }
653 
654     if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
655             != OK) {
656         ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
657                 mP010StreamId);
658         return res;
659     }
660 
661     if ((res = native_window_set_usage(mOutputSurface.get(),
662             GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN)) != OK) {
663         ALOGE("%s: Unable to configure stream buffer usage for stream %d", __FUNCTION__,
664                 mP010StreamId);
665         return res;
666     }
667 
668     int maxProducerBuffers;
669     ANativeWindow *anw = mP010Surface.get();
670     if ((res = anw->query(anw, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxProducerBuffers)) != OK) {
671         ALOGE("%s: Unable to query consumer undequeued"
672                 " buffer count for stream %d", __FUNCTION__, mP010StreamId);
673         return res;
674     }
675 
676     ANativeWindow *anwConsumer = mOutputSurface.get();
677     int maxConsumerBuffers;
678     if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
679                     &maxConsumerBuffers)) != OK) {
680         ALOGE("%s: Unable to query consumer undequeued"
681                 " buffer count for stream %d", __FUNCTION__, mP010StreamId);
682         return res;
683     }
684 
685     if ((res = native_window_set_buffer_count(
686                     anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
687         ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mP010StreamId);
688         return res;
689     }
690 
691     mSessionStatsBuilder.addStream(mP010StreamId);
692 
693     run("JpegRCompositeStreamProc");
694 
695     return NO_ERROR;
696 }
697 
deleteInternalStreams()698 status_t JpegRCompositeStream::deleteInternalStreams() {
699     // The 'CameraDeviceClient' parent will delete the P010 stream
700     requestExit();
701 
702     auto ret = join();
703     if (ret != OK) {
704         ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
705                 strerror(-ret), ret);
706     }
707 
708     if (mBlobStreamId >= 0) {
709         // Camera devices may not be valid after switching to offline mode.
710         // In this case, all offline streams including internal composite streams
711         // are managed and released by the offline session.
712         sp<CameraDeviceBase> device = mDevice.promote();
713         if (device.get() != nullptr) {
714             ret = device->deleteStream(mBlobStreamId);
715         }
716 
717         mBlobStreamId = -1;
718     }
719 
720     if (mOutputSurface != nullptr) {
721         mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
722         mOutputSurface.clear();
723     }
724 
725     return ret;
726 }
727 
onFrameAvailable(const BufferItem & item)728 void JpegRCompositeStream::onFrameAvailable(const BufferItem& item) {
729     if (item.mDataSpace == kJpegDataSpace) {
730         ALOGV("%s: Jpeg buffer with ts: %" PRIu64 " ms. arrived!",
731                 __func__, ns2ms(item.mTimestamp));
732 
733         Mutex::Autolock l(mMutex);
734         if (!mErrorState) {
735             mInputJpegBuffers.push_back(item.mTimestamp);
736             mInputReadyCondition.signal();
737         }
738     } else if (item.mDataSpace == static_cast<android_dataspace_t>(mP010DataSpace)) {
739         ALOGV("%s: P010 buffer with ts: %" PRIu64 " ms. arrived!", __func__,
740                 ns2ms(item.mTimestamp));
741 
742         Mutex::Autolock l(mMutex);
743         if (!mErrorState) {
744             mInputP010Buffers.push_back(item.mTimestamp);
745             mInputReadyCondition.signal();
746         }
747     } else {
748         ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
749     }
750 }
751 
insertGbp(SurfaceMap * outSurfaceMap,Vector<int32_t> * outputStreamIds,int32_t * currentStreamId)752 status_t JpegRCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
753         Vector<int32_t> * /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
754     if (outputStreamIds == nullptr) {
755         return BAD_VALUE;
756     }
757 
758     if (outSurfaceMap->find(mP010StreamId) == outSurfaceMap->end()) {
759         outputStreamIds->push_back(mP010StreamId);
760     }
761     (*outSurfaceMap)[mP010StreamId].push_back(mP010SurfaceId);
762 
763     if (mSupportInternalJpeg) {
764         if (outSurfaceMap->find(mBlobStreamId) == outSurfaceMap->end()) {
765             outputStreamIds->push_back(mBlobStreamId);
766         }
767         (*outSurfaceMap)[mBlobStreamId].push_back(mBlobSurfaceId);
768     }
769 
770     if (currentStreamId != nullptr) {
771         *currentStreamId = mP010StreamId;
772     }
773 
774     return NO_ERROR;
775 }
776 
insertCompositeStreamIds(std::vector<int32_t> * compositeStreamIds)777 status_t JpegRCompositeStream::insertCompositeStreamIds(
778         std::vector<int32_t>* compositeStreamIds /*out*/) {
779     if (compositeStreamIds == nullptr) {
780         return BAD_VALUE;
781     }
782 
783     compositeStreamIds->push_back(mP010StreamId);
784     if (mSupportInternalJpeg) {
785         compositeStreamIds->push_back(mBlobStreamId);
786     }
787 
788     return OK;
789 }
790 
onResultError(const CaptureResultExtras & resultExtras)791 void JpegRCompositeStream::onResultError(const CaptureResultExtras& resultExtras) {
792     // Processing can continue even in case of result errors.
793     // At the moment Jpeg/R composite stream processing relies mainly on static camera
794     // characteristics data. The actual result data can be used for the jpeg quality but
795     // in case it is absent we can default to maximum.
796     eraseResult(resultExtras.frameNumber);
797     mSessionStatsBuilder.incResultCounter(true /*dropped*/);
798 }
799 
onStreamBufferError(const CaptureResultExtras & resultExtras)800 bool JpegRCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
801     bool ret = false;
802     // Buffer errors concerning internal composite streams should not be directly visible to
803     // camera clients. They must only receive a single buffer error with the public composite
804     // stream id.
805     if ((resultExtras.errorStreamId == mP010StreamId) ||
806             (resultExtras.errorStreamId == mBlobStreamId)) {
807         flagAnErrorFrameNumber(resultExtras.frameNumber);
808         ret = true;
809     }
810 
811     return ret;
812 }
813 
getCompositeStreamInfo(const OutputStreamInfo & streamInfo,const CameraMetadata & staticInfo,std::vector<OutputStreamInfo> * compositeOutput)814 status_t JpegRCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
815             const CameraMetadata& staticInfo,
816             std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
817     if (compositeOutput == nullptr) {
818         return BAD_VALUE;
819     }
820 
821     int64_t dynamicRange, dataSpace;
822     deriveDynamicRangeAndDataspace(streamInfo.dynamicRangeProfile, &dynamicRange, &dataSpace);
823 
824     compositeOutput->clear();
825     compositeOutput->push_back({});
826     (*compositeOutput)[0].width = streamInfo.width;
827     (*compositeOutput)[0].height = streamInfo.height;
828     (*compositeOutput)[0].format = kP010PixelFormat;
829     (*compositeOutput)[0].dataSpace = static_cast<android_dataspace_t>(dataSpace);
830     (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
831     (*compositeOutput)[0].dynamicRangeProfile = dynamicRange;
832     (*compositeOutput)[0].colorSpace =
833         ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
834 
835     if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
836                 streamInfo.dynamicRangeProfile,
837                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
838         compositeOutput->push_back({});
839         (*compositeOutput)[1].width = streamInfo.width;
840         (*compositeOutput)[1].height = streamInfo.height;
841         (*compositeOutput)[1].format = HAL_PIXEL_FORMAT_BLOB;
842         (*compositeOutput)[1].dataSpace = kJpegDataSpace;
843         (*compositeOutput)[1].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
844         (*compositeOutput)[1].dynamicRangeProfile =
845             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
846         (*compositeOutput)[1].colorSpace = streamInfo.colorSpace;
847     }
848 
849     return NO_ERROR;
850 }
851 
getStreamStats(hardware::CameraStreamStats * streamStats)852 void JpegRCompositeStream::getStreamStats(hardware::CameraStreamStats* streamStats) {
853     if ((streamStats == nullptr) || (mFirstRequestLatency != -1)) {
854         return;
855     }
856 
857     bool deviceError;
858     std::map<int, StreamStats> stats;
859     mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
860             &deviceError, &stats);
861     if (stats.find(mP010StreamId) != stats.end()) {
862         streamStats->mWidth = mBlobWidth;
863         streamStats->mHeight = mBlobHeight;
864         streamStats->mFormat = HAL_PIXEL_FORMAT_BLOB;
865         streamStats->mDataSpace = static_cast<int>(kJpegRDataSpace);
866         streamStats->mDynamicRangeProfile = mP010DynamicRange;
867         streamStats->mColorSpace = mOutputColorSpace;
868         streamStats->mStreamUseCase = mOutputStreamUseCase;
869         streamStats->mStartLatencyMs = mFirstRequestLatency;
870         streamStats->mHistogramType = hardware::CameraStreamStats::HISTOGRAM_TYPE_CAPTURE_LATENCY;
871         streamStats->mHistogramBins.assign(stats[mP010StreamId].mCaptureLatencyBins.begin(),
872                 stats[mP010StreamId].mCaptureLatencyBins.end());
873         streamStats->mHistogramCounts.assign(stats[mP010StreamId].mCaptureLatencyHistogram.begin(),
874                 stats[mP010StreamId].mCaptureLatencyHistogram.end());
875     }
876 }
877 
878 }; // namespace camera3
879 }; // namespace android
880