• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera3-DepthPhotoProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 //
21 
22 #include "DepthPhotoProcessor.h"
23 
24 #include <dynamic_depth/camera.h>
25 #include <dynamic_depth/cameras.h>
26 #include <dynamic_depth/container.h>
27 #include <dynamic_depth/device.h>
28 #include <dynamic_depth/dimension.h>
29 #include <dynamic_depth/dynamic_depth.h>
30 #include <dynamic_depth/point.h>
31 #include <dynamic_depth/pose.h>
32 #include <dynamic_depth/profile.h>
33 #include <dynamic_depth/profiles.h>
34 #include <jpeglib.h>
35 #include <libexif/exif-data.h>
36 #include <libexif/exif-system.h>
37 #include <math.h>
38 #include <sstream>
39 #include <utils/Errors.h>
40 #include <utils/ExifUtils.h>
41 #include <utils/Log.h>
42 #include <xmpmeta/xmp_data.h>
43 #include <xmpmeta/xmp_writer.h>
44 
45 using dynamic_depth::Camera;
46 using dynamic_depth::Cameras;
47 using dynamic_depth::CameraParams;
48 using dynamic_depth::Container;
49 using dynamic_depth::DepthFormat;
50 using dynamic_depth::DepthMap;
51 using dynamic_depth::DepthMapParams;
52 using dynamic_depth::DepthUnits;
53 using dynamic_depth::Device;
54 using dynamic_depth::DeviceParams;
55 using dynamic_depth::Dimension;
56 using dynamic_depth::Image;
57 using dynamic_depth::ImagingModel;
58 using dynamic_depth::ImagingModelParams;
59 using dynamic_depth::Item;
60 using dynamic_depth::Pose;
61 using dynamic_depth::Profile;
62 using dynamic_depth::Profiles;
63 
64 template<>
65 struct std::default_delete<jpeg_compress_struct> {
operator ()std::default_delete66     inline void operator()(jpeg_compress_struct* cinfo) const {
67         jpeg_destroy_compress(cinfo);
68     }
69 };
70 
71 namespace android {
72 namespace camera3 {
73 
74 // Depth samples with low confidence can skew the
75 // near/far values and impact the range inverse coding.
76 static const float CONFIDENCE_THRESHOLD = .15f;
77 
getExifOrientation(const unsigned char * jpegBuffer,size_t jpegBufferSize)78 ExifOrientation getExifOrientation(const unsigned char *jpegBuffer, size_t jpegBufferSize) {
79     if ((jpegBuffer == nullptr) || (jpegBufferSize == 0)) {
80         return ExifOrientation::ORIENTATION_UNDEFINED;
81     }
82 
83     auto exifData = exif_data_new();
84     exif_data_load_data(exifData, jpegBuffer, jpegBufferSize);
85     ExifEntry *orientation = exif_content_get_entry(exifData->ifd[EXIF_IFD_0],
86             EXIF_TAG_ORIENTATION);
87     if ((orientation == nullptr) || (orientation->size != sizeof(ExifShort))) {
88         ALOGV("%s: Orientation EXIF entry invalid!", __FUNCTION__);
89         exif_data_unref(exifData);
90         return ExifOrientation::ORIENTATION_0_DEGREES;
91     }
92 
93     auto orientationValue = exif_get_short(orientation->data, exif_data_get_byte_order(exifData));
94     ExifOrientation ret;
95     switch (orientationValue) {
96         case ExifOrientation::ORIENTATION_0_DEGREES:
97         case ExifOrientation::ORIENTATION_90_DEGREES:
98         case ExifOrientation::ORIENTATION_180_DEGREES:
99         case ExifOrientation::ORIENTATION_270_DEGREES:
100             ret = static_cast<ExifOrientation> (orientationValue);
101             break;
102         default:
103             ALOGE("%s: Unexpected EXIF orientation value: %d, defaulting to 0 degrees",
104                     __FUNCTION__, orientationValue);
105             ret = ExifOrientation::ORIENTATION_0_DEGREES;
106     }
107 
108     exif_data_unref(exifData);
109 
110     return ret;
111 }
112 
encodeGrayscaleJpeg(size_t width,size_t height,uint8_t * in,void * out,const size_t maxOutSize,uint8_t jpegQuality,ExifOrientation exifOrientation,size_t & actualSize)113 status_t encodeGrayscaleJpeg(size_t width, size_t height, uint8_t *in, void *out,
114         const size_t maxOutSize, uint8_t jpegQuality, ExifOrientation exifOrientation,
115         size_t &actualSize) {
116     status_t ret;
117     // libjpeg is a C library so we use C-style "inheritance" by
118     // putting libjpeg's jpeg_destination_mgr first in our custom
119     // struct. This allows us to cast jpeg_destination_mgr* to
120     // CustomJpegDestMgr* when we get it passed to us in a callback.
121     struct CustomJpegDestMgr : public jpeg_destination_mgr {
122         JOCTET *mBuffer;
123         size_t mBufferSize;
124         size_t mEncodedSize;
125         bool mSuccess;
126     } dmgr;
127 
128     std::unique_ptr<jpeg_compress_struct> cinfo = std::make_unique<jpeg_compress_struct>();
129     jpeg_error_mgr jerr;
130 
131     // Initialize error handling with standard callbacks, but
132     // then override output_message (to print to ALOG) and
133     // error_exit to set a flag and print a message instead
134     // of killing the whole process.
135     cinfo->err = jpeg_std_error(&jerr);
136 
137     cinfo->err->output_message = [](j_common_ptr cinfo) {
138         char buffer[JMSG_LENGTH_MAX];
139 
140         /* Create the message */
141         (*cinfo->err->format_message)(cinfo, buffer);
142         ALOGE("libjpeg error: %s", buffer);
143     };
144 
145     cinfo->err->error_exit = [](j_common_ptr cinfo) {
146         (*cinfo->err->output_message)(cinfo);
147         if(cinfo->client_data) {
148             auto & dmgr = *static_cast<CustomJpegDestMgr*>(cinfo->client_data);
149             dmgr.mSuccess = false;
150         }
151     };
152 
153     // Now that we initialized some callbacks, let's create our compressor
154     jpeg_create_compress(cinfo.get());
155     dmgr.mBuffer = static_cast<JOCTET*>(out);
156     dmgr.mBufferSize = maxOutSize;
157     dmgr.mEncodedSize = 0;
158     dmgr.mSuccess = true;
159     cinfo->client_data = static_cast<void*>(&dmgr);
160 
161     // These lambdas become C-style function pointers and as per C++11 spec
162     // may not capture anything.
163     dmgr.init_destination = [](j_compress_ptr cinfo) {
164         auto & dmgr = static_cast<CustomJpegDestMgr&>(*cinfo->dest);
165         dmgr.next_output_byte = dmgr.mBuffer;
166         dmgr.free_in_buffer = dmgr.mBufferSize;
167         ALOGV("%s:%d jpeg start: %p [%zu]",
168               __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize);
169     };
170 
171     dmgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) {
172         ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__);
173         return 0;
174     };
175 
176     dmgr.term_destination = [](j_compress_ptr cinfo) {
177         auto & dmgr = static_cast<CustomJpegDestMgr&>(*cinfo->dest);
178         dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.free_in_buffer;
179         ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize);
180     };
181     cinfo->dest = static_cast<struct jpeg_destination_mgr*>(&dmgr);
182     cinfo->image_width = width;
183     cinfo->image_height = height;
184     cinfo->input_components = 1;
185     cinfo->in_color_space = JCS_GRAYSCALE;
186 
187     // Initialize defaults and then override what we want
188     jpeg_set_defaults(cinfo.get());
189 
190     jpeg_set_quality(cinfo.get(), jpegQuality, 1);
191     jpeg_set_colorspace(cinfo.get(), JCS_GRAYSCALE);
192     cinfo->raw_data_in = 0;
193     cinfo->dct_method = JDCT_IFAST;
194 
195     cinfo->comp_info[0].h_samp_factor = 1;
196     cinfo->comp_info[1].h_samp_factor = 1;
197     cinfo->comp_info[2].h_samp_factor = 1;
198     cinfo->comp_info[0].v_samp_factor = 1;
199     cinfo->comp_info[1].v_samp_factor = 1;
200     cinfo->comp_info[2].v_samp_factor = 1;
201 
202     jpeg_start_compress(cinfo.get(), TRUE);
203 
204     if (exifOrientation != ExifOrientation::ORIENTATION_UNDEFINED) {
205         std::unique_ptr<ExifUtils> utils(ExifUtils::create());
206         utils->initializeEmpty();
207         utils->setImageWidth(width);
208         utils->setImageHeight(height);
209         utils->setOrientationValue(exifOrientation);
210 
211         if (utils->generateApp1()) {
212             const uint8_t* exifBuffer = utils->getApp1Buffer();
213             size_t exifBufferSize = utils->getApp1Length();
214             jpeg_write_marker(cinfo.get(), JPEG_APP0 + 1, static_cast<const JOCTET*>(exifBuffer),
215                     exifBufferSize);
216         } else {
217             ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
218         }
219     }
220 
221     for (size_t i = 0; i < cinfo->image_height; i++) {
222         auto currentRow  = static_cast<JSAMPROW>(in + i*width);
223         jpeg_write_scanlines(cinfo.get(), &currentRow, /*num_lines*/1);
224     }
225 
226     jpeg_finish_compress(cinfo.get());
227 
228     actualSize = dmgr.mEncodedSize;
229     if (dmgr.mSuccess) {
230         ret = NO_ERROR;
231     } else {
232         ret = UNKNOWN_ERROR;
233     }
234 
235     return ret;
236 }
237 
unpackDepth16(uint16_t value,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)238 inline void unpackDepth16(uint16_t value, std::vector<float> *points /*out*/,
239         std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
240     // Android densely packed depth map. The units for the range are in
241     // millimeters and need to be scaled to meters.
242     // The confidence value is encoded in the 3 most significant bits.
243     // The confidence data needs to be additionally normalized with
244     // values 1.0f, 0.0f representing maximum and minimum confidence
245     // respectively.
246     auto point = static_cast<float>(value & 0x1FFF) / 1000.f;
247     points->push_back(point);
248 
249     auto conf = (value >> 13) & 0x7;
250     float normConfidence = (conf == 0) ? 1.f : (static_cast<float>(conf) - 1) / 7.f;
251     confidence->push_back(normConfidence);
252     if (normConfidence < CONFIDENCE_THRESHOLD) {
253         return;
254     }
255 
256     if (*near > point) {
257         *near = point;
258     }
259     if (*far < point) {
260         *far = point;
261     }
262 }
263 
264 // Trivial case, read forward from top,left corner.
rotate0AndUnpack(DepthPhotoInputFrame inputFrame,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)265 void rotate0AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
266         std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
267     for (size_t i = 0; i < inputFrame.mDepthMapHeight; i++) {
268         for (size_t j = 0; j < inputFrame.mDepthMapWidth; j++) {
269             unpackDepth16(inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j], points,
270                     confidence, near, far);
271         }
272     }
273 }
274 
275 // 90 degrees CW rotation can be applied by starting to read from bottom, left corner
276 // transposing rows and columns.
rotate90AndUnpack(DepthPhotoInputFrame inputFrame,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)277 void rotate90AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
278         std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
279     for (size_t i = 0; i < inputFrame.mDepthMapWidth; i++) {
280         for (ssize_t j = inputFrame.mDepthMapHeight-1; j >= 0; j--) {
281             unpackDepth16(inputFrame.mDepthMapBuffer[j*inputFrame.mDepthMapStride + i], points,
282                     confidence, near, far);
283         }
284     }
285 }
286 
287 // 180 CW degrees rotation can be applied by starting to read backwards from bottom, right corner.
rotate180AndUnpack(DepthPhotoInputFrame inputFrame,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)288 void rotate180AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
289         std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
290     for (ssize_t i = inputFrame.mDepthMapHeight-1; i >= 0; i--) {
291         for (ssize_t j = inputFrame.mDepthMapWidth-1; j >= 0; j--) {
292             unpackDepth16(inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j], points,
293                     confidence, near, far);
294         }
295     }
296 }
297 
298 // 270 degrees CW rotation can be applied by starting to read from top, right corner
299 // transposing rows and columns.
rotate270AndUnpack(DepthPhotoInputFrame inputFrame,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)300 void rotate270AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
301         std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
302     for (ssize_t i = inputFrame.mDepthMapWidth-1; i >= 0; i--) {
303         for (size_t j = 0; j < inputFrame.mDepthMapHeight; j++) {
304             unpackDepth16(inputFrame.mDepthMapBuffer[j*inputFrame.mDepthMapStride + i], points,
305                     confidence, near, far);
306         }
307     }
308 }
309 
rotateAndUnpack(DepthPhotoInputFrame inputFrame,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)310 bool rotateAndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
311         std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
312     switch (inputFrame.mOrientation) {
313         case DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES:
314             rotate0AndUnpack(inputFrame, points, confidence, near, far);
315             return false;
316         case DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES:
317             rotate90AndUnpack(inputFrame, points, confidence, near, far);
318             return true;
319         case DepthPhotoOrientation::DEPTH_ORIENTATION_180_DEGREES:
320             rotate180AndUnpack(inputFrame, points, confidence, near, far);
321             return false;
322         case DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES:
323             rotate270AndUnpack(inputFrame, points, confidence, near, far);
324             return true;
325         default:
326             ALOGE("%s: Unsupported depth photo rotation: %d, default to 0", __FUNCTION__,
327                     inputFrame.mOrientation);
328             rotate0AndUnpack(inputFrame, points, confidence, near, far);
329     }
330 
331     return false;
332 }
333 
processDepthMapFrame(DepthPhotoInputFrame inputFrame,ExifOrientation exifOrientation,std::vector<std::unique_ptr<Item>> * items,bool * switchDimensions)334 std::unique_ptr<dynamic_depth::DepthMap> processDepthMapFrame(DepthPhotoInputFrame inputFrame,
335         ExifOrientation exifOrientation, std::vector<std::unique_ptr<Item>> *items /*out*/,
336         bool *switchDimensions /*out*/) {
337     if ((items == nullptr) || (switchDimensions == nullptr)) {
338         return nullptr;
339     }
340 
341     std::vector<float> points, confidence;
342 
343     size_t pointCount = inputFrame.mDepthMapWidth * inputFrame.mDepthMapHeight;
344     points.reserve(pointCount);
345     confidence.reserve(pointCount);
346     float near = UINT16_MAX;
347     float far = .0f;
348     *switchDimensions = false;
349     // Physical rotation of depth and confidence maps may be needed in case
350     // the EXIF orientation is set to 0 degrees and the depth photo orientation
351     // (source color image) has some different value.
352     if (exifOrientation == ExifOrientation::ORIENTATION_0_DEGREES) {
353         *switchDimensions = rotateAndUnpack(inputFrame, &points, &confidence, &near, &far);
354     } else {
355         rotate0AndUnpack(inputFrame, &points, &confidence, &near, &far);
356     }
357 
358     size_t width = inputFrame.mDepthMapWidth;
359     size_t height = inputFrame.mDepthMapHeight;
360     if (*switchDimensions) {
361         width = inputFrame.mDepthMapHeight;
362         height = inputFrame.mDepthMapWidth;
363     }
364 
365     if (near == far) {
366         ALOGE("%s: Near and far range values must not match!", __FUNCTION__);
367         return nullptr;
368     }
369 
370     std::vector<uint8_t> pointsQuantized, confidenceQuantized;
371     pointsQuantized.reserve(pointCount); confidenceQuantized.reserve(pointCount);
372     auto pointIt = points.begin();
373     auto confidenceIt = confidence.begin();
374     while ((pointIt != points.end()) && (confidenceIt != confidence.end())) {
375         auto point = *pointIt;
376         if ((*confidenceIt) < CONFIDENCE_THRESHOLD) {
377             point = std::clamp(point, near, far);
378         }
379         pointsQuantized.push_back(floorf(((far * (point - near)) /
380                 (point * (far - near))) * 255.0f));
381         confidenceQuantized.push_back(floorf(*confidenceIt * 255.0f));
382         confidenceIt++; pointIt++;
383     }
384 
385     DepthMapParams depthParams(DepthFormat::kRangeInverse, near, far, DepthUnits::kMeters,
386             "android/depthmap");
387     depthParams.confidence_uri = "android/confidencemap";
388     depthParams.mime = "image/jpeg";
389     depthParams.depth_image_data.resize(inputFrame.mMaxJpegSize);
390     depthParams.confidence_data.resize(inputFrame.mMaxJpegSize);
391     size_t actualJpegSize;
392     auto ret = encodeGrayscaleJpeg(width, height, pointsQuantized.data(),
393             depthParams.depth_image_data.data(), inputFrame.mMaxJpegSize,
394             inputFrame.mJpegQuality, exifOrientation, actualJpegSize);
395     if (ret != NO_ERROR) {
396         ALOGE("%s: Depth map compression failed!", __FUNCTION__);
397         return nullptr;
398     }
399     depthParams.depth_image_data.resize(actualJpegSize);
400 
401     ret = encodeGrayscaleJpeg(width, height, confidenceQuantized.data(),
402             depthParams.confidence_data.data(), inputFrame.mMaxJpegSize,
403             inputFrame.mJpegQuality, exifOrientation, actualJpegSize);
404     if (ret != NO_ERROR) {
405         ALOGE("%s: Confidence map compression failed!", __FUNCTION__);
406         return nullptr;
407     }
408     depthParams.confidence_data.resize(actualJpegSize);
409 
410     return DepthMap::FromData(depthParams, items);
411 }
412 
processDepthPhotoFrame(DepthPhotoInputFrame inputFrame,size_t depthPhotoBufferSize,void * depthPhotoBuffer,size_t * depthPhotoActualSize)413 int processDepthPhotoFrame(DepthPhotoInputFrame inputFrame, size_t depthPhotoBufferSize,
414         void* depthPhotoBuffer /*out*/, size_t* depthPhotoActualSize /*out*/) {
415     if ((inputFrame.mMainJpegBuffer == nullptr) || (inputFrame.mDepthMapBuffer == nullptr) ||
416             (depthPhotoBuffer == nullptr) || (depthPhotoActualSize == nullptr)) {
417         return BAD_VALUE;
418     }
419 
420     std::vector<std::unique_ptr<Item>> items;
421     std::vector<std::unique_ptr<Camera>> cameraList;
422     auto image = Image::FromDataForPrimaryImage("image/jpeg", &items);
423     std::unique_ptr<CameraParams> cameraParams(new CameraParams(std::move(image)));
424     if (cameraParams == nullptr) {
425         ALOGE("%s: Failed to initialize camera parameters", __FUNCTION__);
426         return BAD_VALUE;
427     }
428 
429     ExifOrientation exifOrientation = getExifOrientation(
430             reinterpret_cast<const unsigned char*> (inputFrame.mMainJpegBuffer),
431             inputFrame.mMainJpegSize);
432     bool switchDimensions;
433     cameraParams->depth_map = processDepthMapFrame(inputFrame, exifOrientation, &items,
434             &switchDimensions);
435     if (cameraParams->depth_map == nullptr) {
436         ALOGE("%s: Depth map processing failed!", __FUNCTION__);
437         return BAD_VALUE;
438     }
439 
440     // It is not possible to generate an imaging model without intrinsic calibration.
441     if (inputFrame.mIsIntrinsicCalibrationValid) {
442         // The camera intrinsic calibration layout is as follows:
443         // [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
444         const dynamic_depth::Point<double> focalLength(inputFrame.mIntrinsicCalibration[0],
445                 inputFrame.mIntrinsicCalibration[1]);
446         size_t width = inputFrame.mMainJpegWidth;
447         size_t height = inputFrame.mMainJpegHeight;
448         if (switchDimensions) {
449             width = inputFrame.mMainJpegHeight;
450             height = inputFrame.mMainJpegWidth;
451         }
452         const Dimension imageSize(width, height);
453         ImagingModelParams imagingParams(focalLength, imageSize);
454         imagingParams.principal_point.x = inputFrame.mIntrinsicCalibration[2];
455         imagingParams.principal_point.y = inputFrame.mIntrinsicCalibration[3];
456         imagingParams.skew = inputFrame.mIntrinsicCalibration[4];
457 
458         // The camera lens distortion contains the following lens correction coefficients.
459         // [kappa_1, kappa_2, kappa_3 kappa_4, kappa_5]
460         if (inputFrame.mIsLensDistortionValid) {
461             // According to specification the lens distortion coefficients should be ordered
462             // as [1, kappa_4, kappa_1, kappa_5, kappa_2, 0, kappa_3, 0]
463             float distortionData[] = {1.f, inputFrame.mLensDistortion[3],
464                     inputFrame.mLensDistortion[0], inputFrame.mLensDistortion[4],
465                     inputFrame.mLensDistortion[1], 0.f, inputFrame.mLensDistortion[2], 0.f};
466             auto distortionDataLength = sizeof(distortionData) / sizeof(distortionData[0]);
467             imagingParams.distortion.reserve(distortionDataLength);
468             imagingParams.distortion.insert(imagingParams.distortion.end(), distortionData,
469                     distortionData + distortionDataLength);
470         }
471 
472         cameraParams->imaging_model = ImagingModel::FromData(imagingParams);
473     }
474 
475     if (inputFrame.mIsLogical) {
476         cameraParams->trait = dynamic_depth::CameraTrait::LOGICAL;
477     } else {
478         cameraParams->trait = dynamic_depth::CameraTrait::PHYSICAL;
479     }
480 
481     cameraList.emplace_back(Camera::FromData(std::move(cameraParams)));
482 
483     auto deviceParams = std::make_unique<DeviceParams> (Cameras::FromCameraArray(&cameraList));
484     deviceParams->container = Container::FromItems(&items);
485     std::vector<std::unique_ptr<Profile>> profileList;
486     profileList.emplace_back(Profile::FromData("DepthPhoto", {0}));
487     deviceParams->profiles = Profiles::FromProfileArray(&profileList);
488     std::unique_ptr<Device> device = Device::FromData(std::move(deviceParams));
489     if (device == nullptr) {
490         ALOGE("%s: Failed to initialize camera device", __FUNCTION__);
491         return BAD_VALUE;
492     }
493 
494     std::istringstream inputJpegStream(
495             std::string(inputFrame.mMainJpegBuffer, inputFrame.mMainJpegSize));
496     std::ostringstream outputJpegStream;
497     if (!WriteImageAndMetadataAndContainer(&inputJpegStream, device.get(), &outputJpegStream)) {
498         ALOGE("%s: Failed writing depth output", __FUNCTION__);
499         return BAD_VALUE;
500     }
501 
502     *depthPhotoActualSize = static_cast<size_t> (outputJpegStream.tellp());
503     if (*depthPhotoActualSize > depthPhotoBufferSize) {
504         ALOGE("%s: Depth photo output buffer not sufficient, needed %zu actual %zu", __FUNCTION__,
505                 *depthPhotoActualSize, depthPhotoBufferSize);
506         return NO_MEMORY;
507     }
508 
509     memcpy(depthPhotoBuffer, outputJpegStream.str().c_str(), *depthPhotoActualSize);
510 
511     return 0;
512 }
513 
514 }; // namespace camera3
515 }; // namespace android
516