1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera3-DepthPhotoProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 //
21
22 #include "DepthPhotoProcessor.h"
23
24 #include <dynamic_depth/camera.h>
25 #include <dynamic_depth/cameras.h>
26 #include <dynamic_depth/container.h>
27 #include <dynamic_depth/device.h>
28 #include <dynamic_depth/dimension.h>
29 #include <dynamic_depth/dynamic_depth.h>
30 #include <dynamic_depth/point.h>
31 #include <dynamic_depth/pose.h>
32 #include <dynamic_depth/profile.h>
33 #include <dynamic_depth/profiles.h>
34 #include <jpeglib.h>
35 #include <libexif/exif-data.h>
36 #include <libexif/exif-system.h>
37 #include <math.h>
38 #include <sstream>
39 #include <utils/Errors.h>
40 #include <utils/ExifUtils.h>
41 #include <utils/Log.h>
42 #include <xmpmeta/xmp_data.h>
43 #include <xmpmeta/xmp_writer.h>
44
45 #ifndef __unused
46 #define __unused __attribute__((__unused__))
47 #endif
48
49 using dynamic_depth::Camera;
50 using dynamic_depth::Cameras;
51 using dynamic_depth::CameraParams;
52 using dynamic_depth::Container;
53 using dynamic_depth::DepthFormat;
54 using dynamic_depth::DepthMap;
55 using dynamic_depth::DepthMapParams;
56 using dynamic_depth::DepthUnits;
57 using dynamic_depth::Device;
58 using dynamic_depth::DeviceParams;
59 using dynamic_depth::Dimension;
60 using dynamic_depth::Image;
61 using dynamic_depth::ImagingModel;
62 using dynamic_depth::ImagingModelParams;
63 using dynamic_depth::Item;
64 using dynamic_depth::Pose;
65 using dynamic_depth::Profile;
66 using dynamic_depth::Profiles;
67
68 template<>
69 struct std::default_delete<jpeg_compress_struct> {
operator ()std::default_delete70 inline void operator()(jpeg_compress_struct* cinfo) const {
71 jpeg_destroy_compress(cinfo);
72 }
73 };
74
75 namespace android {
76 namespace camera3 {
77
78 // Depth samples with low confidence can skew the
79 // near/far values and impact the range inverse coding.
80 static const float CONFIDENCE_THRESHOLD = .15f;
81
getExifOrientation(const unsigned char * jpegBuffer,size_t jpegBufferSize)82 ExifOrientation getExifOrientation(const unsigned char *jpegBuffer, size_t jpegBufferSize) {
83 if ((jpegBuffer == nullptr) || (jpegBufferSize == 0)) {
84 return ExifOrientation::ORIENTATION_UNDEFINED;
85 }
86
87 auto exifData = exif_data_new();
88 exif_data_load_data(exifData, jpegBuffer, jpegBufferSize);
89 ExifEntry *orientation = exif_content_get_entry(exifData->ifd[EXIF_IFD_0],
90 EXIF_TAG_ORIENTATION);
91 if ((orientation == nullptr) || (orientation->size != sizeof(ExifShort))) {
92 ALOGV("%s: Orientation EXIF entry invalid!", __FUNCTION__);
93 exif_data_unref(exifData);
94 return ExifOrientation::ORIENTATION_0_DEGREES;
95 }
96
97 auto orientationValue = exif_get_short(orientation->data, exif_data_get_byte_order(exifData));
98 ExifOrientation ret;
99 switch (orientationValue) {
100 case ExifOrientation::ORIENTATION_0_DEGREES:
101 case ExifOrientation::ORIENTATION_90_DEGREES:
102 case ExifOrientation::ORIENTATION_180_DEGREES:
103 case ExifOrientation::ORIENTATION_270_DEGREES:
104 ret = static_cast<ExifOrientation> (orientationValue);
105 break;
106 default:
107 ALOGE("%s: Unexpected EXIF orientation value: %d, defaulting to 0 degrees",
108 __FUNCTION__, orientationValue);
109 ret = ExifOrientation::ORIENTATION_0_DEGREES;
110 }
111
112 exif_data_unref(exifData);
113
114 return ret;
115 }
116
encodeGrayscaleJpeg(size_t width,size_t height,uint8_t * in,void * out,const size_t maxOutSize,uint8_t jpegQuality,ExifOrientation exifOrientation,size_t & actualSize)117 status_t encodeGrayscaleJpeg(size_t width, size_t height, uint8_t *in, void *out,
118 const size_t maxOutSize, uint8_t jpegQuality, ExifOrientation exifOrientation,
119 size_t &actualSize) {
120 status_t ret;
121 // libjpeg is a C library so we use C-style "inheritance" by
122 // putting libjpeg's jpeg_destination_mgr first in our custom
123 // struct. This allows us to cast jpeg_destination_mgr* to
124 // CustomJpegDestMgr* when we get it passed to us in a callback.
125 struct CustomJpegDestMgr : public jpeg_destination_mgr {
126 JOCTET *mBuffer;
127 size_t mBufferSize;
128 size_t mEncodedSize;
129 bool mSuccess;
130 } dmgr;
131
132 std::unique_ptr<jpeg_compress_struct> cinfo = std::make_unique<jpeg_compress_struct>();
133 jpeg_error_mgr jerr;
134
135 // Initialize error handling with standard callbacks, but
136 // then override output_message (to print to ALOG) and
137 // error_exit to set a flag and print a message instead
138 // of killing the whole process.
139 cinfo->err = jpeg_std_error(&jerr);
140
141 cinfo->err->output_message = [](j_common_ptr cinfo) {
142 char buffer[JMSG_LENGTH_MAX];
143
144 /* Create the message */
145 (*cinfo->err->format_message)(cinfo, buffer);
146 ALOGE("libjpeg error: %s", buffer);
147 };
148
149 cinfo->err->error_exit = [](j_common_ptr cinfo) {
150 (*cinfo->err->output_message)(cinfo);
151 if(cinfo->client_data) {
152 auto & dmgr = *static_cast<CustomJpegDestMgr*>(cinfo->client_data);
153 dmgr.mSuccess = false;
154 }
155 };
156
157 // Now that we initialized some callbacks, let's create our compressor
158 jpeg_create_compress(cinfo.get());
159 dmgr.mBuffer = static_cast<JOCTET*>(out);
160 dmgr.mBufferSize = maxOutSize;
161 dmgr.mEncodedSize = 0;
162 dmgr.mSuccess = true;
163 cinfo->client_data = static_cast<void*>(&dmgr);
164
165 // These lambdas become C-style function pointers and as per C++11 spec
166 // may not capture anything.
167 dmgr.init_destination = [](j_compress_ptr cinfo) {
168 auto & dmgr = static_cast<CustomJpegDestMgr&>(*cinfo->dest);
169 dmgr.next_output_byte = dmgr.mBuffer;
170 dmgr.free_in_buffer = dmgr.mBufferSize;
171 ALOGV("%s:%d jpeg start: %p [%zu]",
172 __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize);
173 };
174
175 dmgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) {
176 ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__);
177 return 0;
178 };
179
180 dmgr.term_destination = [](j_compress_ptr cinfo) {
181 auto & dmgr = static_cast<CustomJpegDestMgr&>(*cinfo->dest);
182 dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.free_in_buffer;
183 ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize);
184 };
185 cinfo->dest = static_cast<struct jpeg_destination_mgr*>(&dmgr);
186 cinfo->image_width = width;
187 cinfo->image_height = height;
188 cinfo->input_components = 1;
189 cinfo->in_color_space = JCS_GRAYSCALE;
190
191 // Initialize defaults and then override what we want
192 jpeg_set_defaults(cinfo.get());
193
194 jpeg_set_quality(cinfo.get(), jpegQuality, 1);
195 jpeg_set_colorspace(cinfo.get(), JCS_GRAYSCALE);
196 cinfo->raw_data_in = 0;
197 cinfo->dct_method = JDCT_IFAST;
198
199 cinfo->comp_info[0].h_samp_factor = 1;
200 cinfo->comp_info[1].h_samp_factor = 1;
201 cinfo->comp_info[2].h_samp_factor = 1;
202 cinfo->comp_info[0].v_samp_factor = 1;
203 cinfo->comp_info[1].v_samp_factor = 1;
204 cinfo->comp_info[2].v_samp_factor = 1;
205
206 jpeg_start_compress(cinfo.get(), TRUE);
207
208 if (exifOrientation != ExifOrientation::ORIENTATION_UNDEFINED) {
209 std::unique_ptr<ExifUtils> utils(ExifUtils::create());
210 utils->initializeEmpty();
211 utils->setImageWidth(width);
212 utils->setImageHeight(height);
213 utils->setOrientationValue(exifOrientation);
214
215 if (utils->generateApp1()) {
216 const uint8_t* exifBuffer = utils->getApp1Buffer();
217 size_t exifBufferSize = utils->getApp1Length();
218 jpeg_write_marker(cinfo.get(), JPEG_APP0 + 1, static_cast<const JOCTET*>(exifBuffer),
219 exifBufferSize);
220 } else {
221 ALOGE("%s: Unable to generate App1 buffer", __FUNCTION__);
222 }
223 }
224
225 for (size_t i = 0; i < cinfo->image_height; i++) {
226 auto currentRow = static_cast<JSAMPROW>(in + i*width);
227 jpeg_write_scanlines(cinfo.get(), ¤tRow, /*num_lines*/1);
228 }
229
230 jpeg_finish_compress(cinfo.get());
231
232 actualSize = dmgr.mEncodedSize;
233 if (dmgr.mSuccess) {
234 ret = NO_ERROR;
235 } else {
236 ret = UNKNOWN_ERROR;
237 }
238
239 return ret;
240 }
241
unpackDepth16(uint16_t value,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)242 inline void unpackDepth16(uint16_t value, std::vector<float> *points /*out*/,
243 std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
244 // Android densely packed depth map. The units for the range are in
245 // millimeters and need to be scaled to meters.
246 // The confidence value is encoded in the 3 most significant bits.
247 // The confidence data needs to be additionally normalized with
248 // values 1.0f, 0.0f representing maximum and minimum confidence
249 // respectively.
250 auto point = static_cast<float>(value & 0x1FFF) / 1000.f;
251 points->push_back(point);
252
253 auto conf = (value >> 13) & 0x7;
254 float normConfidence = (conf == 0) ? 1.f : (static_cast<float>(conf) - 1) / 7.f;
255 confidence->push_back(normConfidence);
256 if (normConfidence < CONFIDENCE_THRESHOLD) {
257 return;
258 }
259
260 if (*near > point) {
261 *near = point;
262 }
263 if (*far < point) {
264 *far = point;
265 }
266 }
267
268 // Trivial case, read forward from top,left corner.
rotate0AndUnpack(DepthPhotoInputFrame inputFrame,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)269 void rotate0AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
270 std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
271 for (size_t i = 0; i < inputFrame.mDepthMapHeight; i++) {
272 for (size_t j = 0; j < inputFrame.mDepthMapWidth; j++) {
273 unpackDepth16(inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j], points,
274 confidence, near, far);
275 }
276 }
277 }
278
279 // 90 degrees CW rotation can be applied by starting to read from bottom, left corner
280 // transposing rows and columns.
rotate90AndUnpack(DepthPhotoInputFrame inputFrame,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)281 void rotate90AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
282 std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
283 for (size_t i = 0; i < inputFrame.mDepthMapWidth; i++) {
284 for (ssize_t j = inputFrame.mDepthMapHeight-1; j >= 0; j--) {
285 unpackDepth16(inputFrame.mDepthMapBuffer[j*inputFrame.mDepthMapStride + i], points,
286 confidence, near, far);
287 }
288 }
289 }
290
291 // 180 CW degrees rotation can be applied by starting to read backwards from bottom, right corner.
rotate180AndUnpack(DepthPhotoInputFrame inputFrame,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)292 void rotate180AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
293 std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
294 for (ssize_t i = inputFrame.mDepthMapHeight-1; i >= 0; i--) {
295 for (ssize_t j = inputFrame.mDepthMapWidth-1; j >= 0; j--) {
296 unpackDepth16(inputFrame.mDepthMapBuffer[i*inputFrame.mDepthMapStride + j], points,
297 confidence, near, far);
298 }
299 }
300 }
301
302 // 270 degrees CW rotation can be applied by starting to read from top, right corner
303 // transposing rows and columns.
rotate270AndUnpack(DepthPhotoInputFrame inputFrame,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)304 void rotate270AndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
305 std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
306 for (ssize_t i = inputFrame.mDepthMapWidth-1; i >= 0; i--) {
307 for (size_t j = 0; j < inputFrame.mDepthMapHeight; j++) {
308 unpackDepth16(inputFrame.mDepthMapBuffer[j*inputFrame.mDepthMapStride + i], points,
309 confidence, near, far);
310 }
311 }
312 }
313
rotateAndUnpack(DepthPhotoInputFrame inputFrame,std::vector<float> * points,std::vector<float> * confidence,float * near,float * far)314 bool rotateAndUnpack(DepthPhotoInputFrame inputFrame, std::vector<float> *points /*out*/,
315 std::vector<float> *confidence /*out*/, float *near /*out*/, float *far /*out*/) {
316 switch (inputFrame.mOrientation) {
317 case DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES:
318 rotate0AndUnpack(inputFrame, points, confidence, near, far);
319 return false;
320 case DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES:
321 rotate90AndUnpack(inputFrame, points, confidence, near, far);
322 return true;
323 case DepthPhotoOrientation::DEPTH_ORIENTATION_180_DEGREES:
324 rotate180AndUnpack(inputFrame, points, confidence, near, far);
325 return false;
326 case DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES:
327 rotate270AndUnpack(inputFrame, points, confidence, near, far);
328 return true;
329 default:
330 ALOGE("%s: Unsupported depth photo rotation: %d, default to 0", __FUNCTION__,
331 inputFrame.mOrientation);
332 rotate0AndUnpack(inputFrame, points, confidence, near, far);
333 }
334
335 return false;
336 }
337
processDepthMapFrame(DepthPhotoInputFrame inputFrame,ExifOrientation exifOrientation,std::vector<std::unique_ptr<Item>> * items,bool * switchDimensions)338 std::unique_ptr<dynamic_depth::DepthMap> processDepthMapFrame(DepthPhotoInputFrame inputFrame,
339 ExifOrientation exifOrientation, std::vector<std::unique_ptr<Item>> *items /*out*/,
340 bool *switchDimensions /*out*/) {
341 if ((items == nullptr) || (switchDimensions == nullptr)) {
342 return nullptr;
343 }
344
345 std::vector<float> points, confidence;
346
347 size_t pointCount = inputFrame.mDepthMapWidth * inputFrame.mDepthMapHeight;
348 points.reserve(pointCount);
349 confidence.reserve(pointCount);
350 float near = UINT16_MAX;
351 float far = .0f;
352 *switchDimensions = false;
353 // Physical rotation of depth and confidence maps may be needed in case
354 // the EXIF orientation is set to 0 degrees and the depth photo orientation
355 // (source color image) has some different value.
356 if (exifOrientation == ExifOrientation::ORIENTATION_0_DEGREES) {
357 *switchDimensions = rotateAndUnpack(inputFrame, &points, &confidence, &near, &far);
358 } else {
359 rotate0AndUnpack(inputFrame, &points, &confidence, &near, &far);
360 }
361
362 size_t width = inputFrame.mDepthMapWidth;
363 size_t height = inputFrame.mDepthMapHeight;
364 if (*switchDimensions) {
365 width = inputFrame.mDepthMapHeight;
366 height = inputFrame.mDepthMapWidth;
367 }
368
369 if (near == far) {
370 ALOGE("%s: Near and far range values must not match!", __FUNCTION__);
371 return nullptr;
372 }
373
374 std::vector<uint8_t> pointsQuantized, confidenceQuantized;
375 pointsQuantized.reserve(pointCount); confidenceQuantized.reserve(pointCount);
376 auto pointIt = points.begin();
377 auto confidenceIt = confidence.begin();
378 while ((pointIt != points.end()) && (confidenceIt != confidence.end())) {
379 auto point = *pointIt;
380 if ((*confidenceIt) < CONFIDENCE_THRESHOLD) {
381 point = std::clamp(point, near, far);
382 }
383 pointsQuantized.push_back(floorf(((far * (point - near)) /
384 (point * (far - near))) * 255.0f));
385 confidenceQuantized.push_back(floorf(*confidenceIt * 255.0f));
386 confidenceIt++; pointIt++;
387 }
388
389 DepthMapParams depthParams(DepthFormat::kRangeInverse, near, far, DepthUnits::kMeters,
390 "android/depthmap");
391 depthParams.confidence_uri = "android/confidencemap";
392 depthParams.mime = "image/jpeg";
393 depthParams.depth_image_data.resize(inputFrame.mMaxJpegSize);
394 depthParams.confidence_data.resize(inputFrame.mMaxJpegSize);
395 size_t actualJpegSize;
396 auto ret = encodeGrayscaleJpeg(width, height, pointsQuantized.data(),
397 depthParams.depth_image_data.data(), inputFrame.mMaxJpegSize,
398 inputFrame.mJpegQuality, exifOrientation, actualJpegSize);
399 if (ret != NO_ERROR) {
400 ALOGE("%s: Depth map compression failed!", __FUNCTION__);
401 return nullptr;
402 }
403 depthParams.depth_image_data.resize(actualJpegSize);
404
405 ret = encodeGrayscaleJpeg(width, height, confidenceQuantized.data(),
406 depthParams.confidence_data.data(), inputFrame.mMaxJpegSize,
407 inputFrame.mJpegQuality, exifOrientation, actualJpegSize);
408 if (ret != NO_ERROR) {
409 ALOGE("%s: Confidence map compression failed!", __FUNCTION__);
410 return nullptr;
411 }
412 depthParams.confidence_data.resize(actualJpegSize);
413
414 return DepthMap::FromData(depthParams, items);
415 }
416
processDepthPhotoFrame(DepthPhotoInputFrame inputFrame,size_t depthPhotoBufferSize,void * depthPhotoBuffer,size_t * depthPhotoActualSize)417 int processDepthPhotoFrame(DepthPhotoInputFrame inputFrame, size_t depthPhotoBufferSize,
418 void* depthPhotoBuffer /*out*/, size_t* depthPhotoActualSize /*out*/) {
419 if ((inputFrame.mMainJpegBuffer == nullptr) || (inputFrame.mDepthMapBuffer == nullptr) ||
420 (depthPhotoBuffer == nullptr) || (depthPhotoActualSize == nullptr)) {
421 return BAD_VALUE;
422 }
423
424 std::vector<std::unique_ptr<Item>> items;
425 std::vector<std::unique_ptr<Camera>> cameraList;
426 auto image = Image::FromDataForPrimaryImage("image/jpeg", &items);
427 std::unique_ptr<CameraParams> cameraParams(new CameraParams(std::move(image)));
428 if (cameraParams == nullptr) {
429 ALOGE("%s: Failed to initialize camera parameters", __FUNCTION__);
430 return BAD_VALUE;
431 }
432
433 ExifOrientation exifOrientation = getExifOrientation(
434 reinterpret_cast<const unsigned char*> (inputFrame.mMainJpegBuffer),
435 inputFrame.mMainJpegSize);
436 bool switchDimensions;
437 cameraParams->depth_map = processDepthMapFrame(inputFrame, exifOrientation, &items,
438 &switchDimensions);
439 if (cameraParams->depth_map == nullptr) {
440 ALOGE("%s: Depth map processing failed!", __FUNCTION__);
441 return BAD_VALUE;
442 }
443
444 // It is not possible to generate an imaging model without intrinsic calibration.
445 if (inputFrame.mIsIntrinsicCalibrationValid) {
446 // The camera intrinsic calibration layout is as follows:
447 // [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
448 const dynamic_depth::Point<double> focalLength(inputFrame.mIntrinsicCalibration[0],
449 inputFrame.mIntrinsicCalibration[1]);
450 size_t width = inputFrame.mMainJpegWidth;
451 size_t height = inputFrame.mMainJpegHeight;
452 if (switchDimensions) {
453 width = inputFrame.mMainJpegHeight;
454 height = inputFrame.mMainJpegWidth;
455 }
456 const Dimension imageSize(width, height);
457 ImagingModelParams imagingParams(focalLength, imageSize);
458 imagingParams.principal_point.x = inputFrame.mIntrinsicCalibration[2];
459 imagingParams.principal_point.y = inputFrame.mIntrinsicCalibration[3];
460 imagingParams.skew = inputFrame.mIntrinsicCalibration[4];
461
462 // The camera lens distortion contains the following lens correction coefficients.
463 // [kappa_1, kappa_2, kappa_3 kappa_4, kappa_5]
464 if (inputFrame.mIsLensDistortionValid) {
465 // According to specification the lens distortion coefficients should be ordered
466 // as [1, kappa_4, kappa_1, kappa_5, kappa_2, 0, kappa_3, 0]
467 float distortionData[] = {1.f, inputFrame.mLensDistortion[3],
468 inputFrame.mLensDistortion[0], inputFrame.mLensDistortion[4],
469 inputFrame.mLensDistortion[1], 0.f, inputFrame.mLensDistortion[2], 0.f};
470 auto distortionDataLength = sizeof(distortionData) / sizeof(distortionData[0]);
471 imagingParams.distortion.reserve(distortionDataLength);
472 imagingParams.distortion.insert(imagingParams.distortion.end(), distortionData,
473 distortionData + distortionDataLength);
474 }
475
476 cameraParams->imaging_model = ImagingModel::FromData(imagingParams);
477 }
478
479 if (inputFrame.mIsLogical) {
480 cameraParams->trait = dynamic_depth::CameraTrait::LOGICAL;
481 } else {
482 cameraParams->trait = dynamic_depth::CameraTrait::PHYSICAL;
483 }
484
485 cameraList.emplace_back(Camera::FromData(std::move(cameraParams)));
486
487 auto deviceParams = std::make_unique<DeviceParams> (Cameras::FromCameraArray(&cameraList));
488 deviceParams->container = Container::FromItems(&items);
489 std::vector<std::unique_ptr<Profile>> profileList;
490 profileList.emplace_back(Profile::FromData("DepthPhoto", {0}));
491 deviceParams->profiles = Profiles::FromProfileArray(&profileList);
492 std::unique_ptr<Device> device = Device::FromData(std::move(deviceParams));
493 if (device == nullptr) {
494 ALOGE("%s: Failed to initialize camera device", __FUNCTION__);
495 return BAD_VALUE;
496 }
497
498 std::istringstream inputJpegStream(
499 std::string(inputFrame.mMainJpegBuffer, inputFrame.mMainJpegSize));
500 std::ostringstream outputJpegStream;
501 if (!WriteImageAndMetadataAndContainer(&inputJpegStream, device.get(), &outputJpegStream)) {
502 ALOGE("%s: Failed writing depth output", __FUNCTION__);
503 return BAD_VALUE;
504 }
505
506 *depthPhotoActualSize = static_cast<size_t> (outputJpegStream.tellp());
507 if (*depthPhotoActualSize > depthPhotoBufferSize) {
508 ALOGE("%s: Depth photo output buffer not sufficient, needed %zu actual %zu", __FUNCTION__,
509 *depthPhotoActualSize, depthPhotoBufferSize);
510 return NO_MEMORY;
511 }
512
513 memcpy(depthPhotoBuffer, outputJpegStream.str().c_str(), *depthPhotoActualSize);
514
515 return 0;
516 }
517
518 }; // namespace camera3
519 }; // namespace android
520