1 /*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #undef ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION // TODO:remove this and fix code
17
18 //#define LOG_NDEBUG 0
19 #define LOG_TAG "DngCreator_JNI"
20 #include <inttypes.h>
21 #include <string.h>
22 #include <algorithm>
23 #include <array>
24 #include <memory>
25 #include <vector>
26 #include <cmath>
27
28 #include <android-base/properties.h>
29 #include <utils/Log.h>
30 #include <utils/Errors.h>
31 #include <utils/StrongPointer.h>
32 #include <utils/RefBase.h>
33 #include <utils/Vector.h>
34 #include <utils/String8.h>
35 #include <system/camera_metadata.h>
36 #include <camera/CameraMetadata.h>
37 #include <img_utils/DngUtils.h>
38 #include <img_utils/TagDefinitions.h>
39 #include <img_utils/TiffIfd.h>
40 #include <img_utils/TiffWriter.h>
41 #include <img_utils/Output.h>
42 #include <img_utils/Input.h>
43 #include <img_utils/StripSource.h>
44
45 #include "core_jni_helpers.h"
46
47 #include "android_runtime/AndroidRuntime.h"
48 #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
49
50 #include <jni.h>
51 #include <nativehelper/JNIHelp.h>
52 #include <nativehelper/ScopedUtfChars.h>
53
54 using namespace android;
55 using namespace img_utils;
56 using android::base::GetProperty;
57
58 #define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \
59 if ((expr) != OK) { \
60 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
61 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
62 return false; \
63 }
64
65
66 #define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \
67 if ((expr) != OK) { \
68 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
69 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
70 return nullptr; \
71 }
72
73
74 #define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \
75 if ((expr) != OK) { \
76 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
77 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
78 return -1; \
79 }
80
81 #define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \
82 if ((entry).count == 0) { \
83 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
84 "Missing metadata fields for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
85 return nullptr; \
86 }
87
88 #define BAIL_IF_EMPTY_RET_BOOL(entry, jnienv, tagId, writer) \
89 if ((entry).count == 0) { \
90 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
91 "Missing metadata fields for tag %s (%x)", \
92 (writer)->getTagName(tagId), (tagId)); \
93 return false; \
94 }
95
96 #define BAIL_IF_EMPTY_RET_STATUS(entry, jnienv, tagId, writer) \
97 if ((entry).count == 0) { \
98 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
99 "Missing metadata fields for tag %s (%x)", \
100 (writer)->getTagName(tagId), (tagId)); \
101 return BAD_VALUE; \
102 }
103
104 #define BAIL_IF_EXPR_RET_NULL_SP(expr, jnienv, tagId, writer) \
105 if (expr) { \
106 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
107 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
108 return nullptr; \
109 }
110
111
112 #define ANDROID_DNGCREATOR_CTX_JNI_ID "mNativeContext"
113
114 static struct {
115 jfieldID mNativeContext;
116 } gDngCreatorClassInfo;
117
118 static struct {
119 jmethodID mWriteMethod;
120 } gOutputStreamClassInfo;
121
122 static struct {
123 jmethodID mReadMethod;
124 jmethodID mSkipMethod;
125 } gInputStreamClassInfo;
126
127 static struct {
128 jmethodID mGetMethod;
129 } gInputByteBufferClassInfo;
130
131 enum {
132 BITS_PER_SAMPLE = 16,
133 BYTES_PER_SAMPLE = 2,
134 BYTES_PER_RGB_PIXEL = 3,
135 BITS_PER_RGB_SAMPLE = 8,
136 BYTES_PER_RGB_SAMPLE = 1,
137 SAMPLES_PER_RGB_PIXEL = 3,
138 SAMPLES_PER_RAW_PIXEL = 1,
139 TIFF_IFD_0 = 0,
140 TIFF_IFD_SUB1 = 1,
141 TIFF_IFD_GPSINFO = 2,
142 };
143
144
145 /**
146 * POD container class for GPS tag data.
147 */
148 class GpsData {
149 public:
150 enum {
151 GPS_VALUE_LENGTH = 6,
152 GPS_REF_LENGTH = 2,
153 GPS_DATE_LENGTH = 11,
154 };
155
156 uint32_t mLatitude[GPS_VALUE_LENGTH];
157 uint32_t mLongitude[GPS_VALUE_LENGTH];
158 uint32_t mTimestamp[GPS_VALUE_LENGTH];
159 uint8_t mLatitudeRef[GPS_REF_LENGTH];
160 uint8_t mLongitudeRef[GPS_REF_LENGTH];
161 uint8_t mDate[GPS_DATE_LENGTH];
162 };
163
164 // ----------------------------------------------------------------------------
165
166 /**
167 * Container class for the persistent native context.
168 */
169
170 class NativeContext : public LightRefBase<NativeContext> {
171 public:
172 enum {
173 DATETIME_COUNT = 20,
174 };
175
176 NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result);
177 virtual ~NativeContext();
178
179 TiffWriter* getWriter();
180
181 std::shared_ptr<const CameraMetadata> getCharacteristics() const;
182 std::shared_ptr<const CameraMetadata> getResult() const;
183
184 uint32_t getThumbnailWidth() const;
185 uint32_t getThumbnailHeight() const;
186 const uint8_t* getThumbnail() const;
187 bool hasThumbnail() const;
188
189 bool setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height);
190
191 void setOrientation(uint16_t orientation);
192 uint16_t getOrientation() const;
193
194 void setDescription(const String8& desc);
195 String8 getDescription() const;
196 bool hasDescription() const;
197
198 void setGpsData(const GpsData& data);
199 GpsData getGpsData() const;
200 bool hasGpsData() const;
201
202 void setCaptureTime(const String8& formattedCaptureTime);
203 String8 getCaptureTime() const;
204 bool hasCaptureTime() const;
205
206 private:
207 Vector<uint8_t> mCurrentThumbnail;
208 TiffWriter mWriter;
209 std::shared_ptr<CameraMetadata> mCharacteristics;
210 std::shared_ptr<CameraMetadata> mResult;
211 uint32_t mThumbnailWidth;
212 uint32_t mThumbnailHeight;
213 uint16_t mOrientation;
214 bool mThumbnailSet;
215 bool mGpsSet;
216 bool mDescriptionSet;
217 bool mCaptureTimeSet;
218 String8 mDescription;
219 GpsData mGpsData;
220 String8 mFormattedCaptureTime;
221 };
222
NativeContext(const CameraMetadata & characteristics,const CameraMetadata & result)223 NativeContext::NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result) :
224 mCharacteristics(std::make_shared<CameraMetadata>(characteristics)),
225 mResult(std::make_shared<CameraMetadata>(result)), mThumbnailWidth(0),
226 mThumbnailHeight(0), mOrientation(TAG_ORIENTATION_UNKNOWN), mThumbnailSet(false),
227 mGpsSet(false), mDescriptionSet(false), mCaptureTimeSet(false) {}
228
~NativeContext()229 NativeContext::~NativeContext() {}
230
getWriter()231 TiffWriter* NativeContext::getWriter() {
232 return &mWriter;
233 }
234
getCharacteristics() const235 std::shared_ptr<const CameraMetadata> NativeContext::getCharacteristics() const {
236 return mCharacteristics;
237 }
238
getResult() const239 std::shared_ptr<const CameraMetadata> NativeContext::getResult() const {
240 return mResult;
241 }
242
getThumbnailWidth() const243 uint32_t NativeContext::getThumbnailWidth() const {
244 return mThumbnailWidth;
245 }
246
getThumbnailHeight() const247 uint32_t NativeContext::getThumbnailHeight() const {
248 return mThumbnailHeight;
249 }
250
getThumbnail() const251 const uint8_t* NativeContext::getThumbnail() const {
252 return mCurrentThumbnail.array();
253 }
254
hasThumbnail() const255 bool NativeContext::hasThumbnail() const {
256 return mThumbnailSet;
257 }
258
setThumbnail(const uint8_t * buffer,uint32_t width,uint32_t height)259 bool NativeContext::setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height) {
260 mThumbnailWidth = width;
261 mThumbnailHeight = height;
262
263 size_t size = BYTES_PER_RGB_PIXEL * width * height;
264 if (mCurrentThumbnail.resize(size) < 0) {
265 ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__);
266 return false;
267 }
268
269 uint8_t* thumb = mCurrentThumbnail.editArray();
270 memcpy(thumb, buffer, size);
271 mThumbnailSet = true;
272 return true;
273 }
274
setOrientation(uint16_t orientation)275 void NativeContext::setOrientation(uint16_t orientation) {
276 mOrientation = orientation;
277 }
278
getOrientation() const279 uint16_t NativeContext::getOrientation() const {
280 return mOrientation;
281 }
282
setDescription(const String8 & desc)283 void NativeContext::setDescription(const String8& desc) {
284 mDescription = desc;
285 mDescriptionSet = true;
286 }
287
getDescription() const288 String8 NativeContext::getDescription() const {
289 return mDescription;
290 }
291
hasDescription() const292 bool NativeContext::hasDescription() const {
293 return mDescriptionSet;
294 }
295
setGpsData(const GpsData & data)296 void NativeContext::setGpsData(const GpsData& data) {
297 mGpsData = data;
298 mGpsSet = true;
299 }
300
getGpsData() const301 GpsData NativeContext::getGpsData() const {
302 return mGpsData;
303 }
304
hasGpsData() const305 bool NativeContext::hasGpsData() const {
306 return mGpsSet;
307 }
308
setCaptureTime(const String8 & formattedCaptureTime)309 void NativeContext::setCaptureTime(const String8& formattedCaptureTime) {
310 mFormattedCaptureTime = formattedCaptureTime;
311 mCaptureTimeSet = true;
312 }
313
getCaptureTime() const314 String8 NativeContext::getCaptureTime() const {
315 return mFormattedCaptureTime;
316 }
317
hasCaptureTime() const318 bool NativeContext::hasCaptureTime() const {
319 return mCaptureTimeSet;
320 }
321
322 // End of NativeContext
323 // ----------------------------------------------------------------------------
324
325 /**
326 * Wrapper class for a Java OutputStream.
327 *
328 * This class is not intended to be used across JNI calls.
329 */
330 class JniOutputStream : public Output, public LightRefBase<JniOutputStream> {
331 public:
332 JniOutputStream(JNIEnv* env, jobject outStream);
333
334 virtual ~JniOutputStream();
335
336 status_t open();
337
338 status_t write(const uint8_t* buf, size_t offset, size_t count);
339
340 status_t close();
341 private:
342 enum {
343 BYTE_ARRAY_LENGTH = 4096
344 };
345 jobject mOutputStream;
346 JNIEnv* mEnv;
347 jbyteArray mByteArray;
348 };
349
JniOutputStream(JNIEnv * env,jobject outStream)350 JniOutputStream::JniOutputStream(JNIEnv* env, jobject outStream) : mOutputStream(outStream),
351 mEnv(env) {
352 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
353 if (mByteArray == nullptr) {
354 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
355 }
356 }
357
~JniOutputStream()358 JniOutputStream::~JniOutputStream() {
359 mEnv->DeleteLocalRef(mByteArray);
360 }
361
open()362 status_t JniOutputStream::open() {
363 // Do nothing
364 return OK;
365 }
366
write(const uint8_t * buf,size_t offset,size_t count)367 status_t JniOutputStream::write(const uint8_t* buf, size_t offset, size_t count) {
368 while(count > 0) {
369 size_t len = BYTE_ARRAY_LENGTH;
370 len = (count > len) ? len : count;
371 mEnv->SetByteArrayRegion(mByteArray, 0, len, reinterpret_cast<const jbyte*>(buf + offset));
372
373 if (mEnv->ExceptionCheck()) {
374 return BAD_VALUE;
375 }
376
377 mEnv->CallVoidMethod(mOutputStream, gOutputStreamClassInfo.mWriteMethod, mByteArray,
378 0, len);
379
380 if (mEnv->ExceptionCheck()) {
381 return BAD_VALUE;
382 }
383
384 count -= len;
385 offset += len;
386 }
387 return OK;
388 }
389
close()390 status_t JniOutputStream::close() {
391 // Do nothing
392 return OK;
393 }
394
395 // End of JniOutputStream
396 // ----------------------------------------------------------------------------
397
398 /**
399 * Wrapper class for a Java InputStream.
400 *
401 * This class is not intended to be used across JNI calls.
402 */
403 class JniInputStream : public Input, public LightRefBase<JniInputStream> {
404 public:
405 JniInputStream(JNIEnv* env, jobject inStream);
406
407 status_t open();
408
409 status_t close();
410
411 ssize_t read(uint8_t* buf, size_t offset, size_t count);
412
413 ssize_t skip(size_t count);
414
415 virtual ~JniInputStream();
416 private:
417 enum {
418 BYTE_ARRAY_LENGTH = 4096
419 };
420 jobject mInStream;
421 JNIEnv* mEnv;
422 jbyteArray mByteArray;
423
424 };
425
JniInputStream(JNIEnv * env,jobject inStream)426 JniInputStream::JniInputStream(JNIEnv* env, jobject inStream) : mInStream(inStream), mEnv(env) {
427 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
428 if (mByteArray == nullptr) {
429 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
430 }
431 }
432
~JniInputStream()433 JniInputStream::~JniInputStream() {
434 mEnv->DeleteLocalRef(mByteArray);
435 }
436
read(uint8_t * buf,size_t offset,size_t count)437 ssize_t JniInputStream::read(uint8_t* buf, size_t offset, size_t count) {
438
439 jint realCount = BYTE_ARRAY_LENGTH;
440 if (count < BYTE_ARRAY_LENGTH) {
441 realCount = count;
442 }
443 jint actual = mEnv->CallIntMethod(mInStream, gInputStreamClassInfo.mReadMethod, mByteArray, 0,
444 realCount);
445
446 if (actual < 0) {
447 return NOT_ENOUGH_DATA;
448 }
449
450 if (mEnv->ExceptionCheck()) {
451 return BAD_VALUE;
452 }
453
454 mEnv->GetByteArrayRegion(mByteArray, 0, actual, reinterpret_cast<jbyte*>(buf + offset));
455 if (mEnv->ExceptionCheck()) {
456 return BAD_VALUE;
457 }
458 return actual;
459 }
460
skip(size_t count)461 ssize_t JniInputStream::skip(size_t count) {
462 jlong actual = mEnv->CallLongMethod(mInStream, gInputStreamClassInfo.mSkipMethod,
463 static_cast<jlong>(count));
464
465 if (mEnv->ExceptionCheck()) {
466 return BAD_VALUE;
467 }
468 if (actual < 0) {
469 return NOT_ENOUGH_DATA;
470 }
471 return actual;
472 }
473
open()474 status_t JniInputStream::open() {
475 // Do nothing
476 return OK;
477 }
478
close()479 status_t JniInputStream::close() {
480 // Do nothing
481 return OK;
482 }
483
484 // End of JniInputStream
485 // ----------------------------------------------------------------------------
486
487 /**
488 * Wrapper class for a non-direct Java ByteBuffer.
489 *
490 * This class is not intended to be used across JNI calls.
491 */
492 class JniInputByteBuffer : public Input, public LightRefBase<JniInputByteBuffer> {
493 public:
494 JniInputByteBuffer(JNIEnv* env, jobject inBuf);
495
496 status_t open();
497
498 status_t close();
499
500 ssize_t read(uint8_t* buf, size_t offset, size_t count);
501
502 virtual ~JniInputByteBuffer();
503 private:
504 enum {
505 BYTE_ARRAY_LENGTH = 4096
506 };
507 jobject mInBuf;
508 JNIEnv* mEnv;
509 jbyteArray mByteArray;
510 };
511
JniInputByteBuffer(JNIEnv * env,jobject inBuf)512 JniInputByteBuffer::JniInputByteBuffer(JNIEnv* env, jobject inBuf) : mInBuf(inBuf), mEnv(env) {
513 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
514 if (mByteArray == nullptr) {
515 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
516 }
517 }
518
~JniInputByteBuffer()519 JniInputByteBuffer::~JniInputByteBuffer() {
520 mEnv->DeleteLocalRef(mByteArray);
521 }
522
read(uint8_t * buf,size_t offset,size_t count)523 ssize_t JniInputByteBuffer::read(uint8_t* buf, size_t offset, size_t count) {
524 jint realCount = BYTE_ARRAY_LENGTH;
525 if (count < BYTE_ARRAY_LENGTH) {
526 realCount = count;
527 }
528
529 jobject chainingBuf = mEnv->CallObjectMethod(mInBuf, gInputByteBufferClassInfo.mGetMethod,
530 mByteArray, 0, realCount);
531 mEnv->DeleteLocalRef(chainingBuf);
532
533 if (mEnv->ExceptionCheck()) {
534 ALOGE("%s: Exception while reading from input into byte buffer.", __FUNCTION__);
535 return BAD_VALUE;
536 }
537
538 mEnv->GetByteArrayRegion(mByteArray, 0, realCount, reinterpret_cast<jbyte*>(buf + offset));
539 if (mEnv->ExceptionCheck()) {
540 ALOGE("%s: Exception while reading from byte buffer.", __FUNCTION__);
541 return BAD_VALUE;
542 }
543 return realCount;
544 }
545
open()546 status_t JniInputByteBuffer::open() {
547 // Do nothing
548 return OK;
549 }
550
close()551 status_t JniInputByteBuffer::close() {
552 // Do nothing
553 return OK;
554 }
555
556 // End of JniInputByteBuffer
557 // ----------------------------------------------------------------------------
558
559 /**
560 * StripSource subclass for Input types.
561 *
562 * This class is not intended to be used across JNI calls.
563 */
564
565 class InputStripSource : public StripSource, public LightRefBase<InputStripSource> {
566 public:
567 InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, uint32_t height,
568 uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample,
569 uint32_t samplesPerPixel);
570
571 virtual ~InputStripSource();
572
573 virtual status_t writeToStream(Output& stream, uint32_t count);
574
575 virtual uint32_t getIfd() const;
576 protected:
577 uint32_t mIfd;
578 Input* mInput;
579 uint32_t mWidth;
580 uint32_t mHeight;
581 uint32_t mPixStride;
582 uint32_t mRowStride;
583 uint64_t mOffset;
584 JNIEnv* mEnv;
585 uint32_t mBytesPerSample;
586 uint32_t mSamplesPerPixel;
587 };
588
InputStripSource(JNIEnv * env,Input & input,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)589 InputStripSource::InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width,
590 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
591 uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input),
592 mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride),
593 mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
594 mSamplesPerPixel(samplesPerPixel) {}
595
~InputStripSource()596 InputStripSource::~InputStripSource() {}
597
writeToStream(Output & stream,uint32_t count)598 status_t InputStripSource::writeToStream(Output& stream, uint32_t count) {
599 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
600 jlong offset = mOffset;
601
602 if (fullSize != count) {
603 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
604 fullSize);
605 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
606 return BAD_VALUE;
607 }
608
609 // Skip offset
610 while (offset > 0) {
611 ssize_t skipped = mInput->skip(offset);
612 if (skipped <= 0) {
613 if (skipped == NOT_ENOUGH_DATA || skipped == 0) {
614 jniThrowExceptionFmt(mEnv, "java/io/IOException",
615 "Early EOF encountered in skip, not enough pixel data for image of size %u",
616 fullSize);
617 skipped = NOT_ENOUGH_DATA;
618 } else {
619 if (!mEnv->ExceptionCheck()) {
620 jniThrowException(mEnv, "java/io/IOException",
621 "Error encountered while skip bytes in input stream.");
622 }
623 }
624
625 return skipped;
626 }
627 offset -= skipped;
628 }
629
630 Vector<uint8_t> row;
631 if (row.resize(mRowStride) < 0) {
632 jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector.");
633 return BAD_VALUE;
634 }
635
636 uint8_t* rowBytes = row.editArray();
637
638 for (uint32_t i = 0; i < mHeight; ++i) {
639 size_t rowFillAmt = 0;
640 size_t rowSize = mRowStride;
641
642 while (rowFillAmt < mRowStride) {
643 ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize);
644 if (bytesRead <= 0) {
645 if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) {
646 ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd",
647 __FUNCTION__, i, bytesRead);
648 jniThrowExceptionFmt(mEnv, "java/io/IOException",
649 "Early EOF encountered, not enough pixel data for image of size %"
650 PRIu32, fullSize);
651 bytesRead = NOT_ENOUGH_DATA;
652 } else {
653 if (!mEnv->ExceptionCheck()) {
654 jniThrowException(mEnv, "java/io/IOException",
655 "Error encountered while reading");
656 }
657 }
658 return bytesRead;
659 }
660 rowFillAmt += bytesRead;
661 rowSize -= bytesRead;
662 }
663
664 if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
665 ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__);
666
667 if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK ||
668 mEnv->ExceptionCheck()) {
669 if (!mEnv->ExceptionCheck()) {
670 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
671 }
672 return BAD_VALUE;
673 }
674 } else {
675 ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__);
676 jniThrowException(mEnv, "java/lang/IllegalStateException",
677 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
678 return BAD_VALUE;
679
680 // TODO: Add support for non-contiguous pixels if needed.
681 }
682 }
683 return OK;
684 }
685
getIfd() const686 uint32_t InputStripSource::getIfd() const {
687 return mIfd;
688 }
689
690 // End of InputStripSource
691 // ----------------------------------------------------------------------------
692
693 /**
694 * StripSource subclass for direct buffer types.
695 *
696 * This class is not intended to be used across JNI calls.
697 */
698
699 class DirectStripSource : public StripSource, public LightRefBase<DirectStripSource> {
700 public:
701 DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, uint32_t width,
702 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
703 uint32_t bytesPerSample, uint32_t samplesPerPixel);
704
705 virtual ~DirectStripSource();
706
707 virtual status_t writeToStream(Output& stream, uint32_t count);
708
709 virtual uint32_t getIfd() const;
710 protected:
711 uint32_t mIfd;
712 const uint8_t* mPixelBytes;
713 uint32_t mWidth;
714 uint32_t mHeight;
715 uint32_t mPixStride;
716 uint32_t mRowStride;
717 uint16_t mOffset;
718 JNIEnv* mEnv;
719 uint32_t mBytesPerSample;
720 uint32_t mSamplesPerPixel;
721 };
722
DirectStripSource(JNIEnv * env,const uint8_t * pixelBytes,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)723 DirectStripSource::DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd,
724 uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride,
725 uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd),
726 mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride),
727 mRowStride(rowStride), mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
728 mSamplesPerPixel(samplesPerPixel) {}
729
~DirectStripSource()730 DirectStripSource::~DirectStripSource() {}
731
writeToStream(Output & stream,uint32_t count)732 status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) {
733 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
734
735 if (fullSize != count) {
736 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
737 fullSize);
738 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
739 return BAD_VALUE;
740 }
741
742
743 if (mPixStride == mBytesPerSample * mSamplesPerPixel
744 && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) {
745 ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__);
746
747 if (stream.write(mPixelBytes, mOffset, fullSize) != OK || mEnv->ExceptionCheck()) {
748 if (!mEnv->ExceptionCheck()) {
749 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
750 }
751 return BAD_VALUE;
752 }
753 } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
754 ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__);
755
756 for (size_t i = 0; i < mHeight; ++i) {
757 if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK ||
758 mEnv->ExceptionCheck()) {
759 if (!mEnv->ExceptionCheck()) {
760 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
761 }
762 return BAD_VALUE;
763 }
764 }
765 } else {
766 ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__);
767
768 jniThrowException(mEnv, "java/lang/IllegalStateException",
769 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
770 return BAD_VALUE;
771
772 // TODO: Add support for non-contiguous pixels if needed.
773 }
774 return OK;
775
776 }
777
getIfd() const778 uint32_t DirectStripSource::getIfd() const {
779 return mIfd;
780 }
781
782 // End of DirectStripSource
783 // ----------------------------------------------------------------------------
784
785 // Get the appropriate tag corresponding to default / maximum resolution mode.
getAppropriateModeTag(int32_t tag,bool maximumResolution)786 static int32_t getAppropriateModeTag(int32_t tag, bool maximumResolution) {
787 if (!maximumResolution) {
788 return tag;
789 }
790 switch (tag) {
791 case ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE:
792 return ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION;
793 case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE:
794 return ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
795 case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE:
796 return ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
797 default:
798 ALOGE("%s: Tag %d doesn't have sensor info related maximum resolution counterpart",
799 __FUNCTION__, tag);
800 return -1;
801 }
802 }
803
isMaximumResolutionModeImage(const CameraMetadata & characteristics,uint32_t imageWidth,uint32_t imageHeight,const sp<TiffWriter> writer,JNIEnv * env)804 static bool isMaximumResolutionModeImage(const CameraMetadata& characteristics, uint32_t imageWidth,
805 uint32_t imageHeight, const sp<TiffWriter> writer,
806 JNIEnv* env) {
807 // If this isn't an ultra-high resolution sensor, return false;
808 camera_metadata_ro_entry capabilitiesEntry =
809 characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
810 size_t capsCount = capabilitiesEntry.count;
811 const uint8_t* caps = capabilitiesEntry.data.u8;
812 if (std::find(caps, caps + capsCount,
813 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) ==
814 caps + capsCount) {
815 // not an ultra-high resolution sensor, cannot have a maximum resolution
816 // mode image.
817 return false;
818 }
819
820 // If the image width and height are either the maximum resolution
821 // pre-correction active array size or the maximum resolution pixel array
822 // size, this image is a maximum resolution RAW_SENSOR image.
823
824 // Check dimensions
825 camera_metadata_ro_entry entry = characteristics.find(
826 ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION);
827
828 BAIL_IF_EMPTY_RET_BOOL(entry, env, TAG_IMAGEWIDTH, writer);
829
830 uint32_t preWidth = static_cast<uint32_t>(entry.data.i32[2]);
831 uint32_t preHeight = static_cast<uint32_t>(entry.data.i32[3]);
832
833 camera_metadata_ro_entry pixelArrayEntry =
834 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION);
835
836 BAIL_IF_EMPTY_RET_BOOL(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
837
838 uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
839 uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
840
841 return (imageWidth == preWidth && imageHeight == preHeight) ||
842 (imageWidth == pixWidth && imageHeight == pixHeight);
843 }
844
845 /**
846 * Calculate the default crop relative to the "active area" of the image sensor (this active area
847 * will always be the pre-correction active area rectangle), and set this.
848 */
calculateAndSetCrop(JNIEnv * env,const CameraMetadata & characteristics,sp<TiffWriter> writer,bool maximumResolutionMode)849 static status_t calculateAndSetCrop(JNIEnv* env, const CameraMetadata& characteristics,
850 sp<TiffWriter> writer, bool maximumResolutionMode) {
851 camera_metadata_ro_entry entry = characteristics.find(
852 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
853 maximumResolutionMode));
854 BAIL_IF_EMPTY_RET_STATUS(entry, env, TAG_IMAGEWIDTH, writer);
855 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
856 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
857
858 const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation.
859
860 if (width < margin * 2 || height < margin * 2) {
861 ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too"
862 "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width);
863 jniThrowException(env, "java/lang/IllegalStateException",
864 "Pre-correction active area is too small.");
865 return BAD_VALUE;
866 }
867
868 uint32_t defaultCropOrigin[] = {margin, margin};
869 uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin,
870 height - defaultCropOrigin[1] - margin};
871
872 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin,
873 TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer);
874 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize,
875 TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer);
876
877 return OK;
878 }
879
validateDngHeader(JNIEnv * env,sp<TiffWriter> writer,const CameraMetadata & characteristics,jint width,jint height)880 static bool validateDngHeader(JNIEnv* env, sp<TiffWriter> writer,
881 const CameraMetadata& characteristics, jint width, jint height) {
882 if (width <= 0) {
883 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
884 "Image width %d is invalid", width);
885 return false;
886 }
887
888 if (height <= 0) {
889 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
890 "Image height %d is invalid", height);
891 return false;
892 }
893 bool isMaximumResolutionMode =
894 isMaximumResolutionModeImage(characteristics, static_cast<uint32_t>(width),
895 static_cast<uint32_t>(height), writer, env);
896
897 camera_metadata_ro_entry preCorrectionEntry = characteristics.find(
898 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
899 isMaximumResolutionMode));
900 BAIL_IF_EMPTY_RET_BOOL(preCorrectionEntry, env, TAG_IMAGEWIDTH, writer);
901
902 camera_metadata_ro_entry pixelArrayEntry = characteristics.find(
903 getAppropriateModeTag(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, isMaximumResolutionMode));
904 BAIL_IF_EMPTY_RET_BOOL(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
905
906 int pWidth = static_cast<int>(pixelArrayEntry.data.i32[0]);
907 int pHeight = static_cast<int>(pixelArrayEntry.data.i32[1]);
908 int cWidth = static_cast<int>(preCorrectionEntry.data.i32[2]);
909 int cHeight = static_cast<int>(preCorrectionEntry.data.i32[3]);
910
911 bool matchesPixelArray = (pWidth == width && pHeight == height);
912 bool matchesPreCorrectionArray = (cWidth == width && cHeight == height);
913
914 if (!(matchesPixelArray || matchesPreCorrectionArray)) {
915 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
916 "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel "
917 "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)",
918 width, height, pWidth, pHeight, cWidth, cHeight);
919 return false;
920 }
921
922 return true;
923 }
924
925 /**
926 * Write CFA pattern for given CFA enum into cfaOut. cfaOut must have length >= 4.
927 * Returns OK on success, or a negative error code if the CFA enum was invalid.
928 */
convertCFA(uint8_t cfaEnum,uint8_t * cfaOut)929 static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) {
930 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
931 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
932 cfaEnum);
933 switch(cfa) {
934 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
935 cfaOut[0] = 0;
936 cfaOut[1] = 1;
937 cfaOut[2] = 1;
938 cfaOut[3] = 2;
939 break;
940 }
941 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
942 cfaOut[0] = 1;
943 cfaOut[1] = 0;
944 cfaOut[2] = 2;
945 cfaOut[3] = 1;
946 break;
947 }
948 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
949 cfaOut[0] = 1;
950 cfaOut[1] = 2;
951 cfaOut[2] = 0;
952 cfaOut[3] = 1;
953 break;
954 }
955 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
956 cfaOut[0] = 2;
957 cfaOut[1] = 1;
958 cfaOut[2] = 1;
959 cfaOut[3] = 0;
960 break;
961 }
962 // MONO and NIR are degenerate case of RGGB pattern: only Red channel
963 // will be used.
964 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO:
965 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR: {
966 cfaOut[0] = 0;
967 break;
968 }
969 default: {
970 return BAD_VALUE;
971 }
972 }
973 return OK;
974 }
975
976 /**
977 * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to
978 * RGGB for an unknown enum.
979 */
convertCFAEnumToOpcodeLayout(uint8_t cfaEnum)980 static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) {
981 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
982 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
983 cfaEnum);
984 switch(cfa) {
985 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
986 return OpcodeListBuilder::CFA_RGGB;
987 }
988 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
989 return OpcodeListBuilder::CFA_GRBG;
990 }
991 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
992 return OpcodeListBuilder::CFA_GBRG;
993 }
994 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
995 return OpcodeListBuilder::CFA_BGGR;
996 }
997 default: {
998 return OpcodeListBuilder::CFA_RGGB;
999 }
1000 }
1001 }
1002
1003 /**
1004 * For each color plane, find the corresponding noise profile coefficients given in the
1005 * per-channel noise profile. If multiple channels in the CFA correspond to a color in the color
1006 * plane, this method takes the pair of noise profile coefficients with the higher S coefficient.
1007 *
1008 * perChannelNoiseProfile - numChannels * 2 noise profile coefficients.
1009 * cfa - numChannels color channels corresponding to each of the per-channel noise profile
1010 * coefficients.
1011 * numChannels - the number of noise profile coefficient pairs and color channels given in
1012 * the perChannelNoiseProfile and cfa arguments, respectively.
1013 * planeColors - the color planes in the noise profile output.
1014 * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile.
1015 * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients.
1016 *
1017 * returns OK, or a negative error code on failure.
1018 */
generateNoiseProfile(const double * perChannelNoiseProfile,uint8_t * cfa,size_t numChannels,const uint8_t * planeColors,size_t numPlanes,double * noiseProfile)1019 static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa,
1020 size_t numChannels, const uint8_t* planeColors, size_t numPlanes,
1021 /*out*/double* noiseProfile) {
1022
1023 for (size_t p = 0; p < numPlanes; ++p) {
1024 size_t S = p * 2;
1025 size_t O = p * 2 + 1;
1026
1027 noiseProfile[S] = 0;
1028 noiseProfile[O] = 0;
1029 bool uninitialized = true;
1030 for (size_t c = 0; c < numChannels; ++c) {
1031 if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) {
1032 noiseProfile[S] = perChannelNoiseProfile[c * 2];
1033 noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1];
1034 uninitialized = false;
1035 }
1036 }
1037 if (uninitialized) {
1038 ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu",
1039 __FUNCTION__, p);
1040 return BAD_VALUE;
1041 }
1042 }
1043 return OK;
1044 }
1045
undistort(double & x,double & y,const std::array<float,6> & distortion,const float cx,const float cy,const float f)1046 static void undistort(/*inout*/double& x, /*inout*/double& y,
1047 const std::array<float, 6>& distortion,
1048 const float cx, const float cy, const float f) {
1049 double xp = (x - cx) / f;
1050 double yp = (y - cy) / f;
1051
1052 double x2 = xp * xp;
1053 double y2 = yp * yp;
1054 double r2 = x2 + y2;
1055 double xy2 = 2.0 * xp * yp;
1056
1057 const float k0 = distortion[0];
1058 const float k1 = distortion[1];
1059 const float k2 = distortion[2];
1060 const float k3 = distortion[3];
1061 const float p1 = distortion[4];
1062 const float p2 = distortion[5];
1063
1064 double kr = k0 + ((k3 * r2 + k2) * r2 + k1) * r2;
1065 double xpp = xp * kr + p1 * xy2 + p2 * (r2 + 2.0 * x2);
1066 double ypp = yp * kr + p1 * (r2 + 2.0 * y2) + p2 * xy2;
1067
1068 x = xpp * f + cx;
1069 y = ypp * f + cy;
1070 return;
1071 }
1072
unDistortWithinPreCorrArray(double x,double y,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1073 static inline bool unDistortWithinPreCorrArray(
1074 double x, double y,
1075 const std::array<float, 6>& distortion,
1076 const float cx, const float cy, const float f,
1077 const int preCorrW, const int preCorrH, const int xMin, const int yMin) {
1078 undistort(x, y, distortion, cx, cy, f);
1079 // xMin and yMin are inclusive, and xMax and yMax are exclusive.
1080 int xMax = xMin + preCorrW;
1081 int yMax = yMin + preCorrH;
1082 if (x < xMin || y < yMin || x >= xMax || y >= yMax) {
1083 return false;
1084 }
1085 return true;
1086 }
1087
boxWithinPrecorrectionArray(int left,int top,int right,int bottom,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1088 static inline bool boxWithinPrecorrectionArray(
1089 int left, int top, int right, int bottom,
1090 const std::array<float, 6>& distortion,
1091 const float cx, const float cy, const float f,
1092 const int preCorrW, const int preCorrH, const int xMin, const int yMin){
1093 // Top row
1094 if (!unDistortWithinPreCorrArray(left, top,
1095 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1096 return false;
1097 }
1098
1099 if (!unDistortWithinPreCorrArray(cx, top,
1100 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1101 return false;
1102 }
1103
1104 if (!unDistortWithinPreCorrArray(right, top,
1105 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1106 return false;
1107 }
1108
1109 // Middle row
1110 if (!unDistortWithinPreCorrArray(left, cy,
1111 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1112 return false;
1113 }
1114
1115 if (!unDistortWithinPreCorrArray(right, cy,
1116 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1117 return false;
1118 }
1119
1120 // Bottom row
1121 if (!unDistortWithinPreCorrArray(left, bottom,
1122 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1123 return false;
1124 }
1125
1126 if (!unDistortWithinPreCorrArray(cx, bottom,
1127 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1128 return false;
1129 }
1130
1131 if (!unDistortWithinPreCorrArray(right, bottom,
1132 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1133 return false;
1134 }
1135 return true;
1136 }
1137
scaledBoxWithinPrecorrectionArray(double scale,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1138 static inline bool scaledBoxWithinPrecorrectionArray(
1139 double scale/*must be <= 1.0*/,
1140 const std::array<float, 6>& distortion,
1141 const float cx, const float cy, const float f,
1142 const int preCorrW, const int preCorrH,
1143 const int xMin, const int yMin){
1144
1145 double left = cx * (1.0 - scale);
1146 double right = (preCorrW - 1) * scale + cx * (1.0 - scale);
1147 double top = cy * (1.0 - scale);
1148 double bottom = (preCorrH - 1) * scale + cy * (1.0 - scale);
1149
1150 return boxWithinPrecorrectionArray(left, top, right, bottom,
1151 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin);
1152 }
1153
findPostCorrectionScale(double stepSize,double minScale,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin,double * outScale)1154 static status_t findPostCorrectionScale(
1155 double stepSize, double minScale,
1156 const std::array<float, 6>& distortion,
1157 const float cx, const float cy, const float f,
1158 const int preCorrW, const int preCorrH, const int xMin, const int yMin,
1159 /*out*/ double* outScale) {
1160 if (outScale == nullptr) {
1161 ALOGE("%s: outScale must not be null", __FUNCTION__);
1162 return BAD_VALUE;
1163 }
1164
1165 for (double scale = 1.0; scale > minScale; scale -= stepSize) {
1166 if (scaledBoxWithinPrecorrectionArray(
1167 scale, distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1168 *outScale = scale;
1169 return OK;
1170 }
1171 }
1172 ALOGE("%s: cannot find cropping scale for lens distortion: stepSize %f, minScale %f",
1173 __FUNCTION__, stepSize, minScale);
1174 return BAD_VALUE;
1175 }
1176
1177 // Apply a scale factor to distortion coefficients so that the image is zoomed out and all pixels
1178 // are sampled within the precorrection array
normalizeLensDistortion(std::array<float,6> & distortion,float cx,float cy,float f,int preCorrW,int preCorrH,int xMin=0,int yMin=0)1179 static void normalizeLensDistortion(
1180 /*inout*/std::array<float, 6>& distortion,
1181 float cx, float cy, float f, int preCorrW, int preCorrH, int xMin = 0, int yMin = 0) {
1182 ALOGV("%s: distortion [%f, %f, %f, %f, %f, %f], (cx,cy) (%f, %f), f %f, (W,H) (%d, %d)"
1183 ", (xmin, ymin, xmax, ymax) (%d, %d, %d, %d)",
1184 __FUNCTION__, distortion[0], distortion[1], distortion[2],
1185 distortion[3], distortion[4], distortion[5],
1186 cx, cy, f, preCorrW, preCorrH,
1187 xMin, yMin, xMin + preCorrW - 1, yMin + preCorrH - 1);
1188
1189 // Only update distortion coeffients if we can find a good bounding box
1190 double scale = 1.0;
1191 if (OK == findPostCorrectionScale(0.002, 0.5,
1192 distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin,
1193 /*out*/&scale)) {
1194 ALOGV("%s: scaling distortion coefficients by %f", __FUNCTION__, scale);
1195 // The formula:
1196 // xc = xi * (k0 + k1*r^2 + k2*r^4 + k3*r^6) + k4 * (2*xi*yi) + k5 * (r^2 + 2*xi^2)
1197 // To create effective zoom we want to replace xi by xi *m, yi by yi*m and r^2 by r^2*m^2
1198 // Factor the extra m power terms into k0~k6
1199 std::array<float, 6> scalePowers = {1, 3, 5, 7, 2, 2};
1200 for (size_t i = 0; i < 6; i++) {
1201 distortion[i] *= pow(scale, scalePowers[i]);
1202 }
1203 }
1204 return;
1205 }
1206
1207 // ----------------------------------------------------------------------------
1208 extern "C" {
1209
DngCreator_getNativeContext(JNIEnv * env,jobject thiz)1210 static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) {
1211 ALOGV("%s:", __FUNCTION__);
1212 return reinterpret_cast<NativeContext*>(env->GetLongField(thiz,
1213 gDngCreatorClassInfo.mNativeContext));
1214 }
1215
DngCreator_setNativeContext(JNIEnv * env,jobject thiz,sp<NativeContext> context)1216 static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp<NativeContext> context) {
1217 ALOGV("%s:", __FUNCTION__);
1218 NativeContext* current = DngCreator_getNativeContext(env, thiz);
1219
1220 if (context != nullptr) {
1221 context->incStrong((void*) DngCreator_setNativeContext);
1222 }
1223
1224 if (current) {
1225 current->decStrong((void*) DngCreator_setNativeContext);
1226 }
1227
1228 env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext,
1229 reinterpret_cast<jlong>(context.get()));
1230 }
1231
DngCreator_nativeClassInit(JNIEnv * env,jclass clazz)1232 static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) {
1233 ALOGV("%s:", __FUNCTION__);
1234
1235 gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env,
1236 clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J");
1237
1238 jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream");
1239 gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env,
1240 outputStreamClazz, "write", "([BII)V");
1241
1242 jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream");
1243 gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I");
1244 gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J");
1245
1246 jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer");
1247 gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env,
1248 inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;");
1249 }
1250
DngCreator_init(JNIEnv * env,jobject thiz,jobject characteristicsPtr,jobject resultsPtr,jstring formattedCaptureTime)1251 static void DngCreator_init(JNIEnv* env, jobject thiz, jobject characteristicsPtr,
1252 jobject resultsPtr, jstring formattedCaptureTime) {
1253 ALOGV("%s:", __FUNCTION__);
1254 CameraMetadata characteristics;
1255 CameraMetadata results;
1256 if (CameraMetadata_getNativeMetadata(env, characteristicsPtr, &characteristics) != OK) {
1257 jniThrowException(env, "java/lang/AssertionError",
1258 "No native metadata defined for camera characteristics.");
1259 return;
1260 }
1261 if (CameraMetadata_getNativeMetadata(env, resultsPtr, &results) != OK) {
1262 jniThrowException(env, "java/lang/AssertionError",
1263 "No native metadata defined for capture results.");
1264 return;
1265 }
1266
1267 sp<NativeContext> nativeContext = new NativeContext(characteristics, results);
1268
1269 ScopedUtfChars captureTime(env, formattedCaptureTime);
1270 if (captureTime.size() + 1 != NativeContext::DATETIME_COUNT) {
1271 jniThrowException(env, "java/lang/IllegalArgumentException",
1272 "Formatted capture time string length is not required 20 characters");
1273 return;
1274 }
1275
1276 nativeContext->setCaptureTime(String8(captureTime.c_str()));
1277
1278 DngCreator_setNativeContext(env, thiz, nativeContext);
1279 }
1280
DngCreator_setup(JNIEnv * env,jobject thiz,uint32_t imageWidth,uint32_t imageHeight)1281 static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t imageWidth,
1282 uint32_t imageHeight) {
1283
1284 NativeContext* nativeContext = DngCreator_getNativeContext(env, thiz);
1285
1286 if (nativeContext == nullptr) {
1287 jniThrowException(env, "java/lang/AssertionError",
1288 "No native context, must call init before other operations.");
1289 return nullptr;
1290 }
1291
1292 CameraMetadata characteristics = *(nativeContext->getCharacteristics());
1293 CameraMetadata results = *(nativeContext->getResult());
1294
1295 sp<TiffWriter> writer = new TiffWriter();
1296
1297 uint32_t preXMin = 0;
1298 uint32_t preYMin = 0;
1299 uint32_t preWidth = 0;
1300 uint32_t preHeight = 0;
1301 uint8_t colorFilter = 0;
1302 bool isBayer = true;
1303 bool isMaximumResolutionMode =
1304 isMaximumResolutionModeImage(characteristics, imageWidth, imageHeight, writer, env);
1305 {
1306 // Check dimensions
1307 camera_metadata_entry entry = characteristics.find(
1308 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1309 isMaximumResolutionMode));
1310 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1311 preXMin = static_cast<uint32_t>(entry.data.i32[0]);
1312 preYMin = static_cast<uint32_t>(entry.data.i32[1]);
1313 preWidth = static_cast<uint32_t>(entry.data.i32[2]);
1314 preHeight = static_cast<uint32_t>(entry.data.i32[3]);
1315
1316 camera_metadata_entry pixelArrayEntry =
1317 characteristics.find(getAppropriateModeTag(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1318 isMaximumResolutionMode));
1319
1320 BAIL_IF_EMPTY_RET_NULL_SP(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
1321 uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
1322 uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
1323
1324 if (!((imageWidth == preWidth && imageHeight == preHeight) ||
1325 (imageWidth == pixWidth && imageHeight == pixHeight))) {
1326 jniThrowException(env, "java/lang/AssertionError",
1327 "Height and width of image buffer did not match height and width of"
1328 " either the preCorrectionActiveArraySize or the pixelArraySize.");
1329 return nullptr;
1330 }
1331
1332 camera_metadata_entry colorFilterEntry =
1333 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1334 colorFilter = colorFilterEntry.data.u8[0];
1335 camera_metadata_entry capabilitiesEntry =
1336 characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
1337 size_t capsCount = capabilitiesEntry.count;
1338 uint8_t* caps = capabilitiesEntry.data.u8;
1339 if (std::find(caps, caps+capsCount, ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME)
1340 != caps+capsCount) {
1341 isBayer = false;
1342 } else if (colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO ||
1343 colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR) {
1344 jniThrowException(env, "java/lang/AssertionError",
1345 "A camera device with MONO/NIR color filter must have MONOCHROME capability.");
1346 return nullptr;
1347 }
1348 }
1349
1350 writer->addIfd(TIFF_IFD_0);
1351
1352 status_t err = OK;
1353
1354 const uint32_t samplesPerPixel = 1;
1355 const uint32_t bitsPerSample = BITS_PER_SAMPLE;
1356
1357 OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_NONE;
1358 uint8_t cfaPlaneColor[3] = {0, 1, 2};
1359 camera_metadata_entry cfaEntry =
1360 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1361 BAIL_IF_EMPTY_RET_NULL_SP(cfaEntry, env, TAG_CFAPATTERN, writer);
1362 uint8_t cfaEnum = cfaEntry.data.u8[0];
1363
1364 // TODO: Greensplit.
1365 // TODO: Add remaining non-essential tags
1366
1367 // Setup main image tags
1368
1369 {
1370 // Set orientation
1371 uint16_t orientation = TAG_ORIENTATION_NORMAL;
1372 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1373 env, TAG_ORIENTATION, writer);
1374 }
1375
1376 {
1377 // Set subfiletype
1378 uint32_t subfileType = 0; // Main image
1379 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1380 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1381 }
1382
1383 {
1384 // Set bits per sample
1385 uint16_t bits = static_cast<uint16_t>(bitsPerSample);
1386 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env,
1387 TAG_BITSPERSAMPLE, writer);
1388 }
1389
1390 {
1391 // Set compression
1392 uint16_t compression = 1; // None
1393 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1394 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1395 }
1396
1397 {
1398 // Set dimensions
1399 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0),
1400 env, TAG_IMAGEWIDTH, writer);
1401 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0),
1402 env, TAG_IMAGELENGTH, writer);
1403 }
1404
1405 {
1406 // Set photometric interpretation
1407 uint16_t interpretation = isBayer ? 32803 /* CFA */ :
1408 34892; /* Linear Raw */;
1409 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1410 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1411 }
1412
1413 {
1414 uint16_t repeatDim[2] = {2, 2};
1415 if (!isBayer) {
1416 repeatDim[0] = repeatDim[1] = 1;
1417 }
1418 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
1419 TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
1420
1421 // Set blacklevel tags, using dynamic black level if available
1422 camera_metadata_entry entry =
1423 results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
1424 uint32_t blackLevelRational[8] = {0};
1425 if (entry.count != 0) {
1426 BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1427 for (size_t i = 0; i < entry.count; i++) {
1428 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.f[i] * 100);
1429 blackLevelRational[i * 2 + 1] = 100;
1430 }
1431 } else {
1432 // Fall back to static black level which is guaranteed
1433 entry = characteristics.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN);
1434 BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1435 for (size_t i = 0; i < entry.count; i++) {
1436 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.i32[i]);
1437 blackLevelRational[i * 2 + 1] = 1;
1438 }
1439 }
1440 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, repeatDim[0]*repeatDim[1],
1441 blackLevelRational, TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
1442 }
1443
1444 {
1445 // Set samples per pixel
1446 uint16_t samples = static_cast<uint16_t>(samplesPerPixel);
1447 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0),
1448 env, TAG_SAMPLESPERPIXEL, writer);
1449 }
1450
1451 {
1452 // Set planar configuration
1453 uint16_t config = 1; // Chunky
1454 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1455 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1456 }
1457
1458 // All CFA pattern tags are not necessary for monochrome cameras.
1459 if (isBayer) {
1460 // Set CFA pattern dimensions
1461 uint16_t repeatDim[2] = {2, 2};
1462 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
1463 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
1464
1465 // Set CFA pattern
1466 const int cfaLength = 4;
1467 uint8_t cfa[cfaLength];
1468 if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
1469 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1470 "Invalid metadata for tag %d", TAG_CFAPATTERN);
1471 }
1472
1473 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0),
1474 env, TAG_CFAPATTERN, writer);
1475
1476 opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
1477
1478 // Set CFA plane color
1479 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
1480 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
1481
1482 // Set CFA layout
1483 uint16_t cfaLayout = 1;
1484 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
1485 env, TAG_CFALAYOUT, writer);
1486 }
1487
1488 {
1489 // image description
1490 uint8_t imageDescription = '\0'; // empty
1491 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription,
1492 TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer);
1493 }
1494
1495 {
1496 // make
1497 // Use "" to represent unknown make as suggested in TIFF/EP spec.
1498 std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1499 uint32_t count = static_cast<uint32_t>(manufacturer.size()) + 1;
1500
1501 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count,
1502 reinterpret_cast<const uint8_t*>(manufacturer.c_str()), TIFF_IFD_0), env, TAG_MAKE,
1503 writer);
1504 }
1505
1506 {
1507 // model
1508 // Use "" to represent unknown model as suggested in TIFF/EP spec.
1509 std::string model = GetProperty("ro.product.model", "");
1510 uint32_t count = static_cast<uint32_t>(model.size()) + 1;
1511
1512 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count,
1513 reinterpret_cast<const uint8_t*>(model.c_str()), TIFF_IFD_0), env, TAG_MODEL,
1514 writer);
1515 }
1516
1517 {
1518 // x resolution
1519 uint32_t xres[] = { 72, 1 }; // default 72 ppi
1520 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1521 env, TAG_XRESOLUTION, writer);
1522
1523 // y resolution
1524 uint32_t yres[] = { 72, 1 }; // default 72 ppi
1525 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1526 env, TAG_YRESOLUTION, writer);
1527
1528 uint16_t unit = 2; // inches
1529 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1530 env, TAG_RESOLUTIONUNIT, writer);
1531 }
1532
1533 {
1534 // software
1535 std::string software = GetProperty("ro.build.fingerprint", "");
1536 uint32_t count = static_cast<uint32_t>(software.size()) + 1;
1537 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count,
1538 reinterpret_cast<const uint8_t*>(software.c_str()), TIFF_IFD_0), env, TAG_SOFTWARE,
1539 writer);
1540 }
1541
1542 if (nativeContext->hasCaptureTime()) {
1543 // datetime
1544 String8 captureTime = nativeContext->getCaptureTime();
1545
1546 if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT,
1547 reinterpret_cast<const uint8_t*>(captureTime.c_str()),
1548 TIFF_IFD_0) != OK) {
1549 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1550 "Invalid metadata for tag %x", TAG_DATETIME);
1551 return nullptr;
1552 }
1553
1554 // datetime original
1555 if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT,
1556 reinterpret_cast<const uint8_t*>(captureTime.c_str()),
1557 TIFF_IFD_0) != OK) {
1558 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1559 "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL);
1560 return nullptr;
1561 }
1562 }
1563
1564 {
1565 // TIFF/EP standard id
1566 uint8_t standardId[] = { 1, 0, 0, 0 };
1567 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId,
1568 TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer);
1569 }
1570
1571 {
1572 // copyright
1573 uint8_t copyright = '\0'; // empty
1574 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, ©right,
1575 TIFF_IFD_0), env, TAG_COPYRIGHT, writer);
1576 }
1577
1578 {
1579 // exposure time
1580 camera_metadata_entry entry =
1581 results.find(ANDROID_SENSOR_EXPOSURE_TIME);
1582 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer);
1583
1584 int64_t exposureTime = *(entry.data.i64);
1585
1586 if (exposureTime < 0) {
1587 // Should be unreachable
1588 jniThrowException(env, "java/lang/IllegalArgumentException",
1589 "Negative exposure time in metadata");
1590 return nullptr;
1591 }
1592
1593 // Ensure exposure time doesn't overflow (for exposures > 4s)
1594 uint32_t denominator = 1000000000;
1595 while (exposureTime > UINT32_MAX) {
1596 exposureTime >>= 1;
1597 denominator >>= 1;
1598 if (denominator == 0) {
1599 // Should be unreachable
1600 jniThrowException(env, "java/lang/IllegalArgumentException",
1601 "Exposure time too long");
1602 return nullptr;
1603 }
1604 }
1605
1606 uint32_t exposure[] = { static_cast<uint32_t>(exposureTime), denominator };
1607 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure,
1608 TIFF_IFD_0), env, TAG_EXPOSURETIME, writer);
1609
1610 }
1611
1612 {
1613 // ISO speed ratings
1614 camera_metadata_entry entry =
1615 results.find(ANDROID_SENSOR_SENSITIVITY);
1616 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer);
1617
1618 int32_t tempIso = *(entry.data.i32);
1619 if (tempIso < 0) {
1620 jniThrowException(env, "java/lang/IllegalArgumentException",
1621 "Negative ISO value");
1622 return nullptr;
1623 }
1624
1625 if (tempIso > UINT16_MAX) {
1626 ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__);
1627 tempIso = UINT16_MAX;
1628 }
1629
1630 uint16_t iso = static_cast<uint16_t>(tempIso);
1631 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso,
1632 TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer);
1633 }
1634
1635 {
1636 // Baseline exposure
1637 camera_metadata_entry entry =
1638 results.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
1639 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BASELINEEXPOSURE, writer);
1640
1641 // post RAW gain should be boostValue / 100
1642 double postRAWGain = static_cast<double> (entry.data.i32[0]) / 100.f;
1643 // Baseline exposure should be in EV units so log2(gain) =
1644 // log10(gain)/log10(2)
1645 double baselineExposure = std::log(postRAWGain) / std::log(2.0f);
1646 int32_t baseExposureSRat[] = { static_cast<int32_t> (baselineExposure * 100),
1647 100 };
1648 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BASELINEEXPOSURE, 1,
1649 baseExposureSRat, TIFF_IFD_0), env, TAG_BASELINEEXPOSURE, writer);
1650 }
1651
1652 {
1653 // focal length
1654 camera_metadata_entry entry =
1655 results.find(ANDROID_LENS_FOCAL_LENGTH);
1656 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer);
1657
1658 uint32_t focalLength[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1659 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength,
1660 TIFF_IFD_0), env, TAG_FOCALLENGTH, writer);
1661 }
1662
1663 {
1664 // f number
1665 camera_metadata_entry entry =
1666 results.find(ANDROID_LENS_APERTURE);
1667 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer);
1668
1669 uint32_t fnum[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1670 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum,
1671 TIFF_IFD_0), env, TAG_FNUMBER, writer);
1672 }
1673
1674 {
1675 // Set DNG version information
1676 uint8_t version[4] = {1, 4, 0, 0};
1677 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0),
1678 env, TAG_DNGVERSION, writer);
1679
1680 uint8_t backwardVersion[4] = {1, 1, 0, 0};
1681 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion,
1682 TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer);
1683 }
1684
1685 {
1686 // Set whitelevel
1687 camera_metadata_entry entry =
1688 characteristics.find(ANDROID_SENSOR_INFO_WHITE_LEVEL);
1689 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer);
1690 uint32_t whiteLevel = static_cast<uint32_t>(entry.data.i32[0]);
1691 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0),
1692 env, TAG_WHITELEVEL, writer);
1693 }
1694
1695 {
1696 // Set default scale
1697 uint32_t defaultScale[4] = {1, 1, 1, 1};
1698 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale,
1699 TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer);
1700 }
1701
1702 bool singleIlluminant = false;
1703 if (isBayer) {
1704 // Set calibration illuminants
1705 camera_metadata_entry entry1 =
1706 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1707 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer);
1708 camera_metadata_entry entry2 =
1709 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1710 if (entry2.count == 0) {
1711 singleIlluminant = true;
1712 }
1713 uint16_t ref1 = entry1.data.u8[0];
1714
1715 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1,
1716 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer);
1717
1718 if (!singleIlluminant) {
1719 uint16_t ref2 = entry2.data.u8[0];
1720 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2,
1721 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer);
1722 }
1723 }
1724
1725 if (isBayer) {
1726 // Set color transforms
1727 camera_metadata_entry entry1 =
1728 characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
1729 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer);
1730
1731 int32_t colorTransform1[entry1.count * 2];
1732
1733 size_t ctr = 0;
1734 for(size_t i = 0; i < entry1.count; ++i) {
1735 colorTransform1[ctr++] = entry1.data.r[i].numerator;
1736 colorTransform1[ctr++] = entry1.data.r[i].denominator;
1737 }
1738
1739 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count,
1740 colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer);
1741
1742 if (!singleIlluminant) {
1743 camera_metadata_entry entry2 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM2);
1744 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer);
1745 int32_t colorTransform2[entry2.count * 2];
1746
1747 ctr = 0;
1748 for(size_t i = 0; i < entry2.count; ++i) {
1749 colorTransform2[ctr++] = entry2.data.r[i].numerator;
1750 colorTransform2[ctr++] = entry2.data.r[i].denominator;
1751 }
1752
1753 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count,
1754 colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer);
1755 }
1756 }
1757
1758 if (isBayer) {
1759 // Set calibration transforms
1760 camera_metadata_entry entry1 =
1761 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1762 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer);
1763
1764 int32_t calibrationTransform1[entry1.count * 2];
1765
1766 size_t ctr = 0;
1767 for(size_t i = 0; i < entry1.count; ++i) {
1768 calibrationTransform1[ctr++] = entry1.data.r[i].numerator;
1769 calibrationTransform1[ctr++] = entry1.data.r[i].denominator;
1770 }
1771
1772 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count,
1773 calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer);
1774
1775 if (!singleIlluminant) {
1776 camera_metadata_entry entry2 =
1777 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1778 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer);
1779 int32_t calibrationTransform2[entry2.count * 2];
1780
1781 ctr = 0;
1782 for(size_t i = 0; i < entry2.count; ++i) {
1783 calibrationTransform2[ctr++] = entry2.data.r[i].numerator;
1784 calibrationTransform2[ctr++] = entry2.data.r[i].denominator;
1785 }
1786
1787 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count,
1788 calibrationTransform2, TIFF_IFD_0), env, TAG_CAMERACALIBRATION2, writer);
1789 }
1790 }
1791
1792 if (isBayer) {
1793 // Set forward transforms
1794 camera_metadata_entry entry1 =
1795 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
1796 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer);
1797
1798 int32_t forwardTransform1[entry1.count * 2];
1799
1800 size_t ctr = 0;
1801 for(size_t i = 0; i < entry1.count; ++i) {
1802 forwardTransform1[ctr++] = entry1.data.r[i].numerator;
1803 forwardTransform1[ctr++] = entry1.data.r[i].denominator;
1804 }
1805
1806 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count,
1807 forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer);
1808
1809 if (!singleIlluminant) {
1810 camera_metadata_entry entry2 =
1811 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX2);
1812 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer);
1813 int32_t forwardTransform2[entry2.count * 2];
1814
1815 ctr = 0;
1816 for(size_t i = 0; i < entry2.count; ++i) {
1817 forwardTransform2[ctr++] = entry2.data.r[i].numerator;
1818 forwardTransform2[ctr++] = entry2.data.r[i].denominator;
1819 }
1820
1821 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count,
1822 forwardTransform2, TIFF_IFD_0), env, TAG_FORWARDMATRIX2, writer);
1823 }
1824 }
1825
1826 if (isBayer) {
1827 // Set camera neutral
1828 camera_metadata_entry entry =
1829 results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1830 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer);
1831 uint32_t cameraNeutral[entry.count * 2];
1832
1833 size_t ctr = 0;
1834 for(size_t i = 0; i < entry.count; ++i) {
1835 cameraNeutral[ctr++] =
1836 static_cast<uint32_t>(entry.data.r[i].numerator);
1837 cameraNeutral[ctr++] =
1838 static_cast<uint32_t>(entry.data.r[i].denominator);
1839 }
1840
1841 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral,
1842 TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer);
1843 }
1844
1845
1846 {
1847 // Set dimensions
1848 if (calculateAndSetCrop(env, characteristics, writer, isMaximumResolutionMode) != OK) {
1849 return nullptr;
1850 }
1851 camera_metadata_entry entry = characteristics.find(
1852 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1853 isMaximumResolutionMode));
1854 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer);
1855 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1856 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1857 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1858 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1859
1860 // If we only have a buffer containing the pre-correction rectangle, ignore the offset
1861 // relative to the pixel array.
1862 if (imageWidth == width && imageHeight == height) {
1863 xmin = 0;
1864 ymin = 0;
1865 }
1866
1867 uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width};
1868 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0),
1869 env, TAG_ACTIVEAREA, writer);
1870 }
1871
1872 {
1873 // Setup unique camera model tag
1874 std::string model = GetProperty("ro.product.model", "");
1875 std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1876 std::string brand = GetProperty("ro.product.brand", "");
1877
1878 String8 cameraModel(model.c_str());
1879 cameraModel += "-";
1880 cameraModel += manufacturer.c_str();
1881 cameraModel += "-";
1882 cameraModel += brand.c_str();
1883
1884 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1,
1885 reinterpret_cast<const uint8_t*>(
1886 cameraModel.c_str()),
1887 TIFF_IFD_0),
1888 env, TAG_UNIQUECAMERAMODEL, writer);
1889 }
1890
1891 {
1892 // Setup sensor noise model
1893 camera_metadata_entry entry =
1894 results.find(ANDROID_SENSOR_NOISE_PROFILE);
1895
1896 const unsigned long numPlaneColors = isBayer ? 3 : 1;
1897 const unsigned long numCfaChannels = isBayer ? 4 : 1;
1898
1899 uint8_t cfaOut[numCfaChannels];
1900 if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
1901 jniThrowException(env, "java/lang/IllegalArgumentException",
1902 "Invalid CFA from camera characteristics");
1903 return nullptr;
1904 }
1905
1906 double noiseProfile[numPlaneColors * 2];
1907
1908 if (entry.count > 0) {
1909 if (entry.count != numCfaChannels * 2) {
1910 ALOGW("%s: Invalid entry count %zu for noise profile returned "
1911 "in characteristics, no noise profile tag written...",
1912 __FUNCTION__, entry.count);
1913 } else {
1914 if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels,
1915 cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) {
1916
1917 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE,
1918 numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE,
1919 writer);
1920 } else {
1921 ALOGW("%s: Error converting coefficients for noise profile, no noise profile"
1922 " tag written...", __FUNCTION__);
1923 }
1924 }
1925 } else {
1926 ALOGW("%s: No noise profile found in result metadata. Image quality may be reduced.",
1927 __FUNCTION__);
1928 }
1929 }
1930
1931 {
1932 // Set up opcode List 2
1933 OpcodeListBuilder builder;
1934 status_t err = OK;
1935
1936 // Set up lens shading map
1937 camera_metadata_entry entry1 =
1938 characteristics.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
1939
1940 uint32_t lsmWidth = 0;
1941 uint32_t lsmHeight = 0;
1942
1943 if (entry1.count != 0) {
1944 lsmWidth = static_cast<uint32_t>(entry1.data.i32[0]);
1945 lsmHeight = static_cast<uint32_t>(entry1.data.i32[1]);
1946 }
1947
1948 camera_metadata_entry entry2 = results.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
1949
1950 camera_metadata_entry entry = characteristics.find(
1951 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1952 isMaximumResolutionMode));
1953 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1954 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1955 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1956 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1957 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1958 if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) {
1959 // GainMap rectangle is relative to the active area origin.
1960 err = builder.addGainMapsForMetadata(lsmWidth,
1961 lsmHeight,
1962 0,
1963 0,
1964 height,
1965 width,
1966 opcodeCfaLayout,
1967 entry2.data.f);
1968 if (err != OK) {
1969 ALOGE("%s: Could not add Lens shading map.", __FUNCTION__);
1970 jniThrowRuntimeException(env, "failed to add lens shading map.");
1971 return nullptr;
1972 }
1973 }
1974
1975 // Hot pixel map is specific to bayer camera per DNG spec.
1976 if (isBayer) {
1977 // Set up bad pixel correction list
1978 // We first check the capture result. If the hot pixel map is not
1979 // available, as a fallback, try the static characteristics.
1980 camera_metadata_entry entry3 = results.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1981 if (entry3.count == 0) {
1982 entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1983 }
1984
1985 if ((entry3.count % 2) != 0) {
1986 ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
1987 __FUNCTION__);
1988 jniThrowRuntimeException(env, "failed to add hotpixel map.");
1989 return nullptr;
1990 }
1991
1992 // Adjust the bad pixel coordinates to be relative to the origin of the active area
1993 // DNG tag
1994 std::vector<uint32_t> v;
1995 for (size_t i = 0; i < entry3.count; i += 2) {
1996 int32_t x = entry3.data.i32[i];
1997 int32_t y = entry3.data.i32[i + 1];
1998 x -= static_cast<int32_t>(xmin);
1999 y -= static_cast<int32_t>(ymin);
2000 if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
2001 static_cast<uint32_t>(y) >= height) {
2002 continue;
2003 }
2004 v.push_back(x);
2005 v.push_back(y);
2006 }
2007 const uint32_t* badPixels = &v[0];
2008 uint32_t badPixelCount = v.size();
2009
2010 if (badPixelCount > 0) {
2011 err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
2012
2013 if (err != OK) {
2014 ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
2015 jniThrowRuntimeException(env, "failed to add hotpixel map.");
2016 return nullptr;
2017 }
2018 }
2019 }
2020
2021 if (builder.getCount() > 0) {
2022 size_t listSize = builder.getSize();
2023 uint8_t opcodeListBuf[listSize];
2024 err = builder.buildOpList(opcodeListBuf);
2025 if (err == OK) {
2026 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize,
2027 opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST2, writer);
2028 } else {
2029 ALOGE("%s: Could not build list of opcodes for lens shading map and bad pixel "
2030 "correction.", __FUNCTION__);
2031 jniThrowRuntimeException(env, "failed to construct opcode list for lens shading "
2032 "map and bad pixel correction");
2033 return nullptr;
2034 }
2035 }
2036 }
2037
2038 {
2039 // Set up opcode List 3
2040 OpcodeListBuilder builder;
2041 status_t err = OK;
2042
2043 // Set up rectilinear distortion correction
2044 std::array<float, 6> distortion = {1.f, 0.f, 0.f, 0.f, 0.f, 0.f};
2045 bool gotDistortion = false;
2046
2047 // The capture result would have the correct intrinsic calibration
2048 // regardless of the sensor pixel mode.
2049 camera_metadata_entry entry4 =
2050 results.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
2051
2052 if (entry4.count == 5) {
2053 float cx = entry4.data.f[/*c_x*/2];
2054 float cy = entry4.data.f[/*c_y*/3];
2055 // Assuming f_x = f_y, or at least close enough.
2056 // Also assuming s = 0, or at least close enough.
2057 float f = entry4.data.f[/*f_x*/0];
2058
2059 camera_metadata_entry entry3 =
2060 results.find(ANDROID_LENS_DISTORTION);
2061 if (entry3.count == 5) {
2062 gotDistortion = true;
2063
2064 // Scale the distortion coefficients to create a zoom in warpped image so that all
2065 // pixels are drawn within input image.
2066 for (size_t i = 0; i < entry3.count; i++) {
2067 distortion[i+1] = entry3.data.f[i];
2068 }
2069
2070 if (preWidth == imageWidth && preHeight == imageHeight) {
2071 normalizeLensDistortion(distortion, cx, cy, f, preWidth, preHeight);
2072 } else {
2073 // image size == pixel array size (contains optical black pixels)
2074 // cx/cy is defined in preCorrArray so adding the offset
2075 // Also changes default xmin/ymin so that pixels are only
2076 // sampled within preCorrection array
2077 normalizeLensDistortion(
2078 distortion, cx + preXMin, cy + preYMin, f, preWidth, preHeight,
2079 preXMin, preYMin);
2080 }
2081
2082 float m_x = std::fmaxf(preWidth - cx, cx);
2083 float m_y = std::fmaxf(preHeight - cy, cy);
2084 float m_sq = m_x*m_x + m_y*m_y;
2085 float m = sqrtf(m_sq); // distance to farthest corner from optical center
2086 float f_sq = f * f;
2087 // Conversion factors from Camera2 K factors for new LENS_DISTORTION field
2088 // to DNG spec.
2089 //
2090 // Camera2 / OpenCV assume distortion is applied in a space where focal length
2091 // is factored out, while DNG assumes a normalized space where the distance
2092 // from optical center to the farthest corner is 1.
2093 // Scale from camera2 to DNG spec accordingly.
2094 // distortion[0] is always 1 with the new LENS_DISTORTION field.
2095 const double convCoeff[5] = {
2096 m_sq / f_sq,
2097 pow(m_sq, 2) / pow(f_sq, 2),
2098 pow(m_sq, 3) / pow(f_sq, 3),
2099 m / f,
2100 m / f
2101 };
2102 for (size_t i = 0; i < entry3.count; i++) {
2103 distortion[i+1] *= convCoeff[i];
2104 }
2105 } else {
2106 entry3 = results.find(ANDROID_LENS_RADIAL_DISTORTION);
2107 if (entry3.count == 6) {
2108 gotDistortion = true;
2109 // Conversion factors from Camera2 K factors to DNG spec. K factors:
2110 //
2111 // Note: these are necessary because our unit system assumes a
2112 // normalized max radius of sqrt(2), whereas the DNG spec's
2113 // WarpRectilinear opcode assumes a normalized max radius of 1.
2114 // Thus, each K coefficient must include the domain scaling
2115 // factor (the DNG domain is scaled by sqrt(2) to emulate the
2116 // domain used by the Camera2 specification).
2117 const double convCoeff[6] = {
2118 sqrt(2),
2119 2 * sqrt(2),
2120 4 * sqrt(2),
2121 8 * sqrt(2),
2122 2,
2123 2
2124 };
2125 for (size_t i = 0; i < entry3.count; i++) {
2126 distortion[i] = entry3.data.f[i] * convCoeff[i];
2127 }
2128 }
2129 }
2130 if (gotDistortion) {
2131 err = builder.addWarpRectilinearForMetadata(
2132 distortion.data(), preWidth, preHeight, cx, cy);
2133 if (err != OK) {
2134 ALOGE("%s: Could not add distortion correction.", __FUNCTION__);
2135 jniThrowRuntimeException(env, "failed to add distortion correction.");
2136 return nullptr;
2137 }
2138 }
2139 }
2140
2141 if (builder.getCount() > 0) {
2142 size_t listSize = builder.getSize();
2143 uint8_t opcodeListBuf[listSize];
2144 err = builder.buildOpList(opcodeListBuf);
2145 if (err == OK) {
2146 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize,
2147 opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST3, writer);
2148 } else {
2149 ALOGE("%s: Could not build list of opcodes for distortion correction.",
2150 __FUNCTION__);
2151 jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
2152 " correction");
2153 return nullptr;
2154 }
2155 }
2156 }
2157
2158 {
2159 // Set up orientation tags.
2160 // Note: There's only one orientation field for the whole file, in IFD0
2161 // The main image and any thumbnails therefore have the same orientation.
2162 uint16_t orientation = nativeContext->getOrientation();
2163 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
2164 env, TAG_ORIENTATION, writer);
2165
2166 }
2167
2168 if (nativeContext->hasDescription()){
2169 // Set Description
2170 String8 description = nativeContext->getDescription();
2171 size_t len = description.bytes() + 1;
2172 if (writer->addEntry(TAG_IMAGEDESCRIPTION, len,
2173 reinterpret_cast<const uint8_t*>(description.c_str()),
2174 TIFF_IFD_0) != OK) {
2175 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
2176 "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION);
2177 }
2178 }
2179
2180 if (nativeContext->hasGpsData()) {
2181 // Set GPS tags
2182 GpsData gpsData = nativeContext->getGpsData();
2183 if (!writer->hasIfd(TIFF_IFD_GPSINFO)) {
2184 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) {
2185 ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO,
2186 TIFF_IFD_0);
2187 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO");
2188 return nullptr;
2189 }
2190 }
2191
2192 {
2193 uint8_t version[] = {2, 3, 0, 0};
2194 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version,
2195 TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer);
2196 }
2197
2198 {
2199 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF,
2200 GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env,
2201 TAG_GPSLATITUDEREF, writer);
2202 }
2203
2204 {
2205 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF,
2206 GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env,
2207 TAG_GPSLONGITUDEREF, writer);
2208 }
2209
2210 {
2211 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude,
2212 TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer);
2213 }
2214
2215 {
2216 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude,
2217 TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer);
2218 }
2219
2220 {
2221 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp,
2222 TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer);
2223 }
2224
2225 {
2226 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP,
2227 GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env,
2228 TAG_GPSDATESTAMP, writer);
2229 }
2230 }
2231
2232
2233 if (nativeContext->hasThumbnail()) {
2234 if (!writer->hasIfd(TIFF_IFD_SUB1)) {
2235 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) {
2236 ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1,
2237 TIFF_IFD_0);
2238 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD");
2239 return nullptr;
2240 }
2241 }
2242
2243 // Setup thumbnail tags
2244
2245 {
2246 // Set photometric interpretation
2247 uint16_t interpretation = 2; // RGB
2248 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
2249 &interpretation, TIFF_IFD_SUB1), env, TAG_PHOTOMETRICINTERPRETATION, writer);
2250 }
2251
2252 {
2253 // Set planar configuration
2254 uint16_t config = 1; // Chunky
2255 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
2256 TIFF_IFD_SUB1), env, TAG_PLANARCONFIGURATION, writer);
2257 }
2258
2259 {
2260 // Set samples per pixel
2261 uint16_t samples = SAMPLES_PER_RGB_PIXEL;
2262 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples,
2263 TIFF_IFD_SUB1), env, TAG_SAMPLESPERPIXEL, writer);
2264 }
2265
2266 {
2267 // Set bits per sample
2268 uint16_t bits[SAMPLES_PER_RGB_PIXEL];
2269 for (int i = 0; i < SAMPLES_PER_RGB_PIXEL; i++) bits[i] = BITS_PER_RGB_SAMPLE;
2270 BAIL_IF_INVALID_RET_NULL_SP(
2271 writer->addEntry(TAG_BITSPERSAMPLE, SAMPLES_PER_RGB_PIXEL, bits, TIFF_IFD_SUB1),
2272 env, TAG_BITSPERSAMPLE, writer);
2273 }
2274
2275 {
2276 // Set subfiletype
2277 uint32_t subfileType = 1; // Thumbnail image
2278 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
2279 TIFF_IFD_SUB1), env, TAG_NEWSUBFILETYPE, writer);
2280 }
2281
2282 {
2283 // Set compression
2284 uint16_t compression = 1; // None
2285 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
2286 TIFF_IFD_SUB1), env, TAG_COMPRESSION, writer);
2287 }
2288
2289 {
2290 // Set dimensions
2291 uint32_t uWidth = nativeContext->getThumbnailWidth();
2292 uint32_t uHeight = nativeContext->getThumbnailHeight();
2293 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_SUB1),
2294 env, TAG_IMAGEWIDTH, writer);
2295 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight,
2296 TIFF_IFD_SUB1), env, TAG_IMAGELENGTH, writer);
2297 }
2298
2299 {
2300 // x resolution
2301 uint32_t xres[] = { 72, 1 }; // default 72 ppi
2302 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_SUB1),
2303 env, TAG_XRESOLUTION, writer);
2304
2305 // y resolution
2306 uint32_t yres[] = { 72, 1 }; // default 72 ppi
2307 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_SUB1),
2308 env, TAG_YRESOLUTION, writer);
2309
2310 uint16_t unit = 2; // inches
2311 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit,
2312 TIFF_IFD_SUB1), env, TAG_RESOLUTIONUNIT, writer);
2313 }
2314 }
2315
2316 if (writer->addStrip(TIFF_IFD_0) != OK) {
2317 ALOGE("%s: Could not setup main image strip tags.", __FUNCTION__);
2318 jniThrowException(env, "java/lang/IllegalStateException",
2319 "Failed to setup main image strip tags.");
2320 return nullptr;
2321 }
2322
2323 if (writer->hasIfd(TIFF_IFD_SUB1)) {
2324 if (writer->addStrip(TIFF_IFD_SUB1) != OK) {
2325 ALOGE("%s: Could not thumbnail image strip tags.", __FUNCTION__);
2326 jniThrowException(env, "java/lang/IllegalStateException",
2327 "Failed to setup thumbnail image strip tags.");
2328 return nullptr;
2329 }
2330 }
2331 return writer;
2332 }
2333
DngCreator_destroy(JNIEnv * env,jobject thiz)2334 static void DngCreator_destroy(JNIEnv* env, jobject thiz) {
2335 ALOGV("%s:", __FUNCTION__);
2336 DngCreator_setNativeContext(env, thiz, nullptr);
2337 }
2338
DngCreator_nativeSetOrientation(JNIEnv * env,jobject thiz,jint orient)2339 static void DngCreator_nativeSetOrientation(JNIEnv* env, jobject thiz, jint orient) {
2340 ALOGV("%s:", __FUNCTION__);
2341
2342 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2343 if (context == nullptr) {
2344 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2345 jniThrowException(env, "java/lang/AssertionError",
2346 "setOrientation called with uninitialized DngCreator");
2347 return;
2348 }
2349
2350 uint16_t orientation = static_cast<uint16_t>(orient);
2351 context->setOrientation(orientation);
2352 }
2353
DngCreator_nativeSetDescription(JNIEnv * env,jobject thiz,jstring description)2354 static void DngCreator_nativeSetDescription(JNIEnv* env, jobject thiz, jstring description) {
2355 ALOGV("%s:", __FUNCTION__);
2356
2357 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2358 if (context == nullptr) {
2359 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2360 jniThrowException(env, "java/lang/AssertionError",
2361 "setDescription called with uninitialized DngCreator");
2362 return;
2363 }
2364
2365 const char* desc = env->GetStringUTFChars(description, nullptr);
2366 context->setDescription(String8(desc));
2367 env->ReleaseStringUTFChars(description, desc);
2368 }
2369
DngCreator_nativeSetGpsTags(JNIEnv * env,jobject thiz,jintArray latTag,jstring latRef,jintArray longTag,jstring longRef,jstring dateTag,jintArray timeTag)2370 static void DngCreator_nativeSetGpsTags(JNIEnv* env, jobject thiz, jintArray latTag,
2371 jstring latRef, jintArray longTag, jstring longRef, jstring dateTag, jintArray timeTag) {
2372 ALOGV("%s:", __FUNCTION__);
2373
2374 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2375 if (context == nullptr) {
2376 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2377 jniThrowException(env, "java/lang/AssertionError",
2378 "setGpsTags called with uninitialized DngCreator");
2379 return;
2380 }
2381
2382 GpsData data;
2383
2384 jsize latLen = env->GetArrayLength(latTag);
2385 jsize longLen = env->GetArrayLength(longTag);
2386 jsize timeLen = env->GetArrayLength(timeTag);
2387 if (latLen != GpsData::GPS_VALUE_LENGTH) {
2388 jniThrowException(env, "java/lang/IllegalArgumentException",
2389 "invalid latitude tag length");
2390 return;
2391 } else if (longLen != GpsData::GPS_VALUE_LENGTH) {
2392 jniThrowException(env, "java/lang/IllegalArgumentException",
2393 "invalid longitude tag length");
2394 return;
2395 } else if (timeLen != GpsData::GPS_VALUE_LENGTH) {
2396 jniThrowException(env, "java/lang/IllegalArgumentException",
2397 "invalid time tag length");
2398 return;
2399 }
2400
2401 env->GetIntArrayRegion(latTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2402 reinterpret_cast<jint*>(&data.mLatitude));
2403 env->GetIntArrayRegion(longTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2404 reinterpret_cast<jint*>(&data.mLongitude));
2405 env->GetIntArrayRegion(timeTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2406 reinterpret_cast<jint*>(&data.mTimestamp));
2407
2408
2409 env->GetStringUTFRegion(latRef, 0, 1, reinterpret_cast<char*>(&data.mLatitudeRef));
2410 data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2411 env->GetStringUTFRegion(longRef, 0, 1, reinterpret_cast<char*>(&data.mLongitudeRef));
2412 data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2413 env->GetStringUTFRegion(dateTag, 0, GpsData::GPS_DATE_LENGTH - 1,
2414 reinterpret_cast<char*>(&data.mDate));
2415 data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0';
2416
2417 context->setGpsData(data);
2418 }
2419
DngCreator_nativeSetThumbnail(JNIEnv * env,jobject thiz,jobject buffer,jint width,jint height)2420 static void DngCreator_nativeSetThumbnail(JNIEnv* env, jobject thiz, jobject buffer, jint width,
2421 jint height) {
2422 ALOGV("%s:", __FUNCTION__);
2423
2424 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2425 if (context == nullptr) {
2426 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2427 jniThrowException(env, "java/lang/AssertionError",
2428 "setThumbnail called with uninitialized DngCreator");
2429 return;
2430 }
2431
2432 size_t fullSize = width * height * BYTES_PER_RGB_PIXEL;
2433 jlong capacity = env->GetDirectBufferCapacity(buffer);
2434 if (static_cast<uint64_t>(capacity) != static_cast<uint64_t>(fullSize)) {
2435 jniThrowExceptionFmt(env, "java/lang/AssertionError",
2436 "Invalid size %d for thumbnail, expected size was %d",
2437 capacity, fullSize);
2438 return;
2439 }
2440
2441 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer));
2442 if (pixelBytes == nullptr) {
2443 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2444 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2445 return;
2446 }
2447
2448 if (!context->setThumbnail(pixelBytes, width, height)) {
2449 jniThrowException(env, "java/lang/IllegalStateException",
2450 "Failed to set thumbnail.");
2451 return;
2452 }
2453 }
2454
2455 // TODO: Refactor out common preamble for the two nativeWrite methods.
DngCreator_nativeWriteImage(JNIEnv * env,jobject thiz,jobject outStream,jint width,jint height,jobject inBuffer,jint rowStride,jint pixStride,jlong offset,jboolean isDirect)2456 static void DngCreator_nativeWriteImage(JNIEnv* env, jobject thiz, jobject outStream, jint width,
2457 jint height, jobject inBuffer, jint rowStride, jint pixStride, jlong offset,
2458 jboolean isDirect) {
2459 ALOGV("%s:", __FUNCTION__);
2460 ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, "
2461 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2462 height, rowStride, pixStride, offset);
2463 uint32_t rStride = static_cast<uint32_t>(rowStride);
2464 uint32_t pStride = static_cast<uint32_t>(pixStride);
2465 uint32_t uWidth = static_cast<uint32_t>(width);
2466 uint32_t uHeight = static_cast<uint32_t>(height);
2467 uint64_t uOffset = static_cast<uint64_t>(offset);
2468
2469 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2470 if(env->ExceptionCheck()) {
2471 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2472 return;
2473 }
2474
2475 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2476 if (context == nullptr) {
2477 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2478 jniThrowException(env, "java/lang/AssertionError",
2479 "Write called with uninitialized DngCreator");
2480 return;
2481 }
2482 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2483
2484 if (writer.get() == nullptr) {
2485 return;
2486 }
2487
2488 // Validate DNG size
2489 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2490 return;
2491 }
2492
2493 sp<JniInputByteBuffer> inBuf;
2494 Vector<StripSource*> sources;
2495 sp<DirectStripSource> thumbnailSource;
2496 uint32_t targetIfd = TIFF_IFD_0;
2497 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2498 if (hasThumbnail) {
2499 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2500 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2501 uint32_t thumbWidth = context->getThumbnailWidth();
2502 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_SUB1,
2503 thumbWidth, context->getThumbnailHeight(), bytesPerPixel,
2504 bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2505 SAMPLES_PER_RGB_PIXEL);
2506 }
2507
2508 if (isDirect) {
2509 size_t fullSize = rStride * uHeight;
2510 jlong capacity = env->GetDirectBufferCapacity(inBuffer);
2511 if (capacity < 0 || fullSize + uOffset > static_cast<uint64_t>(capacity)) {
2512 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
2513 "Invalid size %d for Image, size given in metadata is %d at current stride",
2514 capacity, fullSize);
2515 return;
2516 }
2517
2518 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(inBuffer));
2519 if (pixelBytes == nullptr) {
2520 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2521 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2522 return;
2523 }
2524
2525 ALOGV("%s: Using direct-type strip source.", __FUNCTION__);
2526 DirectStripSource stripSource(env, pixelBytes, targetIfd, uWidth, uHeight, pStride,
2527 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2528 sources.add(&stripSource);
2529 if (thumbnailSource.get() != nullptr) {
2530 sources.add(thumbnailSource.get());
2531 }
2532
2533 status_t ret = OK;
2534 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2535 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2536 if (!env->ExceptionCheck()) {
2537 jniThrowExceptionFmt(env, "java/io/IOException",
2538 "Encountered error %d while writing file.", ret);
2539 }
2540 return;
2541 }
2542 } else {
2543 inBuf = new JniInputByteBuffer(env, inBuffer);
2544
2545 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2546 InputStripSource stripSource(env, *inBuf, targetIfd, uWidth, uHeight, pStride,
2547 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2548 sources.add(&stripSource);
2549 if (thumbnailSource.get() != nullptr) {
2550 sources.add(thumbnailSource.get());
2551 }
2552
2553 status_t ret = OK;
2554 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2555 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2556 if (!env->ExceptionCheck()) {
2557 jniThrowExceptionFmt(env, "java/io/IOException",
2558 "Encountered error %d while writing file.", ret);
2559 }
2560 return;
2561 }
2562 }
2563
2564 }
2565
DngCreator_nativeWriteInputStream(JNIEnv * env,jobject thiz,jobject outStream,jobject inStream,jint width,jint height,jlong offset)2566 static void DngCreator_nativeWriteInputStream(JNIEnv* env, jobject thiz, jobject outStream,
2567 jobject inStream, jint width, jint height, jlong offset) {
2568 ALOGV("%s:", __FUNCTION__);
2569
2570 uint32_t rowStride = width * BYTES_PER_SAMPLE;
2571 uint32_t pixStride = BYTES_PER_SAMPLE;
2572 uint32_t uWidth = static_cast<uint32_t>(width);
2573 uint32_t uHeight = static_cast<uint32_t>(height);
2574 uint64_t uOffset = static_cast<uint32_t>(offset);
2575
2576 ALOGV("%s: nativeWriteInputStream called with: width=%d, height=%d, "
2577 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2578 height, rowStride, pixStride, offset);
2579
2580 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2581 if (env->ExceptionCheck()) {
2582 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2583 return;
2584 }
2585
2586 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2587 if (context == nullptr) {
2588 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2589 jniThrowException(env, "java/lang/AssertionError",
2590 "Write called with uninitialized DngCreator");
2591 return;
2592 }
2593 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2594
2595 if (writer.get() == nullptr) {
2596 return;
2597 }
2598
2599 // Validate DNG size
2600 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2601 return;
2602 }
2603
2604 sp<DirectStripSource> thumbnailSource;
2605 uint32_t targetIfd = TIFF_IFD_0;
2606 Vector<StripSource*> sources;
2607
2608
2609 sp<JniInputStream> in = new JniInputStream(env, inStream);
2610
2611 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2612 InputStripSource stripSource(env, *in, targetIfd, uWidth, uHeight, pixStride,
2613 rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2614 sources.add(&stripSource);
2615
2616 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2617 if (hasThumbnail) {
2618 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2619 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2620 uint32_t width = context->getThumbnailWidth();
2621 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_SUB1,
2622 width, context->getThumbnailHeight(), bytesPerPixel,
2623 bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2624 SAMPLES_PER_RGB_PIXEL);
2625 sources.add(thumbnailSource.get());
2626 }
2627
2628 status_t ret = OK;
2629 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2630 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2631 if (!env->ExceptionCheck()) {
2632 jniThrowExceptionFmt(env, "java/io/IOException",
2633 "Encountered error %d while writing file.", ret);
2634 }
2635 return;
2636 }
2637 }
2638
2639 } /*extern "C" */
2640
2641 static const JNINativeMethod gDngCreatorMethods[] = {
nativeClassInit()2642 {"nativeClassInit", "()V", (void*) DngCreator_nativeClassInit},
nativeInit(Landroid/hardware/camera2/impl/CameraMetadataNative;Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)2643 {"nativeInit", "(Landroid/hardware/camera2/impl/CameraMetadataNative;"
2644 "Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)V",
2645 (void*) DngCreator_init},
nativeDestroy()2646 {"nativeDestroy", "()V", (void*) DngCreator_destroy},
nativeSetOrientation(I)2647 {"nativeSetOrientation", "(I)V", (void*) DngCreator_nativeSetOrientation},
nativeSetDescription(Ljava/lang/String;)2648 {"nativeSetDescription", "(Ljava/lang/String;)V", (void*) DngCreator_nativeSetDescription},
nativeSetGpsTags([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)2649 {"nativeSetGpsTags", "([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)V",
2650 (void*) DngCreator_nativeSetGpsTags},
nativeSetThumbnail(Ljava/nio/ByteBuffer;II)2651 {"nativeSetThumbnail","(Ljava/nio/ByteBuffer;II)V", (void*) DngCreator_nativeSetThumbnail},
nativeWriteImage(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)2652 {"nativeWriteImage", "(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)V",
2653 (void*) DngCreator_nativeWriteImage},
nativeWriteInputStream(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)2654 {"nativeWriteInputStream", "(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)V",
2655 (void*) DngCreator_nativeWriteInputStream},
2656 };
2657
register_android_hardware_camera2_DngCreator(JNIEnv * env)2658 int register_android_hardware_camera2_DngCreator(JNIEnv *env) {
2659 return RegisterMethodsOrDie(env,
2660 "android/hardware/camera2/DngCreator", gDngCreatorMethods, NELEM(gDngCreatorMethods));
2661 }
2662