1 /*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_NDEBUG 0
18 #define LOG_TAG "DngCreator_JNI"
19 #include <inttypes.h>
20 #include <string.h>
21 #include <algorithm>
22 #include <memory>
23 #include <vector>
24
25 #include <utils/Log.h>
26 #include <utils/Errors.h>
27 #include <utils/StrongPointer.h>
28 #include <utils/RefBase.h>
29 #include <utils/Vector.h>
30 #include <utils/String8.h>
31 #include <cutils/properties.h>
32 #include <system/camera_metadata.h>
33 #include <camera/CameraMetadata.h>
34 #include <img_utils/DngUtils.h>
35 #include <img_utils/TagDefinitions.h>
36 #include <img_utils/TiffIfd.h>
37 #include <img_utils/TiffWriter.h>
38 #include <img_utils/Output.h>
39 #include <img_utils/Input.h>
40 #include <img_utils/StripSource.h>
41
42 #include "core_jni_helpers.h"
43
44 #include "android_runtime/AndroidRuntime.h"
45 #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
46
47 #include <jni.h>
48 #include <JNIHelp.h>
49
50 using namespace android;
51 using namespace img_utils;
52
53 #define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \
54 if ((expr) != OK) { \
55 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
56 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
57 return false; \
58 }
59
60
61 #define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \
62 if ((expr) != OK) { \
63 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
64 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
65 return nullptr; \
66 }
67
68
69 #define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \
70 if ((expr) != OK) { \
71 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
72 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
73 return -1; \
74 }
75
76 #define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \
77 if (entry.count == 0) { \
78 jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
79 "Missing metadata fields for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
80 return nullptr; \
81 }
82
83
84 #define ANDROID_DNGCREATOR_CTX_JNI_ID "mNativeContext"
85
86 static struct {
87 jfieldID mNativeContext;
88 } gDngCreatorClassInfo;
89
90 static struct {
91 jmethodID mWriteMethod;
92 } gOutputStreamClassInfo;
93
94 static struct {
95 jmethodID mReadMethod;
96 jmethodID mSkipMethod;
97 } gInputStreamClassInfo;
98
99 static struct {
100 jmethodID mGetMethod;
101 } gInputByteBufferClassInfo;
102
103 enum {
104 BITS_PER_SAMPLE = 16,
105 BYTES_PER_SAMPLE = 2,
106 BYTES_PER_RGB_PIXEL = 3,
107 BITS_PER_RGB_SAMPLE = 8,
108 BYTES_PER_RGB_SAMPLE = 1,
109 SAMPLES_PER_RGB_PIXEL = 3,
110 SAMPLES_PER_RAW_PIXEL = 1,
111 TIFF_IFD_0 = 0,
112 TIFF_IFD_SUB1 = 1,
113 TIFF_IFD_GPSINFO = 2,
114 };
115
116
117 /**
118 * POD container class for GPS tag data.
119 */
120 class GpsData {
121 public:
122 enum {
123 GPS_VALUE_LENGTH = 6,
124 GPS_REF_LENGTH = 2,
125 GPS_DATE_LENGTH = 11,
126 };
127
128 uint32_t mLatitude[GPS_VALUE_LENGTH];
129 uint32_t mLongitude[GPS_VALUE_LENGTH];
130 uint32_t mTimestamp[GPS_VALUE_LENGTH];
131 uint8_t mLatitudeRef[GPS_REF_LENGTH];
132 uint8_t mLongitudeRef[GPS_REF_LENGTH];
133 uint8_t mDate[GPS_DATE_LENGTH];
134 };
135
136 // ----------------------------------------------------------------------------
137
138 /**
139 * Container class for the persistent native context.
140 */
141
142 class NativeContext : public LightRefBase<NativeContext> {
143 public:
144 enum {
145 DATETIME_COUNT = 20,
146 };
147
148 NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result);
149 virtual ~NativeContext();
150
151 TiffWriter* getWriter();
152
153 std::shared_ptr<const CameraMetadata> getCharacteristics() const;
154 std::shared_ptr<const CameraMetadata> getResult() const;
155
156 uint32_t getThumbnailWidth() const;
157 uint32_t getThumbnailHeight() const;
158 const uint8_t* getThumbnail() const;
159 bool hasThumbnail() const;
160
161 bool setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height);
162
163 void setOrientation(uint16_t orientation);
164 uint16_t getOrientation() const;
165
166 void setDescription(const String8& desc);
167 String8 getDescription() const;
168 bool hasDescription() const;
169
170 void setGpsData(const GpsData& data);
171 GpsData getGpsData() const;
172 bool hasGpsData() const;
173
174 void setCaptureTime(const String8& formattedCaptureTime);
175 String8 getCaptureTime() const;
176 bool hasCaptureTime() const;
177
178 private:
179 Vector<uint8_t> mCurrentThumbnail;
180 TiffWriter mWriter;
181 std::shared_ptr<CameraMetadata> mCharacteristics;
182 std::shared_ptr<CameraMetadata> mResult;
183 uint32_t mThumbnailWidth;
184 uint32_t mThumbnailHeight;
185 uint16_t mOrientation;
186 bool mThumbnailSet;
187 bool mGpsSet;
188 bool mDescriptionSet;
189 bool mCaptureTimeSet;
190 String8 mDescription;
191 GpsData mGpsData;
192 String8 mFormattedCaptureTime;
193 };
194
NativeContext(const CameraMetadata & characteristics,const CameraMetadata & result)195 NativeContext::NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result) :
196 mCharacteristics(std::make_shared<CameraMetadata>(characteristics)),
197 mResult(std::make_shared<CameraMetadata>(result)), mThumbnailWidth(0),
198 mThumbnailHeight(0), mOrientation(0), mThumbnailSet(false), mGpsSet(false),
199 mDescriptionSet(false), mCaptureTimeSet(false) {}
200
~NativeContext()201 NativeContext::~NativeContext() {}
202
getWriter()203 TiffWriter* NativeContext::getWriter() {
204 return &mWriter;
205 }
206
getCharacteristics() const207 std::shared_ptr<const CameraMetadata> NativeContext::getCharacteristics() const {
208 return mCharacteristics;
209 }
210
getResult() const211 std::shared_ptr<const CameraMetadata> NativeContext::getResult() const {
212 return mResult;
213 }
214
getThumbnailWidth() const215 uint32_t NativeContext::getThumbnailWidth() const {
216 return mThumbnailWidth;
217 }
218
getThumbnailHeight() const219 uint32_t NativeContext::getThumbnailHeight() const {
220 return mThumbnailHeight;
221 }
222
getThumbnail() const223 const uint8_t* NativeContext::getThumbnail() const {
224 return mCurrentThumbnail.array();
225 }
226
hasThumbnail() const227 bool NativeContext::hasThumbnail() const {
228 return mThumbnailSet;
229 }
230
setThumbnail(const uint8_t * buffer,uint32_t width,uint32_t height)231 bool NativeContext::setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height) {
232 mThumbnailWidth = width;
233 mThumbnailHeight = height;
234
235 size_t size = BYTES_PER_RGB_PIXEL * width * height;
236 if (mCurrentThumbnail.resize(size) < 0) {
237 ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__);
238 return false;
239 }
240
241 uint8_t* thumb = mCurrentThumbnail.editArray();
242 memcpy(thumb, buffer, size);
243 mThumbnailSet = true;
244 return true;
245 }
246
setOrientation(uint16_t orientation)247 void NativeContext::setOrientation(uint16_t orientation) {
248 mOrientation = orientation;
249 }
250
getOrientation() const251 uint16_t NativeContext::getOrientation() const {
252 return mOrientation;
253 }
254
setDescription(const String8 & desc)255 void NativeContext::setDescription(const String8& desc) {
256 mDescription = desc;
257 mDescriptionSet = true;
258 }
259
getDescription() const260 String8 NativeContext::getDescription() const {
261 return mDescription;
262 }
263
hasDescription() const264 bool NativeContext::hasDescription() const {
265 return mDescriptionSet;
266 }
267
setGpsData(const GpsData & data)268 void NativeContext::setGpsData(const GpsData& data) {
269 mGpsData = data;
270 mGpsSet = true;
271 }
272
getGpsData() const273 GpsData NativeContext::getGpsData() const {
274 return mGpsData;
275 }
276
hasGpsData() const277 bool NativeContext::hasGpsData() const {
278 return mGpsSet;
279 }
280
setCaptureTime(const String8 & formattedCaptureTime)281 void NativeContext::setCaptureTime(const String8& formattedCaptureTime) {
282 mFormattedCaptureTime = formattedCaptureTime;
283 mCaptureTimeSet = true;
284 }
285
getCaptureTime() const286 String8 NativeContext::getCaptureTime() const {
287 return mFormattedCaptureTime;
288 }
289
hasCaptureTime() const290 bool NativeContext::hasCaptureTime() const {
291 return mCaptureTimeSet;
292 }
293
294 // End of NativeContext
295 // ----------------------------------------------------------------------------
296
297 /**
298 * Wrapper class for a Java OutputStream.
299 *
300 * This class is not intended to be used across JNI calls.
301 */
302 class JniOutputStream : public Output, public LightRefBase<JniOutputStream> {
303 public:
304 JniOutputStream(JNIEnv* env, jobject outStream);
305
306 virtual ~JniOutputStream();
307
308 status_t open();
309
310 status_t write(const uint8_t* buf, size_t offset, size_t count);
311
312 status_t close();
313 private:
314 enum {
315 BYTE_ARRAY_LENGTH = 4096
316 };
317 jobject mOutputStream;
318 JNIEnv* mEnv;
319 jbyteArray mByteArray;
320 };
321
JniOutputStream(JNIEnv * env,jobject outStream)322 JniOutputStream::JniOutputStream(JNIEnv* env, jobject outStream) : mOutputStream(outStream),
323 mEnv(env) {
324 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
325 if (mByteArray == nullptr) {
326 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
327 }
328 }
329
~JniOutputStream()330 JniOutputStream::~JniOutputStream() {
331 mEnv->DeleteLocalRef(mByteArray);
332 }
333
open()334 status_t JniOutputStream::open() {
335 // Do nothing
336 return OK;
337 }
338
write(const uint8_t * buf,size_t offset,size_t count)339 status_t JniOutputStream::write(const uint8_t* buf, size_t offset, size_t count) {
340 while(count > 0) {
341 size_t len = BYTE_ARRAY_LENGTH;
342 len = (count > len) ? len : count;
343 mEnv->SetByteArrayRegion(mByteArray, 0, len, reinterpret_cast<const jbyte*>(buf + offset));
344
345 if (mEnv->ExceptionCheck()) {
346 return BAD_VALUE;
347 }
348
349 mEnv->CallVoidMethod(mOutputStream, gOutputStreamClassInfo.mWriteMethod, mByteArray,
350 0, len);
351
352 if (mEnv->ExceptionCheck()) {
353 return BAD_VALUE;
354 }
355
356 count -= len;
357 offset += len;
358 }
359 return OK;
360 }
361
close()362 status_t JniOutputStream::close() {
363 // Do nothing
364 return OK;
365 }
366
367 // End of JniOutputStream
368 // ----------------------------------------------------------------------------
369
370 /**
371 * Wrapper class for a Java InputStream.
372 *
373 * This class is not intended to be used across JNI calls.
374 */
375 class JniInputStream : public Input, public LightRefBase<JniInputStream> {
376 public:
377 JniInputStream(JNIEnv* env, jobject inStream);
378
379 status_t open();
380
381 status_t close();
382
383 ssize_t read(uint8_t* buf, size_t offset, size_t count);
384
385 ssize_t skip(size_t count);
386
387 virtual ~JniInputStream();
388 private:
389 enum {
390 BYTE_ARRAY_LENGTH = 4096
391 };
392 jobject mInStream;
393 JNIEnv* mEnv;
394 jbyteArray mByteArray;
395
396 };
397
JniInputStream(JNIEnv * env,jobject inStream)398 JniInputStream::JniInputStream(JNIEnv* env, jobject inStream) : mInStream(inStream), mEnv(env) {
399 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
400 if (mByteArray == nullptr) {
401 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
402 }
403 }
404
~JniInputStream()405 JniInputStream::~JniInputStream() {
406 mEnv->DeleteLocalRef(mByteArray);
407 }
408
read(uint8_t * buf,size_t offset,size_t count)409 ssize_t JniInputStream::read(uint8_t* buf, size_t offset, size_t count) {
410
411 jint realCount = BYTE_ARRAY_LENGTH;
412 if (count < BYTE_ARRAY_LENGTH) {
413 realCount = count;
414 }
415 jint actual = mEnv->CallIntMethod(mInStream, gInputStreamClassInfo.mReadMethod, mByteArray, 0,
416 realCount);
417
418 if (actual < 0) {
419 return NOT_ENOUGH_DATA;
420 }
421
422 if (mEnv->ExceptionCheck()) {
423 return BAD_VALUE;
424 }
425
426 mEnv->GetByteArrayRegion(mByteArray, 0, actual, reinterpret_cast<jbyte*>(buf + offset));
427 if (mEnv->ExceptionCheck()) {
428 return BAD_VALUE;
429 }
430 return actual;
431 }
432
skip(size_t count)433 ssize_t JniInputStream::skip(size_t count) {
434 jlong actual = mEnv->CallLongMethod(mInStream, gInputStreamClassInfo.mSkipMethod,
435 static_cast<jlong>(count));
436
437 if (mEnv->ExceptionCheck()) {
438 return BAD_VALUE;
439 }
440 if (actual < 0) {
441 return NOT_ENOUGH_DATA;
442 }
443 return actual;
444 }
445
open()446 status_t JniInputStream::open() {
447 // Do nothing
448 return OK;
449 }
450
close()451 status_t JniInputStream::close() {
452 // Do nothing
453 return OK;
454 }
455
456 // End of JniInputStream
457 // ----------------------------------------------------------------------------
458
459 /**
460 * Wrapper class for a non-direct Java ByteBuffer.
461 *
462 * This class is not intended to be used across JNI calls.
463 */
464 class JniInputByteBuffer : public Input, public LightRefBase<JniInputByteBuffer> {
465 public:
466 JniInputByteBuffer(JNIEnv* env, jobject inBuf);
467
468 status_t open();
469
470 status_t close();
471
472 ssize_t read(uint8_t* buf, size_t offset, size_t count);
473
474 virtual ~JniInputByteBuffer();
475 private:
476 enum {
477 BYTE_ARRAY_LENGTH = 4096
478 };
479 jobject mInBuf;
480 JNIEnv* mEnv;
481 jbyteArray mByteArray;
482 };
483
JniInputByteBuffer(JNIEnv * env,jobject inBuf)484 JniInputByteBuffer::JniInputByteBuffer(JNIEnv* env, jobject inBuf) : mInBuf(inBuf), mEnv(env) {
485 mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
486 if (mByteArray == nullptr) {
487 jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
488 }
489 }
490
~JniInputByteBuffer()491 JniInputByteBuffer::~JniInputByteBuffer() {
492 mEnv->DeleteLocalRef(mByteArray);
493 }
494
read(uint8_t * buf,size_t offset,size_t count)495 ssize_t JniInputByteBuffer::read(uint8_t* buf, size_t offset, size_t count) {
496 jint realCount = BYTE_ARRAY_LENGTH;
497 if (count < BYTE_ARRAY_LENGTH) {
498 realCount = count;
499 }
500
501 jobject chainingBuf = mEnv->CallObjectMethod(mInBuf, gInputByteBufferClassInfo.mGetMethod,
502 mByteArray, 0, realCount);
503 mEnv->DeleteLocalRef(chainingBuf);
504
505 if (mEnv->ExceptionCheck()) {
506 ALOGE("%s: Exception while reading from input into byte buffer.", __FUNCTION__);
507 return BAD_VALUE;
508 }
509
510 mEnv->GetByteArrayRegion(mByteArray, 0, realCount, reinterpret_cast<jbyte*>(buf + offset));
511 if (mEnv->ExceptionCheck()) {
512 ALOGE("%s: Exception while reading from byte buffer.", __FUNCTION__);
513 return BAD_VALUE;
514 }
515 return realCount;
516 }
517
open()518 status_t JniInputByteBuffer::open() {
519 // Do nothing
520 return OK;
521 }
522
close()523 status_t JniInputByteBuffer::close() {
524 // Do nothing
525 return OK;
526 }
527
528 // End of JniInputByteBuffer
529 // ----------------------------------------------------------------------------
530
531 /**
532 * StripSource subclass for Input types.
533 *
534 * This class is not intended to be used across JNI calls.
535 */
536
537 class InputStripSource : public StripSource, public LightRefBase<InputStripSource> {
538 public:
539 InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, uint32_t height,
540 uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample,
541 uint32_t samplesPerPixel);
542
543 virtual ~InputStripSource();
544
545 virtual status_t writeToStream(Output& stream, uint32_t count);
546
547 virtual uint32_t getIfd() const;
548 protected:
549 uint32_t mIfd;
550 Input* mInput;
551 uint32_t mWidth;
552 uint32_t mHeight;
553 uint32_t mPixStride;
554 uint32_t mRowStride;
555 uint64_t mOffset;
556 JNIEnv* mEnv;
557 uint32_t mBytesPerSample;
558 uint32_t mSamplesPerPixel;
559 };
560
InputStripSource(JNIEnv * env,Input & input,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)561 InputStripSource::InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width,
562 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
563 uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input),
564 mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride),
565 mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
566 mSamplesPerPixel(samplesPerPixel) {}
567
~InputStripSource()568 InputStripSource::~InputStripSource() {}
569
writeToStream(Output & stream,uint32_t count)570 status_t InputStripSource::writeToStream(Output& stream, uint32_t count) {
571 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
572 jlong offset = mOffset;
573
574 if (fullSize != count) {
575 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
576 fullSize);
577 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
578 return BAD_VALUE;
579 }
580
581 // Skip offset
582 while (offset > 0) {
583 ssize_t skipped = mInput->skip(offset);
584 if (skipped <= 0) {
585 if (skipped == NOT_ENOUGH_DATA || skipped == 0) {
586 jniThrowExceptionFmt(mEnv, "java/io/IOException",
587 "Early EOF encountered in skip, not enough pixel data for image of size %u",
588 fullSize);
589 skipped = NOT_ENOUGH_DATA;
590 } else {
591 if (!mEnv->ExceptionCheck()) {
592 jniThrowException(mEnv, "java/io/IOException",
593 "Error encountered while skip bytes in input stream.");
594 }
595 }
596
597 return skipped;
598 }
599 offset -= skipped;
600 }
601
602 Vector<uint8_t> row;
603 if (row.resize(mRowStride) < 0) {
604 jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector.");
605 return BAD_VALUE;
606 }
607
608 uint8_t* rowBytes = row.editArray();
609
610 for (uint32_t i = 0; i < mHeight; ++i) {
611 size_t rowFillAmt = 0;
612 size_t rowSize = mRowStride;
613
614 while (rowFillAmt < mRowStride) {
615 ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize);
616 if (bytesRead <= 0) {
617 if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) {
618 ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd",
619 __FUNCTION__, i, bytesRead);
620 jniThrowExceptionFmt(mEnv, "java/io/IOException",
621 "Early EOF encountered, not enough pixel data for image of size %"
622 PRIu32, fullSize);
623 bytesRead = NOT_ENOUGH_DATA;
624 } else {
625 if (!mEnv->ExceptionCheck()) {
626 jniThrowException(mEnv, "java/io/IOException",
627 "Error encountered while reading");
628 }
629 }
630 return bytesRead;
631 }
632 rowFillAmt += bytesRead;
633 rowSize -= bytesRead;
634 }
635
636 if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
637 ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__);
638
639 if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK ||
640 mEnv->ExceptionCheck()) {
641 if (!mEnv->ExceptionCheck()) {
642 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
643 }
644 return BAD_VALUE;
645 }
646 } else {
647 ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__);
648 jniThrowException(mEnv, "java/lang/IllegalStateException",
649 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
650 return BAD_VALUE;
651
652 // TODO: Add support for non-contiguous pixels if needed.
653 }
654 }
655 return OK;
656 }
657
getIfd() const658 uint32_t InputStripSource::getIfd() const {
659 return mIfd;
660 }
661
662 // End of InputStripSource
663 // ----------------------------------------------------------------------------
664
665 /**
666 * StripSource subclass for direct buffer types.
667 *
668 * This class is not intended to be used across JNI calls.
669 */
670
671 class DirectStripSource : public StripSource, public LightRefBase<DirectStripSource> {
672 public:
673 DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, uint32_t width,
674 uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
675 uint32_t bytesPerSample, uint32_t samplesPerPixel);
676
677 virtual ~DirectStripSource();
678
679 virtual status_t writeToStream(Output& stream, uint32_t count);
680
681 virtual uint32_t getIfd() const;
682 protected:
683 uint32_t mIfd;
684 const uint8_t* mPixelBytes;
685 uint32_t mWidth;
686 uint32_t mHeight;
687 uint32_t mPixStride;
688 uint32_t mRowStride;
689 uint16_t mOffset;
690 JNIEnv* mEnv;
691 uint32_t mBytesPerSample;
692 uint32_t mSamplesPerPixel;
693 };
694
DirectStripSource(JNIEnv * env,const uint8_t * pixelBytes,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)695 DirectStripSource::DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd,
696 uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride,
697 uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd),
698 mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride),
699 mRowStride(rowStride), mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
700 mSamplesPerPixel(samplesPerPixel) {}
701
~DirectStripSource()702 DirectStripSource::~DirectStripSource() {}
703
writeToStream(Output & stream,uint32_t count)704 status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) {
705 uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
706
707 if (fullSize != count) {
708 ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
709 fullSize);
710 jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
711 return BAD_VALUE;
712 }
713
714
715 if (mPixStride == mBytesPerSample * mSamplesPerPixel
716 && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) {
717 ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__);
718
719 if (stream.write(mPixelBytes, mOffset, fullSize) != OK || mEnv->ExceptionCheck()) {
720 if (!mEnv->ExceptionCheck()) {
721 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
722 }
723 return BAD_VALUE;
724 }
725 } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
726 ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__);
727
728 for (size_t i = 0; i < mHeight; ++i) {
729 if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK ||
730 mEnv->ExceptionCheck()) {
731 if (!mEnv->ExceptionCheck()) {
732 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
733 }
734 return BAD_VALUE;
735 }
736 }
737 } else {
738 ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__);
739
740 jniThrowException(mEnv, "java/lang/IllegalStateException",
741 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
742 return BAD_VALUE;
743
744 // TODO: Add support for non-contiguous pixels if needed.
745 }
746 return OK;
747
748 }
749
getIfd() const750 uint32_t DirectStripSource::getIfd() const {
751 return mIfd;
752 }
753
754 // End of DirectStripSource
755 // ----------------------------------------------------------------------------
756
757 /**
758 * Calculate the default crop relative to the "active area" of the image sensor (this active area
759 * will always be the pre-correction active area rectangle), and set this.
760 */
calculateAndSetCrop(JNIEnv * env,const CameraMetadata & characteristics,sp<TiffWriter> writer)761 static status_t calculateAndSetCrop(JNIEnv* env, const CameraMetadata& characteristics,
762 sp<TiffWriter> writer) {
763
764 camera_metadata_ro_entry entry =
765 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
766 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
767 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
768
769 const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation.
770
771 if (width < margin * 2 || height < margin * 2) {
772 ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too"
773 "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width);
774 jniThrowException(env, "java/lang/IllegalStateException",
775 "Pre-correction active area is too small.");
776 return BAD_VALUE;
777 }
778
779 uint32_t defaultCropOrigin[] = {margin, margin};
780 uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin,
781 height - defaultCropOrigin[1] - margin};
782
783 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin,
784 TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer);
785 BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize,
786 TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer);
787
788 return OK;
789 }
790
validateDngHeader(JNIEnv * env,sp<TiffWriter> writer,const CameraMetadata & characteristics,jint width,jint height)791 static bool validateDngHeader(JNIEnv* env, sp<TiffWriter> writer,
792 const CameraMetadata& characteristics, jint width, jint height) {
793 if (width <= 0) {
794 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
795 "Image width %d is invalid", width);
796 return false;
797 }
798
799 if (height <= 0) {
800 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
801 "Image height %d is invalid", height);
802 return false;
803 }
804
805 camera_metadata_ro_entry preCorrectionEntry =
806 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
807 camera_metadata_ro_entry pixelArrayEntry =
808 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE);
809
810 int pWidth = static_cast<int>(pixelArrayEntry.data.i32[0]);
811 int pHeight = static_cast<int>(pixelArrayEntry.data.i32[1]);
812 int cWidth = static_cast<int>(preCorrectionEntry.data.i32[2]);
813 int cHeight = static_cast<int>(preCorrectionEntry.data.i32[3]);
814
815 bool matchesPixelArray = (pWidth == width && pHeight == height);
816 bool matchesPreCorrectionArray = (cWidth == width && cHeight == height);
817
818 if (!(matchesPixelArray || matchesPreCorrectionArray)) {
819 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
820 "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel "
821 "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)",
822 width, height, pWidth, pHeight, cWidth, cHeight);
823 return false;
824 }
825
826 return true;
827 }
828
moveEntries(sp<TiffWriter> writer,uint32_t ifdFrom,uint32_t ifdTo,const Vector<uint16_t> & entries)829 static status_t moveEntries(sp<TiffWriter> writer, uint32_t ifdFrom, uint32_t ifdTo,
830 const Vector<uint16_t>& entries) {
831 for (size_t i = 0; i < entries.size(); ++i) {
832 uint16_t tagId = entries[i];
833 sp<TiffEntry> entry = writer->getEntry(tagId, ifdFrom);
834 if (entry.get() == nullptr) {
835 ALOGE("%s: moveEntries failed, entry %u not found in IFD %u", __FUNCTION__, tagId,
836 ifdFrom);
837 return BAD_VALUE;
838 }
839 if (writer->addEntry(entry, ifdTo) != OK) {
840 ALOGE("%s: moveEntries failed, could not add entry %u to IFD %u", __FUNCTION__, tagId,
841 ifdFrom);
842 return BAD_VALUE;
843 }
844 writer->removeEntry(tagId, ifdFrom);
845 }
846 return OK;
847 }
848
849 /**
850 * Write CFA pattern for given CFA enum into cfaOut. cfaOut must have length >= 4.
851 * Returns OK on success, or a negative error code if the CFA enum was invalid.
852 */
convertCFA(uint8_t cfaEnum,uint8_t * cfaOut)853 static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) {
854 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
855 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
856 cfaEnum);
857 switch(cfa) {
858 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
859 cfaOut[0] = 0;
860 cfaOut[1] = 1;
861 cfaOut[2] = 1;
862 cfaOut[3] = 2;
863 break;
864 }
865 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
866 cfaOut[0] = 1;
867 cfaOut[1] = 0;
868 cfaOut[2] = 2;
869 cfaOut[3] = 1;
870 break;
871 }
872 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
873 cfaOut[0] = 1;
874 cfaOut[1] = 2;
875 cfaOut[2] = 0;
876 cfaOut[3] = 1;
877 break;
878 }
879 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
880 cfaOut[0] = 2;
881 cfaOut[1] = 1;
882 cfaOut[2] = 1;
883 cfaOut[3] = 0;
884 break;
885 }
886 default: {
887 return BAD_VALUE;
888 }
889 }
890 return OK;
891 }
892
893 /**
894 * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to
895 * RGGB for an unknown enum.
896 */
convertCFAEnumToOpcodeLayout(uint8_t cfaEnum)897 static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) {
898 camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
899 static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
900 cfaEnum);
901 switch(cfa) {
902 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
903 return OpcodeListBuilder::CFA_RGGB;
904 }
905 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
906 return OpcodeListBuilder::CFA_GRBG;
907 }
908 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
909 return OpcodeListBuilder::CFA_GBRG;
910 }
911 case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
912 return OpcodeListBuilder::CFA_BGGR;
913 }
914 default: {
915 return OpcodeListBuilder::CFA_RGGB;
916 }
917 }
918 }
919
920 /**
921 * For each color plane, find the corresponding noise profile coefficients given in the
922 * per-channel noise profile. If multiple channels in the CFA correspond to a color in the color
923 * plane, this method takes the pair of noise profile coefficients with the higher S coefficient.
924 *
925 * perChannelNoiseProfile - numChannels * 2 noise profile coefficients.
926 * cfa - numChannels color channels corresponding to each of the per-channel noise profile
927 * coefficients.
928 * numChannels - the number of noise profile coefficient pairs and color channels given in
929 * the perChannelNoiseProfile and cfa arguments, respectively.
930 * planeColors - the color planes in the noise profile output.
931 * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile.
932 * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients.
933 *
934 * returns OK, or a negative error code on failure.
935 */
generateNoiseProfile(const double * perChannelNoiseProfile,uint8_t * cfa,size_t numChannels,const uint8_t * planeColors,size_t numPlanes,double * noiseProfile)936 static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa,
937 size_t numChannels, const uint8_t* planeColors, size_t numPlanes,
938 /*out*/double* noiseProfile) {
939
940 for (size_t p = 0; p < numPlanes; ++p) {
941 size_t S = p * 2;
942 size_t O = p * 2 + 1;
943
944 noiseProfile[S] = 0;
945 noiseProfile[O] = 0;
946 bool uninitialized = true;
947 for (size_t c = 0; c < numChannels; ++c) {
948 if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) {
949 noiseProfile[S] = perChannelNoiseProfile[c * 2];
950 noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1];
951 uninitialized = false;
952 }
953 }
954 if (uninitialized) {
955 ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu",
956 __FUNCTION__, p);
957 return BAD_VALUE;
958 }
959 }
960 return OK;
961 }
962
963 // ----------------------------------------------------------------------------
964 extern "C" {
965
DngCreator_getNativeContext(JNIEnv * env,jobject thiz)966 static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) {
967 ALOGV("%s:", __FUNCTION__);
968 return reinterpret_cast<NativeContext*>(env->GetLongField(thiz,
969 gDngCreatorClassInfo.mNativeContext));
970 }
971
DngCreator_setNativeContext(JNIEnv * env,jobject thiz,sp<NativeContext> context)972 static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp<NativeContext> context) {
973 ALOGV("%s:", __FUNCTION__);
974 NativeContext* current = DngCreator_getNativeContext(env, thiz);
975
976 if (context != nullptr) {
977 context->incStrong((void*) DngCreator_setNativeContext);
978 }
979
980 if (current) {
981 current->decStrong((void*) DngCreator_setNativeContext);
982 }
983
984 env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext,
985 reinterpret_cast<jlong>(context.get()));
986 }
987
DngCreator_nativeClassInit(JNIEnv * env,jclass clazz)988 static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) {
989 ALOGV("%s:", __FUNCTION__);
990
991 gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env,
992 clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J");
993
994 jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream");
995 gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env,
996 outputStreamClazz, "write", "([BII)V");
997
998 jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream");
999 gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I");
1000 gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J");
1001
1002 jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer");
1003 gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env,
1004 inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;");
1005 }
1006
DngCreator_init(JNIEnv * env,jobject thiz,jobject characteristicsPtr,jobject resultsPtr,jstring formattedCaptureTime)1007 static void DngCreator_init(JNIEnv* env, jobject thiz, jobject characteristicsPtr,
1008 jobject resultsPtr, jstring formattedCaptureTime) {
1009 ALOGV("%s:", __FUNCTION__);
1010 CameraMetadata characteristics;
1011 CameraMetadata results;
1012 if (CameraMetadata_getNativeMetadata(env, characteristicsPtr, &characteristics) != OK) {
1013 jniThrowException(env, "java/lang/AssertionError",
1014 "No native metadata defined for camera characteristics.");
1015 return;
1016 }
1017 if (CameraMetadata_getNativeMetadata(env, resultsPtr, &results) != OK) {
1018 jniThrowException(env, "java/lang/AssertionError",
1019 "No native metadata defined for capture results.");
1020 return;
1021 }
1022
1023 sp<NativeContext> nativeContext = new NativeContext(characteristics, results);
1024
1025 const char* captureTime = env->GetStringUTFChars(formattedCaptureTime, nullptr);
1026
1027 size_t len = strlen(captureTime) + 1;
1028 if (len != NativeContext::DATETIME_COUNT) {
1029 jniThrowException(env, "java/lang/IllegalArgumentException",
1030 "Formatted capture time string length is not required 20 characters");
1031 return;
1032 }
1033
1034 nativeContext->setCaptureTime(String8(captureTime));
1035
1036 DngCreator_setNativeContext(env, thiz, nativeContext);
1037 }
1038
DngCreator_setup(JNIEnv * env,jobject thiz,uint32_t imageWidth,uint32_t imageHeight)1039 static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t imageWidth,
1040 uint32_t imageHeight) {
1041
1042 NativeContext* nativeContext = DngCreator_getNativeContext(env, thiz);
1043
1044 if (nativeContext == nullptr) {
1045 jniThrowException(env, "java/lang/AssertionError",
1046 "No native context, must call init before other operations.");
1047 return nullptr;
1048 }
1049
1050 CameraMetadata characteristics = *(nativeContext->getCharacteristics());
1051 CameraMetadata results = *(nativeContext->getResult());
1052
1053 sp<TiffWriter> writer = new TiffWriter();
1054
1055 uint32_t preWidth = 0;
1056 uint32_t preHeight = 0;
1057 {
1058 // Check dimensions
1059 camera_metadata_entry entry =
1060 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1061 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1062 preWidth = static_cast<uint32_t>(entry.data.i32[2]);
1063 preHeight = static_cast<uint32_t>(entry.data.i32[3]);
1064
1065 camera_metadata_entry pixelArrayEntry =
1066 characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE);
1067 uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
1068 uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
1069
1070 if (!((imageWidth == preWidth && imageHeight == preHeight) ||
1071 (imageWidth == pixWidth && imageHeight == pixHeight))) {
1072 jniThrowException(env, "java/lang/AssertionError",
1073 "Height and width of imate buffer did not match height and width of"
1074 "either the preCorrectionActiveArraySize or the pixelArraySize.");
1075 return nullptr;
1076 }
1077 }
1078
1079
1080
1081 writer->addIfd(TIFF_IFD_0);
1082
1083 status_t err = OK;
1084
1085 const uint32_t samplesPerPixel = 1;
1086 const uint32_t bitsPerSample = BITS_PER_SAMPLE;
1087
1088 OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_RGGB;
1089 uint8_t cfaPlaneColor[3] = {0, 1, 2};
1090 uint8_t cfaEnum = -1;
1091
1092 // TODO: Greensplit.
1093 // TODO: Add remaining non-essential tags
1094
1095 // Setup main image tags
1096
1097 {
1098 // Set orientation
1099 uint16_t orientation = 1; // Normal
1100 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1101 env, TAG_ORIENTATION, writer);
1102 }
1103
1104 {
1105 // Set subfiletype
1106 uint32_t subfileType = 0; // Main image
1107 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1108 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1109 }
1110
1111 {
1112 // Set bits per sample
1113 uint16_t bits = static_cast<uint16_t>(bitsPerSample);
1114 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env,
1115 TAG_BITSPERSAMPLE, writer);
1116 }
1117
1118 {
1119 // Set compression
1120 uint16_t compression = 1; // None
1121 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1122 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1123 }
1124
1125 {
1126 // Set dimensions
1127 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0),
1128 env, TAG_IMAGEWIDTH, writer);
1129 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0),
1130 env, TAG_IMAGELENGTH, writer);
1131 }
1132
1133 {
1134 // Set photometric interpretation
1135 uint16_t interpretation = 32803; // CFA
1136 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1137 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1138 }
1139
1140 {
1141 // Set blacklevel tags
1142 camera_metadata_entry entry =
1143 characteristics.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN);
1144 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BLACKLEVEL, writer);
1145 const uint32_t* blackLevel = reinterpret_cast<const uint32_t*>(entry.data.i32);
1146 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, entry.count, blackLevel,
1147 TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
1148
1149 uint16_t repeatDim[2] = {2, 2};
1150 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
1151 TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
1152 }
1153
1154 {
1155 // Set samples per pixel
1156 uint16_t samples = static_cast<uint16_t>(samplesPerPixel);
1157 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0),
1158 env, TAG_SAMPLESPERPIXEL, writer);
1159 }
1160
1161 {
1162 // Set planar configuration
1163 uint16_t config = 1; // Chunky
1164 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1165 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1166 }
1167
1168 {
1169 // Set CFA pattern dimensions
1170 uint16_t repeatDim[2] = {2, 2};
1171 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
1172 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
1173 }
1174
1175 {
1176 // Set CFA pattern
1177 camera_metadata_entry entry =
1178 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1179 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_CFAPATTERN, writer);
1180
1181 const int cfaLength = 4;
1182 cfaEnum = entry.data.u8[0];
1183 uint8_t cfa[cfaLength];
1184 if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
1185 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1186 "Invalid metadata for tag %d", TAG_CFAPATTERN);
1187 }
1188
1189 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0),
1190 env, TAG_CFAPATTERN, writer);
1191
1192 opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
1193 }
1194
1195 {
1196 // Set CFA plane color
1197 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
1198 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
1199 }
1200
1201 {
1202 // Set CFA layout
1203 uint16_t cfaLayout = 1;
1204 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
1205 env, TAG_CFALAYOUT, writer);
1206 }
1207
1208 {
1209 // image description
1210 uint8_t imageDescription = '\0'; // empty
1211 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription,
1212 TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer);
1213 }
1214
1215 {
1216 // make
1217 char manufacturer[PROPERTY_VALUE_MAX];
1218
1219 // Use "" to represent unknown make as suggested in TIFF/EP spec.
1220 property_get("ro.product.manufacturer", manufacturer, "");
1221 uint32_t count = static_cast<uint32_t>(strlen(manufacturer)) + 1;
1222
1223 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count,
1224 reinterpret_cast<uint8_t*>(manufacturer), TIFF_IFD_0), env, TAG_MAKE, writer);
1225 }
1226
1227 {
1228 // model
1229 char model[PROPERTY_VALUE_MAX];
1230
1231 // Use "" to represent unknown model as suggested in TIFF/EP spec.
1232 property_get("ro.product.model", model, "");
1233 uint32_t count = static_cast<uint32_t>(strlen(model)) + 1;
1234
1235 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count,
1236 reinterpret_cast<uint8_t*>(model), TIFF_IFD_0), env, TAG_MODEL, writer);
1237 }
1238
1239 {
1240 // x resolution
1241 uint32_t xres[] = { 72, 1 }; // default 72 ppi
1242 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1243 env, TAG_XRESOLUTION, writer);
1244
1245 // y resolution
1246 uint32_t yres[] = { 72, 1 }; // default 72 ppi
1247 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1248 env, TAG_YRESOLUTION, writer);
1249
1250 uint16_t unit = 2; // inches
1251 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1252 env, TAG_RESOLUTIONUNIT, writer);
1253 }
1254
1255 {
1256 // software
1257 char software[PROPERTY_VALUE_MAX];
1258 property_get("ro.build.fingerprint", software, "");
1259 uint32_t count = static_cast<uint32_t>(strlen(software)) + 1;
1260 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count,
1261 reinterpret_cast<uint8_t*>(software), TIFF_IFD_0), env, TAG_SOFTWARE, writer);
1262 }
1263
1264 if (nativeContext->hasCaptureTime()) {
1265 // datetime
1266 String8 captureTime = nativeContext->getCaptureTime();
1267
1268 if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT,
1269 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1270 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1271 "Invalid metadata for tag %x", TAG_DATETIME);
1272 return nullptr;
1273 }
1274
1275 // datetime original
1276 if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT,
1277 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1278 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1279 "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL);
1280 return nullptr;
1281 }
1282 }
1283
1284 {
1285 // TIFF/EP standard id
1286 uint8_t standardId[] = { 1, 0, 0, 0 };
1287 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId,
1288 TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer);
1289 }
1290
1291 {
1292 // copyright
1293 uint8_t copyright = '\0'; // empty
1294 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, ©right,
1295 TIFF_IFD_0), env, TAG_COPYRIGHT, writer);
1296 }
1297
1298 {
1299 // exposure time
1300 camera_metadata_entry entry =
1301 results.find(ANDROID_SENSOR_EXPOSURE_TIME);
1302 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer);
1303
1304 int64_t exposureTime = *(entry.data.i64);
1305
1306 if (exposureTime < 0) {
1307 // Should be unreachable
1308 jniThrowException(env, "java/lang/IllegalArgumentException",
1309 "Negative exposure time in metadata");
1310 return nullptr;
1311 }
1312
1313 // Ensure exposure time doesn't overflow (for exposures > 4s)
1314 uint32_t denominator = 1000000000;
1315 while (exposureTime > UINT32_MAX) {
1316 exposureTime >>= 1;
1317 denominator >>= 1;
1318 if (denominator == 0) {
1319 // Should be unreachable
1320 jniThrowException(env, "java/lang/IllegalArgumentException",
1321 "Exposure time too long");
1322 return nullptr;
1323 }
1324 }
1325
1326 uint32_t exposure[] = { static_cast<uint32_t>(exposureTime), denominator };
1327 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure,
1328 TIFF_IFD_0), env, TAG_EXPOSURETIME, writer);
1329
1330 }
1331
1332 {
1333 // ISO speed ratings
1334 camera_metadata_entry entry =
1335 results.find(ANDROID_SENSOR_SENSITIVITY);
1336 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer);
1337
1338 int32_t tempIso = *(entry.data.i32);
1339 if (tempIso < 0) {
1340 jniThrowException(env, "java/lang/IllegalArgumentException",
1341 "Negative ISO value");
1342 return nullptr;
1343 }
1344
1345 if (tempIso > UINT16_MAX) {
1346 ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__);
1347 tempIso = UINT16_MAX;
1348 }
1349
1350 uint16_t iso = static_cast<uint16_t>(tempIso);
1351 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso,
1352 TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer);
1353 }
1354
1355 {
1356 // focal length
1357 camera_metadata_entry entry =
1358 results.find(ANDROID_LENS_FOCAL_LENGTH);
1359 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer);
1360
1361 uint32_t focalLength[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1362 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength,
1363 TIFF_IFD_0), env, TAG_FOCALLENGTH, writer);
1364 }
1365
1366 {
1367 // f number
1368 camera_metadata_entry entry =
1369 results.find(ANDROID_LENS_APERTURE);
1370 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer);
1371
1372 uint32_t fnum[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1373 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum,
1374 TIFF_IFD_0), env, TAG_FNUMBER, writer);
1375 }
1376
1377 {
1378 // Set DNG version information
1379 uint8_t version[4] = {1, 4, 0, 0};
1380 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0),
1381 env, TAG_DNGVERSION, writer);
1382
1383 uint8_t backwardVersion[4] = {1, 1, 0, 0};
1384 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion,
1385 TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer);
1386 }
1387
1388 {
1389 // Set whitelevel
1390 camera_metadata_entry entry =
1391 characteristics.find(ANDROID_SENSOR_INFO_WHITE_LEVEL);
1392 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer);
1393 uint32_t whiteLevel = static_cast<uint32_t>(entry.data.i32[0]);
1394 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0),
1395 env, TAG_WHITELEVEL, writer);
1396 }
1397
1398 {
1399 // Set default scale
1400 uint32_t defaultScale[4] = {1, 1, 1, 1};
1401 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale,
1402 TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer);
1403 }
1404
1405 bool singleIlluminant = false;
1406 {
1407 // Set calibration illuminants
1408 camera_metadata_entry entry1 =
1409 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1410 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer);
1411 camera_metadata_entry entry2 =
1412 characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1413 if (entry2.count == 0) {
1414 singleIlluminant = true;
1415 }
1416 uint16_t ref1 = entry1.data.u8[0];
1417
1418 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1,
1419 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer);
1420
1421 if (!singleIlluminant) {
1422 uint16_t ref2 = entry2.data.u8[0];
1423 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2,
1424 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer);
1425 }
1426 }
1427
1428 {
1429 // Set color transforms
1430 camera_metadata_entry entry1 =
1431 characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
1432 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer);
1433
1434 int32_t colorTransform1[entry1.count * 2];
1435
1436 size_t ctr = 0;
1437 for(size_t i = 0; i < entry1.count; ++i) {
1438 colorTransform1[ctr++] = entry1.data.r[i].numerator;
1439 colorTransform1[ctr++] = entry1.data.r[i].denominator;
1440 }
1441
1442 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count,
1443 colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer);
1444
1445 if (!singleIlluminant) {
1446 camera_metadata_entry entry2 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM2);
1447 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer);
1448 int32_t colorTransform2[entry2.count * 2];
1449
1450 ctr = 0;
1451 for(size_t i = 0; i < entry2.count; ++i) {
1452 colorTransform2[ctr++] = entry2.data.r[i].numerator;
1453 colorTransform2[ctr++] = entry2.data.r[i].denominator;
1454 }
1455
1456 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count,
1457 colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer);
1458 }
1459 }
1460
1461 {
1462 // Set calibration transforms
1463 camera_metadata_entry entry1 =
1464 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1465 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer);
1466
1467 int32_t calibrationTransform1[entry1.count * 2];
1468
1469 size_t ctr = 0;
1470 for(size_t i = 0; i < entry1.count; ++i) {
1471 calibrationTransform1[ctr++] = entry1.data.r[i].numerator;
1472 calibrationTransform1[ctr++] = entry1.data.r[i].denominator;
1473 }
1474
1475 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count,
1476 calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer);
1477
1478 if (!singleIlluminant) {
1479 camera_metadata_entry entry2 =
1480 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1481 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer);
1482 int32_t calibrationTransform2[entry2.count * 2];
1483
1484 ctr = 0;
1485 for(size_t i = 0; i < entry2.count; ++i) {
1486 calibrationTransform2[ctr++] = entry2.data.r[i].numerator;
1487 calibrationTransform2[ctr++] = entry2.data.r[i].denominator;
1488 }
1489
1490 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count,
1491 calibrationTransform2, TIFF_IFD_0), env, TAG_CAMERACALIBRATION2, writer);
1492 }
1493 }
1494
1495 {
1496 // Set forward transforms
1497 camera_metadata_entry entry1 =
1498 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
1499 BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer);
1500
1501 int32_t forwardTransform1[entry1.count * 2];
1502
1503 size_t ctr = 0;
1504 for(size_t i = 0; i < entry1.count; ++i) {
1505 forwardTransform1[ctr++] = entry1.data.r[i].numerator;
1506 forwardTransform1[ctr++] = entry1.data.r[i].denominator;
1507 }
1508
1509 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count,
1510 forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer);
1511
1512 if (!singleIlluminant) {
1513 camera_metadata_entry entry2 =
1514 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX2);
1515 BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer);
1516 int32_t forwardTransform2[entry2.count * 2];
1517
1518 ctr = 0;
1519 for(size_t i = 0; i < entry2.count; ++i) {
1520 forwardTransform2[ctr++] = entry2.data.r[i].numerator;
1521 forwardTransform2[ctr++] = entry2.data.r[i].denominator;
1522 }
1523
1524 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count,
1525 forwardTransform2, TIFF_IFD_0), env, TAG_FORWARDMATRIX2, writer);
1526 }
1527 }
1528
1529 {
1530 // Set camera neutral
1531 camera_metadata_entry entry =
1532 results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1533 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer);
1534 uint32_t cameraNeutral[entry.count * 2];
1535
1536 size_t ctr = 0;
1537 for(size_t i = 0; i < entry.count; ++i) {
1538 cameraNeutral[ctr++] =
1539 static_cast<uint32_t>(entry.data.r[i].numerator);
1540 cameraNeutral[ctr++] =
1541 static_cast<uint32_t>(entry.data.r[i].denominator);
1542 }
1543
1544 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral,
1545 TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer);
1546 }
1547
1548
1549 {
1550 // Set dimensions
1551 if (calculateAndSetCrop(env, characteristics, writer) != OK) {
1552 return nullptr;
1553 }
1554 camera_metadata_entry entry =
1555 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1556 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer);
1557 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1558 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1559 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1560 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1561
1562 // If we only have a buffer containing the pre-correction rectangle, ignore the offset
1563 // relative to the pixel array.
1564 if (imageWidth == width && imageHeight == height) {
1565 xmin = 0;
1566 ymin = 0;
1567 }
1568
1569 uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width};
1570 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0),
1571 env, TAG_ACTIVEAREA, writer);
1572 }
1573
1574 {
1575 // Setup unique camera model tag
1576 char model[PROPERTY_VALUE_MAX];
1577 property_get("ro.product.model", model, "");
1578
1579 char manufacturer[PROPERTY_VALUE_MAX];
1580 property_get("ro.product.manufacturer", manufacturer, "");
1581
1582 char brand[PROPERTY_VALUE_MAX];
1583 property_get("ro.product.brand", brand, "");
1584
1585 String8 cameraModel(model);
1586 cameraModel += "-";
1587 cameraModel += manufacturer;
1588 cameraModel += "-";
1589 cameraModel += brand;
1590
1591 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1,
1592 reinterpret_cast<const uint8_t*>(cameraModel.string()), TIFF_IFD_0), env,
1593 TAG_UNIQUECAMERAMODEL, writer);
1594 }
1595
1596 {
1597 // Setup sensor noise model
1598 camera_metadata_entry entry =
1599 results.find(ANDROID_SENSOR_NOISE_PROFILE);
1600
1601 const status_t numPlaneColors = 3;
1602 const status_t numCfaChannels = 4;
1603
1604 uint8_t cfaOut[numCfaChannels];
1605 if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
1606 jniThrowException(env, "java/lang/IllegalArgumentException",
1607 "Invalid CFA from camera characteristics");
1608 return nullptr;
1609 }
1610
1611 double noiseProfile[numPlaneColors * 2];
1612
1613 if (entry.count > 0) {
1614 if (entry.count != numCfaChannels * 2) {
1615 ALOGW("%s: Invalid entry count %zu for noise profile returned "
1616 "in characteristics, no noise profile tag written...",
1617 __FUNCTION__, entry.count);
1618 } else {
1619 if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels,
1620 cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) {
1621
1622 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE,
1623 numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE,
1624 writer);
1625 } else {
1626 ALOGW("%s: Error converting coefficients for noise profile, no noise profile"
1627 " tag written...", __FUNCTION__);
1628 }
1629 }
1630 } else {
1631 ALOGW("%s: No noise profile found in result metadata. Image quality may be reduced.",
1632 __FUNCTION__);
1633 }
1634 }
1635
1636 {
1637 // Set up opcode List 2
1638 OpcodeListBuilder builder;
1639 status_t err = OK;
1640
1641 // Set up lens shading map
1642 camera_metadata_entry entry1 =
1643 characteristics.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
1644
1645 uint32_t lsmWidth = 0;
1646 uint32_t lsmHeight = 0;
1647
1648 if (entry1.count != 0) {
1649 lsmWidth = static_cast<uint32_t>(entry1.data.i32[0]);
1650 lsmHeight = static_cast<uint32_t>(entry1.data.i32[1]);
1651 }
1652
1653 camera_metadata_entry entry2 = results.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
1654
1655 camera_metadata_entry entry =
1656 characteristics.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
1657 BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1658 uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1659 uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1660 uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1661 uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1662 if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) {
1663 // GainMap rectangle is relative to the active area origin.
1664 err = builder.addGainMapsForMetadata(lsmWidth,
1665 lsmHeight,
1666 0,
1667 0,
1668 height,
1669 width,
1670 opcodeCfaLayout,
1671 entry2.data.f);
1672 if (err != OK) {
1673 ALOGE("%s: Could not add Lens shading map.", __FUNCTION__);
1674 jniThrowRuntimeException(env, "failed to add lens shading map.");
1675 return nullptr;
1676 }
1677 }
1678
1679
1680 // Set up bad pixel correction list
1681 camera_metadata_entry entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1682
1683 if ((entry3.count % 2) != 0) {
1684 ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
1685 __FUNCTION__);
1686 jniThrowRuntimeException(env, "failed to add hotpixel map.");
1687 return nullptr;
1688 }
1689
1690 // Adjust the bad pixel coordinates to be relative to the origin of the active area DNG tag
1691 std::vector<uint32_t> v;
1692 for (size_t i = 0; i < entry3.count; i+=2) {
1693 int32_t x = entry3.data.i32[i];
1694 int32_t y = entry3.data.i32[i + 1];
1695 x -= static_cast<int32_t>(xmin);
1696 y -= static_cast<int32_t>(ymin);
1697 if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
1698 static_cast<uint32_t>(y) >= width) {
1699 continue;
1700 }
1701 v.push_back(x);
1702 v.push_back(y);
1703 }
1704 const uint32_t* badPixels = &v[0];
1705 uint32_t badPixelCount = v.size();
1706
1707 if (badPixelCount > 0) {
1708 err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
1709
1710 if (err != OK) {
1711 ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
1712 jniThrowRuntimeException(env, "failed to add hotpixel map.");
1713 return nullptr;
1714 }
1715 }
1716
1717
1718 size_t listSize = builder.getSize();
1719 uint8_t opcodeListBuf[listSize];
1720 err = builder.buildOpList(opcodeListBuf);
1721 if (err == OK) {
1722 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize, opcodeListBuf,
1723 TIFF_IFD_0), env, TAG_OPCODELIST2, writer);
1724 } else {
1725 ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading"
1726 "map.", __FUNCTION__);
1727 jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
1728 " correction and lens shading map");
1729 return nullptr;
1730 }
1731 }
1732
1733 {
1734 // Set up opcode List 3
1735 OpcodeListBuilder builder;
1736 status_t err = OK;
1737
1738 // Set up rectilinear distortion correction
1739 camera_metadata_entry entry3 =
1740 results.find(ANDROID_LENS_RADIAL_DISTORTION);
1741 camera_metadata_entry entry4 =
1742 results.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
1743
1744 if (entry3.count == 6 && entry4.count == 5) {
1745 float cx = entry4.data.f[/*c_x*/2];
1746 float cy = entry4.data.f[/*c_y*/3];
1747 err = builder.addWarpRectilinearForMetadata(entry3.data.f, preWidth, preHeight, cx,
1748 cy);
1749 if (err != OK) {
1750 ALOGE("%s: Could not add distortion correction.", __FUNCTION__);
1751 jniThrowRuntimeException(env, "failed to add distortion correction.");
1752 return nullptr;
1753 }
1754 }
1755
1756 size_t listSize = builder.getSize();
1757 uint8_t opcodeListBuf[listSize];
1758 err = builder.buildOpList(opcodeListBuf);
1759 if (err == OK) {
1760 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize, opcodeListBuf,
1761 TIFF_IFD_0), env, TAG_OPCODELIST3, writer);
1762 } else {
1763 ALOGE("%s: Could not build list of opcodes for distortion correction and lens shading"
1764 "map.", __FUNCTION__);
1765 jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
1766 " correction and lens shading map");
1767 return nullptr;
1768 }
1769 }
1770
1771 {
1772 // Set up orientation tags.
1773 uint16_t orientation = nativeContext->getOrientation();
1774 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1775 env, TAG_ORIENTATION, writer);
1776
1777 }
1778
1779 if (nativeContext->hasDescription()){
1780 // Set Description
1781 String8 description = nativeContext->getDescription();
1782 size_t len = description.bytes() + 1;
1783 if (writer->addEntry(TAG_IMAGEDESCRIPTION, len,
1784 reinterpret_cast<const uint8_t*>(description.string()), TIFF_IFD_0) != OK) {
1785 jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1786 "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION);
1787 }
1788 }
1789
1790 if (nativeContext->hasGpsData()) {
1791 // Set GPS tags
1792 GpsData gpsData = nativeContext->getGpsData();
1793 if (!writer->hasIfd(TIFF_IFD_GPSINFO)) {
1794 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) {
1795 ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO,
1796 TIFF_IFD_0);
1797 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO");
1798 return nullptr;
1799 }
1800 }
1801
1802 {
1803 uint8_t version[] = {2, 3, 0, 0};
1804 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version,
1805 TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer);
1806 }
1807
1808 {
1809 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF,
1810 GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env,
1811 TAG_GPSLATITUDEREF, writer);
1812 }
1813
1814 {
1815 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF,
1816 GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env,
1817 TAG_GPSLONGITUDEREF, writer);
1818 }
1819
1820 {
1821 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude,
1822 TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer);
1823 }
1824
1825 {
1826 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude,
1827 TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer);
1828 }
1829
1830 {
1831 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp,
1832 TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer);
1833 }
1834
1835 {
1836 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP,
1837 GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env,
1838 TAG_GPSDATESTAMP, writer);
1839 }
1840 }
1841
1842
1843 if (nativeContext->hasThumbnail()) {
1844 if (!writer->hasIfd(TIFF_IFD_SUB1)) {
1845 if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) {
1846 ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1,
1847 TIFF_IFD_0);
1848 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD");
1849 return nullptr;
1850 }
1851 }
1852
1853 Vector<uint16_t> tagsToMove;
1854 tagsToMove.add(TAG_ORIENTATION);
1855 tagsToMove.add(TAG_NEWSUBFILETYPE);
1856 tagsToMove.add(TAG_ACTIVEAREA);
1857 tagsToMove.add(TAG_BITSPERSAMPLE);
1858 tagsToMove.add(TAG_COMPRESSION);
1859 tagsToMove.add(TAG_IMAGEWIDTH);
1860 tagsToMove.add(TAG_IMAGELENGTH);
1861 tagsToMove.add(TAG_PHOTOMETRICINTERPRETATION);
1862 tagsToMove.add(TAG_BLACKLEVEL);
1863 tagsToMove.add(TAG_BLACKLEVELREPEATDIM);
1864 tagsToMove.add(TAG_SAMPLESPERPIXEL);
1865 tagsToMove.add(TAG_PLANARCONFIGURATION);
1866 tagsToMove.add(TAG_CFAREPEATPATTERNDIM);
1867 tagsToMove.add(TAG_CFAPATTERN);
1868 tagsToMove.add(TAG_CFAPLANECOLOR);
1869 tagsToMove.add(TAG_CFALAYOUT);
1870 tagsToMove.add(TAG_XRESOLUTION);
1871 tagsToMove.add(TAG_YRESOLUTION);
1872 tagsToMove.add(TAG_RESOLUTIONUNIT);
1873 tagsToMove.add(TAG_WHITELEVEL);
1874 tagsToMove.add(TAG_DEFAULTSCALE);
1875 tagsToMove.add(TAG_DEFAULTCROPORIGIN);
1876 tagsToMove.add(TAG_DEFAULTCROPSIZE);
1877 tagsToMove.add(TAG_OPCODELIST2);
1878 tagsToMove.add(TAG_OPCODELIST3);
1879
1880 if (moveEntries(writer, TIFF_IFD_0, TIFF_IFD_SUB1, tagsToMove) != OK) {
1881 jniThrowException(env, "java/lang/IllegalStateException", "Failed to move entries");
1882 return nullptr;
1883 }
1884
1885 // Make sure both IFDs get the same orientation tag
1886 sp<TiffEntry> orientEntry = writer->getEntry(TAG_ORIENTATION, TIFF_IFD_SUB1);
1887 if (orientEntry.get() != nullptr) {
1888 writer->addEntry(orientEntry, TIFF_IFD_0);
1889 }
1890
1891 // Setup thumbnail tags
1892
1893 {
1894 // Set photometric interpretation
1895 uint16_t interpretation = 2; // RGB
1896 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1897 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1898 }
1899
1900 {
1901 // Set planar configuration
1902 uint16_t config = 1; // Chunky
1903 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1904 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1905 }
1906
1907 {
1908 // Set samples per pixel
1909 uint16_t samples = SAMPLES_PER_RGB_PIXEL;
1910 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples,
1911 TIFF_IFD_0), env, TAG_SAMPLESPERPIXEL, writer);
1912 }
1913
1914 {
1915 // Set bits per sample
1916 uint16_t bits = BITS_PER_RGB_SAMPLE;
1917 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0),
1918 env, TAG_BITSPERSAMPLE, writer);
1919 }
1920
1921 {
1922 // Set subfiletype
1923 uint32_t subfileType = 1; // Thumbnail image
1924 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1925 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1926 }
1927
1928 {
1929 // Set compression
1930 uint16_t compression = 1; // None
1931 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1932 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1933 }
1934
1935 {
1936 // Set dimensions
1937 uint32_t uWidth = nativeContext->getThumbnailWidth();
1938 uint32_t uHeight = nativeContext->getThumbnailHeight();
1939 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_0),
1940 env, TAG_IMAGEWIDTH, writer);
1941 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight, TIFF_IFD_0),
1942 env, TAG_IMAGELENGTH, writer);
1943 }
1944
1945 {
1946 // x resolution
1947 uint32_t xres[] = { 72, 1 }; // default 72 ppi
1948 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1949 env, TAG_XRESOLUTION, writer);
1950
1951 // y resolution
1952 uint32_t yres[] = { 72, 1 }; // default 72 ppi
1953 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1954 env, TAG_YRESOLUTION, writer);
1955
1956 uint16_t unit = 2; // inches
1957 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1958 env, TAG_RESOLUTIONUNIT, writer);
1959 }
1960 }
1961
1962 if (writer->addStrip(TIFF_IFD_0) != OK) {
1963 ALOGE("%s: Could not setup thumbnail strip tags.", __FUNCTION__);
1964 jniThrowException(env, "java/lang/IllegalStateException",
1965 "Failed to setup thumbnail strip tags.");
1966 return nullptr;
1967 }
1968
1969 if (writer->hasIfd(TIFF_IFD_SUB1)) {
1970 if (writer->addStrip(TIFF_IFD_SUB1) != OK) {
1971 ALOGE("%s: Could not main image strip tags.", __FUNCTION__);
1972 jniThrowException(env, "java/lang/IllegalStateException",
1973 "Failed to setup main image strip tags.");
1974 return nullptr;
1975 }
1976 }
1977 return writer;
1978 }
1979
DngCreator_destroy(JNIEnv * env,jobject thiz)1980 static void DngCreator_destroy(JNIEnv* env, jobject thiz) {
1981 ALOGV("%s:", __FUNCTION__);
1982 DngCreator_setNativeContext(env, thiz, nullptr);
1983 }
1984
DngCreator_nativeSetOrientation(JNIEnv * env,jobject thiz,jint orient)1985 static void DngCreator_nativeSetOrientation(JNIEnv* env, jobject thiz, jint orient) {
1986 ALOGV("%s:", __FUNCTION__);
1987
1988 NativeContext* context = DngCreator_getNativeContext(env, thiz);
1989 if (context == nullptr) {
1990 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
1991 jniThrowException(env, "java/lang/AssertionError",
1992 "setOrientation called with uninitialized DngCreator");
1993 return;
1994 }
1995
1996 uint16_t orientation = static_cast<uint16_t>(orient);
1997 context->setOrientation(orientation);
1998 }
1999
DngCreator_nativeSetDescription(JNIEnv * env,jobject thiz,jstring description)2000 static void DngCreator_nativeSetDescription(JNIEnv* env, jobject thiz, jstring description) {
2001 ALOGV("%s:", __FUNCTION__);
2002
2003 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2004 if (context == nullptr) {
2005 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2006 jniThrowException(env, "java/lang/AssertionError",
2007 "setDescription called with uninitialized DngCreator");
2008 return;
2009 }
2010
2011 const char* desc = env->GetStringUTFChars(description, nullptr);
2012 context->setDescription(String8(desc));
2013 env->ReleaseStringUTFChars(description, desc);
2014 }
2015
DngCreator_nativeSetGpsTags(JNIEnv * env,jobject thiz,jintArray latTag,jstring latRef,jintArray longTag,jstring longRef,jstring dateTag,jintArray timeTag)2016 static void DngCreator_nativeSetGpsTags(JNIEnv* env, jobject thiz, jintArray latTag,
2017 jstring latRef, jintArray longTag, jstring longRef, jstring dateTag, jintArray timeTag) {
2018 ALOGV("%s:", __FUNCTION__);
2019
2020 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2021 if (context == nullptr) {
2022 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2023 jniThrowException(env, "java/lang/AssertionError",
2024 "setGpsTags called with uninitialized DngCreator");
2025 return;
2026 }
2027
2028 GpsData data;
2029
2030 jsize latLen = env->GetArrayLength(latTag);
2031 jsize longLen = env->GetArrayLength(longTag);
2032 jsize timeLen = env->GetArrayLength(timeTag);
2033 if (latLen != GpsData::GPS_VALUE_LENGTH) {
2034 jniThrowException(env, "java/lang/IllegalArgumentException",
2035 "invalid latitude tag length");
2036 return;
2037 } else if (longLen != GpsData::GPS_VALUE_LENGTH) {
2038 jniThrowException(env, "java/lang/IllegalArgumentException",
2039 "invalid longitude tag length");
2040 return;
2041 } else if (timeLen != GpsData::GPS_VALUE_LENGTH) {
2042 jniThrowException(env, "java/lang/IllegalArgumentException",
2043 "invalid time tag length");
2044 return;
2045 }
2046
2047 env->GetIntArrayRegion(latTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2048 reinterpret_cast<jint*>(&data.mLatitude));
2049 env->GetIntArrayRegion(longTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2050 reinterpret_cast<jint*>(&data.mLongitude));
2051 env->GetIntArrayRegion(timeTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2052 reinterpret_cast<jint*>(&data.mTimestamp));
2053
2054
2055 env->GetStringUTFRegion(latRef, 0, 1, reinterpret_cast<char*>(&data.mLatitudeRef));
2056 data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2057 env->GetStringUTFRegion(longRef, 0, 1, reinterpret_cast<char*>(&data.mLongitudeRef));
2058 data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2059 env->GetStringUTFRegion(dateTag, 0, GpsData::GPS_DATE_LENGTH - 1,
2060 reinterpret_cast<char*>(&data.mDate));
2061 data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0';
2062
2063 context->setGpsData(data);
2064 }
2065
DngCreator_nativeSetThumbnail(JNIEnv * env,jobject thiz,jobject buffer,jint width,jint height)2066 static void DngCreator_nativeSetThumbnail(JNIEnv* env, jobject thiz, jobject buffer, jint width,
2067 jint height) {
2068 ALOGV("%s:", __FUNCTION__);
2069
2070 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2071 if (context == nullptr) {
2072 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2073 jniThrowException(env, "java/lang/AssertionError",
2074 "setThumbnail called with uninitialized DngCreator");
2075 return;
2076 }
2077
2078 size_t fullSize = width * height * BYTES_PER_RGB_PIXEL;
2079 jlong capacity = env->GetDirectBufferCapacity(buffer);
2080 if (static_cast<uint64_t>(capacity) != static_cast<uint64_t>(fullSize)) {
2081 jniThrowExceptionFmt(env, "java/lang/AssertionError",
2082 "Invalid size %d for thumbnail, expected size was %d",
2083 capacity, fullSize);
2084 return;
2085 }
2086
2087 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer));
2088 if (pixelBytes == nullptr) {
2089 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2090 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2091 return;
2092 }
2093
2094 if (!context->setThumbnail(pixelBytes, width, height)) {
2095 jniThrowException(env, "java/lang/IllegalStateException",
2096 "Failed to set thumbnail.");
2097 return;
2098 }
2099 }
2100
2101 // TODO: Refactor out common preamble for the two nativeWrite methods.
DngCreator_nativeWriteImage(JNIEnv * env,jobject thiz,jobject outStream,jint width,jint height,jobject inBuffer,jint rowStride,jint pixStride,jlong offset,jboolean isDirect)2102 static void DngCreator_nativeWriteImage(JNIEnv* env, jobject thiz, jobject outStream, jint width,
2103 jint height, jobject inBuffer, jint rowStride, jint pixStride, jlong offset,
2104 jboolean isDirect) {
2105 ALOGV("%s:", __FUNCTION__);
2106 ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, "
2107 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2108 height, rowStride, pixStride, offset);
2109 uint32_t rStride = static_cast<uint32_t>(rowStride);
2110 uint32_t pStride = static_cast<uint32_t>(pixStride);
2111 uint32_t uWidth = static_cast<uint32_t>(width);
2112 uint32_t uHeight = static_cast<uint32_t>(height);
2113 uint64_t uOffset = static_cast<uint64_t>(offset);
2114
2115 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2116 if(env->ExceptionCheck()) {
2117 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2118 return;
2119 }
2120
2121 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2122 if (context == nullptr) {
2123 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2124 jniThrowException(env, "java/lang/AssertionError",
2125 "Write called with uninitialized DngCreator");
2126 return;
2127 }
2128 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2129
2130 if (writer.get() == nullptr) {
2131 return;
2132 }
2133
2134 // Validate DNG size
2135 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2136 return;
2137 }
2138
2139 sp<JniInputByteBuffer> inBuf;
2140 Vector<StripSource*> sources;
2141 sp<DirectStripSource> thumbnailSource;
2142 uint32_t targetIfd = TIFF_IFD_0;
2143
2144 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2145
2146 if (hasThumbnail) {
2147 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2148 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2149 uint32_t thumbWidth = context->getThumbnailWidth();
2150 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2151 thumbWidth, context->getThumbnailHeight(), bytesPerPixel,
2152 bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2153 SAMPLES_PER_RGB_PIXEL);
2154 sources.add(thumbnailSource.get());
2155 targetIfd = TIFF_IFD_SUB1;
2156 }
2157
2158 if (isDirect) {
2159 size_t fullSize = rStride * uHeight;
2160 jlong capacity = env->GetDirectBufferCapacity(inBuffer);
2161 if (capacity < 0 || fullSize + uOffset > static_cast<uint64_t>(capacity)) {
2162 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
2163 "Invalid size %d for Image, size given in metadata is %d at current stride",
2164 capacity, fullSize);
2165 return;
2166 }
2167
2168 uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(inBuffer));
2169 if (pixelBytes == nullptr) {
2170 ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2171 jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2172 return;
2173 }
2174
2175 ALOGV("%s: Using direct-type strip source.", __FUNCTION__);
2176 DirectStripSource stripSource(env, pixelBytes, targetIfd, uWidth, uHeight, pStride,
2177 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2178 sources.add(&stripSource);
2179
2180 status_t ret = OK;
2181 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2182 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2183 if (!env->ExceptionCheck()) {
2184 jniThrowExceptionFmt(env, "java/io/IOException",
2185 "Encountered error %d while writing file.", ret);
2186 }
2187 return;
2188 }
2189 } else {
2190 inBuf = new JniInputByteBuffer(env, inBuffer);
2191
2192 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2193 InputStripSource stripSource(env, *inBuf, targetIfd, uWidth, uHeight, pStride,
2194 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2195 sources.add(&stripSource);
2196
2197 status_t ret = OK;
2198 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2199 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2200 if (!env->ExceptionCheck()) {
2201 jniThrowExceptionFmt(env, "java/io/IOException",
2202 "Encountered error %d while writing file.", ret);
2203 }
2204 return;
2205 }
2206 }
2207 }
2208
DngCreator_nativeWriteInputStream(JNIEnv * env,jobject thiz,jobject outStream,jobject inStream,jint width,jint height,jlong offset)2209 static void DngCreator_nativeWriteInputStream(JNIEnv* env, jobject thiz, jobject outStream,
2210 jobject inStream, jint width, jint height, jlong offset) {
2211 ALOGV("%s:", __FUNCTION__);
2212
2213 uint32_t rowStride = width * BYTES_PER_SAMPLE;
2214 uint32_t pixStride = BYTES_PER_SAMPLE;
2215 uint32_t uWidth = static_cast<uint32_t>(width);
2216 uint32_t uHeight = static_cast<uint32_t>(height);
2217 uint64_t uOffset = static_cast<uint32_t>(offset);
2218
2219 ALOGV("%s: nativeWriteInputStream called with: width=%d, height=%d, "
2220 "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2221 height, rowStride, pixStride, offset);
2222
2223 sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2224 if (env->ExceptionCheck()) {
2225 ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2226 return;
2227 }
2228
2229 NativeContext* context = DngCreator_getNativeContext(env, thiz);
2230 if (context == nullptr) {
2231 ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2232 jniThrowException(env, "java/lang/AssertionError",
2233 "Write called with uninitialized DngCreator");
2234 return;
2235 }
2236 sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2237
2238 if (writer.get() == nullptr) {
2239 return;
2240 }
2241
2242 // Validate DNG size
2243 if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2244 return;
2245 }
2246
2247 sp<DirectStripSource> thumbnailSource;
2248 uint32_t targetIfd = TIFF_IFD_0;
2249 bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2250 Vector<StripSource*> sources;
2251
2252 if (hasThumbnail) {
2253 ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2254 uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2255 uint32_t width = context->getThumbnailWidth();
2256 thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_0,
2257 width, context->getThumbnailHeight(), bytesPerPixel,
2258 bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2259 SAMPLES_PER_RGB_PIXEL);
2260 sources.add(thumbnailSource.get());
2261 targetIfd = TIFF_IFD_SUB1;
2262 }
2263
2264 sp<JniInputStream> in = new JniInputStream(env, inStream);
2265
2266 ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2267 InputStripSource stripSource(env, *in, targetIfd, uWidth, uHeight, pixStride,
2268 rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2269 sources.add(&stripSource);
2270
2271 status_t ret = OK;
2272 if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2273 ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2274 if (!env->ExceptionCheck()) {
2275 jniThrowExceptionFmt(env, "java/io/IOException",
2276 "Encountered error %d while writing file.", ret);
2277 }
2278 return;
2279 }
2280 }
2281
2282 } /*extern "C" */
2283
2284 static JNINativeMethod gDngCreatorMethods[] = {
2285 {"nativeClassInit", "()V", (void*) DngCreator_nativeClassInit},
2286 {"nativeInit", "(Landroid/hardware/camera2/impl/CameraMetadataNative;"
2287 "Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)V",
2288 (void*) DngCreator_init},
2289 {"nativeDestroy", "()V", (void*) DngCreator_destroy},
2290 {"nativeSetOrientation", "(I)V", (void*) DngCreator_nativeSetOrientation},
2291 {"nativeSetDescription", "(Ljava/lang/String;)V", (void*) DngCreator_nativeSetDescription},
2292 {"nativeSetGpsTags", "([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)V",
2293 (void*) DngCreator_nativeSetGpsTags},
2294 {"nativeSetThumbnail","(Ljava/nio/ByteBuffer;II)V", (void*) DngCreator_nativeSetThumbnail},
2295 {"nativeWriteImage", "(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)V",
2296 (void*) DngCreator_nativeWriteImage},
2297 {"nativeWriteInputStream", "(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)V",
2298 (void*) DngCreator_nativeWriteInputStream},
2299 };
2300
register_android_hardware_camera2_DngCreator(JNIEnv * env)2301 int register_android_hardware_camera2_DngCreator(JNIEnv *env) {
2302 return RegisterMethodsOrDie(env,
2303 "android/hardware/camera2/DngCreator", gDngCreatorMethods, NELEM(gDngCreatorMethods));
2304 }
2305