1 /*
2  * Copyright 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "DngCreator_JNI"
19 #include <inttypes.h>
20 #include <string.h>
21 #include <algorithm>
22 #include <array>
23 #include <memory>
24 #include <vector>
25 #include <cmath>
26 
27 #include <android-base/properties.h>
28 #include <utils/Log.h>
29 #include <utils/Errors.h>
30 #include <utils/StrongPointer.h>
31 #include <utils/RefBase.h>
32 #include <utils/Vector.h>
33 #include <utils/String8.h>
34 #include <system/camera_metadata.h>
35 #include <camera/CameraMetadata.h>
36 #include <img_utils/DngUtils.h>
37 #include <img_utils/TagDefinitions.h>
38 #include <img_utils/TiffIfd.h>
39 #include <img_utils/TiffWriter.h>
40 #include <img_utils/Output.h>
41 #include <img_utils/Input.h>
42 #include <img_utils/StripSource.h>
43 
44 #include "core_jni_helpers.h"
45 
46 #include "android_runtime/AndroidRuntime.h"
47 #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
48 
49 #include <jni.h>
50 #include <nativehelper/JNIHelp.h>
51 #include <nativehelper/ScopedUtfChars.h>
52 
53 using namespace android;
54 using namespace img_utils;
55 using android::base::GetProperty;
56 
57 #define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \
58     if ((expr) != OK) { \
59         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
60                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
61         return false; \
62     }
63 
64 
65 #define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \
66     if ((expr) != OK) { \
67         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
68                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
69         return nullptr; \
70     }
71 
72 
73 #define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \
74     if ((expr) != OK) { \
75         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
76                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
77         return -1; \
78     }
79 
80 #define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \
81     if ((entry).count == 0) { \
82         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
83                 "Missing metadata fields for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
84         return nullptr; \
85     }
86 
87 #define BAIL_IF_EMPTY_RET_BOOL(entry, jnienv, tagId, writer)               \
88     if ((entry).count == 0) {                                              \
89         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
90                              "Missing metadata fields for tag %s (%x)",    \
91                              (writer)->getTagName(tagId), (tagId));        \
92         return false;                                                      \
93     }
94 
95 #define BAIL_IF_EMPTY_RET_STATUS(entry, jnienv, tagId, writer)             \
96     if ((entry).count == 0) {                                              \
97         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
98                              "Missing metadata fields for tag %s (%x)",    \
99                              (writer)->getTagName(tagId), (tagId));        \
100         return BAD_VALUE;                                                  \
101     }
102 
103 #define BAIL_IF_EXPR_RET_NULL_SP(expr, jnienv, tagId, writer) \
104     if (expr) { \
105         jniThrowExceptionFmt(jnienv, "java/lang/IllegalArgumentException", \
106                 "Invalid metadata for tag %s (%x)", (writer)->getTagName(tagId), (tagId)); \
107         return nullptr; \
108     }
109 
110 
111 #define ANDROID_DNGCREATOR_CTX_JNI_ID     "mNativeContext"
112 
113 static struct {
114     jfieldID mNativeContext;
115 } gDngCreatorClassInfo;
116 
117 static struct {
118     jmethodID mWriteMethod;
119 } gOutputStreamClassInfo;
120 
121 static struct {
122     jmethodID mReadMethod;
123     jmethodID mSkipMethod;
124 } gInputStreamClassInfo;
125 
126 static struct {
127     jmethodID mGetMethod;
128 } gInputByteBufferClassInfo;
129 
130 enum {
131     BITS_PER_SAMPLE = 16,
132     BYTES_PER_SAMPLE = 2,
133     BYTES_PER_RGB_PIXEL = 3,
134     BITS_PER_RGB_SAMPLE = 8,
135     BYTES_PER_RGB_SAMPLE = 1,
136     SAMPLES_PER_RGB_PIXEL = 3,
137     SAMPLES_PER_RAW_PIXEL = 1,
138     TIFF_IFD_0 = 0,
139     TIFF_IFD_SUB1 = 1,
140     TIFF_IFD_GPSINFO = 2,
141 };
142 
143 
144 /**
145  * POD container class for GPS tag data.
146  */
147 class GpsData {
148 public:
149     enum {
150         GPS_VALUE_LENGTH = 6,
151         GPS_REF_LENGTH = 2,
152         GPS_DATE_LENGTH = 11,
153     };
154 
155     uint32_t mLatitude[GPS_VALUE_LENGTH];
156     uint32_t mLongitude[GPS_VALUE_LENGTH];
157     uint32_t mTimestamp[GPS_VALUE_LENGTH];
158     uint8_t mLatitudeRef[GPS_REF_LENGTH];
159     uint8_t mLongitudeRef[GPS_REF_LENGTH];
160     uint8_t mDate[GPS_DATE_LENGTH];
161 };
162 
163 // ----------------------------------------------------------------------------
164 
165 /**
166  * Container class for the persistent native context.
167  */
168 
169 class NativeContext : public LightRefBase<NativeContext> {
170 public:
171     enum {
172         DATETIME_COUNT = 20,
173     };
174 
175     NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result);
176     virtual ~NativeContext();
177 
178     TiffWriter* getWriter();
179 
180     std::shared_ptr<const CameraMetadata> getCharacteristics() const;
181     std::shared_ptr<const CameraMetadata> getResult() const;
182 
183     uint32_t getThumbnailWidth() const;
184     uint32_t getThumbnailHeight() const;
185     const uint8_t* getThumbnail() const;
186     bool hasThumbnail() const;
187 
188     bool setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height);
189 
190     void setOrientation(uint16_t orientation);
191     uint16_t getOrientation() const;
192 
193     void setDescription(const String8& desc);
194     String8 getDescription() const;
195     bool hasDescription() const;
196 
197     void setGpsData(const GpsData& data);
198     GpsData getGpsData() const;
199     bool hasGpsData() const;
200 
201     void setCaptureTime(const String8& formattedCaptureTime);
202     String8 getCaptureTime() const;
203     bool hasCaptureTime() const;
204 
205 private:
206     Vector<uint8_t> mCurrentThumbnail;
207     TiffWriter mWriter;
208     std::shared_ptr<CameraMetadata> mCharacteristics;
209     std::shared_ptr<CameraMetadata> mResult;
210     uint32_t mThumbnailWidth;
211     uint32_t mThumbnailHeight;
212     uint16_t mOrientation;
213     bool mThumbnailSet;
214     bool mGpsSet;
215     bool mDescriptionSet;
216     bool mCaptureTimeSet;
217     String8 mDescription;
218     GpsData mGpsData;
219     String8 mFormattedCaptureTime;
220 };
221 
NativeContext(const CameraMetadata & characteristics,const CameraMetadata & result)222 NativeContext::NativeContext(const CameraMetadata& characteristics, const CameraMetadata& result) :
223         mCharacteristics(std::make_shared<CameraMetadata>(characteristics)),
224         mResult(std::make_shared<CameraMetadata>(result)), mThumbnailWidth(0),
225         mThumbnailHeight(0), mOrientation(TAG_ORIENTATION_UNKNOWN), mThumbnailSet(false),
226         mGpsSet(false), mDescriptionSet(false), mCaptureTimeSet(false) {}
227 
~NativeContext()228 NativeContext::~NativeContext() {}
229 
getWriter()230 TiffWriter* NativeContext::getWriter() {
231     return &mWriter;
232 }
233 
getCharacteristics() const234 std::shared_ptr<const CameraMetadata> NativeContext::getCharacteristics() const {
235     return mCharacteristics;
236 }
237 
getResult() const238 std::shared_ptr<const CameraMetadata> NativeContext::getResult() const {
239     return mResult;
240 }
241 
getThumbnailWidth() const242 uint32_t NativeContext::getThumbnailWidth() const {
243     return mThumbnailWidth;
244 }
245 
getThumbnailHeight() const246 uint32_t NativeContext::getThumbnailHeight() const {
247     return mThumbnailHeight;
248 }
249 
getThumbnail() const250 const uint8_t* NativeContext::getThumbnail() const {
251     return mCurrentThumbnail.array();
252 }
253 
hasThumbnail() const254 bool NativeContext::hasThumbnail() const {
255     return mThumbnailSet;
256 }
257 
setThumbnail(const uint8_t * buffer,uint32_t width,uint32_t height)258 bool NativeContext::setThumbnail(const uint8_t* buffer, uint32_t width, uint32_t height) {
259     mThumbnailWidth = width;
260     mThumbnailHeight = height;
261 
262     size_t size = BYTES_PER_RGB_PIXEL * width * height;
263     if (mCurrentThumbnail.resize(size) < 0) {
264         ALOGE("%s: Could not resize thumbnail buffer.", __FUNCTION__);
265         return false;
266     }
267 
268     uint8_t* thumb = mCurrentThumbnail.editArray();
269     memcpy(thumb, buffer, size);
270     mThumbnailSet = true;
271     return true;
272 }
273 
setOrientation(uint16_t orientation)274 void NativeContext::setOrientation(uint16_t orientation) {
275     mOrientation = orientation;
276 }
277 
getOrientation() const278 uint16_t NativeContext::getOrientation() const {
279     return mOrientation;
280 }
281 
setDescription(const String8 & desc)282 void NativeContext::setDescription(const String8& desc) {
283     mDescription = desc;
284     mDescriptionSet = true;
285 }
286 
getDescription() const287 String8 NativeContext::getDescription() const {
288     return mDescription;
289 }
290 
hasDescription() const291 bool NativeContext::hasDescription() const {
292     return mDescriptionSet;
293 }
294 
setGpsData(const GpsData & data)295 void NativeContext::setGpsData(const GpsData& data) {
296     mGpsData = data;
297     mGpsSet = true;
298 }
299 
getGpsData() const300 GpsData NativeContext::getGpsData() const {
301     return mGpsData;
302 }
303 
hasGpsData() const304 bool NativeContext::hasGpsData() const {
305     return mGpsSet;
306 }
307 
setCaptureTime(const String8 & formattedCaptureTime)308 void NativeContext::setCaptureTime(const String8& formattedCaptureTime) {
309     mFormattedCaptureTime = formattedCaptureTime;
310     mCaptureTimeSet = true;
311 }
312 
getCaptureTime() const313 String8 NativeContext::getCaptureTime() const {
314     return mFormattedCaptureTime;
315 }
316 
hasCaptureTime() const317 bool NativeContext::hasCaptureTime() const {
318     return mCaptureTimeSet;
319 }
320 
321 // End of NativeContext
322 // ----------------------------------------------------------------------------
323 
324 /**
325  * Wrapper class for a Java OutputStream.
326  *
327  * This class is not intended to be used across JNI calls.
328  */
329 class JniOutputStream : public Output, public LightRefBase<JniOutputStream> {
330 public:
331     JniOutputStream(JNIEnv* env, jobject outStream);
332 
333     virtual ~JniOutputStream();
334 
335     status_t open();
336 
337     status_t write(const uint8_t* buf, size_t offset, size_t count);
338 
339     status_t close();
340 private:
341     enum {
342         BYTE_ARRAY_LENGTH = 4096
343     };
344     jobject mOutputStream;
345     JNIEnv* mEnv;
346     jbyteArray mByteArray;
347 };
348 
JniOutputStream(JNIEnv * env,jobject outStream)349 JniOutputStream::JniOutputStream(JNIEnv* env, jobject outStream) : mOutputStream(outStream),
350         mEnv(env) {
351     mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
352     if (mByteArray == nullptr) {
353         jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
354     }
355 }
356 
~JniOutputStream()357 JniOutputStream::~JniOutputStream() {
358     mEnv->DeleteLocalRef(mByteArray);
359 }
360 
open()361 status_t JniOutputStream::open() {
362     // Do nothing
363     return OK;
364 }
365 
write(const uint8_t * buf,size_t offset,size_t count)366 status_t JniOutputStream::write(const uint8_t* buf, size_t offset, size_t count) {
367     while(count > 0) {
368         size_t len = BYTE_ARRAY_LENGTH;
369         len = (count > len) ? len : count;
370         mEnv->SetByteArrayRegion(mByteArray, 0, len, reinterpret_cast<const jbyte*>(buf + offset));
371 
372         if (mEnv->ExceptionCheck()) {
373             return BAD_VALUE;
374         }
375 
376         mEnv->CallVoidMethod(mOutputStream, gOutputStreamClassInfo.mWriteMethod, mByteArray,
377                 0, len);
378 
379         if (mEnv->ExceptionCheck()) {
380             return BAD_VALUE;
381         }
382 
383         count -= len;
384         offset += len;
385     }
386     return OK;
387 }
388 
close()389 status_t JniOutputStream::close() {
390     // Do nothing
391     return OK;
392 }
393 
394 // End of JniOutputStream
395 // ----------------------------------------------------------------------------
396 
397 /**
398  * Wrapper class for a Java InputStream.
399  *
400  * This class is not intended to be used across JNI calls.
401  */
402 class JniInputStream : public Input, public LightRefBase<JniInputStream> {
403 public:
404     JniInputStream(JNIEnv* env, jobject inStream);
405 
406     status_t open();
407 
408     status_t close();
409 
410     ssize_t read(uint8_t* buf, size_t offset, size_t count);
411 
412     ssize_t skip(size_t count);
413 
414     virtual ~JniInputStream();
415 private:
416     enum {
417         BYTE_ARRAY_LENGTH = 4096
418     };
419     jobject mInStream;
420     JNIEnv* mEnv;
421     jbyteArray mByteArray;
422 
423 };
424 
JniInputStream(JNIEnv * env,jobject inStream)425 JniInputStream::JniInputStream(JNIEnv* env, jobject inStream) : mInStream(inStream), mEnv(env) {
426     mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
427     if (mByteArray == nullptr) {
428         jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
429     }
430 }
431 
~JniInputStream()432 JniInputStream::~JniInputStream() {
433     mEnv->DeleteLocalRef(mByteArray);
434 }
435 
read(uint8_t * buf,size_t offset,size_t count)436 ssize_t JniInputStream::read(uint8_t* buf, size_t offset, size_t count) {
437 
438     jint realCount = BYTE_ARRAY_LENGTH;
439     if (count < BYTE_ARRAY_LENGTH) {
440         realCount = count;
441     }
442     jint actual = mEnv->CallIntMethod(mInStream, gInputStreamClassInfo.mReadMethod, mByteArray, 0,
443             realCount);
444 
445     if (actual < 0) {
446         return NOT_ENOUGH_DATA;
447     }
448 
449     if (mEnv->ExceptionCheck()) {
450         return BAD_VALUE;
451     }
452 
453     mEnv->GetByteArrayRegion(mByteArray, 0, actual, reinterpret_cast<jbyte*>(buf + offset));
454     if (mEnv->ExceptionCheck()) {
455         return BAD_VALUE;
456     }
457     return actual;
458 }
459 
skip(size_t count)460 ssize_t JniInputStream::skip(size_t count) {
461     jlong actual = mEnv->CallLongMethod(mInStream, gInputStreamClassInfo.mSkipMethod,
462             static_cast<jlong>(count));
463 
464     if (mEnv->ExceptionCheck()) {
465         return BAD_VALUE;
466     }
467     if (actual < 0) {
468         return NOT_ENOUGH_DATA;
469     }
470     return actual;
471 }
472 
open()473 status_t JniInputStream::open() {
474     // Do nothing
475     return OK;
476 }
477 
close()478 status_t JniInputStream::close() {
479     // Do nothing
480     return OK;
481 }
482 
483 // End of JniInputStream
484 // ----------------------------------------------------------------------------
485 
486 /**
487  * Wrapper class for a non-direct Java ByteBuffer.
488  *
489  * This class is not intended to be used across JNI calls.
490  */
491 class JniInputByteBuffer : public Input, public LightRefBase<JniInputByteBuffer> {
492 public:
493     JniInputByteBuffer(JNIEnv* env, jobject inBuf);
494 
495     status_t open();
496 
497     status_t close();
498 
499     ssize_t read(uint8_t* buf, size_t offset, size_t count);
500 
501     virtual ~JniInputByteBuffer();
502 private:
503     enum {
504         BYTE_ARRAY_LENGTH = 4096
505     };
506     jobject mInBuf;
507     JNIEnv* mEnv;
508     jbyteArray mByteArray;
509 };
510 
JniInputByteBuffer(JNIEnv * env,jobject inBuf)511 JniInputByteBuffer::JniInputByteBuffer(JNIEnv* env, jobject inBuf) : mInBuf(inBuf), mEnv(env) {
512     mByteArray = env->NewByteArray(BYTE_ARRAY_LENGTH);
513     if (mByteArray == nullptr) {
514         jniThrowException(env, "java/lang/OutOfMemoryError", "Could not allocate byte array.");
515     }
516 }
517 
~JniInputByteBuffer()518 JniInputByteBuffer::~JniInputByteBuffer() {
519     mEnv->DeleteLocalRef(mByteArray);
520 }
521 
read(uint8_t * buf,size_t offset,size_t count)522 ssize_t JniInputByteBuffer::read(uint8_t* buf, size_t offset, size_t count) {
523     jint realCount = BYTE_ARRAY_LENGTH;
524     if (count < BYTE_ARRAY_LENGTH) {
525         realCount = count;
526     }
527 
528     jobject chainingBuf = mEnv->CallObjectMethod(mInBuf, gInputByteBufferClassInfo.mGetMethod,
529             mByteArray, 0, realCount);
530     mEnv->DeleteLocalRef(chainingBuf);
531 
532     if (mEnv->ExceptionCheck()) {
533         ALOGE("%s: Exception while reading from input into byte buffer.", __FUNCTION__);
534         return BAD_VALUE;
535     }
536 
537     mEnv->GetByteArrayRegion(mByteArray, 0, realCount, reinterpret_cast<jbyte*>(buf + offset));
538     if (mEnv->ExceptionCheck()) {
539         ALOGE("%s: Exception while reading from byte buffer.", __FUNCTION__);
540         return BAD_VALUE;
541     }
542     return realCount;
543 }
544 
open()545 status_t JniInputByteBuffer::open() {
546     // Do nothing
547     return OK;
548 }
549 
close()550 status_t JniInputByteBuffer::close() {
551     // Do nothing
552     return OK;
553 }
554 
555 // End of JniInputByteBuffer
556 // ----------------------------------------------------------------------------
557 
558 /**
559  * StripSource subclass for Input types.
560  *
561  * This class is not intended to be used across JNI calls.
562  */
563 
564 class InputStripSource : public StripSource, public LightRefBase<InputStripSource> {
565 public:
566     InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width, uint32_t height,
567             uint32_t pixStride, uint32_t rowStride, uint64_t offset, uint32_t bytesPerSample,
568             uint32_t samplesPerPixel);
569 
570     virtual ~InputStripSource();
571 
572     virtual status_t writeToStream(Output& stream, uint32_t count);
573 
574     virtual uint32_t getIfd() const;
575 protected:
576     uint32_t mIfd;
577     Input* mInput;
578     uint32_t mWidth;
579     uint32_t mHeight;
580     uint32_t mPixStride;
581     uint32_t mRowStride;
582     uint64_t mOffset;
583     JNIEnv* mEnv;
584     uint32_t mBytesPerSample;
585     uint32_t mSamplesPerPixel;
586 };
587 
InputStripSource(JNIEnv * env,Input & input,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)588 InputStripSource::InputStripSource(JNIEnv* env, Input& input, uint32_t ifd, uint32_t width,
589         uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
590         uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd), mInput(&input),
591         mWidth(width), mHeight(height), mPixStride(pixStride), mRowStride(rowStride),
592         mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
593         mSamplesPerPixel(samplesPerPixel) {}
594 
~InputStripSource()595 InputStripSource::~InputStripSource() {}
596 
writeToStream(Output & stream,uint32_t count)597 status_t InputStripSource::writeToStream(Output& stream, uint32_t count) {
598     uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
599     jlong offset = mOffset;
600 
601     if (fullSize != count) {
602         ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
603                 fullSize);
604         jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
605         return BAD_VALUE;
606     }
607 
608     // Skip offset
609     while (offset > 0) {
610         ssize_t skipped = mInput->skip(offset);
611         if (skipped <= 0) {
612             if (skipped == NOT_ENOUGH_DATA || skipped == 0) {
613                 jniThrowExceptionFmt(mEnv, "java/io/IOException",
614                         "Early EOF encountered in skip, not enough pixel data for image of size %u",
615                         fullSize);
616                 skipped = NOT_ENOUGH_DATA;
617             } else {
618                 if (!mEnv->ExceptionCheck()) {
619                     jniThrowException(mEnv, "java/io/IOException",
620                             "Error encountered while skip bytes in input stream.");
621                 }
622             }
623 
624             return skipped;
625         }
626         offset -= skipped;
627     }
628 
629     Vector<uint8_t> row;
630     if (row.resize(mRowStride) < 0) {
631         jniThrowException(mEnv, "java/lang/OutOfMemoryError", "Could not allocate row vector.");
632         return BAD_VALUE;
633     }
634 
635     uint8_t* rowBytes = row.editArray();
636 
637     for (uint32_t i = 0; i < mHeight; ++i) {
638         size_t rowFillAmt = 0;
639         size_t rowSize = mRowStride;
640 
641         while (rowFillAmt < mRowStride) {
642             ssize_t bytesRead = mInput->read(rowBytes, rowFillAmt, rowSize);
643             if (bytesRead <= 0) {
644                 if (bytesRead == NOT_ENOUGH_DATA || bytesRead == 0) {
645                     ALOGE("%s: Early EOF on row %" PRIu32 ", received bytesRead %zd",
646                             __FUNCTION__, i, bytesRead);
647                     jniThrowExceptionFmt(mEnv, "java/io/IOException",
648                             "Early EOF encountered, not enough pixel data for image of size %"
649                             PRIu32, fullSize);
650                     bytesRead = NOT_ENOUGH_DATA;
651                 } else {
652                     if (!mEnv->ExceptionCheck()) {
653                         jniThrowException(mEnv, "java/io/IOException",
654                                 "Error encountered while reading");
655                     }
656                 }
657                 return bytesRead;
658             }
659             rowFillAmt += bytesRead;
660             rowSize -= bytesRead;
661         }
662 
663         if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
664             ALOGV("%s: Using stream per-row write for strip.", __FUNCTION__);
665 
666             if (stream.write(rowBytes, 0, mBytesPerSample * mSamplesPerPixel * mWidth) != OK ||
667                     mEnv->ExceptionCheck()) {
668                 if (!mEnv->ExceptionCheck()) {
669                     jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
670                 }
671                 return BAD_VALUE;
672             }
673         } else {
674             ALOGV("%s: Using stream per-pixel write for strip.", __FUNCTION__);
675             jniThrowException(mEnv, "java/lang/IllegalStateException",
676                     "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
677             return BAD_VALUE;
678 
679             // TODO: Add support for non-contiguous pixels if needed.
680         }
681     }
682     return OK;
683 }
684 
getIfd() const685 uint32_t InputStripSource::getIfd() const {
686     return mIfd;
687 }
688 
689 // End of InputStripSource
690 // ----------------------------------------------------------------------------
691 
692 /**
693  * StripSource subclass for direct buffer types.
694  *
695  * This class is not intended to be used across JNI calls.
696  */
697 
698 class DirectStripSource : public StripSource, public LightRefBase<DirectStripSource> {
699 public:
700     DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd, uint32_t width,
701             uint32_t height, uint32_t pixStride, uint32_t rowStride, uint64_t offset,
702             uint32_t bytesPerSample, uint32_t samplesPerPixel);
703 
704     virtual ~DirectStripSource();
705 
706     virtual status_t writeToStream(Output& stream, uint32_t count);
707 
708     virtual uint32_t getIfd() const;
709 protected:
710     uint32_t mIfd;
711     const uint8_t* mPixelBytes;
712     uint32_t mWidth;
713     uint32_t mHeight;
714     uint32_t mPixStride;
715     uint32_t mRowStride;
716     uint16_t mOffset;
717     JNIEnv* mEnv;
718     uint32_t mBytesPerSample;
719     uint32_t mSamplesPerPixel;
720 };
721 
DirectStripSource(JNIEnv * env,const uint8_t * pixelBytes,uint32_t ifd,uint32_t width,uint32_t height,uint32_t pixStride,uint32_t rowStride,uint64_t offset,uint32_t bytesPerSample,uint32_t samplesPerPixel)722 DirectStripSource::DirectStripSource(JNIEnv* env, const uint8_t* pixelBytes, uint32_t ifd,
723             uint32_t width, uint32_t height, uint32_t pixStride, uint32_t rowStride,
724             uint64_t offset, uint32_t bytesPerSample, uint32_t samplesPerPixel) : mIfd(ifd),
725             mPixelBytes(pixelBytes), mWidth(width), mHeight(height), mPixStride(pixStride),
726             mRowStride(rowStride), mOffset(offset), mEnv(env), mBytesPerSample(bytesPerSample),
727             mSamplesPerPixel(samplesPerPixel) {}
728 
~DirectStripSource()729 DirectStripSource::~DirectStripSource() {}
730 
writeToStream(Output & stream,uint32_t count)731 status_t DirectStripSource::writeToStream(Output& stream, uint32_t count) {
732     uint32_t fullSize = mWidth * mHeight * mBytesPerSample * mSamplesPerPixel;
733 
734     if (fullSize != count) {
735         ALOGE("%s: Amount to write %u doesn't match image size %u", __FUNCTION__, count,
736                 fullSize);
737         jniThrowException(mEnv, "java/lang/IllegalStateException", "Not enough data to write");
738         return BAD_VALUE;
739     }
740 
741 
742     if (mPixStride == mBytesPerSample * mSamplesPerPixel
743             && mRowStride == mWidth * mBytesPerSample * mSamplesPerPixel) {
744         ALOGV("%s: Using direct single-pass write for strip.", __FUNCTION__);
745 
746         if (stream.write(mPixelBytes, mOffset, fullSize) != OK || mEnv->ExceptionCheck()) {
747             if (!mEnv->ExceptionCheck()) {
748                 jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
749             }
750             return BAD_VALUE;
751         }
752     } else if (mPixStride == mBytesPerSample * mSamplesPerPixel) {
753         ALOGV("%s: Using direct per-row write for strip.", __FUNCTION__);
754 
755         for (size_t i = 0; i < mHeight; ++i) {
756             if (stream.write(mPixelBytes, mOffset + i * mRowStride, mPixStride * mWidth) != OK ||
757                         mEnv->ExceptionCheck()) {
758                 if (!mEnv->ExceptionCheck()) {
759                     jniThrowException(mEnv, "java/io/IOException", "Failed to write pixel data");
760                 }
761                 return BAD_VALUE;
762             }
763         }
764     } else {
765         ALOGV("%s: Using direct per-pixel write for strip.", __FUNCTION__);
766 
767         jniThrowException(mEnv, "java/lang/IllegalStateException",
768                 "Per-pixel strides are not supported for RAW16 -- pixels must be contiguous");
769         return BAD_VALUE;
770 
771         // TODO: Add support for non-contiguous pixels if needed.
772     }
773     return OK;
774 
775 }
776 
getIfd() const777 uint32_t DirectStripSource::getIfd() const {
778     return mIfd;
779 }
780 
781 // End of DirectStripSource
782 // ----------------------------------------------------------------------------
783 
784 // Get the appropriate tag corresponding to default / maximum resolution mode.
getAppropriateModeTag(int32_t tag,bool maximumResolution)785 static int32_t getAppropriateModeTag(int32_t tag, bool maximumResolution) {
786     if (!maximumResolution) {
787         return tag;
788     }
789     switch (tag) {
790         case ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE:
791             return ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION;
792         case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE:
793             return ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
794         case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE:
795             return ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION;
796         default:
797             ALOGE("%s: Tag %d doesn't have sensor info related maximum resolution counterpart",
798                   __FUNCTION__, tag);
799             return -1;
800     }
801 }
802 
isMaximumResolutionModeImage(const CameraMetadata & characteristics,uint32_t imageWidth,uint32_t imageHeight,const sp<TiffWriter> writer,JNIEnv * env)803 static bool isMaximumResolutionModeImage(const CameraMetadata& characteristics, uint32_t imageWidth,
804                                          uint32_t imageHeight, const sp<TiffWriter> writer,
805                                          JNIEnv* env) {
806     // If this isn't an ultra-high resolution sensor, return false;
807     camera_metadata_ro_entry capabilitiesEntry =
808             characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
809     size_t capsCount = capabilitiesEntry.count;
810     const uint8_t* caps = capabilitiesEntry.data.u8;
811     if (std::find(caps, caps + capsCount,
812                   ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) ==
813         caps + capsCount) {
814         // not an ultra-high resolution sensor, cannot have a maximum resolution
815         // mode image.
816         return false;
817     }
818 
819     // If the image width and height are either the maximum resolution
820     // pre-correction active array size or the maximum resolution pixel array
821     // size, this image is a maximum resolution RAW_SENSOR image.
822 
823     // Check dimensions
824     camera_metadata_ro_entry entry = characteristics.find(
825             ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION);
826 
827     BAIL_IF_EMPTY_RET_BOOL(entry, env, TAG_IMAGEWIDTH, writer);
828 
829     uint32_t preWidth = static_cast<uint32_t>(entry.data.i32[2]);
830     uint32_t preHeight = static_cast<uint32_t>(entry.data.i32[3]);
831 
832     camera_metadata_ro_entry pixelArrayEntry =
833             characteristics.find(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION);
834 
835     BAIL_IF_EMPTY_RET_BOOL(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
836 
837     uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
838     uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
839 
840     return (imageWidth == preWidth && imageHeight == preHeight) ||
841             (imageWidth == pixWidth && imageHeight == pixHeight);
842 }
843 
844 /**
845  * Calculate the default crop relative to the "active area" of the image sensor (this active area
846  * will always be the pre-correction active area rectangle), and set this.
847  */
calculateAndSetCrop(JNIEnv * env,const CameraMetadata & characteristics,sp<TiffWriter> writer,bool maximumResolutionMode)848 static status_t calculateAndSetCrop(JNIEnv* env, const CameraMetadata& characteristics,
849                                     sp<TiffWriter> writer, bool maximumResolutionMode) {
850     camera_metadata_ro_entry entry = characteristics.find(
851             getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
852                                   maximumResolutionMode));
853     BAIL_IF_EMPTY_RET_STATUS(entry, env, TAG_IMAGEWIDTH, writer);
854     uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
855     uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
856 
857     const uint32_t margin = 8; // Default margin recommended by Adobe for interpolation.
858 
859     if (width < margin * 2 || height < margin * 2) {
860         ALOGE("%s: Cannot calculate default crop for image, pre-correction active area is too"
861                 "small: h=%" PRIu32 ", w=%" PRIu32, __FUNCTION__, height, width);
862         jniThrowException(env, "java/lang/IllegalStateException",
863                 "Pre-correction active area is too small.");
864         return BAD_VALUE;
865     }
866 
867     uint32_t defaultCropOrigin[] = {margin, margin};
868     uint32_t defaultCropSize[] = {width - defaultCropOrigin[0] - margin,
869                                   height - defaultCropOrigin[1] - margin};
870 
871     BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPORIGIN, 2, defaultCropOrigin,
872             TIFF_IFD_0), env, TAG_DEFAULTCROPORIGIN, writer);
873     BAIL_IF_INVALID_R(writer->addEntry(TAG_DEFAULTCROPSIZE, 2, defaultCropSize,
874             TIFF_IFD_0), env, TAG_DEFAULTCROPSIZE, writer);
875 
876     return OK;
877 }
878 
validateDngHeader(JNIEnv * env,sp<TiffWriter> writer,const CameraMetadata & characteristics,jint width,jint height)879 static bool validateDngHeader(JNIEnv* env, sp<TiffWriter> writer,
880         const CameraMetadata& characteristics, jint width, jint height) {
881     if (width <= 0) {
882         jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
883                         "Image width %d is invalid", width);
884         return false;
885     }
886 
887     if (height <= 0) {
888         jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
889                         "Image height %d is invalid", height);
890         return false;
891     }
892     bool isMaximumResolutionMode =
893             isMaximumResolutionModeImage(characteristics, static_cast<uint32_t>(width),
894                                          static_cast<uint32_t>(height), writer, env);
895 
896     camera_metadata_ro_entry preCorrectionEntry = characteristics.find(
897             getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
898                                   isMaximumResolutionMode));
899     BAIL_IF_EMPTY_RET_BOOL(preCorrectionEntry, env, TAG_IMAGEWIDTH, writer);
900 
901     camera_metadata_ro_entry pixelArrayEntry = characteristics.find(
902             getAppropriateModeTag(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, isMaximumResolutionMode));
903     BAIL_IF_EMPTY_RET_BOOL(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
904 
905     int pWidth = static_cast<int>(pixelArrayEntry.data.i32[0]);
906     int pHeight = static_cast<int>(pixelArrayEntry.data.i32[1]);
907     int cWidth = static_cast<int>(preCorrectionEntry.data.i32[2]);
908     int cHeight = static_cast<int>(preCorrectionEntry.data.i32[3]);
909 
910     bool matchesPixelArray = (pWidth == width && pHeight == height);
911     bool matchesPreCorrectionArray = (cWidth == width && cHeight == height);
912 
913     if (!(matchesPixelArray || matchesPreCorrectionArray)) {
914         jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException", \
915                         "Image dimensions (w=%d,h=%d) are invalid, must match either the pixel "
916                         "array size (w=%d, h=%d) or the pre-correction array size (w=%d, h=%d)",
917                         width, height, pWidth, pHeight, cWidth, cHeight);
918         return false;
919     }
920 
921     return true;
922 }
923 
924 /**
925  * Write CFA pattern for given CFA enum into cfaOut.  cfaOut must have length >= 4.
926  * Returns OK on success, or a negative error code if the CFA enum was invalid.
927  */
convertCFA(uint8_t cfaEnum,uint8_t * cfaOut)928 static status_t convertCFA(uint8_t cfaEnum, /*out*/uint8_t* cfaOut) {
929     camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
930             static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
931             cfaEnum);
932     switch(cfa) {
933         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
934             cfaOut[0] = 0;
935             cfaOut[1] = 1;
936             cfaOut[2] = 1;
937             cfaOut[3] = 2;
938             break;
939         }
940         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
941             cfaOut[0] = 1;
942             cfaOut[1] = 0;
943             cfaOut[2] = 2;
944             cfaOut[3] = 1;
945             break;
946         }
947         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
948             cfaOut[0] = 1;
949             cfaOut[1] = 2;
950             cfaOut[2] = 0;
951             cfaOut[3] = 1;
952             break;
953         }
954         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
955             cfaOut[0] = 2;
956             cfaOut[1] = 1;
957             cfaOut[2] = 1;
958             cfaOut[3] = 0;
959             break;
960         }
961         // MONO and NIR are degenerate case of RGGB pattern: only Red channel
962         // will be used.
963         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO:
964         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR: {
965             cfaOut[0] = 0;
966             break;
967         }
968         default: {
969             return BAD_VALUE;
970         }
971     }
972     return OK;
973 }
974 
975 /**
976  * Convert the CFA layout enum to an OpcodeListBuilder::CfaLayout enum, defaults to
977  * RGGB for an unknown enum.
978  */
convertCFAEnumToOpcodeLayout(uint8_t cfaEnum)979 static OpcodeListBuilder::CfaLayout convertCFAEnumToOpcodeLayout(uint8_t cfaEnum) {
980     camera_metadata_enum_android_sensor_info_color_filter_arrangement_t cfa =
981             static_cast<camera_metadata_enum_android_sensor_info_color_filter_arrangement_t>(
982             cfaEnum);
983     switch(cfa) {
984         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB: {
985             return OpcodeListBuilder::CFA_RGGB;
986         }
987         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG: {
988             return OpcodeListBuilder::CFA_GRBG;
989         }
990         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG: {
991             return OpcodeListBuilder::CFA_GBRG;
992         }
993         case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR: {
994             return OpcodeListBuilder::CFA_BGGR;
995         }
996         default: {
997             return OpcodeListBuilder::CFA_RGGB;
998         }
999     }
1000 }
1001 
1002 /**
1003  * For each color plane, find the corresponding noise profile coefficients given in the
1004  * per-channel noise profile.  If multiple channels in the CFA correspond to a color in the color
1005  * plane, this method takes the pair of noise profile coefficients with the higher S coefficient.
1006  *
1007  * perChannelNoiseProfile - numChannels * 2 noise profile coefficients.
1008  * cfa - numChannels color channels corresponding to each of the per-channel noise profile
1009  *       coefficients.
1010  * numChannels - the number of noise profile coefficient pairs and color channels given in
1011  *       the perChannelNoiseProfile and cfa arguments, respectively.
1012  * planeColors - the color planes in the noise profile output.
1013  * numPlanes - the number of planes in planeColors and pairs of coefficients in noiseProfile.
1014  * noiseProfile - 2 * numPlanes doubles containing numPlanes pairs of noise profile coefficients.
1015  *
1016  * returns OK, or a negative error code on failure.
1017  */
generateNoiseProfile(const double * perChannelNoiseProfile,uint8_t * cfa,size_t numChannels,const uint8_t * planeColors,size_t numPlanes,double * noiseProfile)1018 static status_t generateNoiseProfile(const double* perChannelNoiseProfile, uint8_t* cfa,
1019         size_t numChannels, const uint8_t* planeColors, size_t numPlanes,
1020         /*out*/double* noiseProfile) {
1021 
1022     for (size_t p = 0; p < numPlanes; ++p) {
1023         size_t S = p * 2;
1024         size_t O = p * 2 + 1;
1025 
1026         noiseProfile[S] = 0;
1027         noiseProfile[O] = 0;
1028         bool uninitialized = true;
1029         for (size_t c = 0; c < numChannels; ++c) {
1030             if (cfa[c] == planeColors[p] && perChannelNoiseProfile[c * 2] > noiseProfile[S]) {
1031                 noiseProfile[S] = perChannelNoiseProfile[c * 2];
1032                 noiseProfile[O] = perChannelNoiseProfile[c * 2 + 1];
1033                 uninitialized = false;
1034             }
1035         }
1036         if (uninitialized) {
1037             ALOGE("%s: No valid NoiseProfile coefficients for color plane %zu",
1038                   __FUNCTION__, p);
1039             return BAD_VALUE;
1040         }
1041     }
1042     return OK;
1043 }
1044 
undistort(double & x,double & y,const std::array<float,6> & distortion,const float cx,const float cy,const float f)1045 static void undistort(/*inout*/double& x, /*inout*/double& y,
1046         const std::array<float, 6>& distortion,
1047         const float cx, const float cy, const float f) {
1048     double xp = (x - cx) / f;
1049     double yp = (y - cy) / f;
1050 
1051     double x2 = xp * xp;
1052     double y2 = yp * yp;
1053     double r2 = x2 + y2;
1054     double xy2 = 2.0 * xp * yp;
1055 
1056     const float k0 = distortion[0];
1057     const float k1 = distortion[1];
1058     const float k2 = distortion[2];
1059     const float k3 = distortion[3];
1060     const float p1 = distortion[4];
1061     const float p2 = distortion[5];
1062 
1063     double kr = k0 + ((k3 * r2 + k2) * r2 + k1) * r2;
1064     double xpp = xp * kr + p1 * xy2 + p2 * (r2 + 2.0 * x2);
1065     double ypp = yp * kr + p1 * (r2 + 2.0 * y2) + p2 * xy2;
1066 
1067     x = xpp * f + cx;
1068     y = ypp * f + cy;
1069     return;
1070 }
1071 
unDistortWithinPreCorrArray(double x,double y,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1072 static inline bool unDistortWithinPreCorrArray(
1073         double x, double y,
1074         const std::array<float, 6>& distortion,
1075         const float cx, const float cy, const float f,
1076         const int preCorrW, const int preCorrH, const int xMin, const int yMin) {
1077     undistort(x, y, distortion, cx, cy, f);
1078     // xMin and yMin are inclusive, and xMax and yMax are exclusive.
1079     int xMax = xMin + preCorrW;
1080     int yMax = yMin + preCorrH;
1081     if (x < xMin || y < yMin || x >= xMax || y >= yMax) {
1082         return false;
1083     }
1084     return true;
1085 }
1086 
boxWithinPrecorrectionArray(int left,int top,int right,int bottom,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1087 static inline bool boxWithinPrecorrectionArray(
1088         int left, int top, int right, int bottom,
1089         const std::array<float, 6>& distortion,
1090         const float cx, const float cy, const float f,
1091         const int preCorrW, const int preCorrH, const int xMin, const int yMin){
1092     // Top row
1093     if (!unDistortWithinPreCorrArray(left, top,
1094             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1095         return false;
1096     }
1097 
1098     if (!unDistortWithinPreCorrArray(cx, top,
1099             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1100         return false;
1101     }
1102 
1103     if (!unDistortWithinPreCorrArray(right, top,
1104             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1105         return false;
1106     }
1107 
1108     // Middle row
1109     if (!unDistortWithinPreCorrArray(left, cy,
1110             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1111         return false;
1112     }
1113 
1114     if (!unDistortWithinPreCorrArray(right, cy,
1115             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1116         return false;
1117     }
1118 
1119     // Bottom row
1120     if (!unDistortWithinPreCorrArray(left, bottom,
1121             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1122         return false;
1123     }
1124 
1125     if (!unDistortWithinPreCorrArray(cx, bottom,
1126             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1127         return false;
1128     }
1129 
1130     if (!unDistortWithinPreCorrArray(right, bottom,
1131             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1132         return false;
1133     }
1134     return true;
1135 }
1136 
scaledBoxWithinPrecorrectionArray(double scale,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin)1137 static inline bool scaledBoxWithinPrecorrectionArray(
1138         double scale/*must be <= 1.0*/,
1139         const std::array<float, 6>& distortion,
1140         const float cx, const float cy, const float f,
1141         const int preCorrW, const int preCorrH,
1142         const int xMin, const int yMin){
1143 
1144     double left = cx * (1.0 - scale);
1145     double right = (preCorrW - 1) * scale + cx * (1.0 - scale);
1146     double top = cy * (1.0 - scale);
1147     double bottom = (preCorrH - 1) * scale + cy * (1.0 - scale);
1148 
1149     return boxWithinPrecorrectionArray(left, top, right, bottom,
1150             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin);
1151 }
1152 
findPostCorrectionScale(double stepSize,double minScale,const std::array<float,6> & distortion,const float cx,const float cy,const float f,const int preCorrW,const int preCorrH,const int xMin,const int yMin,double * outScale)1153 static status_t findPostCorrectionScale(
1154         double stepSize, double minScale,
1155         const std::array<float, 6>& distortion,
1156         const float cx, const float cy, const float f,
1157         const int preCorrW, const int preCorrH, const int xMin, const int yMin,
1158         /*out*/ double* outScale) {
1159     if (outScale == nullptr) {
1160         ALOGE("%s: outScale must not be null", __FUNCTION__);
1161         return BAD_VALUE;
1162     }
1163 
1164     for (double scale = 1.0; scale > minScale; scale -= stepSize) {
1165         if (scaledBoxWithinPrecorrectionArray(
1166                 scale, distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin)) {
1167             *outScale = scale;
1168             return OK;
1169         }
1170     }
1171     ALOGE("%s: cannot find cropping scale for lens distortion: stepSize %f, minScale %f",
1172             __FUNCTION__, stepSize, minScale);
1173     return BAD_VALUE;
1174 }
1175 
1176 // Apply a scale factor to distortion coefficients so that the image is zoomed out and all pixels
1177 // are sampled within the precorrection array
normalizeLensDistortion(std::array<float,6> & distortion,float cx,float cy,float f,int preCorrW,int preCorrH,int xMin=0,int yMin=0)1178 static void normalizeLensDistortion(
1179         /*inout*/std::array<float, 6>& distortion,
1180         float cx, float cy, float f, int preCorrW, int preCorrH, int xMin = 0, int yMin = 0) {
1181     ALOGV("%s: distortion [%f, %f, %f, %f, %f, %f], (cx,cy) (%f, %f), f %f, (W,H) (%d, %d)"
1182             ", (xmin, ymin, xmax, ymax) (%d, %d, %d, %d)",
1183             __FUNCTION__, distortion[0], distortion[1], distortion[2],
1184             distortion[3], distortion[4], distortion[5],
1185             cx, cy, f, preCorrW, preCorrH,
1186             xMin, yMin, xMin + preCorrW - 1, yMin + preCorrH - 1);
1187 
1188     // Only update distortion coeffients if we can find a good bounding box
1189     double scale = 1.0;
1190     if (OK == findPostCorrectionScale(0.002, 0.5,
1191             distortion, cx, cy, f, preCorrW, preCorrH, xMin, yMin,
1192             /*out*/&scale)) {
1193         ALOGV("%s: scaling distortion coefficients by %f", __FUNCTION__, scale);
1194         // The formula:
1195         // xc = xi * (k0 + k1*r^2 + k2*r^4 + k3*r^6) + k4 * (2*xi*yi) + k5 * (r^2 + 2*xi^2)
1196         // To create effective zoom we want to replace xi by xi *m, yi by yi*m and r^2 by r^2*m^2
1197         // Factor the extra m power terms into k0~k6
1198         std::array<float, 6> scalePowers = {1, 3, 5, 7, 2, 2};
1199         for (size_t i = 0; i < 6; i++) {
1200             distortion[i] *= pow(scale, scalePowers[i]);
1201         }
1202     }
1203     return;
1204 }
1205 
1206 // ----------------------------------------------------------------------------
1207 extern "C" {
1208 
DngCreator_getNativeContext(JNIEnv * env,jobject thiz)1209 static NativeContext* DngCreator_getNativeContext(JNIEnv* env, jobject thiz) {
1210     ALOGV("%s:", __FUNCTION__);
1211     return reinterpret_cast<NativeContext*>(env->GetLongField(thiz,
1212             gDngCreatorClassInfo.mNativeContext));
1213 }
1214 
DngCreator_setNativeContext(JNIEnv * env,jobject thiz,sp<NativeContext> context)1215 static void DngCreator_setNativeContext(JNIEnv* env, jobject thiz, sp<NativeContext> context) {
1216     ALOGV("%s:", __FUNCTION__);
1217     NativeContext* current = DngCreator_getNativeContext(env, thiz);
1218 
1219     if (context != nullptr) {
1220         context->incStrong((void*) DngCreator_setNativeContext);
1221     }
1222 
1223     if (current) {
1224         current->decStrong((void*) DngCreator_setNativeContext);
1225     }
1226 
1227     env->SetLongField(thiz, gDngCreatorClassInfo.mNativeContext,
1228             reinterpret_cast<jlong>(context.get()));
1229 }
1230 
DngCreator_nativeClassInit(JNIEnv * env,jclass clazz)1231 static void DngCreator_nativeClassInit(JNIEnv* env, jclass clazz) {
1232     ALOGV("%s:", __FUNCTION__);
1233 
1234     gDngCreatorClassInfo.mNativeContext = GetFieldIDOrDie(env,
1235             clazz, ANDROID_DNGCREATOR_CTX_JNI_ID, "J");
1236 
1237     jclass outputStreamClazz = FindClassOrDie(env, "java/io/OutputStream");
1238     gOutputStreamClassInfo.mWriteMethod = GetMethodIDOrDie(env,
1239             outputStreamClazz, "write", "([BII)V");
1240 
1241     jclass inputStreamClazz = FindClassOrDie(env, "java/io/InputStream");
1242     gInputStreamClassInfo.mReadMethod = GetMethodIDOrDie(env, inputStreamClazz, "read", "([BII)I");
1243     gInputStreamClassInfo.mSkipMethod = GetMethodIDOrDie(env, inputStreamClazz, "skip", "(J)J");
1244 
1245     jclass inputBufferClazz = FindClassOrDie(env, "java/nio/ByteBuffer");
1246     gInputByteBufferClassInfo.mGetMethod = GetMethodIDOrDie(env,
1247             inputBufferClazz, "get", "([BII)Ljava/nio/ByteBuffer;");
1248 }
1249 
DngCreator_init(JNIEnv * env,jobject thiz,jobject characteristicsPtr,jobject resultsPtr,jstring formattedCaptureTime)1250 static void DngCreator_init(JNIEnv* env, jobject thiz, jobject characteristicsPtr,
1251         jobject resultsPtr, jstring formattedCaptureTime) {
1252     ALOGV("%s:", __FUNCTION__);
1253     CameraMetadata characteristics;
1254     CameraMetadata results;
1255     if (CameraMetadata_getNativeMetadata(env, characteristicsPtr, &characteristics) != OK) {
1256          jniThrowException(env, "java/lang/AssertionError",
1257                 "No native metadata defined for camera characteristics.");
1258          return;
1259     }
1260     if (CameraMetadata_getNativeMetadata(env, resultsPtr, &results) != OK) {
1261         jniThrowException(env, "java/lang/AssertionError",
1262                 "No native metadata defined for capture results.");
1263         return;
1264     }
1265 
1266     sp<NativeContext> nativeContext = new NativeContext(characteristics, results);
1267 
1268     ScopedUtfChars captureTime(env, formattedCaptureTime);
1269     if (captureTime.size() + 1 != NativeContext::DATETIME_COUNT) {
1270         jniThrowException(env, "java/lang/IllegalArgumentException",
1271                 "Formatted capture time string length is not required 20 characters");
1272         return;
1273     }
1274 
1275     nativeContext->setCaptureTime(String8(captureTime.c_str()));
1276 
1277     DngCreator_setNativeContext(env, thiz, nativeContext);
1278 }
1279 
DngCreator_setup(JNIEnv * env,jobject thiz,uint32_t imageWidth,uint32_t imageHeight)1280 static sp<TiffWriter> DngCreator_setup(JNIEnv* env, jobject thiz, uint32_t imageWidth,
1281         uint32_t imageHeight) {
1282 
1283     NativeContext* nativeContext = DngCreator_getNativeContext(env, thiz);
1284 
1285     if (nativeContext == nullptr) {
1286         jniThrowException(env, "java/lang/AssertionError",
1287                 "No native context, must call init before other operations.");
1288         return nullptr;
1289     }
1290 
1291     CameraMetadata characteristics = *(nativeContext->getCharacteristics());
1292     CameraMetadata results = *(nativeContext->getResult());
1293 
1294     sp<TiffWriter> writer = new TiffWriter();
1295 
1296     uint32_t preXMin = 0;
1297     uint32_t preYMin = 0;
1298     uint32_t preWidth = 0;
1299     uint32_t preHeight = 0;
1300     uint8_t colorFilter = 0;
1301     bool isBayer = true;
1302     bool isMaximumResolutionMode =
1303             isMaximumResolutionModeImage(characteristics, imageWidth, imageHeight, writer, env);
1304     {
1305         // Check dimensions
1306         camera_metadata_entry entry = characteristics.find(
1307                 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1308                                       isMaximumResolutionMode));
1309         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1310         preXMin = static_cast<uint32_t>(entry.data.i32[0]);
1311         preYMin = static_cast<uint32_t>(entry.data.i32[1]);
1312         preWidth = static_cast<uint32_t>(entry.data.i32[2]);
1313         preHeight = static_cast<uint32_t>(entry.data.i32[3]);
1314 
1315         camera_metadata_entry pixelArrayEntry =
1316                 characteristics.find(getAppropriateModeTag(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1317                                                            isMaximumResolutionMode));
1318 
1319         BAIL_IF_EMPTY_RET_NULL_SP(pixelArrayEntry, env, TAG_IMAGEWIDTH, writer);
1320         uint32_t pixWidth = static_cast<uint32_t>(pixelArrayEntry.data.i32[0]);
1321         uint32_t pixHeight = static_cast<uint32_t>(pixelArrayEntry.data.i32[1]);
1322 
1323         if (!((imageWidth == preWidth && imageHeight == preHeight) ||
1324                 (imageWidth == pixWidth && imageHeight == pixHeight))) {
1325             jniThrowException(env, "java/lang/AssertionError",
1326                               "Height and width of image buffer did not match height and width of"
1327                               " either the preCorrectionActiveArraySize or the pixelArraySize.");
1328             return nullptr;
1329         }
1330 
1331         camera_metadata_entry colorFilterEntry =
1332                 characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1333         colorFilter = colorFilterEntry.data.u8[0];
1334         camera_metadata_entry capabilitiesEntry =
1335                 characteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
1336         size_t capsCount = capabilitiesEntry.count;
1337         uint8_t* caps = capabilitiesEntry.data.u8;
1338         if (std::find(caps, caps+capsCount, ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME)
1339                 != caps+capsCount) {
1340             isBayer = false;
1341         } else if (colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO ||
1342                 colorFilter == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR) {
1343             jniThrowException(env, "java/lang/AssertionError",
1344                     "A camera device with MONO/NIR color filter must have MONOCHROME capability.");
1345             return nullptr;
1346         }
1347     }
1348 
1349     writer->addIfd(TIFF_IFD_0);
1350 
1351     status_t err = OK;
1352 
1353     const uint32_t samplesPerPixel = 1;
1354     const uint32_t bitsPerSample = BITS_PER_SAMPLE;
1355 
1356     OpcodeListBuilder::CfaLayout opcodeCfaLayout = OpcodeListBuilder::CFA_NONE;
1357     uint8_t cfaPlaneColor[3] = {0, 1, 2};
1358     camera_metadata_entry cfaEntry =
1359             characteristics.find(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT);
1360     BAIL_IF_EMPTY_RET_NULL_SP(cfaEntry, env, TAG_CFAPATTERN, writer);
1361     uint8_t cfaEnum = cfaEntry.data.u8[0];
1362 
1363     // TODO: Greensplit.
1364     // TODO: Add remaining non-essential tags
1365 
1366     // Setup main image tags
1367 
1368     {
1369         // Set orientation
1370         uint16_t orientation = TAG_ORIENTATION_NORMAL;
1371         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
1372                 env, TAG_ORIENTATION, writer);
1373     }
1374 
1375     {
1376         // Set subfiletype
1377         uint32_t subfileType = 0; // Main image
1378         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
1379                 TIFF_IFD_0), env, TAG_NEWSUBFILETYPE, writer);
1380     }
1381 
1382     {
1383         // Set bits per sample
1384         uint16_t bits = static_cast<uint16_t>(bitsPerSample);
1385         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BITSPERSAMPLE, 1, &bits, TIFF_IFD_0), env,
1386                 TAG_BITSPERSAMPLE, writer);
1387     }
1388 
1389     {
1390         // Set compression
1391         uint16_t compression = 1; // None
1392         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
1393                 TIFF_IFD_0), env, TAG_COMPRESSION, writer);
1394     }
1395 
1396     {
1397         // Set dimensions
1398         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &imageWidth, TIFF_IFD_0),
1399                 env, TAG_IMAGEWIDTH, writer);
1400         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &imageHeight, TIFF_IFD_0),
1401                 env, TAG_IMAGELENGTH, writer);
1402     }
1403 
1404     {
1405         // Set photometric interpretation
1406         uint16_t interpretation = isBayer ? 32803 /* CFA */ :
1407                 34892; /* Linear Raw */;
1408         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
1409                 &interpretation, TIFF_IFD_0), env, TAG_PHOTOMETRICINTERPRETATION, writer);
1410     }
1411 
1412     {
1413         uint16_t repeatDim[2] = {2, 2};
1414         if (!isBayer) {
1415             repeatDim[0] = repeatDim[1] = 1;
1416         }
1417         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVELREPEATDIM, 2, repeatDim,
1418                 TIFF_IFD_0), env, TAG_BLACKLEVELREPEATDIM, writer);
1419 
1420         // Set blacklevel tags, using dynamic black level if available
1421         camera_metadata_entry entry =
1422                 results.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
1423         uint32_t blackLevelRational[8] = {0};
1424         if (entry.count != 0) {
1425             BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1426             for (size_t i = 0; i < entry.count; i++) {
1427                 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.f[i] * 100);
1428                 blackLevelRational[i * 2 + 1] = 100;
1429             }
1430         } else {
1431             // Fall back to static black level which is guaranteed
1432             entry = characteristics.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN);
1433             BAIL_IF_EXPR_RET_NULL_SP(entry.count != 4, env, TAG_BLACKLEVEL, writer);
1434             for (size_t i = 0; i < entry.count; i++) {
1435                 blackLevelRational[i * 2] = static_cast<uint32_t>(entry.data.i32[i]);
1436                 blackLevelRational[i * 2 + 1] = 1;
1437             }
1438         }
1439         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BLACKLEVEL, repeatDim[0]*repeatDim[1],
1440                 blackLevelRational, TIFF_IFD_0), env, TAG_BLACKLEVEL, writer);
1441     }
1442 
1443     {
1444         // Set samples per pixel
1445         uint16_t samples = static_cast<uint16_t>(samplesPerPixel);
1446         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples, TIFF_IFD_0),
1447                 env, TAG_SAMPLESPERPIXEL, writer);
1448     }
1449 
1450     {
1451         // Set planar configuration
1452         uint16_t config = 1; // Chunky
1453         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
1454                 TIFF_IFD_0), env, TAG_PLANARCONFIGURATION, writer);
1455     }
1456 
1457     // All CFA pattern tags are not necessary for monochrome cameras.
1458     if (isBayer) {
1459         // Set CFA pattern dimensions
1460         uint16_t repeatDim[2] = {2, 2};
1461         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAREPEATPATTERNDIM, 2, repeatDim,
1462                 TIFF_IFD_0), env, TAG_CFAREPEATPATTERNDIM, writer);
1463 
1464         // Set CFA pattern
1465         const int cfaLength = 4;
1466         uint8_t cfa[cfaLength];
1467         if ((err = convertCFA(cfaEnum, /*out*/cfa)) != OK) {
1468             jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1469                         "Invalid metadata for tag %d", TAG_CFAPATTERN);
1470         }
1471 
1472         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPATTERN, cfaLength, cfa, TIFF_IFD_0),
1473                 env, TAG_CFAPATTERN, writer);
1474 
1475         opcodeCfaLayout = convertCFAEnumToOpcodeLayout(cfaEnum);
1476 
1477         // Set CFA plane color
1478         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFAPLANECOLOR, 3, cfaPlaneColor,
1479                 TIFF_IFD_0), env, TAG_CFAPLANECOLOR, writer);
1480 
1481         // Set CFA layout
1482         uint16_t cfaLayout = 1;
1483         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CFALAYOUT, 1, &cfaLayout, TIFF_IFD_0),
1484                 env, TAG_CFALAYOUT, writer);
1485     }
1486 
1487     {
1488         // image description
1489         uint8_t imageDescription = '\0'; // empty
1490         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEDESCRIPTION, 1, &imageDescription,
1491                 TIFF_IFD_0), env, TAG_IMAGEDESCRIPTION, writer);
1492     }
1493 
1494     {
1495         // make
1496         // Use "" to represent unknown make as suggested in TIFF/EP spec.
1497         std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1498         uint32_t count = static_cast<uint32_t>(manufacturer.size()) + 1;
1499 
1500         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MAKE, count,
1501                 reinterpret_cast<const uint8_t*>(manufacturer.c_str()), TIFF_IFD_0), env, TAG_MAKE,
1502                 writer);
1503     }
1504 
1505     {
1506         // model
1507         // Use "" to represent unknown model as suggested in TIFF/EP spec.
1508         std::string model = GetProperty("ro.product.model", "");
1509         uint32_t count = static_cast<uint32_t>(model.size()) + 1;
1510 
1511         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_MODEL, count,
1512                 reinterpret_cast<const uint8_t*>(model.c_str()), TIFF_IFD_0), env, TAG_MODEL,
1513                 writer);
1514     }
1515 
1516     {
1517         // x resolution
1518         uint32_t xres[] = { 72, 1 }; // default 72 ppi
1519         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_0),
1520                 env, TAG_XRESOLUTION, writer);
1521 
1522         // y resolution
1523         uint32_t yres[] = { 72, 1 }; // default 72 ppi
1524         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_0),
1525                 env, TAG_YRESOLUTION, writer);
1526 
1527         uint16_t unit = 2; // inches
1528         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit, TIFF_IFD_0),
1529                 env, TAG_RESOLUTIONUNIT, writer);
1530     }
1531 
1532     {
1533         // software
1534         std::string software = GetProperty("ro.build.fingerprint", "");
1535         uint32_t count = static_cast<uint32_t>(software.size()) + 1;
1536         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SOFTWARE, count,
1537                 reinterpret_cast<const uint8_t*>(software.c_str()), TIFF_IFD_0), env, TAG_SOFTWARE,
1538                 writer);
1539     }
1540 
1541     if (nativeContext->hasCaptureTime()) {
1542         // datetime
1543         String8 captureTime = nativeContext->getCaptureTime();
1544 
1545         if (writer->addEntry(TAG_DATETIME, NativeContext::DATETIME_COUNT,
1546                 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1547             jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1548                     "Invalid metadata for tag %x", TAG_DATETIME);
1549             return nullptr;
1550         }
1551 
1552         // datetime original
1553         if (writer->addEntry(TAG_DATETIMEORIGINAL, NativeContext::DATETIME_COUNT,
1554                 reinterpret_cast<const uint8_t*>(captureTime.string()), TIFF_IFD_0) != OK) {
1555             jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
1556                     "Invalid metadata for tag %x", TAG_DATETIMEORIGINAL);
1557             return nullptr;
1558         }
1559     }
1560 
1561     {
1562         // TIFF/EP standard id
1563         uint8_t standardId[] = { 1, 0, 0, 0 };
1564         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_TIFFEPSTANDARDID, 4, standardId,
1565                 TIFF_IFD_0), env, TAG_TIFFEPSTANDARDID, writer);
1566     }
1567 
1568     {
1569         // copyright
1570         uint8_t copyright = '\0'; // empty
1571         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COPYRIGHT, 1, &copyright,
1572                 TIFF_IFD_0), env, TAG_COPYRIGHT, writer);
1573     }
1574 
1575     {
1576         // exposure time
1577         camera_metadata_entry entry =
1578             results.find(ANDROID_SENSOR_EXPOSURE_TIME);
1579         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_EXPOSURETIME, writer);
1580 
1581         int64_t exposureTime = *(entry.data.i64);
1582 
1583         if (exposureTime < 0) {
1584             // Should be unreachable
1585             jniThrowException(env, "java/lang/IllegalArgumentException",
1586                     "Negative exposure time in metadata");
1587             return nullptr;
1588         }
1589 
1590         // Ensure exposure time doesn't overflow (for exposures > 4s)
1591         uint32_t denominator = 1000000000;
1592         while (exposureTime > UINT32_MAX) {
1593             exposureTime >>= 1;
1594             denominator >>= 1;
1595             if (denominator == 0) {
1596                 // Should be unreachable
1597                 jniThrowException(env, "java/lang/IllegalArgumentException",
1598                         "Exposure time too long");
1599                 return nullptr;
1600             }
1601         }
1602 
1603         uint32_t exposure[] = { static_cast<uint32_t>(exposureTime), denominator };
1604         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_EXPOSURETIME, 1, exposure,
1605                 TIFF_IFD_0), env, TAG_EXPOSURETIME, writer);
1606 
1607     }
1608 
1609     {
1610         // ISO speed ratings
1611         camera_metadata_entry entry =
1612             results.find(ANDROID_SENSOR_SENSITIVITY);
1613         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ISOSPEEDRATINGS, writer);
1614 
1615         int32_t tempIso = *(entry.data.i32);
1616         if (tempIso < 0) {
1617             jniThrowException(env, "java/lang/IllegalArgumentException",
1618                                     "Negative ISO value");
1619             return nullptr;
1620         }
1621 
1622         if (tempIso > UINT16_MAX) {
1623             ALOGW("%s: ISO value overflows UINT16_MAX, clamping to max", __FUNCTION__);
1624             tempIso = UINT16_MAX;
1625         }
1626 
1627         uint16_t iso = static_cast<uint16_t>(tempIso);
1628         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ISOSPEEDRATINGS, 1, &iso,
1629                 TIFF_IFD_0), env, TAG_ISOSPEEDRATINGS, writer);
1630     }
1631 
1632     {
1633         // Baseline exposure
1634         camera_metadata_entry entry =
1635                 results.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST);
1636         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_BASELINEEXPOSURE, writer);
1637 
1638         // post RAW gain should be boostValue / 100
1639         double postRAWGain = static_cast<double> (entry.data.i32[0]) / 100.f;
1640         // Baseline exposure should be in EV units so log2(gain) =
1641         // log10(gain)/log10(2)
1642         double baselineExposure = std::log(postRAWGain) / std::log(2.0f);
1643         int32_t baseExposureSRat[] = { static_cast<int32_t> (baselineExposure * 100),
1644                 100 };
1645         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_BASELINEEXPOSURE, 1,
1646                 baseExposureSRat, TIFF_IFD_0), env, TAG_BASELINEEXPOSURE, writer);
1647     }
1648 
1649     {
1650         // focal length
1651         camera_metadata_entry entry =
1652             results.find(ANDROID_LENS_FOCAL_LENGTH);
1653         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FOCALLENGTH, writer);
1654 
1655         uint32_t focalLength[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1656         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FOCALLENGTH, 1, focalLength,
1657                 TIFF_IFD_0), env, TAG_FOCALLENGTH, writer);
1658     }
1659 
1660     {
1661         // f number
1662         camera_metadata_entry entry =
1663             results.find(ANDROID_LENS_APERTURE);
1664         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_FNUMBER, writer);
1665 
1666         uint32_t fnum[] = { static_cast<uint32_t>(*(entry.data.f) * 100), 100 };
1667         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FNUMBER, 1, fnum,
1668                 TIFF_IFD_0), env, TAG_FNUMBER, writer);
1669     }
1670 
1671     {
1672         // Set DNG version information
1673         uint8_t version[4] = {1, 4, 0, 0};
1674         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGVERSION, 4, version, TIFF_IFD_0),
1675                 env, TAG_DNGVERSION, writer);
1676 
1677         uint8_t backwardVersion[4] = {1, 1, 0, 0};
1678         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DNGBACKWARDVERSION, 4, backwardVersion,
1679                 TIFF_IFD_0), env, TAG_DNGBACKWARDVERSION, writer);
1680     }
1681 
1682     {
1683         // Set whitelevel
1684         camera_metadata_entry entry =
1685                 characteristics.find(ANDROID_SENSOR_INFO_WHITE_LEVEL);
1686         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_WHITELEVEL, writer);
1687         uint32_t whiteLevel = static_cast<uint32_t>(entry.data.i32[0]);
1688         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_WHITELEVEL, 1, &whiteLevel, TIFF_IFD_0),
1689                 env, TAG_WHITELEVEL, writer);
1690     }
1691 
1692     {
1693         // Set default scale
1694         uint32_t defaultScale[4] = {1, 1, 1, 1};
1695         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_DEFAULTSCALE, 2, defaultScale,
1696                 TIFF_IFD_0), env, TAG_DEFAULTSCALE, writer);
1697     }
1698 
1699     bool singleIlluminant = false;
1700     if (isBayer) {
1701         // Set calibration illuminants
1702         camera_metadata_entry entry1 =
1703             characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT1);
1704         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CALIBRATIONILLUMINANT1, writer);
1705         camera_metadata_entry entry2 =
1706             characteristics.find(ANDROID_SENSOR_REFERENCE_ILLUMINANT2);
1707         if (entry2.count == 0) {
1708             singleIlluminant = true;
1709         }
1710         uint16_t ref1 = entry1.data.u8[0];
1711 
1712         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT1, 1, &ref1,
1713                 TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT1, writer);
1714 
1715         if (!singleIlluminant) {
1716             uint16_t ref2 = entry2.data.u8[0];
1717             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CALIBRATIONILLUMINANT2, 1, &ref2,
1718                     TIFF_IFD_0), env, TAG_CALIBRATIONILLUMINANT2, writer);
1719         }
1720     }
1721 
1722     if (isBayer) {
1723         // Set color transforms
1724         camera_metadata_entry entry1 =
1725             characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM1);
1726         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_COLORMATRIX1, writer);
1727 
1728         int32_t colorTransform1[entry1.count * 2];
1729 
1730         size_t ctr = 0;
1731         for(size_t i = 0; i < entry1.count; ++i) {
1732             colorTransform1[ctr++] = entry1.data.r[i].numerator;
1733             colorTransform1[ctr++] = entry1.data.r[i].denominator;
1734         }
1735 
1736         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX1, entry1.count,
1737                 colorTransform1, TIFF_IFD_0), env, TAG_COLORMATRIX1, writer);
1738 
1739         if (!singleIlluminant) {
1740             camera_metadata_entry entry2 = characteristics.find(ANDROID_SENSOR_COLOR_TRANSFORM2);
1741             BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_COLORMATRIX2, writer);
1742             int32_t colorTransform2[entry2.count * 2];
1743 
1744             ctr = 0;
1745             for(size_t i = 0; i < entry2.count; ++i) {
1746                 colorTransform2[ctr++] = entry2.data.r[i].numerator;
1747                 colorTransform2[ctr++] = entry2.data.r[i].denominator;
1748             }
1749 
1750             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COLORMATRIX2, entry2.count,
1751                     colorTransform2, TIFF_IFD_0), env, TAG_COLORMATRIX2, writer);
1752         }
1753     }
1754 
1755     if (isBayer) {
1756         // Set calibration transforms
1757         camera_metadata_entry entry1 =
1758             characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM1);
1759         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_CAMERACALIBRATION1, writer);
1760 
1761         int32_t calibrationTransform1[entry1.count * 2];
1762 
1763         size_t ctr = 0;
1764         for(size_t i = 0; i < entry1.count; ++i) {
1765             calibrationTransform1[ctr++] = entry1.data.r[i].numerator;
1766             calibrationTransform1[ctr++] = entry1.data.r[i].denominator;
1767         }
1768 
1769         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION1, entry1.count,
1770                 calibrationTransform1, TIFF_IFD_0), env, TAG_CAMERACALIBRATION1, writer);
1771 
1772         if (!singleIlluminant) {
1773             camera_metadata_entry entry2 =
1774                 characteristics.find(ANDROID_SENSOR_CALIBRATION_TRANSFORM2);
1775             BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_CAMERACALIBRATION2, writer);
1776             int32_t calibrationTransform2[entry2.count * 2];
1777 
1778             ctr = 0;
1779             for(size_t i = 0; i < entry2.count; ++i) {
1780                 calibrationTransform2[ctr++] = entry2.data.r[i].numerator;
1781                 calibrationTransform2[ctr++] = entry2.data.r[i].denominator;
1782             }
1783 
1784             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_CAMERACALIBRATION2, entry2.count,
1785                     calibrationTransform2, TIFF_IFD_0),  env, TAG_CAMERACALIBRATION2, writer);
1786         }
1787     }
1788 
1789     if (isBayer) {
1790         // Set forward transforms
1791         camera_metadata_entry entry1 =
1792             characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX1);
1793         BAIL_IF_EMPTY_RET_NULL_SP(entry1, env, TAG_FORWARDMATRIX1, writer);
1794 
1795         int32_t forwardTransform1[entry1.count * 2];
1796 
1797         size_t ctr = 0;
1798         for(size_t i = 0; i < entry1.count; ++i) {
1799             forwardTransform1[ctr++] = entry1.data.r[i].numerator;
1800             forwardTransform1[ctr++] = entry1.data.r[i].denominator;
1801         }
1802 
1803         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX1, entry1.count,
1804                 forwardTransform1, TIFF_IFD_0), env, TAG_FORWARDMATRIX1, writer);
1805 
1806         if (!singleIlluminant) {
1807             camera_metadata_entry entry2 =
1808                 characteristics.find(ANDROID_SENSOR_FORWARD_MATRIX2);
1809             BAIL_IF_EMPTY_RET_NULL_SP(entry2, env, TAG_FORWARDMATRIX2, writer);
1810             int32_t forwardTransform2[entry2.count * 2];
1811 
1812             ctr = 0;
1813             for(size_t i = 0; i < entry2.count; ++i) {
1814                 forwardTransform2[ctr++] = entry2.data.r[i].numerator;
1815                 forwardTransform2[ctr++] = entry2.data.r[i].denominator;
1816             }
1817 
1818             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_FORWARDMATRIX2, entry2.count,
1819                     forwardTransform2, TIFF_IFD_0),  env, TAG_FORWARDMATRIX2, writer);
1820         }
1821     }
1822 
1823     if (isBayer) {
1824         // Set camera neutral
1825         camera_metadata_entry entry =
1826             results.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT);
1827         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ASSHOTNEUTRAL, writer);
1828         uint32_t cameraNeutral[entry.count * 2];
1829 
1830         size_t ctr = 0;
1831         for(size_t i = 0; i < entry.count; ++i) {
1832             cameraNeutral[ctr++] =
1833                     static_cast<uint32_t>(entry.data.r[i].numerator);
1834             cameraNeutral[ctr++] =
1835                     static_cast<uint32_t>(entry.data.r[i].denominator);
1836         }
1837 
1838         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ASSHOTNEUTRAL, entry.count, cameraNeutral,
1839                 TIFF_IFD_0), env, TAG_ASSHOTNEUTRAL, writer);
1840     }
1841 
1842 
1843     {
1844         // Set dimensions
1845         if (calculateAndSetCrop(env, characteristics, writer, isMaximumResolutionMode) != OK) {
1846             return nullptr;
1847         }
1848         camera_metadata_entry entry = characteristics.find(
1849                 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1850                                       isMaximumResolutionMode));
1851         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_ACTIVEAREA, writer);
1852         uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1853         uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1854         uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1855         uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1856 
1857         // If we only have a buffer containing the pre-correction rectangle, ignore the offset
1858         // relative to the pixel array.
1859         if (imageWidth == width && imageHeight == height) {
1860             xmin = 0;
1861             ymin = 0;
1862         }
1863 
1864         uint32_t activeArea[] = {ymin, xmin, ymin + height, xmin + width};
1865         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ACTIVEAREA, 4, activeArea, TIFF_IFD_0),
1866                 env, TAG_ACTIVEAREA, writer);
1867     }
1868 
1869     {
1870         // Setup unique camera model tag
1871         std::string model = GetProperty("ro.product.model", "");
1872         std::string manufacturer = GetProperty("ro.product.manufacturer", "");
1873         std::string brand = GetProperty("ro.product.brand", "");
1874 
1875         String8 cameraModel(model.c_str());
1876         cameraModel += "-";
1877         cameraModel += manufacturer.c_str();
1878         cameraModel += "-";
1879         cameraModel += brand.c_str();
1880 
1881         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_UNIQUECAMERAMODEL, cameraModel.size() + 1,
1882                 reinterpret_cast<const uint8_t*>(cameraModel.string()), TIFF_IFD_0), env,
1883                 TAG_UNIQUECAMERAMODEL, writer);
1884     }
1885 
1886     {
1887         // Setup sensor noise model
1888         camera_metadata_entry entry =
1889             results.find(ANDROID_SENSOR_NOISE_PROFILE);
1890 
1891         const status_t numPlaneColors = isBayer ? 3 : 1;
1892         const status_t numCfaChannels = isBayer ? 4 : 1;
1893 
1894         uint8_t cfaOut[numCfaChannels];
1895         if ((err = convertCFA(cfaEnum, /*out*/cfaOut)) != OK) {
1896             jniThrowException(env, "java/lang/IllegalArgumentException",
1897                     "Invalid CFA from camera characteristics");
1898             return nullptr;
1899         }
1900 
1901         double noiseProfile[numPlaneColors * 2];
1902 
1903         if (entry.count > 0) {
1904             if (entry.count != numCfaChannels * 2) {
1905                 ALOGW("%s: Invalid entry count %zu for noise profile returned "
1906                       "in characteristics, no noise profile tag written...",
1907                       __FUNCTION__, entry.count);
1908             } else {
1909                 if ((err = generateNoiseProfile(entry.data.d, cfaOut, numCfaChannels,
1910                         cfaPlaneColor, numPlaneColors, /*out*/ noiseProfile)) == OK) {
1911 
1912                     BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NOISEPROFILE,
1913                             numPlaneColors * 2, noiseProfile, TIFF_IFD_0), env, TAG_NOISEPROFILE,
1914                             writer);
1915                 } else {
1916                     ALOGW("%s: Error converting coefficients for noise profile, no noise profile"
1917                             " tag written...", __FUNCTION__);
1918                 }
1919             }
1920         } else {
1921             ALOGW("%s: No noise profile found in result metadata.  Image quality may be reduced.",
1922                     __FUNCTION__);
1923         }
1924     }
1925 
1926     {
1927         // Set up opcode List 2
1928         OpcodeListBuilder builder;
1929         status_t err = OK;
1930 
1931         // Set up lens shading map
1932         camera_metadata_entry entry1 =
1933                 characteristics.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
1934 
1935         uint32_t lsmWidth = 0;
1936         uint32_t lsmHeight = 0;
1937 
1938         if (entry1.count != 0) {
1939             lsmWidth = static_cast<uint32_t>(entry1.data.i32[0]);
1940             lsmHeight = static_cast<uint32_t>(entry1.data.i32[1]);
1941         }
1942 
1943         camera_metadata_entry entry2 = results.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
1944 
1945         camera_metadata_entry entry = characteristics.find(
1946                 getAppropriateModeTag(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
1947                                       isMaximumResolutionMode));
1948         BAIL_IF_EMPTY_RET_NULL_SP(entry, env, TAG_IMAGEWIDTH, writer);
1949         uint32_t xmin = static_cast<uint32_t>(entry.data.i32[0]);
1950         uint32_t ymin = static_cast<uint32_t>(entry.data.i32[1]);
1951         uint32_t width = static_cast<uint32_t>(entry.data.i32[2]);
1952         uint32_t height = static_cast<uint32_t>(entry.data.i32[3]);
1953         if (entry2.count > 0 && entry2.count == lsmWidth * lsmHeight * 4) {
1954             // GainMap rectangle is relative to the active area origin.
1955             err = builder.addGainMapsForMetadata(lsmWidth,
1956                                                  lsmHeight,
1957                                                  0,
1958                                                  0,
1959                                                  height,
1960                                                  width,
1961                                                  opcodeCfaLayout,
1962                                                  entry2.data.f);
1963             if (err != OK) {
1964                 ALOGE("%s: Could not add Lens shading map.", __FUNCTION__);
1965                 jniThrowRuntimeException(env, "failed to add lens shading map.");
1966                 return nullptr;
1967             }
1968         }
1969 
1970         // Hot pixel map is specific to bayer camera per DNG spec.
1971         if (isBayer) {
1972             // Set up bad pixel correction list
1973             // We first check the capture result. If the hot pixel map is not
1974             // available, as a fallback, try the static characteristics.
1975             camera_metadata_entry entry3 = results.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1976             if (entry3.count == 0) {
1977                 entry3 = characteristics.find(ANDROID_STATISTICS_HOT_PIXEL_MAP);
1978             }
1979 
1980             if ((entry3.count % 2) != 0) {
1981                 ALOGE("%s: Hot pixel map contains odd number of values, cannot map to pairs!",
1982                         __FUNCTION__);
1983                 jniThrowRuntimeException(env, "failed to add hotpixel map.");
1984                 return nullptr;
1985             }
1986 
1987             // Adjust the bad pixel coordinates to be relative to the origin of the active area
1988             // DNG tag
1989             std::vector<uint32_t> v;
1990             for (size_t i = 0; i < entry3.count; i += 2) {
1991                 int32_t x = entry3.data.i32[i];
1992                 int32_t y = entry3.data.i32[i + 1];
1993                 x -= static_cast<int32_t>(xmin);
1994                 y -= static_cast<int32_t>(ymin);
1995                 if (x < 0 || y < 0 || static_cast<uint32_t>(x) >= width ||
1996                         static_cast<uint32_t>(y) >= height) {
1997                     continue;
1998                 }
1999                 v.push_back(x);
2000                 v.push_back(y);
2001             }
2002             const uint32_t* badPixels = &v[0];
2003             uint32_t badPixelCount = v.size();
2004 
2005             if (badPixelCount > 0) {
2006                 err = builder.addBadPixelListForMetadata(badPixels, badPixelCount, opcodeCfaLayout);
2007 
2008                 if (err != OK) {
2009                     ALOGE("%s: Could not add hotpixel map.", __FUNCTION__);
2010                     jniThrowRuntimeException(env, "failed to add hotpixel map.");
2011                     return nullptr;
2012                 }
2013             }
2014         }
2015 
2016         if (builder.getCount() > 0) {
2017             size_t listSize = builder.getSize();
2018             uint8_t opcodeListBuf[listSize];
2019             err = builder.buildOpList(opcodeListBuf);
2020             if (err == OK) {
2021                 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST2, listSize,
2022                         opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST2, writer);
2023             } else {
2024                 ALOGE("%s: Could not build list of opcodes for lens shading map and bad pixel "
2025                         "correction.", __FUNCTION__);
2026                 jniThrowRuntimeException(env, "failed to construct opcode list for lens shading "
2027                         "map and bad pixel correction");
2028                 return nullptr;
2029             }
2030         }
2031     }
2032 
2033     {
2034         // Set up opcode List 3
2035         OpcodeListBuilder builder;
2036         status_t err = OK;
2037 
2038         // Set up rectilinear distortion correction
2039         std::array<float, 6> distortion = {1.f, 0.f, 0.f, 0.f, 0.f, 0.f};
2040         bool gotDistortion = false;
2041 
2042         // The capture result would have the correct intrinsic calibration
2043         // regardless of the sensor pixel mode.
2044         camera_metadata_entry entry4 =
2045                 results.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
2046 
2047         if (entry4.count == 5) {
2048             float cx = entry4.data.f[/*c_x*/2];
2049             float cy = entry4.data.f[/*c_y*/3];
2050             // Assuming f_x = f_y, or at least close enough.
2051             // Also assuming s = 0, or at least close enough.
2052             float f = entry4.data.f[/*f_x*/0];
2053 
2054             camera_metadata_entry entry3 =
2055                     results.find(ANDROID_LENS_DISTORTION);
2056             if (entry3.count == 5) {
2057                 gotDistortion = true;
2058 
2059                 // Scale the distortion coefficients to create a zoom in warpped image so that all
2060                 // pixels are drawn within input image.
2061                 for (size_t i = 0; i < entry3.count; i++) {
2062                     distortion[i+1] = entry3.data.f[i];
2063                 }
2064 
2065                 if (preWidth == imageWidth && preHeight == imageHeight) {
2066                     normalizeLensDistortion(distortion, cx, cy, f, preWidth, preHeight);
2067                 } else {
2068                     // image size == pixel array size (contains optical black pixels)
2069                     // cx/cy is defined in preCorrArray so adding the offset
2070                     // Also changes default xmin/ymin so that pixels are only
2071                     // sampled within preCorrection array
2072                     normalizeLensDistortion(
2073                             distortion, cx + preXMin, cy + preYMin, f, preWidth, preHeight,
2074                             preXMin, preYMin);
2075                 }
2076 
2077                 float m_x = std::fmaxf(preWidth - cx, cx);
2078                 float m_y = std::fmaxf(preHeight - cy, cy);
2079                 float m_sq = m_x*m_x + m_y*m_y;
2080                 float m = sqrtf(m_sq); // distance to farthest corner from optical center
2081                 float f_sq = f * f;
2082                 // Conversion factors from Camera2 K factors for new LENS_DISTORTION field
2083                 // to DNG spec.
2084                 //
2085                 //       Camera2 / OpenCV assume distortion is applied in a space where focal length
2086                 //       is factored out, while DNG assumes a normalized space where the distance
2087                 //       from optical center to the farthest corner is 1.
2088                 //       Scale from camera2 to DNG spec accordingly.
2089                 //       distortion[0] is always 1 with the new LENS_DISTORTION field.
2090                 const double convCoeff[5] = {
2091                     m_sq / f_sq,
2092                     pow(m_sq, 2) / pow(f_sq, 2),
2093                     pow(m_sq, 3) / pow(f_sq, 3),
2094                     m / f,
2095                     m / f
2096                 };
2097                 for (size_t i = 0; i < entry3.count; i++) {
2098                     distortion[i+1] *= convCoeff[i];
2099                 }
2100             } else {
2101                 entry3 = results.find(ANDROID_LENS_RADIAL_DISTORTION);
2102                 if (entry3.count == 6) {
2103                     gotDistortion = true;
2104                     // Conversion factors from Camera2 K factors to DNG spec. K factors:
2105                     //
2106                     //      Note: these are necessary because our unit system assumes a
2107                     //      normalized max radius of sqrt(2), whereas the DNG spec's
2108                     //      WarpRectilinear opcode assumes a normalized max radius of 1.
2109                     //      Thus, each K coefficient must include the domain scaling
2110                     //      factor (the DNG domain is scaled by sqrt(2) to emulate the
2111                     //      domain used by the Camera2 specification).
2112                     const double convCoeff[6] = {
2113                         sqrt(2),
2114                         2 * sqrt(2),
2115                         4 * sqrt(2),
2116                         8 * sqrt(2),
2117                         2,
2118                         2
2119                     };
2120                     for (size_t i = 0; i < entry3.count; i++) {
2121                         distortion[i] = entry3.data.f[i] * convCoeff[i];
2122                     }
2123                 }
2124             }
2125             if (gotDistortion) {
2126                 err = builder.addWarpRectilinearForMetadata(
2127                         distortion.data(), preWidth, preHeight, cx, cy);
2128                 if (err != OK) {
2129                     ALOGE("%s: Could not add distortion correction.", __FUNCTION__);
2130                     jniThrowRuntimeException(env, "failed to add distortion correction.");
2131                     return nullptr;
2132                 }
2133             }
2134         }
2135 
2136         if (builder.getCount() > 0) {
2137             size_t listSize = builder.getSize();
2138             uint8_t opcodeListBuf[listSize];
2139             err = builder.buildOpList(opcodeListBuf);
2140             if (err == OK) {
2141                 BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_OPCODELIST3, listSize,
2142                         opcodeListBuf, TIFF_IFD_0), env, TAG_OPCODELIST3, writer);
2143             } else {
2144                 ALOGE("%s: Could not build list of opcodes for distortion correction.",
2145                         __FUNCTION__);
2146                 jniThrowRuntimeException(env, "failed to construct opcode list for distortion"
2147                         " correction");
2148                 return nullptr;
2149             }
2150         }
2151     }
2152 
2153     {
2154         // Set up orientation tags.
2155         // Note: There's only one orientation field for the whole file, in IFD0
2156         // The main image and any thumbnails therefore have the same orientation.
2157         uint16_t orientation = nativeContext->getOrientation();
2158         BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_ORIENTATION, 1, &orientation, TIFF_IFD_0),
2159                 env, TAG_ORIENTATION, writer);
2160 
2161     }
2162 
2163     if (nativeContext->hasDescription()){
2164         // Set Description
2165         String8 description = nativeContext->getDescription();
2166         size_t len = description.bytes() + 1;
2167         if (writer->addEntry(TAG_IMAGEDESCRIPTION, len,
2168                 reinterpret_cast<const uint8_t*>(description.string()), TIFF_IFD_0) != OK) {
2169             jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
2170                     "Invalid metadata for tag %x", TAG_IMAGEDESCRIPTION);
2171         }
2172     }
2173 
2174     if (nativeContext->hasGpsData()) {
2175         // Set GPS tags
2176         GpsData gpsData = nativeContext->getGpsData();
2177         if (!writer->hasIfd(TIFF_IFD_GPSINFO)) {
2178             if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_GPSINFO, TiffWriter::GPSINFO) != OK) {
2179                 ALOGE("%s: Failed to add GpsInfo IFD %u to IFD %u", __FUNCTION__, TIFF_IFD_GPSINFO,
2180                         TIFF_IFD_0);
2181                 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add GPSINFO");
2182                 return nullptr;
2183             }
2184         }
2185 
2186         {
2187             uint8_t version[] = {2, 3, 0, 0};
2188             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSVERSIONID, 4, version,
2189                     TIFF_IFD_GPSINFO), env, TAG_GPSVERSIONID, writer);
2190         }
2191 
2192         {
2193             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDEREF,
2194                     GpsData::GPS_REF_LENGTH, gpsData.mLatitudeRef, TIFF_IFD_GPSINFO), env,
2195                     TAG_GPSLATITUDEREF, writer);
2196         }
2197 
2198         {
2199             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDEREF,
2200                     GpsData::GPS_REF_LENGTH, gpsData.mLongitudeRef, TIFF_IFD_GPSINFO), env,
2201                     TAG_GPSLONGITUDEREF, writer);
2202         }
2203 
2204         {
2205             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLATITUDE, 3, gpsData.mLatitude,
2206                     TIFF_IFD_GPSINFO), env, TAG_GPSLATITUDE, writer);
2207         }
2208 
2209         {
2210             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSLONGITUDE, 3, gpsData.mLongitude,
2211                     TIFF_IFD_GPSINFO), env, TAG_GPSLONGITUDE, writer);
2212         }
2213 
2214         {
2215             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSTIMESTAMP, 3, gpsData.mTimestamp,
2216                     TIFF_IFD_GPSINFO), env, TAG_GPSTIMESTAMP, writer);
2217         }
2218 
2219         {
2220             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_GPSDATESTAMP,
2221                     GpsData::GPS_DATE_LENGTH, gpsData.mDate, TIFF_IFD_GPSINFO), env,
2222                     TAG_GPSDATESTAMP, writer);
2223         }
2224     }
2225 
2226 
2227     if (nativeContext->hasThumbnail()) {
2228         if (!writer->hasIfd(TIFF_IFD_SUB1)) {
2229             if (writer->addSubIfd(TIFF_IFD_0, TIFF_IFD_SUB1) != OK) {
2230                 ALOGE("%s: Failed to add SubIFD %u to IFD %u", __FUNCTION__, TIFF_IFD_SUB1,
2231                         TIFF_IFD_0);
2232                 jniThrowException(env, "java/lang/IllegalStateException", "Failed to add SubIFD");
2233                 return nullptr;
2234             }
2235         }
2236 
2237         // Setup thumbnail tags
2238 
2239         {
2240             // Set photometric interpretation
2241             uint16_t interpretation = 2; // RGB
2242             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PHOTOMETRICINTERPRETATION, 1,
2243                     &interpretation, TIFF_IFD_SUB1), env, TAG_PHOTOMETRICINTERPRETATION, writer);
2244         }
2245 
2246         {
2247             // Set planar configuration
2248             uint16_t config = 1; // Chunky
2249             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_PLANARCONFIGURATION, 1, &config,
2250                     TIFF_IFD_SUB1), env, TAG_PLANARCONFIGURATION, writer);
2251         }
2252 
2253         {
2254             // Set samples per pixel
2255             uint16_t samples = SAMPLES_PER_RGB_PIXEL;
2256             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_SAMPLESPERPIXEL, 1, &samples,
2257                     TIFF_IFD_SUB1), env, TAG_SAMPLESPERPIXEL, writer);
2258         }
2259 
2260         {
2261             // Set bits per sample
2262             uint16_t bits[SAMPLES_PER_RGB_PIXEL];
2263             for (int i = 0; i < SAMPLES_PER_RGB_PIXEL; i++) bits[i] = BITS_PER_RGB_SAMPLE;
2264             BAIL_IF_INVALID_RET_NULL_SP(
2265                     writer->addEntry(TAG_BITSPERSAMPLE, SAMPLES_PER_RGB_PIXEL, bits, TIFF_IFD_SUB1),
2266                     env, TAG_BITSPERSAMPLE, writer);
2267         }
2268 
2269         {
2270             // Set subfiletype
2271             uint32_t subfileType = 1; // Thumbnail image
2272             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_NEWSUBFILETYPE, 1, &subfileType,
2273                     TIFF_IFD_SUB1), env, TAG_NEWSUBFILETYPE, writer);
2274         }
2275 
2276         {
2277             // Set compression
2278             uint16_t compression = 1; // None
2279             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_COMPRESSION, 1, &compression,
2280                     TIFF_IFD_SUB1), env, TAG_COMPRESSION, writer);
2281         }
2282 
2283         {
2284             // Set dimensions
2285             uint32_t uWidth = nativeContext->getThumbnailWidth();
2286             uint32_t uHeight = nativeContext->getThumbnailHeight();
2287             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGEWIDTH, 1, &uWidth, TIFF_IFD_SUB1),
2288                     env, TAG_IMAGEWIDTH, writer);
2289             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_IMAGELENGTH, 1, &uHeight,
2290                     TIFF_IFD_SUB1), env, TAG_IMAGELENGTH, writer);
2291         }
2292 
2293         {
2294             // x resolution
2295             uint32_t xres[] = { 72, 1 }; // default 72 ppi
2296             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_XRESOLUTION, 1, xres, TIFF_IFD_SUB1),
2297                     env, TAG_XRESOLUTION, writer);
2298 
2299             // y resolution
2300             uint32_t yres[] = { 72, 1 }; // default 72 ppi
2301             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_YRESOLUTION, 1, yres, TIFF_IFD_SUB1),
2302                     env, TAG_YRESOLUTION, writer);
2303 
2304             uint16_t unit = 2; // inches
2305             BAIL_IF_INVALID_RET_NULL_SP(writer->addEntry(TAG_RESOLUTIONUNIT, 1, &unit,
2306                     TIFF_IFD_SUB1), env, TAG_RESOLUTIONUNIT, writer);
2307         }
2308     }
2309 
2310     if (writer->addStrip(TIFF_IFD_0) != OK) {
2311         ALOGE("%s: Could not setup main image strip tags.", __FUNCTION__);
2312         jniThrowException(env, "java/lang/IllegalStateException",
2313                 "Failed to setup main image strip tags.");
2314         return nullptr;
2315     }
2316 
2317     if (writer->hasIfd(TIFF_IFD_SUB1)) {
2318         if (writer->addStrip(TIFF_IFD_SUB1) != OK) {
2319             ALOGE("%s: Could not thumbnail image strip tags.", __FUNCTION__);
2320             jniThrowException(env, "java/lang/IllegalStateException",
2321                     "Failed to setup thumbnail image strip tags.");
2322             return nullptr;
2323         }
2324     }
2325     return writer;
2326 }
2327 
DngCreator_destroy(JNIEnv * env,jobject thiz)2328 static void DngCreator_destroy(JNIEnv* env, jobject thiz) {
2329     ALOGV("%s:", __FUNCTION__);
2330     DngCreator_setNativeContext(env, thiz, nullptr);
2331 }
2332 
DngCreator_nativeSetOrientation(JNIEnv * env,jobject thiz,jint orient)2333 static void DngCreator_nativeSetOrientation(JNIEnv* env, jobject thiz, jint orient) {
2334     ALOGV("%s:", __FUNCTION__);
2335 
2336     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2337     if (context == nullptr) {
2338         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2339         jniThrowException(env, "java/lang/AssertionError",
2340                 "setOrientation called with uninitialized DngCreator");
2341         return;
2342     }
2343 
2344     uint16_t orientation = static_cast<uint16_t>(orient);
2345     context->setOrientation(orientation);
2346 }
2347 
DngCreator_nativeSetDescription(JNIEnv * env,jobject thiz,jstring description)2348 static void DngCreator_nativeSetDescription(JNIEnv* env, jobject thiz, jstring description) {
2349     ALOGV("%s:", __FUNCTION__);
2350 
2351     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2352     if (context == nullptr) {
2353         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2354         jniThrowException(env, "java/lang/AssertionError",
2355                 "setDescription called with uninitialized DngCreator");
2356         return;
2357     }
2358 
2359     const char* desc = env->GetStringUTFChars(description, nullptr);
2360     context->setDescription(String8(desc));
2361     env->ReleaseStringUTFChars(description, desc);
2362 }
2363 
DngCreator_nativeSetGpsTags(JNIEnv * env,jobject thiz,jintArray latTag,jstring latRef,jintArray longTag,jstring longRef,jstring dateTag,jintArray timeTag)2364 static void DngCreator_nativeSetGpsTags(JNIEnv* env, jobject thiz, jintArray latTag,
2365         jstring latRef, jintArray longTag, jstring longRef, jstring dateTag, jintArray timeTag) {
2366     ALOGV("%s:", __FUNCTION__);
2367 
2368     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2369     if (context == nullptr) {
2370         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2371         jniThrowException(env, "java/lang/AssertionError",
2372                 "setGpsTags called with uninitialized DngCreator");
2373         return;
2374     }
2375 
2376     GpsData data;
2377 
2378     jsize latLen = env->GetArrayLength(latTag);
2379     jsize longLen = env->GetArrayLength(longTag);
2380     jsize timeLen = env->GetArrayLength(timeTag);
2381     if (latLen != GpsData::GPS_VALUE_LENGTH) {
2382         jniThrowException(env, "java/lang/IllegalArgumentException",
2383                 "invalid latitude tag length");
2384         return;
2385     } else if (longLen != GpsData::GPS_VALUE_LENGTH) {
2386         jniThrowException(env, "java/lang/IllegalArgumentException",
2387                 "invalid longitude tag length");
2388         return;
2389     } else if (timeLen != GpsData::GPS_VALUE_LENGTH) {
2390         jniThrowException(env, "java/lang/IllegalArgumentException",
2391                 "invalid time tag length");
2392         return;
2393     }
2394 
2395     env->GetIntArrayRegion(latTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2396             reinterpret_cast<jint*>(&data.mLatitude));
2397     env->GetIntArrayRegion(longTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2398             reinterpret_cast<jint*>(&data.mLongitude));
2399     env->GetIntArrayRegion(timeTag, 0, static_cast<jsize>(GpsData::GPS_VALUE_LENGTH),
2400             reinterpret_cast<jint*>(&data.mTimestamp));
2401 
2402 
2403     env->GetStringUTFRegion(latRef, 0, 1, reinterpret_cast<char*>(&data.mLatitudeRef));
2404     data.mLatitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2405     env->GetStringUTFRegion(longRef, 0, 1, reinterpret_cast<char*>(&data.mLongitudeRef));
2406     data.mLongitudeRef[GpsData::GPS_REF_LENGTH - 1] = '\0';
2407     env->GetStringUTFRegion(dateTag, 0, GpsData::GPS_DATE_LENGTH - 1,
2408             reinterpret_cast<char*>(&data.mDate));
2409     data.mDate[GpsData::GPS_DATE_LENGTH - 1] = '\0';
2410 
2411     context->setGpsData(data);
2412 }
2413 
DngCreator_nativeSetThumbnail(JNIEnv * env,jobject thiz,jobject buffer,jint width,jint height)2414 static void DngCreator_nativeSetThumbnail(JNIEnv* env, jobject thiz, jobject buffer, jint width,
2415         jint height) {
2416     ALOGV("%s:", __FUNCTION__);
2417 
2418     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2419     if (context == nullptr) {
2420         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2421         jniThrowException(env, "java/lang/AssertionError",
2422                 "setThumbnail called with uninitialized DngCreator");
2423         return;
2424     }
2425 
2426     size_t fullSize = width * height * BYTES_PER_RGB_PIXEL;
2427     jlong capacity = env->GetDirectBufferCapacity(buffer);
2428     if (static_cast<uint64_t>(capacity) != static_cast<uint64_t>(fullSize)) {
2429         jniThrowExceptionFmt(env, "java/lang/AssertionError",
2430                 "Invalid size %d for thumbnail, expected size was %d",
2431                 capacity, fullSize);
2432         return;
2433     }
2434 
2435     uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer));
2436     if (pixelBytes == nullptr) {
2437         ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2438         jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2439         return;
2440     }
2441 
2442     if (!context->setThumbnail(pixelBytes, width, height)) {
2443         jniThrowException(env, "java/lang/IllegalStateException",
2444                 "Failed to set thumbnail.");
2445         return;
2446     }
2447 }
2448 
2449 // TODO: Refactor out common preamble for the two nativeWrite methods.
DngCreator_nativeWriteImage(JNIEnv * env,jobject thiz,jobject outStream,jint width,jint height,jobject inBuffer,jint rowStride,jint pixStride,jlong offset,jboolean isDirect)2450 static void DngCreator_nativeWriteImage(JNIEnv* env, jobject thiz, jobject outStream, jint width,
2451         jint height, jobject inBuffer, jint rowStride, jint pixStride, jlong offset,
2452         jboolean isDirect) {
2453     ALOGV("%s:", __FUNCTION__);
2454     ALOGV("%s: nativeWriteImage called with: width=%d, height=%d, "
2455           "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2456           height, rowStride, pixStride, offset);
2457     uint32_t rStride = static_cast<uint32_t>(rowStride);
2458     uint32_t pStride = static_cast<uint32_t>(pixStride);
2459     uint32_t uWidth = static_cast<uint32_t>(width);
2460     uint32_t uHeight = static_cast<uint32_t>(height);
2461     uint64_t uOffset = static_cast<uint64_t>(offset);
2462 
2463     sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2464     if(env->ExceptionCheck()) {
2465         ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2466         return;
2467     }
2468 
2469     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2470     if (context == nullptr) {
2471         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2472         jniThrowException(env, "java/lang/AssertionError",
2473                 "Write called with uninitialized DngCreator");
2474         return;
2475     }
2476     sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2477 
2478     if (writer.get() == nullptr) {
2479         return;
2480     }
2481 
2482     // Validate DNG size
2483     if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2484         return;
2485     }
2486 
2487     sp<JniInputByteBuffer> inBuf;
2488     Vector<StripSource*> sources;
2489     sp<DirectStripSource> thumbnailSource;
2490     uint32_t targetIfd = TIFF_IFD_0;
2491     bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2492     if (hasThumbnail) {
2493         ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2494         uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2495         uint32_t thumbWidth = context->getThumbnailWidth();
2496         thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_SUB1,
2497                 thumbWidth, context->getThumbnailHeight(), bytesPerPixel,
2498                 bytesPerPixel * thumbWidth, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2499                 SAMPLES_PER_RGB_PIXEL);
2500     }
2501 
2502     if (isDirect) {
2503         size_t fullSize = rStride * uHeight;
2504         jlong capacity = env->GetDirectBufferCapacity(inBuffer);
2505         if (capacity < 0 || fullSize + uOffset > static_cast<uint64_t>(capacity)) {
2506             jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
2507                     "Invalid size %d for Image, size given in metadata is %d at current stride",
2508                     capacity, fullSize);
2509             return;
2510         }
2511 
2512         uint8_t* pixelBytes = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(inBuffer));
2513         if (pixelBytes == nullptr) {
2514             ALOGE("%s: Could not get native ByteBuffer", __FUNCTION__);
2515             jniThrowException(env, "java/lang/IllegalArgumentException", "Invalid ByteBuffer");
2516             return;
2517         }
2518 
2519         ALOGV("%s: Using direct-type strip source.", __FUNCTION__);
2520         DirectStripSource stripSource(env, pixelBytes, targetIfd, uWidth, uHeight, pStride,
2521                 rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2522         sources.add(&stripSource);
2523         if (thumbnailSource.get() != nullptr) {
2524             sources.add(thumbnailSource.get());
2525         }
2526 
2527         status_t ret = OK;
2528         if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2529             ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2530             if (!env->ExceptionCheck()) {
2531                 jniThrowExceptionFmt(env, "java/io/IOException",
2532                         "Encountered error %d while writing file.", ret);
2533             }
2534             return;
2535         }
2536     } else {
2537         inBuf = new JniInputByteBuffer(env, inBuffer);
2538 
2539         ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2540         InputStripSource stripSource(env, *inBuf, targetIfd, uWidth, uHeight, pStride,
2541                  rStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2542         sources.add(&stripSource);
2543         if (thumbnailSource.get() != nullptr) {
2544             sources.add(thumbnailSource.get());
2545         }
2546 
2547         status_t ret = OK;
2548         if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2549             ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2550             if (!env->ExceptionCheck()) {
2551                 jniThrowExceptionFmt(env, "java/io/IOException",
2552                         "Encountered error %d while writing file.", ret);
2553             }
2554             return;
2555         }
2556     }
2557 
2558 }
2559 
DngCreator_nativeWriteInputStream(JNIEnv * env,jobject thiz,jobject outStream,jobject inStream,jint width,jint height,jlong offset)2560 static void DngCreator_nativeWriteInputStream(JNIEnv* env, jobject thiz, jobject outStream,
2561         jobject inStream, jint width, jint height, jlong offset) {
2562     ALOGV("%s:", __FUNCTION__);
2563 
2564     uint32_t rowStride = width * BYTES_PER_SAMPLE;
2565     uint32_t pixStride = BYTES_PER_SAMPLE;
2566     uint32_t uWidth = static_cast<uint32_t>(width);
2567     uint32_t uHeight = static_cast<uint32_t>(height);
2568     uint64_t uOffset = static_cast<uint32_t>(offset);
2569 
2570     ALOGV("%s: nativeWriteInputStream called with: width=%d, height=%d, "
2571           "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, width,
2572           height, rowStride, pixStride, offset);
2573 
2574     sp<JniOutputStream> out = new JniOutputStream(env, outStream);
2575     if (env->ExceptionCheck()) {
2576         ALOGE("%s: Could not allocate buffers for output stream", __FUNCTION__);
2577         return;
2578     }
2579 
2580     NativeContext* context = DngCreator_getNativeContext(env, thiz);
2581     if (context == nullptr) {
2582         ALOGE("%s: Failed to initialize DngCreator", __FUNCTION__);
2583         jniThrowException(env, "java/lang/AssertionError",
2584                 "Write called with uninitialized DngCreator");
2585         return;
2586     }
2587     sp<TiffWriter> writer = DngCreator_setup(env, thiz, uWidth, uHeight);
2588 
2589     if (writer.get() == nullptr) {
2590         return;
2591     }
2592 
2593     // Validate DNG size
2594     if (!validateDngHeader(env, writer, *(context->getCharacteristics()), width, height)) {
2595         return;
2596     }
2597 
2598     sp<DirectStripSource> thumbnailSource;
2599     uint32_t targetIfd = TIFF_IFD_0;
2600     Vector<StripSource*> sources;
2601 
2602 
2603     sp<JniInputStream> in = new JniInputStream(env, inStream);
2604 
2605     ALOGV("%s: Using input-type strip source.", __FUNCTION__);
2606     InputStripSource stripSource(env, *in, targetIfd, uWidth, uHeight, pixStride,
2607              rowStride, uOffset, BYTES_PER_SAMPLE, SAMPLES_PER_RAW_PIXEL);
2608     sources.add(&stripSource);
2609 
2610     bool hasThumbnail = writer->hasIfd(TIFF_IFD_SUB1);
2611     if (hasThumbnail) {
2612         ALOGV("%s: Adding thumbnail strip sources.", __FUNCTION__);
2613         uint32_t bytesPerPixel = SAMPLES_PER_RGB_PIXEL * BYTES_PER_RGB_SAMPLE;
2614         uint32_t width = context->getThumbnailWidth();
2615         thumbnailSource = new DirectStripSource(env, context->getThumbnail(), TIFF_IFD_SUB1,
2616                 width, context->getThumbnailHeight(), bytesPerPixel,
2617                 bytesPerPixel * width, /*offset*/0, BYTES_PER_RGB_SAMPLE,
2618                 SAMPLES_PER_RGB_PIXEL);
2619         sources.add(thumbnailSource.get());
2620     }
2621 
2622     status_t ret = OK;
2623     if ((ret = writer->write(out.get(), sources.editArray(), sources.size())) != OK) {
2624         ALOGE("%s: write failed with error %d.", __FUNCTION__, ret);
2625         if (!env->ExceptionCheck()) {
2626             jniThrowExceptionFmt(env, "java/io/IOException",
2627                     "Encountered error %d while writing file.", ret);
2628         }
2629         return;
2630     }
2631 }
2632 
2633 } /*extern "C" */
2634 
2635 static const JNINativeMethod gDngCreatorMethods[] = {
2636     {"nativeClassInit",        "()V", (void*) DngCreator_nativeClassInit},
2637     {"nativeInit", "(Landroid/hardware/camera2/impl/CameraMetadataNative;"
2638             "Landroid/hardware/camera2/impl/CameraMetadataNative;Ljava/lang/String;)V",
2639             (void*) DngCreator_init},
2640     {"nativeDestroy",           "()V",      (void*) DngCreator_destroy},
2641     {"nativeSetOrientation",    "(I)V",     (void*) DngCreator_nativeSetOrientation},
2642     {"nativeSetDescription",    "(Ljava/lang/String;)V", (void*) DngCreator_nativeSetDescription},
2643     {"nativeSetGpsTags",    "([ILjava/lang/String;[ILjava/lang/String;Ljava/lang/String;[I)V",
2644             (void*) DngCreator_nativeSetGpsTags},
2645     {"nativeSetThumbnail","(Ljava/nio/ByteBuffer;II)V", (void*) DngCreator_nativeSetThumbnail},
2646     {"nativeWriteImage",        "(Ljava/io/OutputStream;IILjava/nio/ByteBuffer;IIJZ)V",
2647             (void*) DngCreator_nativeWriteImage},
2648     {"nativeWriteInputStream",    "(Ljava/io/OutputStream;Ljava/io/InputStream;IIJ)V",
2649             (void*) DngCreator_nativeWriteInputStream},
2650 };
2651 
register_android_hardware_camera2_DngCreator(JNIEnv * env)2652 int register_android_hardware_camera2_DngCreator(JNIEnv *env) {
2653     return RegisterMethodsOrDie(env,
2654             "android/hardware/camera2/DngCreator", gDngCreatorMethods, NELEM(gDngCreatorMethods));
2655 }
2656