1 /* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define LOG_TAG "QCamera3Channel"
31 //#define LOG_NDEBUG 0
32 #include <fcntl.h>
33 #include <stdlib.h>
34 #include <cstdlib>
35 #include <stdio.h>
36 #include <string.h>
37 #include <hardware/camera3.h>
38 #include <system/camera_metadata.h>
39 #include <gralloc_priv.h>
40 #include <utils/Log.h>
41 #include <utils/Errors.h>
42 #include <cutils/properties.h>
43 #include "QCamera3Channel.h"
44
45 using namespace android;
46
47 #define MIN_STREAMING_BUFFER_NUM 7+11
48
49 namespace qcamera {
50 static const char ExifAsciiPrefix[] =
51 { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 }; // "ASCII\0\0\0"
52 static const char ExifUndefinedPrefix[] =
53 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; // "\0\0\0\0\0\0\0\0"
54
55 #define EXIF_ASCII_PREFIX_SIZE 8 //(sizeof(ExifAsciiPrefix))
56 #define FOCAL_LENGTH_DECIMAL_PRECISION 1000
57
58 /*===========================================================================
59 * FUNCTION : QCamera3Channel
60 *
61 * DESCRIPTION: constrcutor of QCamera3Channel
62 *
63 * PARAMETERS :
64 * @cam_handle : camera handle
65 * @cam_ops : ptr to camera ops table
66 *
67 * RETURN : none
68 *==========================================================================*/
QCamera3Channel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData)69 QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
70 mm_camera_ops_t *cam_ops,
71 channel_cb_routine cb_routine,
72 cam_padding_info_t *paddingInfo,
73 void *userData)
74 {
75 m_camHandle = cam_handle;
76 m_camOps = cam_ops;
77 m_bIsActive = false;
78
79 m_handle = 0;
80 m_numStreams = 0;
81 memset(mStreams, 0, sizeof(mStreams));
82 mUserData = userData;
83
84 mStreamInfoBuf = NULL;
85 mChannelCB = cb_routine;
86 mPaddingInfo = paddingInfo;
87 }
88
89 /*===========================================================================
90 * FUNCTION : QCamera3Channel
91 *
92 * DESCRIPTION: default constrcutor of QCamera3Channel
93 *
94 * PARAMETERS : none
95 *
96 * RETURN : none
97 *==========================================================================*/
QCamera3Channel()98 QCamera3Channel::QCamera3Channel()
99 {
100 m_camHandle = 0;
101 m_camOps = NULL;
102 m_bIsActive = false;
103
104 m_handle = 0;
105 m_numStreams = 0;
106 memset(mStreams, 0, sizeof(mStreams));
107 mUserData = NULL;
108
109 mStreamInfoBuf = NULL;
110 mChannelCB = NULL;
111 mPaddingInfo = NULL;
112 }
113
114 /*===========================================================================
115 * FUNCTION : ~QCamera3Channel
116 *
117 * DESCRIPTION: destructor of QCamera3Channel
118 *
119 * PARAMETERS : none
120 *
121 * RETURN : none
122 *==========================================================================*/
~QCamera3Channel()123 QCamera3Channel::~QCamera3Channel()
124 {
125 if (m_bIsActive)
126 stop();
127
128 for (int i = 0; i < m_numStreams; i++) {
129 if (mStreams[i] != NULL) {
130 delete mStreams[i];
131 mStreams[i] = 0;
132 }
133 }
134 if (m_handle) {
135 m_camOps->delete_channel(m_camHandle, m_handle);
136 ALOGE("%s: deleting channel %d", __func__, m_handle);
137 m_handle = 0;
138 }
139 m_numStreams = 0;
140 }
141
142 /*===========================================================================
143 * FUNCTION : init
144 *
145 * DESCRIPTION: initialization of channel
146 *
147 * PARAMETERS :
148 * @attr : channel bundle attribute setting
149 * @dataCB : data notify callback
150 * @userData: user data ptr
151 *
152 * RETURN : int32_t type of status
153 * NO_ERROR -- success
154 * none-zero failure code
155 *==========================================================================*/
init(mm_camera_channel_attr_t * attr,mm_camera_buf_notify_t dataCB)156 int32_t QCamera3Channel::init(mm_camera_channel_attr_t *attr,
157 mm_camera_buf_notify_t dataCB)
158 {
159 m_handle = m_camOps->add_channel(m_camHandle,
160 attr,
161 dataCB,
162 this);
163 if (m_handle == 0) {
164 ALOGE("%s: Add channel failed", __func__);
165 return UNKNOWN_ERROR;
166 }
167 return NO_ERROR;
168 }
169
170 /*===========================================================================
171 * FUNCTION : addStream
172 *
173 * DESCRIPTION: add a stream into channel
174 *
175 * PARAMETERS :
176 * @allocator : stream related buffer allocator
177 * @streamInfoBuf : ptr to buf that constains stream info
178 * @minStreamBufNum: number of stream buffers needed
179 * @paddingInfo : padding information
180 * @stream_cb : stream data notify callback
181 * @userdata : user data ptr
182 *
183 * RETURN : int32_t type of status
184 * NO_ERROR -- success
185 * none-zero failure code
186 *==========================================================================*/
addStream(cam_stream_type_t streamType,cam_format_t streamFormat,cam_dimension_t streamDim,uint8_t minStreamBufNum)187 int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
188 cam_format_t streamFormat,
189 cam_dimension_t streamDim,
190 uint8_t minStreamBufNum)
191 {
192 int32_t rc = NO_ERROR;
193
194 if (m_numStreams >= 1) {
195 ALOGE("%s: Only one stream per channel supported in v3 Hal", __func__);
196 return BAD_VALUE;
197 }
198
199 if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
200 ALOGE("%s: stream number (%d) exceeds max limit (%d)",
201 __func__, m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
202 return BAD_VALUE;
203 }
204 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
205 m_handle,
206 m_camOps,
207 mPaddingInfo,
208 this);
209 if (pStream == NULL) {
210 ALOGE("%s: No mem for Stream", __func__);
211 return NO_MEMORY;
212 }
213
214 rc = pStream->init(streamType, streamFormat, streamDim, NULL, minStreamBufNum,
215 streamCbRoutine, this);
216 if (rc == 0) {
217 mStreams[m_numStreams] = pStream;
218 m_numStreams++;
219 } else {
220 delete pStream;
221 }
222 return rc;
223 }
224
225 /*===========================================================================
226 * FUNCTION : start
227 *
228 * DESCRIPTION: start channel, which will start all streams belong to this channel
229 *
230 * PARAMETERS :
231 *
232 * RETURN : int32_t type of status
233 * NO_ERROR -- success
234 * none-zero failure code
235 *==========================================================================*/
start()236 int32_t QCamera3Channel::start()
237 {
238 int32_t rc = NO_ERROR;
239
240 if (m_numStreams > 1) {
241 ALOGE("%s: bundle not supported", __func__);
242 } else if (m_numStreams == 0) {
243 return NO_INIT;
244 }
245
246 for (int i = 0; i < m_numStreams; i++) {
247 if (mStreams[i] != NULL) {
248 mStreams[i]->start();
249 }
250 }
251 rc = m_camOps->start_channel(m_camHandle, m_handle);
252
253 if (rc != NO_ERROR) {
254 for (int i = 0; i < m_numStreams; i++) {
255 if (mStreams[i] != NULL) {
256 mStreams[i]->stop();
257 }
258 }
259 } else {
260 m_bIsActive = true;
261 }
262
263 return rc;
264 }
265
266 /*===========================================================================
267 * FUNCTION : stop
268 *
269 * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
270 *
271 * PARAMETERS : none
272 *
273 * RETURN : int32_t type of status
274 * NO_ERROR -- success
275 * none-zero failure code
276 *==========================================================================*/
stop()277 int32_t QCamera3Channel::stop()
278 {
279 int32_t rc = NO_ERROR;
280 if(!m_bIsActive) {
281 ALOGE("%s: Attempt to stop inactive channel",__func__);
282 return rc;
283 }
284
285 for (int i = 0; i < m_numStreams; i++) {
286 if (mStreams[i] != NULL) {
287 mStreams[i]->stop();
288 }
289 }
290
291 rc = m_camOps->stop_channel(m_camHandle, m_handle);
292
293 m_bIsActive = false;
294 return rc;
295 }
296
297 /*===========================================================================
298 * FUNCTION : bufDone
299 *
300 * DESCRIPTION: return a stream buf back to kernel
301 *
302 * PARAMETERS :
303 * @recvd_frame : stream buf frame to be returned
304 *
305 * RETURN : int32_t type of status
306 * NO_ERROR -- success
307 * none-zero failure code
308 *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)309 int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame)
310 {
311 int32_t rc = NO_ERROR;
312 for (int i = 0; i < recvd_frame->num_bufs; i++) {
313 if (recvd_frame->bufs[i] != NULL) {
314 for (int j = 0; j < m_numStreams; j++) {
315 if (mStreams[j] != NULL &&
316 mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
317 rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
318 break; // break loop j
319 }
320 }
321 }
322 }
323
324 return rc;
325 }
326
327 /*===========================================================================
328 * FUNCTION : getStreamTypeMask
329 *
330 * DESCRIPTION: Get bit mask of all stream types in this channel
331 *
332 * PARAMETERS : None
333 *
334 * RETURN : Bit mask of all stream types in this channel
335 *==========================================================================*/
getStreamTypeMask()336 uint32_t QCamera3Channel::getStreamTypeMask()
337 {
338 uint32_t mask = 0;
339 for (int i = 0; i < m_numStreams; i++) {
340 mask |= (0x1 << mStreams[i]->getMyType());
341 }
342 return mask;
343 }
344
345 /*===========================================================================
346 * FUNCTION : getStreamID
347 *
348 * DESCRIPTION: Get StreamID of requested stream type
349 *
350 * PARAMETERS : streamMask
351 *
352 * RETURN : Stream ID
353 *==========================================================================*/
getStreamID(uint32_t streamMask)354 uint32_t QCamera3Channel::getStreamID(uint32_t streamMask)
355 {
356 uint32_t streamID = 0;
357 for (int i = 0; i < m_numStreams; i++) {
358 if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) {
359 streamID = mStreams[i]->getMyServerID();
360 break;
361 }
362 }
363 return streamID;
364 }
365
366 /*===========================================================================
367 * FUNCTION : getStreamByHandle
368 *
369 * DESCRIPTION: return stream object by stream handle
370 *
371 * PARAMETERS :
372 * @streamHandle : stream handle
373 *
374 * RETURN : stream object. NULL if not found
375 *==========================================================================*/
getStreamByHandle(uint32_t streamHandle)376 QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle)
377 {
378 for (int i = 0; i < m_numStreams; i++) {
379 if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
380 return mStreams[i];
381 }
382 }
383 return NULL;
384 }
385
386 /*===========================================================================
387 * FUNCTION : getStreamByIndex
388 *
389 * DESCRIPTION: return stream object by index
390 *
391 * PARAMETERS :
392 * @streamHandle : stream handle
393 *
394 * RETURN : stream object. NULL if not found
395 *==========================================================================*/
getStreamByIndex(uint8_t index)396 QCamera3Stream *QCamera3Channel::getStreamByIndex(uint8_t index)
397 {
398 if (index < m_numStreams) {
399 return mStreams[index];
400 }
401 return NULL;
402 }
403
404 /*===========================================================================
405 * FUNCTION : streamCbRoutine
406 *
407 * DESCRIPTION: callback routine for stream
408 *
409 * PARAMETERS :
410 * @streamHandle : stream handle
411 *
412 * RETURN : stream object. NULL if not found
413 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream,void * userdata)414 void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
415 QCamera3Stream *stream, void *userdata)
416 {
417 QCamera3Channel *channel = (QCamera3Channel *)userdata;
418 if (channel == NULL) {
419 ALOGE("%s: invalid channel pointer", __func__);
420 return;
421 }
422 channel->streamCbRoutine(super_frame, stream);
423 }
424
425 /*===========================================================================
426 * FUNCTION : QCamera3RegularChannel
427 *
428 * DESCRIPTION: constrcutor of QCamera3RegularChannel
429 *
430 * PARAMETERS :
431 * @cam_handle : camera handle
432 * @cam_ops : ptr to camera ops table
433 * @cb_routine : callback routine to frame aggregator
434 * @stream : camera3_stream_t structure
435 * @stream_type: Channel stream type
436 *
437 * RETURN : none
438 *==========================================================================*/
QCamera3RegularChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type)439 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
440 mm_camera_ops_t *cam_ops,
441 channel_cb_routine cb_routine,
442 cam_padding_info_t *paddingInfo,
443 void *userData,
444 camera3_stream_t *stream,
445 cam_stream_type_t stream_type) :
446 QCamera3Channel(cam_handle, cam_ops, cb_routine,
447 paddingInfo, userData),
448 mCamera3Stream(stream),
449 mNumBufs(0),
450 mStreamType(stream_type)
451 {
452 }
453
454 /*===========================================================================
455 * FUNCTION : ~QCamera3RegularChannel
456 *
457 * DESCRIPTION: destructor of QCamera3RegularChannel
458 *
459 * PARAMETERS : none
460 *
461 * RETURN : none
462 *==========================================================================*/
~QCamera3RegularChannel()463 QCamera3RegularChannel::~QCamera3RegularChannel()
464 {
465 }
466
467 /*===========================================================================
468 * FUNCTION : initialize
469 *
470 * DESCRIPTION: Initialize and add camera channel & stream
471 *
472 * PARAMETERS :
473 *
474 * RETURN : int32_t type of status
475 * NO_ERROR -- success
476 * none-zero failure code
477 *==========================================================================*/
478
initialize()479 int32_t QCamera3RegularChannel::initialize()
480 {
481 int32_t rc = NO_ERROR;
482 cam_format_t streamFormat;
483 cam_dimension_t streamDim;
484
485 if (NULL == mCamera3Stream) {
486 ALOGE("%s: Camera stream uninitialized", __func__);
487 return NO_INIT;
488 }
489
490 if (1 <= m_numStreams) {
491 // Only one stream per channel supported in v3 Hal
492 return NO_ERROR;
493 }
494
495 rc = init(NULL, NULL);
496 if (rc < 0) {
497 ALOGE("%s: init failed", __func__);
498 return rc;
499 }
500
501 mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM;
502
503 if (mCamera3Stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
504 if (mStreamType == CAM_STREAM_TYPE_VIDEO) {
505 streamFormat = CAM_FORMAT_YUV_420_NV12;
506 } else if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
507 streamFormat = CAM_FORMAT_YUV_420_NV21;
508 } else {
509 //TODO: Add a new flag in libgralloc for ZSL buffers, and its size needs
510 // to be properly aligned and padded.
511 streamFormat = CAM_FORMAT_YUV_420_NV21;
512 }
513 } else if(mCamera3Stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
514 streamFormat = CAM_FORMAT_YUV_420_NV21;
515 } else if (mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
516 mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW16) {
517 // Bayer pattern doesn't matter here.
518 // All CAMIF raw format uses 10bit.
519 streamFormat = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
520 } else {
521
522 //TODO: Fail for other types of streams for now
523 ALOGE("%s: format is not IMPLEMENTATION_DEFINED or flexible", __func__);
524 return -EINVAL;
525 }
526
527 streamDim.width = mCamera3Stream->width;
528 streamDim.height = mCamera3Stream->height;
529
530 rc = QCamera3Channel::addStream(mStreamType,
531 streamFormat,
532 streamDim,
533 mNumBufs);
534
535 return rc;
536 }
537
538 /*===========================================================================
539 * FUNCTION : start
540 *
541 * DESCRIPTION: start a regular channel
542 *
543 * PARAMETERS :
544 *
545 * RETURN : int32_t type of status
546 * NO_ERROR -- success
547 * none-zero failure code
548 *==========================================================================*/
start()549 int32_t QCamera3RegularChannel::start()
550 {
551 int32_t rc = NO_ERROR;
552
553 if (0 < mMemory.getCnt()) {
554 rc = QCamera3Channel::start();
555 }
556
557 return rc;
558 }
559 /*===========================================================================
560 * FUNCTION : getInternalFormatBuffer
561 *
562 * DESCRIPTION: return buffer in the internal format structure
563 *
564 * PARAMETERS :
565 * @streamHandle : buffer handle
566 *
567 * RETURN : stream object. NULL if not found
568 *==========================================================================*/
getInternalFormatBuffer(buffer_handle_t * buffer)569 mm_camera_buf_def_t* QCamera3RegularChannel::getInternalFormatBuffer(
570 buffer_handle_t * buffer)
571 {
572 int32_t index;
573 if(buffer == NULL)
574 return NULL;
575 index = mMemory.getMatchBufIndex((void*)buffer);
576 if(index < 0) {
577 ALOGE("%s: Could not find object among registered buffers",__func__);
578 return NULL;
579 }
580 return mStreams[0]->getInternalFormatBuffer(index);
581 }
582
583 /*===========================================================================
584 * FUNCTION : request
585 *
586 * DESCRIPTION: process a request from camera service. Stream on if ncessary.
587 *
588 * PARAMETERS :
589 * @buffer : buffer to be filled for this request
590 *
591 * RETURN : 0 on a success start of capture
592 * -EINVAL on invalid input
593 * -ENODEV on serious error
594 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber)595 int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber)
596 {
597 //FIX ME: Return buffer back in case of failures below.
598
599 int32_t rc = NO_ERROR;
600 int index;
601
602 if (NULL == buffer) {
603 ALOGE("%s: Invalid buffer in channel request", __func__);
604 return BAD_VALUE;
605 }
606
607 if(!m_bIsActive) {
608 rc = registerBuffer(buffer);
609 if (NO_ERROR != rc) {
610 ALOGE("%s: On-the-fly buffer registration failed %d",
611 __func__, rc);
612 return rc;
613 }
614
615 rc = start();
616 if (NO_ERROR != rc) {
617 return rc;
618 }
619 } else {
620 ALOGV("%s: Request on an existing stream",__func__);
621 }
622
623 index = mMemory.getMatchBufIndex((void*)buffer);
624 if(index < 0) {
625 rc = registerBuffer(buffer);
626 if (NO_ERROR != rc) {
627 ALOGE("%s: On-the-fly buffer registration failed %d",
628 __func__, rc);
629 return rc;
630 }
631
632 index = mMemory.getMatchBufIndex((void*)buffer);
633 if (index < 0) {
634 ALOGE("%s: Could not find object among registered buffers",
635 __func__);
636 return DEAD_OBJECT;
637 }
638 }
639
640 rc = mStreams[0]->bufDone(index);
641 if(rc != NO_ERROR) {
642 ALOGE("%s: Failed to Q new buffer to stream",__func__);
643 return rc;
644 }
645
646 rc = mMemory.markFrameNumber(index, frameNumber);
647 return rc;
648 }
649
650 /*===========================================================================
651 * FUNCTION : registerBuffer
652 *
653 * DESCRIPTION: register streaming buffer to the channel object
654 *
655 * PARAMETERS :
656 * @buffer : buffer to be registered
657 *
658 * RETURN : int32_t type of status
659 * NO_ERROR -- success
660 * none-zero failure code
661 *==========================================================================*/
registerBuffer(buffer_handle_t * buffer)662 int32_t QCamera3RegularChannel::registerBuffer(buffer_handle_t *buffer)
663 {
664 int rc = 0;
665
666 if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
667 ALOGE("%s: Trying to register more buffers than initially requested",
668 __func__);
669 return BAD_VALUE;
670 }
671
672 if (0 == m_numStreams) {
673 rc = initialize();
674 if (rc != NO_ERROR) {
675 ALOGE("%s: Couldn't initialize camera stream %d",
676 __func__, rc);
677 return rc;
678 }
679 }
680
681 rc = mMemory.registerBuffer(buffer);
682 if (ALREADY_EXISTS == rc) {
683 return NO_ERROR;
684 } else if (NO_ERROR != rc) {
685 ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
686 return rc;
687 }
688
689 return rc;
690 }
691
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)692 void QCamera3RegularChannel::streamCbRoutine(
693 mm_camera_super_buf_t *super_frame,
694 QCamera3Stream *stream)
695 {
696 //FIXME Q Buf back in case of error?
697 uint8_t frameIndex;
698 buffer_handle_t *resultBuffer;
699 int32_t resultFrameNumber;
700 camera3_stream_buffer_t result;
701
702 if(!super_frame) {
703 ALOGE("%s: Invalid Super buffer",__func__);
704 return;
705 }
706
707 if(super_frame->num_bufs != 1) {
708 ALOGE("%s: Multiple streams are not supported",__func__);
709 return;
710 }
711 if(super_frame->bufs[0] == NULL ) {
712 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
713 __func__);
714 return;
715 }
716
717 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
718 if(frameIndex >= mNumBufs) {
719 ALOGE("%s: Error, Invalid index for buffer",__func__);
720 if(stream) {
721 stream->bufDone(frameIndex);
722 }
723 return;
724 }
725
726 ////Use below data to issue framework callback
727 resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
728 resultFrameNumber = mMemory.getFrameNumber(frameIndex);
729
730 result.stream = mCamera3Stream;
731 result.buffer = resultBuffer;
732 result.status = CAMERA3_BUFFER_STATUS_OK;
733 result.acquire_fence = -1;
734 result.release_fence = -1;
735
736 mChannelCB(NULL, &result, resultFrameNumber, mUserData);
737 free(super_frame);
738 return;
739 }
740
getStreamBufs(uint32_t)741 QCamera3Memory* QCamera3RegularChannel::getStreamBufs(uint32_t /*len*/)
742 {
743 return &mMemory;
744 }
745
putStreamBufs()746 void QCamera3RegularChannel::putStreamBufs()
747 {
748 mMemory.unregisterBuffers();
749 }
750
751 int QCamera3RegularChannel::kMaxBuffers = 7;
752
QCamera3MetadataChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData)753 QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
754 mm_camera_ops_t *cam_ops,
755 channel_cb_routine cb_routine,
756 cam_padding_info_t *paddingInfo,
757 void *userData) :
758 QCamera3Channel(cam_handle, cam_ops,
759 cb_routine, paddingInfo, userData),
760 mMemory(NULL)
761 {
762 }
763
~QCamera3MetadataChannel()764 QCamera3MetadataChannel::~QCamera3MetadataChannel()
765 {
766 if (m_bIsActive)
767 stop();
768
769 if (mMemory) {
770 mMemory->deallocate();
771 delete mMemory;
772 mMemory = NULL;
773 }
774 }
775
initialize()776 int32_t QCamera3MetadataChannel::initialize()
777 {
778 int32_t rc;
779 cam_dimension_t streamDim;
780
781 if (mMemory || m_numStreams > 0) {
782 ALOGE("%s: metadata channel already initialized", __func__);
783 return -EINVAL;
784 }
785
786 rc = init(NULL, NULL);
787 if (rc < 0) {
788 ALOGE("%s: init failed", __func__);
789 return rc;
790 }
791
792 streamDim.width = sizeof(metadata_buffer_t),
793 streamDim.height = 1;
794 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
795 streamDim, MIN_STREAMING_BUFFER_NUM);
796 if (rc < 0) {
797 ALOGE("%s: addStream failed", __func__);
798 }
799 return rc;
800 }
801
request(buffer_handle_t *,uint32_t)802 int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/,
803 uint32_t /*frameNumber*/)
804 {
805 if (!m_bIsActive) {
806 return start();
807 }
808 else
809 return 0;
810 }
811
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)812 void QCamera3MetadataChannel::streamCbRoutine(
813 mm_camera_super_buf_t *super_frame,
814 QCamera3Stream * /*stream*/)
815 {
816 uint32_t requestNumber = 0;
817 if (super_frame == NULL || super_frame->num_bufs != 1) {
818 ALOGE("%s: super_frame is not valid", __func__);
819 return;
820 }
821 mChannelCB(super_frame, NULL, requestNumber, mUserData);
822 }
823
getStreamBufs(uint32_t len)824 QCamera3Memory* QCamera3MetadataChannel::getStreamBufs(uint32_t len)
825 {
826 int rc;
827 if (len < sizeof(metadata_buffer_t)) {
828 ALOGE("%s: size doesn't match %d vs %d", __func__,
829 len, sizeof(metadata_buffer_t));
830 return NULL;
831 }
832 mMemory = new QCamera3HeapMemory();
833 if (!mMemory) {
834 ALOGE("%s: unable to create metadata memory", __func__);
835 return NULL;
836 }
837 rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
838 if (rc < 0) {
839 ALOGE("%s: unable to allocate metadata memory", __func__);
840 delete mMemory;
841 mMemory = NULL;
842 return NULL;
843 }
844 memset(mMemory->getPtr(0), 0, sizeof(metadata_buffer_t));
845 return mMemory;
846 }
847
putStreamBufs()848 void QCamera3MetadataChannel::putStreamBufs()
849 {
850 mMemory->deallocate();
851 delete mMemory;
852 mMemory = NULL;
853 }
854 /*************************************************************************************/
855 // RAW Channel related functions
856 int QCamera3RawChannel::kMaxBuffers = 7;
857
QCamera3RawChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,bool raw_16)858 QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle,
859 mm_camera_ops_t *cam_ops,
860 channel_cb_routine cb_routine,
861 cam_padding_info_t *paddingInfo,
862 void *userData,
863 camera3_stream_t *stream,
864 bool raw_16) :
865 QCamera3RegularChannel(cam_handle, cam_ops,
866 cb_routine, paddingInfo, userData, stream,
867 CAM_STREAM_TYPE_RAW),
868 mIsRaw16(raw_16)
869 {
870 char prop[PROPERTY_VALUE_MAX];
871 property_get("persist.camera.raw.dump", prop, "0");
872 mRawDump = atoi(prop);
873 }
874
~QCamera3RawChannel()875 QCamera3RawChannel::~QCamera3RawChannel()
876 {
877 }
878
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)879 void QCamera3RawChannel::streamCbRoutine(
880 mm_camera_super_buf_t *super_frame,
881 QCamera3Stream * stream)
882 {
883 /* Move this back down once verified */
884 if (mRawDump)
885 dumpRawSnapshot(super_frame->bufs[0]);
886
887 if (mIsRaw16)
888 convertToRaw16(super_frame->bufs[0]);
889
890 //Make sure cache coherence because extra processing is done
891 mMemory.cleanInvalidateCache(super_frame->bufs[0]->buf_idx);
892
893 QCamera3RegularChannel::streamCbRoutine(super_frame, stream);
894 return;
895 }
896
dumpRawSnapshot(mm_camera_buf_def_t * frame)897 void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
898 {
899 QCamera3Stream *stream = getStreamByIndex(0);
900 char buf[32];
901 memset(buf, 0, sizeof(buf));
902 cam_dimension_t dim;
903 memset(&dim, 0, sizeof(dim));
904 stream->getFrameDimension(dim);
905
906 cam_frame_len_offset_t offset;
907 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
908 stream->getFrameOffset(offset);
909 snprintf(buf, sizeof(buf), "/data/r_%d_%dx%d.raw",
910 frame->frame_idx, dim.width, dim.height);
911
912 int file_fd = open(buf, O_RDWR| O_CREAT, 0777);
913 if (file_fd >= 0) {
914 int written_len = write(file_fd, frame->buffer, offset.frame_len);
915 ALOGE("%s: written number of bytes %d", __func__, written_len);
916 close(file_fd);
917 } else {
918 ALOGE("%s: failed to open file to dump image", __func__);
919 }
920
921 }
922
convertToRaw16(mm_camera_buf_def_t * frame)923 void QCamera3RawChannel::convertToRaw16(mm_camera_buf_def_t *frame)
924 {
925 // Convert image buffer from Opaque raw format to RAW16 format
926 // 10bit Opaque raw is stored in the format of:
927 // 0000 - p5 - p4 - p3 - p2 - p1 - p0
928 // where p0 to p5 are 6 pixels (each is 10bit)_and most significant
929 // 4 bits are 0s. Each 64bit word contains 6 pixels.
930
931 QCamera3Stream *stream = getStreamByIndex(0);
932 cam_dimension_t dim;
933 memset(&dim, 0, sizeof(dim));
934 stream->getFrameDimension(dim);
935
936 cam_frame_len_offset_t offset;
937 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
938 stream->getFrameOffset(offset);
939
940 uint32_t raw16_stride = (dim.width + 15) & ~15;
941 uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
942
943 // In-place format conversion.
944 // Raw16 format always occupy more memory than opaque raw10.
945 // Convert to Raw16 by iterating through all pixels from bottom-right
946 // to top-left of the image.
947 // One special notes:
948 // 1. Cross-platform raw16's stride is 16 pixels.
949 // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes.
950 for (int y = dim.height-1; y >= 0; y--) {
951 uint64_t* row_start = (uint64_t *)frame->buffer +
952 y * offset.mp[0].stride / 8;
953 for (int x = dim.width-1; x >= 0; x--) {
954 uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6)));
955 raw16_buffer[y*raw16_stride+x] = raw16_pixel;
956 }
957 }
958 }
959
960 /*************************************************************************************/
961
962 /*===========================================================================
963 * FUNCTION : jpegEvtHandle
964 *
965 * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events.
966 Construct result payload and call mChannelCb to deliver buffer
967 to framework.
968 *
969 * PARAMETERS :
970 * @status : status of jpeg job
971 * @client_hdl: jpeg client handle
972 * @jobId : jpeg job Id
973 * @p_ouput : ptr to jpeg output result struct
974 * @userdata : user data ptr
975 *
976 * RETURN : none
977 *==========================================================================*/
jpegEvtHandle(jpeg_job_status_t status,uint32_t,uint32_t jobId,mm_jpeg_output_t * p_output,void * userdata)978 void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status,
979 uint32_t /*client_hdl*/,
980 uint32_t jobId,
981 mm_jpeg_output_t *p_output,
982 void *userdata)
983 {
984 buffer_handle_t *resultBuffer, *jpegBufferHandle;
985 int32_t resultFrameNumber;
986 int resultStatus = CAMERA3_BUFFER_STATUS_OK;
987 camera3_stream_buffer_t result;
988 camera3_jpeg_blob_t jpegHeader;
989 char* jpeg_eof = 0;
990 int maxJpegSize;
991 QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
992 if (obj) {
993
994 //Release any cached metabuffer information
995 if (obj->mMetaFrame != NULL && obj->m_pMetaChannel != NULL) {
996 ((QCamera3MetadataChannel*)(obj->m_pMetaChannel))->bufDone(obj->mMetaFrame);
997 obj->mMetaFrame = NULL;
998 obj->m_pMetaChannel = NULL;
999 } else {
1000 ALOGE("%s: Meta frame was NULL", __func__);
1001 }
1002 //Construct payload for process_capture_result. Call mChannelCb
1003
1004 qcamera_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId);
1005
1006 if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) {
1007 ALOGE("%s: Error in jobId: (%d) with status: %d", __func__, jobId, status);
1008 resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
1009 }
1010
1011 //Construct jpeg transient header of type camera3_jpeg_blob_t
1012 //Append at the end of jpeg image of buf_filled_len size
1013
1014 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
1015 jpegHeader.jpeg_size = p_output->buf_filled_len;
1016
1017
1018 char* jpeg_buf = (char *)p_output->buf_vaddr;
1019
1020 // Gralloc buffer may have additional padding for 4K page size
1021 // Follow size guidelines based on spec since framework relies
1022 // on that to reach end of buffer and with it the header
1023
1024 //Handle same as resultBuffer, but for readablity
1025 jpegBufferHandle =
1026 (buffer_handle_t *)obj->mMemory.getBufferHandle(obj->mCurrentBufIndex);
1027
1028 maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width;
1029 if (maxJpegSize > obj->mMemory.getSize(obj->mCurrentBufIndex)) {
1030 maxJpegSize = obj->mMemory.getSize(obj->mCurrentBufIndex);
1031 }
1032
1033 jpeg_eof = &jpeg_buf[maxJpegSize-sizeof(jpegHeader)];
1034 memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
1035 obj->mMemory.cleanInvalidateCache(obj->mCurrentBufIndex);
1036
1037 ////Use below data to issue framework callback
1038 resultBuffer = (buffer_handle_t *)obj->mMemory.getBufferHandle(obj->mCurrentBufIndex);
1039 resultFrameNumber = obj->mMemory.getFrameNumber(obj->mCurrentBufIndex);
1040
1041 result.stream = obj->mCamera3Stream;
1042 result.buffer = resultBuffer;
1043 result.status = resultStatus;
1044 result.acquire_fence = -1;
1045 result.release_fence = -1;
1046
1047 ALOGV("%s: Issue Callback", __func__);
1048 obj->mChannelCB(NULL, &result, resultFrameNumber, obj->mUserData);
1049
1050 // release internal data for jpeg job
1051 if (job != NULL) {
1052 obj->m_postprocessor.releaseJpegJobData(job);
1053 free(job);
1054 }
1055 return;
1056 // }
1057 } else {
1058 ALOGE("%s: Null userdata in jpeg callback", __func__);
1059 }
1060 }
1061
QCamera3PicChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream)1062 QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle,
1063 mm_camera_ops_t *cam_ops,
1064 channel_cb_routine cb_routine,
1065 cam_padding_info_t *paddingInfo,
1066 void *userData,
1067 camera3_stream_t *stream) :
1068 QCamera3Channel(cam_handle, cam_ops, cb_routine,
1069 paddingInfo, userData),
1070 m_postprocessor(this),
1071 mCamera3Stream(stream),
1072 mNumBufs(0),
1073 mCurrentBufIndex(-1),
1074 mYuvMemory(NULL),
1075 mMetaFrame(NULL)
1076 {
1077 mYuvWidth = stream->width;
1078 mYuvHeight = stream->height;
1079 int32_t rc = m_postprocessor.init(&mMemory, jpegEvtHandle, this);
1080 if (rc != 0) {
1081 ALOGE("Init Postprocessor failed");
1082 }
1083 }
1084
1085 /*===========================================================================
1086 * FUNCTION : stop
1087 *
1088 * DESCRIPTION: stop pic channel, which will stop all streams within, including
1089 * the reprocessing channel in postprocessor and YUV stream.
1090 *
1091 * PARAMETERS : none
1092 *
1093 * RETURN : int32_t type of status
1094 * NO_ERROR -- success
1095 * none-zero failure code
1096 *==========================================================================*/
stop()1097 int32_t QCamera3PicChannel::stop()
1098 {
1099 int32_t rc = NO_ERROR;
1100 if(!m_bIsActive) {
1101 ALOGE("%s: Attempt to stop inactive channel",__func__);
1102 return rc;
1103 }
1104
1105 m_postprocessor.stop();
1106 rc = m_postprocessor.deinit();
1107 if (rc != 0) {
1108 ALOGE("De-init Postprocessor failed");
1109 }
1110
1111 rc |= QCamera3Channel::stop();
1112 return rc;
1113 }
1114
~QCamera3PicChannel()1115 QCamera3PicChannel::~QCamera3PicChannel()
1116 {
1117 stop();
1118 }
1119
initialize()1120 int32_t QCamera3PicChannel::initialize()
1121 {
1122 int32_t rc = NO_ERROR;
1123 cam_dimension_t streamDim;
1124 cam_stream_type_t streamType;
1125 cam_format_t streamFormat;
1126 mm_camera_channel_attr_t attr;
1127
1128 if (NULL == mCamera3Stream) {
1129 ALOGE("%s: Camera stream uninitialized", __func__);
1130 return NO_INIT;
1131 }
1132
1133 if (1 <= m_numStreams) {
1134 // Only one stream per channel supported in v3 Hal
1135 return NO_ERROR;
1136 }
1137
1138 memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
1139 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
1140 attr.look_back = 1;
1141 attr.post_frame_skip = 1;
1142 attr.water_mark = 1;
1143 attr.max_unmatched_frames = 1;
1144
1145 rc = init(&attr, NULL);
1146 if (rc < 0) {
1147 ALOGE("%s: init failed", __func__);
1148 return rc;
1149 }
1150
1151 streamType = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
1152 streamFormat = CAM_FORMAT_YUV_420_NV21;
1153 streamDim.width = mYuvWidth;
1154 streamDim.height = mYuvHeight;
1155
1156 int num_buffers = 1;
1157 mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM;
1158 rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
1159 num_buffers);
1160
1161 return rc;
1162 }
1163
request(buffer_handle_t * buffer,uint32_t frameNumber,mm_camera_buf_def_t * pInputBuffer,metadata_buffer_t * metadata)1164 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
1165 uint32_t frameNumber,
1166 mm_camera_buf_def_t *pInputBuffer,
1167 metadata_buffer_t *metadata)
1168 {
1169 //FIX ME: Return buffer back in case of failures below.
1170
1171 int32_t rc = NO_ERROR;
1172 int index;
1173 // Picture stream has already been started before any request comes in
1174 if (!m_bIsActive) {
1175 ALOGE("%s: Channel not started!!", __func__);
1176 return NO_INIT;
1177 }
1178
1179 index = mMemory.getMatchBufIndex((void*)buffer);
1180 if(index < 0) {
1181 rc = registerBuffer(buffer);
1182 if (NO_ERROR != rc) {
1183 ALOGE("%s: On-the-fly buffer registration failed %d",
1184 __func__, rc);
1185 return rc;
1186 }
1187
1188 index = mMemory.getMatchBufIndex((void*)buffer);
1189 if (index < 0) {
1190 ALOGE("%s: Could not find object among registered buffers",__func__);
1191 return DEAD_OBJECT;
1192 }
1193 }
1194 rc = mMemory.markFrameNumber(index, frameNumber);
1195
1196 //Start the postprocessor for jpeg encoding. Pass mMemory as destination buffer
1197 mCurrentBufIndex = index;
1198
1199 // Start postprocessor
1200 m_postprocessor.start(this, metadata);
1201
1202 // Queue jpeg settings
1203 rc = queueJpegSetting(index, metadata);
1204
1205 if (pInputBuffer == NULL)
1206 mStreams[0]->bufDone(0);
1207 else {
1208 mm_camera_super_buf_t *src_frame = NULL;
1209 src_frame = (mm_camera_super_buf_t *)malloc(
1210 sizeof(mm_camera_super_buf_t));
1211 if (src_frame == NULL) {
1212 ALOGE("%s: No memory for src frame", __func__);
1213 return NO_MEMORY;
1214 }
1215 memset(src_frame, 0, sizeof(mm_camera_super_buf_t));
1216 src_frame->num_bufs = 1;
1217 src_frame->bufs[0] = pInputBuffer;
1218
1219 ALOGD("%s: Post-process started", __func__);
1220 ALOGD("%s: Issue call to reprocess", __func__);
1221
1222 m_postprocessor.processPPMetadata(metadata);
1223 m_postprocessor.processData(src_frame);
1224 }
1225 return rc;
1226 }
1227
1228 /*===========================================================================
1229 * FUNCTION : dataNotifyCB
1230 *
1231 * DESCRIPTION: Channel Level callback used for super buffer data notify.
1232 * This function is registered with mm-camera-interface to handle
1233 * data notify
1234 *
1235 * PARAMETERS :
1236 * @recvd_frame : stream frame received
1237 * userdata : user data ptr
1238 *
1239 * RETURN : none
1240 *==========================================================================*/
dataNotifyCB(mm_camera_super_buf_t * recvd_frame,void * userdata)1241 void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
1242 void *userdata)
1243 {
1244 ALOGV("%s: E\n", __func__);
1245 QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
1246
1247 if (channel == NULL) {
1248 ALOGE("%s: invalid channel pointer", __func__);
1249 return;
1250 }
1251
1252 if(channel->m_numStreams != 1) {
1253 ALOGE("%s: Error: Bug: This callback assumes one stream per channel",__func__);
1254 return;
1255 }
1256
1257
1258 if(channel->mStreams[0] == NULL) {
1259 ALOGE("%s: Error: Invalid Stream object",__func__);
1260 return;
1261 }
1262
1263 channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]);
1264
1265 ALOGV("%s: X\n", __func__);
1266 return;
1267 }
1268
1269 /*===========================================================================
1270 * FUNCTION : registerBuffer
1271 *
1272 * DESCRIPTION: register streaming buffer to the channel object
1273 *
1274 * PARAMETERS :
1275 * @buffer : buffer to be registered
1276 *
1277 * RETURN : int32_t type of status
1278 * NO_ERROR -- success
1279 * none-zero failure code
1280 *==========================================================================*/
registerBuffer(buffer_handle_t * buffer)1281 int32_t QCamera3PicChannel::registerBuffer(buffer_handle_t *buffer)
1282 {
1283 int rc = 0;
1284
1285 if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
1286 ALOGE("%s: Trying to register more buffers than initially requested",
1287 __func__);
1288 return BAD_VALUE;
1289 }
1290
1291 if (0 == m_numStreams) {
1292 rc = initialize();
1293 if (rc != NO_ERROR) {
1294 ALOGE("%s: Couldn't initialize camera stream %d",
1295 __func__, rc);
1296 return rc;
1297 }
1298 }
1299 rc = mMemory.registerBuffer(buffer);
1300 if (ALREADY_EXISTS == rc) {
1301 return NO_ERROR;
1302 } else if (NO_ERROR != rc) {
1303 ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
1304 return rc;
1305 }
1306
1307 return rc;
1308 }
1309
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1310 void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
1311 QCamera3Stream *stream)
1312 {
1313 //TODO
1314 //Used only for getting YUV. Jpeg callback will be sent back from channel
1315 //directly to HWI. Refer to func jpegEvtHandle
1316
1317 //Got the yuv callback. Calling yuv callback handler in PostProc
1318 uint8_t frameIndex;
1319 mm_camera_super_buf_t* frame = NULL;
1320 if(!super_frame) {
1321 ALOGE("%s: Invalid Super buffer",__func__);
1322 return;
1323 }
1324
1325 if(super_frame->num_bufs != 1) {
1326 ALOGE("%s: Multiple streams are not supported",__func__);
1327 return;
1328 }
1329 if(super_frame->bufs[0] == NULL ) {
1330 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
1331 __func__);
1332 return;
1333 }
1334
1335 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
1336 if(frameIndex >= mNumBufs) {
1337 ALOGE("%s: Error, Invalid index for buffer",__func__);
1338 if(stream) {
1339 stream->bufDone(frameIndex);
1340 }
1341 return;
1342 }
1343
1344 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1345 if (frame == NULL) {
1346 ALOGE("%s: Error allocating memory to save received_frame structure.",
1347 __func__);
1348 if(stream) {
1349 stream->bufDone(frameIndex);
1350 }
1351 return;
1352 }
1353 *frame = *super_frame;
1354 m_postprocessor.processData(frame);
1355 free(super_frame);
1356 return;
1357 }
1358
getStreamBufs(uint32_t len)1359 QCamera3Memory* QCamera3PicChannel::getStreamBufs(uint32_t len)
1360 {
1361 int rc = 0;
1362
1363 mYuvMemory = new QCamera3HeapMemory();
1364 if (!mYuvMemory) {
1365 ALOGE("%s: unable to create metadata memory", __func__);
1366 return NULL;
1367 }
1368
1369 //Queue YUV buffers in the beginning mQueueAll = true
1370 rc = mYuvMemory->allocate(1, len, false);
1371 if (rc < 0) {
1372 ALOGE("%s: unable to allocate metadata memory", __func__);
1373 delete mYuvMemory;
1374 mYuvMemory = NULL;
1375 return NULL;
1376 }
1377 return mYuvMemory;
1378 }
1379
putStreamBufs()1380 void QCamera3PicChannel::putStreamBufs()
1381 {
1382 mMemory.unregisterBuffers();
1383
1384 mYuvMemory->deallocate();
1385 delete mYuvMemory;
1386 mYuvMemory = NULL;
1387 }
1388
queueReprocMetadata(metadata_buffer_t * metadata)1389 int32_t QCamera3PicChannel::queueReprocMetadata(metadata_buffer_t *metadata)
1390 {
1391 return m_postprocessor.processPPMetadata(metadata);
1392 }
1393
queueJpegSetting(int32_t index,metadata_buffer_t * metadata)1394 int32_t QCamera3PicChannel::queueJpegSetting(int32_t index, metadata_buffer_t *metadata)
1395 {
1396 jpeg_settings_t *settings =
1397 (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t));
1398
1399 if (!settings) {
1400 ALOGE("%s: out of memory allocating jpeg_settings", __func__);
1401 return -ENOMEM;
1402 }
1403
1404 memset(settings, 0, sizeof(jpeg_settings_t));
1405
1406 settings->out_buf_index = index;
1407
1408 settings->jpeg_orientation = 0;
1409 if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
1410 int32_t *orientation = (int32_t *)POINTER_OF(
1411 CAM_INTF_META_JPEG_ORIENTATION, metadata);
1412 settings->jpeg_orientation = *orientation;
1413 }
1414
1415 settings->jpeg_quality = 85;
1416 if (IS_PARM_VALID(CAM_INTF_META_JPEG_QUALITY, metadata)) {
1417 uint8_t *quality = (uint8_t *)POINTER_OF(
1418 CAM_INTF_META_JPEG_QUALITY, metadata);
1419 settings->jpeg_quality = *quality;
1420 }
1421
1422 if (IS_PARM_VALID(CAM_INTF_META_JPEG_THUMB_QUALITY, metadata)) {
1423 uint8_t *quality = (uint8_t *)POINTER_OF(
1424 CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
1425 settings->jpeg_thumb_quality = *quality;
1426 }
1427
1428 if (IS_PARM_VALID(CAM_INTF_META_JPEG_THUMB_SIZE, metadata)) {
1429 cam_dimension_t *dimension = (cam_dimension_t *)POINTER_OF(
1430 CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
1431 settings->thumbnail_size = *dimension;
1432 }
1433
1434 settings->gps_timestamp_valid = 0;
1435 if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata)) {
1436 int64_t *timestamp = (int64_t *)POINTER_OF(
1437 CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
1438 settings->gps_timestamp = *timestamp;
1439 settings->gps_timestamp_valid = 1;
1440 }
1441
1442 settings->gps_coordinates_valid = 0;
1443 if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_COORDINATES, metadata)) {
1444 double *coordinates = (double *)POINTER_OF(
1445 CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
1446 memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double));
1447 settings->gps_coordinates_valid = 1;
1448 }
1449
1450 if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata)) {
1451 char *proc_methods = (char *)POINTER_OF(
1452 CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
1453 memset(settings->gps_processing_method, 0,
1454 sizeof(settings->gps_processing_method));
1455 strncpy(settings->gps_processing_method, proc_methods,
1456 sizeof(settings->gps_processing_method));
1457 }
1458
1459 return m_postprocessor.processJpegSettingData(settings);
1460 }
1461
1462 /*===========================================================================
1463 * FUNCTION : getRational
1464 *
1465 * DESCRIPTION: compose rational struct
1466 *
1467 * PARAMETERS :
1468 * @rat : ptr to struct to store rational info
1469 * @num :num of the rational
1470 * @denom : denom of the rational
1471 *
1472 * RETURN : int32_t type of status
1473 * NO_ERROR -- success
1474 * none-zero failure code
1475 *==========================================================================*/
getRational(rat_t * rat,int num,int denom)1476 int32_t getRational(rat_t *rat, int num, int denom)
1477 {
1478 if (NULL == rat) {
1479 ALOGE("%s: NULL rat input", __func__);
1480 return BAD_VALUE;
1481 }
1482 rat->num = num;
1483 rat->denom = denom;
1484 return NO_ERROR;
1485 }
1486
1487 /*===========================================================================
1488 * FUNCTION : parseGPSCoordinate
1489 *
1490 * DESCRIPTION: parse GPS coordinate string
1491 *
1492 * PARAMETERS :
1493 * @coord_str : [input] coordinate string
1494 * @coord : [output] ptr to struct to store coordinate
1495 *
1496 * RETURN : int32_t type of status
1497 * NO_ERROR -- success
1498 * none-zero failure code
1499 *==========================================================================*/
parseGPSCoordinate(const char * coord_str,rat_t * coord)1500 int parseGPSCoordinate(const char *coord_str, rat_t* coord)
1501 {
1502 if(coord == NULL) {
1503 ALOGE("%s: error, invalid argument coord == NULL", __func__);
1504 return BAD_VALUE;
1505 }
1506 float degF = atof(coord_str);
1507 if (degF < 0) {
1508 degF = -degF;
1509 }
1510 float minF = (degF - (int) degF) * 60;
1511 float secF = (minF - (int) minF) * 60;
1512
1513 getRational(&coord[0], (int)degF, 1);
1514 getRational(&coord[1], (int)minF, 1);
1515 getRational(&coord[2], (int)(secF * 10000), 10000);
1516 return NO_ERROR;
1517 }
1518
1519 /*===========================================================================
1520 * FUNCTION : getExifDateTime
1521 *
1522 * DESCRIPTION: query exif date time
1523 *
1524 * PARAMETERS :
1525 * @dateTime : string to store exif date time
1526 * @subsecTime : string to store exif subsec time
1527 * @count : length of the dateTime string
1528 * @subsecCount: length of the subsecTime string
1529 *
1530 * RETURN : int32_t type of status
1531 * NO_ERROR -- success
1532 * none-zero failure code
1533 *==========================================================================*/
getExifDateTime(char * dateTime,char * subsecTime,uint32_t & count,uint32_t & subsecCount)1534 int32_t getExifDateTime(char *dateTime, char *subsecTime,
1535 uint32_t &count, uint32_t &subsecCount)
1536 {
1537 //get time and date from system
1538 struct timeval tv;
1539 struct tm *timeinfo;
1540
1541 gettimeofday(&tv, NULL);
1542 timeinfo = localtime(&tv.tv_sec);
1543 //Write datetime according to EXIF Spec
1544 //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
1545 snprintf(dateTime, 20, "%04d:%02d:%02d %02d:%02d:%02d",
1546 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
1547 timeinfo->tm_mday, timeinfo->tm_hour,
1548 timeinfo->tm_min, timeinfo->tm_sec);
1549 count = 20;
1550
1551 //Write subsec according to EXIF Sepc
1552 snprintf(subsecTime, 7, "%06ld", tv.tv_usec);
1553 subsecCount = 7;
1554 return NO_ERROR;
1555 }
1556
1557 /*===========================================================================
1558 * FUNCTION : getExifFocalLength
1559 *
1560 * DESCRIPTION: get exif focal lenght
1561 *
1562 * PARAMETERS :
1563 * @focalLength : ptr to rational strcut to store focal lenght
1564 *
1565 * RETURN : int32_t type of status
1566 * NO_ERROR -- success
1567 * none-zero failure code
1568 *==========================================================================*/
getExifFocalLength(rat_t * focalLength,float value)1569 int32_t getExifFocalLength(rat_t *focalLength, float value)
1570 {
1571 int focalLengthValue =
1572 (int)(value * FOCAL_LENGTH_DECIMAL_PRECISION);
1573 return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
1574 }
1575
1576 /*===========================================================================
1577 * FUNCTION : getExifExpTimeInfo
1578 *
1579 * DESCRIPTION: get exif exposure time information
1580 *
1581 * PARAMETERS :
1582 * @expoTimeInfo : expousure time value
1583 * RETURN : nt32_t type of status
1584 * NO_ERROR -- success
1585 * none-zero failure code
1586 *==========================================================================*/
getExifExpTimeInfo(rat_t * expoTimeInfo,int64_t value)1587 int32_t getExifExpTimeInfo(rat_t *expoTimeInfo, int64_t value)
1588 {
1589
1590 int cal_exposureTime;
1591 if (value != 0)
1592 cal_exposureTime = value;
1593 else
1594 cal_exposureTime = 60;
1595
1596 return getRational(expoTimeInfo, 1, cal_exposureTime);
1597 }
1598
1599 /*===========================================================================
1600 * FUNCTION : getExifGpsProcessingMethod
1601 *
1602 * DESCRIPTION: get GPS processing method
1603 *
1604 * PARAMETERS :
1605 * @gpsProcessingMethod : string to store GPS process method
1606 * @count : lenght of the string
1607 *
1608 * RETURN : int32_t type of status
1609 * NO_ERROR -- success
1610 * none-zero failure code
1611 *==========================================================================*/
getExifGpsProcessingMethod(char * gpsProcessingMethod,uint32_t & count,char * value)1612 int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod,
1613 uint32_t &count, char* value)
1614 {
1615 if(value != NULL) {
1616 memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
1617 count = EXIF_ASCII_PREFIX_SIZE;
1618 strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, value, strlen(value));
1619 count += strlen(value);
1620 gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
1621 return NO_ERROR;
1622 } else {
1623 return BAD_VALUE;
1624 }
1625 }
1626
1627 /*===========================================================================
1628 * FUNCTION : getExifLatitude
1629 *
1630 * DESCRIPTION: get exif latitude
1631 *
1632 * PARAMETERS :
1633 * @latitude : ptr to rational struct to store latitude info
1634 * @ladRef : charater to indicate latitude reference
1635 *
1636 * RETURN : int32_t type of status
1637 * NO_ERROR -- success
1638 * none-zero failure code
1639 *==========================================================================*/
getExifLatitude(rat_t * latitude,char * latRef,double value)1640 int32_t getExifLatitude(rat_t *latitude,
1641 char *latRef, double value)
1642 {
1643 char str[30];
1644 snprintf(str, sizeof(str), "%f", value);
1645 if(str != NULL) {
1646 parseGPSCoordinate(str, latitude);
1647
1648 //set Latitude Ref
1649 float latitudeValue = strtof(str, 0);
1650 if(latitudeValue < 0.0f) {
1651 latRef[0] = 'S';
1652 } else {
1653 latRef[0] = 'N';
1654 }
1655 latRef[1] = '\0';
1656 return NO_ERROR;
1657 }else{
1658 return BAD_VALUE;
1659 }
1660 }
1661
1662 /*===========================================================================
1663 * FUNCTION : getExifLongitude
1664 *
1665 * DESCRIPTION: get exif longitude
1666 *
1667 * PARAMETERS :
1668 * @longitude : ptr to rational struct to store longitude info
1669 * @lonRef : charater to indicate longitude reference
1670 *
1671 * RETURN : int32_t type of status
1672 * NO_ERROR -- success
1673 * none-zero failure code
1674 *==========================================================================*/
getExifLongitude(rat_t * longitude,char * lonRef,double value)1675 int32_t getExifLongitude(rat_t *longitude,
1676 char *lonRef, double value)
1677 {
1678 char str[30];
1679 snprintf(str, sizeof(str), "%f", value);
1680 if(str != NULL) {
1681 parseGPSCoordinate(str, longitude);
1682
1683 //set Longitude Ref
1684 float longitudeValue = strtof(str, 0);
1685 if(longitudeValue < 0.0f) {
1686 lonRef[0] = 'W';
1687 } else {
1688 lonRef[0] = 'E';
1689 }
1690 lonRef[1] = '\0';
1691 return NO_ERROR;
1692 }else{
1693 return BAD_VALUE;
1694 }
1695 }
1696
1697 /*===========================================================================
1698 * FUNCTION : getExifAltitude
1699 *
1700 * DESCRIPTION: get exif altitude
1701 *
1702 * PARAMETERS :
1703 * @altitude : ptr to rational struct to store altitude info
1704 * @altRef : charater to indicate altitude reference
1705 *
1706 * RETURN : int32_t type of status
1707 * NO_ERROR -- success
1708 * none-zero failure code
1709 *==========================================================================*/
getExifAltitude(rat_t * altitude,char * altRef,double value)1710 int32_t getExifAltitude(rat_t *altitude,
1711 char *altRef, double value)
1712 {
1713 char str[30];
1714 snprintf(str, sizeof(str), "%f", value);
1715 if(str != NULL) {
1716 double value = atof(str);
1717 *altRef = 0;
1718 if(value < 0){
1719 *altRef = 1;
1720 value = -value;
1721 }
1722 return getRational(altitude, value*1000, 1000);
1723 }else{
1724 return BAD_VALUE;
1725 }
1726 }
1727
1728 /*===========================================================================
1729 * FUNCTION : getExifGpsDateTimeStamp
1730 *
1731 * DESCRIPTION: get exif GPS date time stamp
1732 *
1733 * PARAMETERS :
1734 * @gpsDateStamp : GPS date time stamp string
1735 * @bufLen : length of the string
1736 * @gpsTimeStamp : ptr to rational struct to store time stamp info
1737 *
1738 * RETURN : int32_t type of status
1739 * NO_ERROR -- success
1740 * none-zero failure code
1741 *==========================================================================*/
getExifGpsDateTimeStamp(char * gpsDateStamp,uint32_t bufLen,rat_t * gpsTimeStamp,int64_t value)1742 int32_t getExifGpsDateTimeStamp(char *gpsDateStamp,
1743 uint32_t bufLen,
1744 rat_t *gpsTimeStamp, int64_t value)
1745 {
1746 char str[30];
1747 snprintf(str, sizeof(str), "%lld", value);
1748 if(str != NULL) {
1749 time_t unixTime = (time_t)atol(str);
1750 struct tm *UTCTimestamp = gmtime(&unixTime);
1751
1752 strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
1753
1754 getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
1755 getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
1756 getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
1757
1758 return NO_ERROR;
1759 } else {
1760 return BAD_VALUE;
1761 }
1762 }
1763
getExifExposureValue(srat_t * exposure_val,int32_t exposure_comp,cam_rational_type_t step)1764 int32_t getExifExposureValue(srat_t* exposure_val, int32_t exposure_comp,
1765 cam_rational_type_t step)
1766 {
1767 exposure_val->num = exposure_comp * step.numerator;
1768 exposure_val->denom = step.denominator;
1769 return 0;
1770 }
1771 /*===========================================================================
1772 * FUNCTION : getExifData
1773 *
1774 * DESCRIPTION: get exif data to be passed into jpeg encoding
1775 *
1776 * PARAMETERS : none
1777 *
1778 * RETURN : exif data from user setting and GPS
1779 *==========================================================================*/
getExifData(metadata_buffer_t * metadata,jpeg_settings_t * jpeg_settings)1780 QCamera3Exif *QCamera3PicChannel::getExifData(metadata_buffer_t *metadata,
1781 jpeg_settings_t *jpeg_settings)
1782 {
1783 QCamera3Exif *exif = new QCamera3Exif();
1784 if (exif == NULL) {
1785 ALOGE("%s: No memory for QCamera3Exif", __func__);
1786 return NULL;
1787 }
1788
1789 int32_t rc = NO_ERROR;
1790 uint32_t count = 0;
1791
1792 // add exif entries
1793 {
1794 char dateTime[20];
1795 char subsecTime[7];
1796 uint32_t subsecCount;
1797 memset(dateTime, 0, sizeof(dateTime));
1798 memset(subsecTime, 0, sizeof(subsecTime));
1799 count = 20;
1800 subsecCount = 7;
1801 rc = getExifDateTime(dateTime, subsecTime, count, subsecCount);
1802 if(rc == NO_ERROR) {
1803 exif->addEntry(EXIFTAGID_DATE_TIME,
1804 EXIF_ASCII,
1805 count,
1806 (void *)dateTime);
1807 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL,
1808 EXIF_ASCII,
1809 count,
1810 (void *)dateTime);
1811 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED,
1812 EXIF_ASCII,
1813 count,
1814 (void *)dateTime);
1815 exif->addEntry(EXIFTAGID_SUBSEC_TIME,
1816 EXIF_ASCII,
1817 subsecCount,
1818 (void *)subsecTime);
1819 exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL,
1820 EXIF_ASCII,
1821 subsecCount,
1822 (void *)subsecTime);
1823 exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED,
1824 EXIF_ASCII,
1825 subsecCount,
1826 (void *)subsecTime);
1827 } else {
1828 ALOGE("%s: getExifDateTime failed", __func__);
1829 }
1830 }
1831
1832 if (IS_PARM_VALID(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)) {
1833 float focal_length = *(float *)POINTER_OF(
1834 CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1835 rat_t focalLength;
1836 rc = getExifFocalLength(&focalLength, focal_length);
1837 if (rc == NO_ERROR) {
1838 exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
1839 EXIF_RATIONAL,
1840 1,
1841 (void *)&(focalLength));
1842 } else {
1843 ALOGE("%s: getExifFocalLength failed", __func__);
1844 }
1845 }
1846
1847 if (IS_PARM_VALID(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)) {
1848 int16_t isoSpeed = *(int32_t *)POINTER_OF(
1849 CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1850 exif->addEntry(EXIFTAGID_ISO_SPEED_RATING,
1851 EXIF_SHORT,
1852 1,
1853 (void *)&(isoSpeed));
1854 }
1855
1856 if (IS_PARM_VALID(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)) {
1857 int64_t sensor_exposure_time = *(int64_t *)POINTER_OF(
1858 CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1859 rat_t sensorExpTime;
1860 rc = getExifExpTimeInfo(&sensorExpTime, sensor_exposure_time);
1861 if (rc == NO_ERROR){
1862 exif->addEntry(EXIFTAGID_EXPOSURE_TIME,
1863 EXIF_RATIONAL,
1864 1,
1865 (void *)&(sensorExpTime));
1866 } else {
1867 ALOGE("%s: getExifExpTimeInfo failed", __func__);
1868 }
1869 }
1870
1871 if (strlen(jpeg_settings->gps_processing_method) > 0) {
1872 char gpsProcessingMethod[
1873 EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
1874 count = 0;
1875 rc = getExifGpsProcessingMethod(gpsProcessingMethod,
1876 count, jpeg_settings->gps_processing_method);
1877 if(rc == NO_ERROR) {
1878 exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
1879 EXIF_ASCII,
1880 count,
1881 (void *)gpsProcessingMethod);
1882 } else {
1883 ALOGE("%s: getExifGpsProcessingMethod failed", __func__);
1884 }
1885 }
1886
1887 if (jpeg_settings->gps_coordinates_valid) {
1888
1889 //latitude
1890 rat_t latitude[3];
1891 char latRef[2];
1892 rc = getExifLatitude(latitude, latRef,
1893 jpeg_settings->gps_coordinates[0]);
1894 if(rc == NO_ERROR) {
1895 exif->addEntry(EXIFTAGID_GPS_LATITUDE,
1896 EXIF_RATIONAL,
1897 3,
1898 (void *)latitude);
1899 exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
1900 EXIF_ASCII,
1901 2,
1902 (void *)latRef);
1903 } else {
1904 ALOGE("%s: getExifLatitude failed", __func__);
1905 }
1906
1907 //longitude
1908 rat_t longitude[3];
1909 char lonRef[2];
1910 rc = getExifLongitude(longitude, lonRef,
1911 jpeg_settings->gps_coordinates[1]);
1912 if(rc == NO_ERROR) {
1913 exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
1914 EXIF_RATIONAL,
1915 3,
1916 (void *)longitude);
1917
1918 exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
1919 EXIF_ASCII,
1920 2,
1921 (void *)lonRef);
1922 } else {
1923 ALOGE("%s: getExifLongitude failed", __func__);
1924 }
1925
1926 //altitude
1927 rat_t altitude;
1928 char altRef;
1929 rc = getExifAltitude(&altitude, &altRef,
1930 jpeg_settings->gps_coordinates[2]);
1931 if(rc == NO_ERROR) {
1932 exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
1933 EXIF_RATIONAL,
1934 1,
1935 (void *)&(altitude));
1936
1937 exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
1938 EXIF_BYTE,
1939 1,
1940 (void *)&altRef);
1941 } else {
1942 ALOGE("%s: getExifAltitude failed", __func__);
1943 }
1944 }
1945
1946 if (jpeg_settings->gps_timestamp_valid) {
1947
1948 char gpsDateStamp[20];
1949 rat_t gpsTimeStamp[3];
1950 rc = getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp,
1951 jpeg_settings->gps_timestamp);
1952 if(rc == NO_ERROR) {
1953 exif->addEntry(EXIFTAGID_GPS_DATESTAMP,
1954 EXIF_ASCII,
1955 strlen(gpsDateStamp) + 1,
1956 (void *)gpsDateStamp);
1957
1958 exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
1959 EXIF_RATIONAL,
1960 3,
1961 (void *)gpsTimeStamp);
1962 } else {
1963 ALOGE("%s: getExifGpsDataTimeStamp failed", __func__);
1964 }
1965 }
1966
1967 if (IS_PARM_VALID(CAM_INTF_PARM_EV, metadata) &&
1968 IS_PARM_VALID(CAM_INTF_PARM_EV_STEP, metadata)) {
1969 int32_t exposure_comp = *(int32_t *)POINTER_OF(
1970 CAM_INTF_PARM_EV, metadata);
1971 cam_rational_type_t comp_step = *(cam_rational_type_t *)POINTER_OF(
1972 CAM_INTF_PARM_EV_STEP, metadata);
1973 srat_t exposure_val;
1974 rc = getExifExposureValue(&exposure_val, exposure_comp, comp_step);
1975 if(rc == NO_ERROR) {
1976 exif->addEntry(EXIFTAGID_EXPOSURE_BIAS_VALUE,
1977 EXIF_SRATIONAL,
1978 1,
1979 (void *)(&exposure_val));
1980 } else {
1981 ALOGE("%s: getExifExposureValue failed ", __func__);
1982 }
1983 }
1984
1985 char value[PROPERTY_VALUE_MAX];
1986 if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
1987 exif->addEntry(EXIFTAGID_MAKE,
1988 EXIF_ASCII,
1989 strlen(value) + 1,
1990 (void *)value);
1991 } else {
1992 ALOGE("%s: getExifMaker failed", __func__);
1993 }
1994
1995 if (property_get("ro.product.model", value, "QCAM-AA") > 0) {
1996 exif->addEntry(EXIFTAGID_MODEL,
1997 EXIF_ASCII,
1998 strlen(value) + 1,
1999 (void *)value);
2000 } else {
2001 ALOGE("%s: getExifModel failed", __func__);
2002 }
2003
2004 return exif;
2005 }
2006
overrideYuvSize(uint32_t width,uint32_t height)2007 void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height)
2008 {
2009 mYuvWidth = width;
2010 mYuvHeight = height;
2011 }
2012
2013 int QCamera3PicChannel::kMaxBuffers = 1;
2014
2015 /*===========================================================================
2016 * FUNCTION : QCamera3ReprocessChannel
2017 *
2018 * DESCRIPTION: constructor of QCamera3ReprocessChannel
2019 *
2020 * PARAMETERS :
2021 * @cam_handle : camera handle
2022 * @cam_ops : ptr to camera ops table
2023 * @pp_mask : post-proccess feature mask
2024 *
2025 * RETURN : none
2026 *==========================================================================*/
QCamera3ReprocessChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,void * ch_hdl)2027 QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle,
2028 mm_camera_ops_t *cam_ops,
2029 channel_cb_routine cb_routine,
2030 cam_padding_info_t *paddingInfo,
2031 void *userData, void *ch_hdl) :
2032 QCamera3Channel(cam_handle, cam_ops, cb_routine, paddingInfo, userData),
2033 picChHandle(ch_hdl),
2034 m_pSrcChannel(NULL),
2035 m_pMetaChannel(NULL),
2036 mMemory(NULL)
2037 {
2038 memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
2039 }
2040
2041
2042 /*===========================================================================
2043 * FUNCTION : QCamera3ReprocessChannel
2044 *
2045 * DESCRIPTION: constructor of QCamera3ReprocessChannel
2046 *
2047 * PARAMETERS :
2048 * @cam_handle : camera handle
2049 * @cam_ops : ptr to camera ops table
2050 * @pp_mask : post-proccess feature mask
2051 *
2052 * RETURN : none
2053 *==========================================================================*/
initialize()2054 int32_t QCamera3ReprocessChannel::initialize()
2055 {
2056 int32_t rc = NO_ERROR;
2057 mm_camera_channel_attr_t attr;
2058
2059 memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
2060 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
2061 attr.max_unmatched_frames = 1;
2062
2063 rc = init(&attr, NULL);
2064 if (rc < 0) {
2065 ALOGE("%s: init failed", __func__);
2066 }
2067 return rc;
2068 }
2069
2070
2071 /*===========================================================================
2072 * FUNCTION : QCamera3ReprocessChannel
2073 *
2074 * DESCRIPTION: constructor of QCamera3ReprocessChannel
2075 *
2076 * PARAMETERS :
2077 * @cam_handle : camera handle
2078 * @cam_ops : ptr to camera ops table
2079 * @pp_mask : post-proccess feature mask
2080 *
2081 * RETURN : none
2082 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)2083 void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
2084 QCamera3Stream *stream)
2085 {
2086 //Got the pproc data callback. Now send to jpeg encoding
2087 uint8_t frameIndex;
2088 mm_camera_super_buf_t* frame = NULL;
2089 QCamera3PicChannel *obj = (QCamera3PicChannel *)picChHandle;
2090
2091 if(!super_frame) {
2092 ALOGE("%s: Invalid Super buffer",__func__);
2093 return;
2094 }
2095
2096 if(super_frame->num_bufs != 1) {
2097 ALOGE("%s: Multiple streams are not supported",__func__);
2098 return;
2099 }
2100 if(super_frame->bufs[0] == NULL ) {
2101 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
2102 __func__);
2103 return;
2104 }
2105
2106 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
2107 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
2108 if (frame == NULL) {
2109 ALOGE("%s: Error allocating memory to save received_frame structure.",
2110 __func__);
2111 if(stream) {
2112 stream->bufDone(frameIndex);
2113 }
2114 return;
2115 }
2116 *frame = *super_frame;
2117 obj->m_postprocessor.processPPData(frame);
2118 free(super_frame);
2119 return;
2120 }
2121
2122 /*===========================================================================
2123 * FUNCTION : QCamera3ReprocessChannel
2124 *
2125 * DESCRIPTION: default constructor of QCamera3ReprocessChannel
2126 *
2127 * PARAMETERS : none
2128 *
2129 * RETURN : none
2130 *==========================================================================*/
QCamera3ReprocessChannel()2131 QCamera3ReprocessChannel::QCamera3ReprocessChannel() :
2132 m_pSrcChannel(NULL),
2133 m_pMetaChannel(NULL)
2134 {
2135 }
2136
2137 /*===========================================================================
2138 * FUNCTION : getStreamBufs
2139 *
2140 * DESCRIPTION: register the buffers of the reprocess channel
2141 *
2142 * PARAMETERS : none
2143 *
2144 * RETURN : QCamera3Memory *
2145 *==========================================================================*/
getStreamBufs(uint32_t len)2146 QCamera3Memory* QCamera3ReprocessChannel::getStreamBufs(uint32_t len)
2147 {
2148 int rc = 0;
2149
2150 mMemory = new QCamera3HeapMemory();
2151 if (!mMemory) {
2152 ALOGE("%s: unable to create reproc memory", __func__);
2153 return NULL;
2154 }
2155
2156 //Queue YUV buffers in the beginning mQueueAll = true
2157 rc = mMemory->allocate(2, len, true);
2158 if (rc < 0) {
2159 ALOGE("%s: unable to allocate reproc memory", __func__);
2160 delete mMemory;
2161 mMemory = NULL;
2162 return NULL;
2163 }
2164 return mMemory;
2165 }
2166
2167 /*===========================================================================
2168 * FUNCTION : getStreamBufs
2169 *
2170 * DESCRIPTION: register the buffers of the reprocess channel
2171 *
2172 * PARAMETERS : none
2173 *
2174 * RETURN :
2175 *==========================================================================*/
putStreamBufs()2176 void QCamera3ReprocessChannel::putStreamBufs()
2177 {
2178 mMemory->deallocate();
2179 delete mMemory;
2180 mMemory = NULL;
2181 }
2182
2183 /*===========================================================================
2184 * FUNCTION : ~QCamera3ReprocessChannel
2185 *
2186 * DESCRIPTION: destructor of QCamera3ReprocessChannel
2187 *
2188 * PARAMETERS : none
2189 *
2190 * RETURN : none
2191 *==========================================================================*/
~QCamera3ReprocessChannel()2192 QCamera3ReprocessChannel::~QCamera3ReprocessChannel()
2193 {
2194 }
2195
2196 /*===========================================================================
2197 * FUNCTION : getStreamBySrcHandle
2198 *
2199 * DESCRIPTION: find reprocess stream by its source stream handle
2200 *
2201 * PARAMETERS :
2202 * @srcHandle : source stream handle
2203 *
2204 * RETURN : ptr to reprocess stream if found. NULL if not found
2205 *==========================================================================*/
getStreamBySrcHandle(uint32_t srcHandle)2206 QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle)
2207 {
2208 QCamera3Stream *pStream = NULL;
2209
2210 for (int i = 0; i < m_numStreams; i++) {
2211 if (mSrcStreamHandles[i] == srcHandle) {
2212 pStream = mStreams[i];
2213 break;
2214 }
2215 }
2216 return pStream;
2217 }
2218
2219 /*===========================================================================
2220 * FUNCTION : getSrcStreamBySrcHandle
2221 *
2222 * DESCRIPTION: find source stream by source stream handle
2223 *
2224 * PARAMETERS :
2225 * @srcHandle : source stream handle
2226 *
2227 * RETURN : ptr to reprocess stream if found. NULL if not found
2228 *==========================================================================*/
getSrcStreamBySrcHandle(uint32_t srcHandle)2229 QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle)
2230 {
2231 QCamera3Stream *pStream = NULL;
2232
2233 for (int i = 0; i < m_numStreams; i++) {
2234 if (mSrcStreamHandles[i] == srcHandle) {
2235 pStream = m_pSrcChannel->getStreamByIndex(i);
2236 break;
2237 }
2238 }
2239 return pStream;
2240 }
2241
2242 /*===========================================================================
2243 * FUNCTION : metadataBufDone
2244 *
2245 * DESCRIPTION: buf done method for a metadata buffer
2246 *
2247 * PARAMETERS :
2248 * @recvd_frame : received metadata frame
2249 *
2250 * RETURN :
2251 *==========================================================================*/
metadataBufDone(mm_camera_super_buf_t * recvd_frame)2252 int32_t QCamera3ReprocessChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
2253 {
2254 int32_t rc;
2255 rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame);
2256 free(recvd_frame);
2257 recvd_frame = NULL;
2258 return rc;
2259 }
2260
2261 /*===========================================================================
2262 * FUNCTION : doReprocess
2263 *
2264 * DESCRIPTION: request to do a reprocess on the frame
2265 *
2266 * PARAMETERS :
2267 * @frame : frame to be performed a reprocess
2268 *
2269 * RETURN : int32_t type of status
2270 * NO_ERROR -- success
2271 * none-zero failure code
2272 *==========================================================================*/
doReprocess(mm_camera_super_buf_t * frame,mm_camera_super_buf_t * meta_frame)2273 int32_t QCamera3ReprocessChannel::doReprocess(mm_camera_super_buf_t *frame,
2274 mm_camera_super_buf_t *meta_frame)
2275 {
2276 int32_t rc = 0;
2277 if (m_numStreams < 1) {
2278 ALOGE("%s: No reprocess stream is created", __func__);
2279 return -1;
2280 }
2281 if (m_pSrcChannel == NULL) {
2282 ALOGE("%s: No source channel for reprocess", __func__);
2283 return -1;
2284 }
2285 for (int i = 0; i < frame->num_bufs; i++) {
2286 QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
2287 if (pStream != NULL) {
2288 cam_stream_parm_buffer_t param;
2289 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
2290 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
2291 param.reprocess.buf_index = frame->bufs[i]->buf_idx;
2292 if (meta_frame != NULL) {
2293 param.reprocess.meta_present = 1;
2294 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
2295 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
2296 }
2297 rc = pStream->setParameter(param);
2298 if (rc != NO_ERROR) {
2299 ALOGE("%s: stream setParameter for reprocess failed", __func__);
2300 break;
2301 }
2302 }
2303 }
2304 return rc;
2305 }
2306
doReprocessOffline(mm_camera_super_buf_t * frame,metadata_buffer_t * metadata)2307 int32_t QCamera3ReprocessChannel::doReprocessOffline(mm_camera_super_buf_t *frame,
2308 metadata_buffer_t *metadata)
2309 {
2310 int32_t rc = 0;
2311 OfflineBuffer mappedBuffer;
2312 if (m_numStreams < 1) {
2313 ALOGE("%s: No reprocess stream is created", __func__);
2314 return -1;
2315 }
2316 if (m_pSrcChannel == NULL) {
2317 ALOGE("%s: No source channel for reprocess", __func__);
2318 return -1;
2319 }
2320
2321 uint32_t buf_idx = 0;
2322 for (int i = 0; i < frame->num_bufs; i++) {
2323 QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
2324 QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id);
2325 if (pStream != NULL && pSrcStream != NULL) {
2326
2327 rc = mStreams[i]->mapBuf(
2328 CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
2329 buf_idx, -1,
2330 frame->bufs[i]->fd, frame->bufs[i]->frame_len);
2331
2332 if (rc == NO_ERROR) {
2333 memset(&mappedBuffer, 0, sizeof(OfflineBuffer));
2334 mappedBuffer.index = buf_idx;
2335 mappedBuffer.stream = pStream;
2336 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
2337 mOfflineBuffers.push_back(mappedBuffer);
2338
2339 cam_stream_parm_buffer_t param;
2340 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
2341 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
2342 param.reprocess.buf_index = buf_idx;
2343
2344 param.reprocess.meta_present = 1;
2345 char* private_data = (char *)POINTER_OF(
2346 CAM_INTF_META_PRIVATE_DATA, metadata);
2347 memcpy(param.reprocess.private_data, private_data,
2348 MAX_METADATA_PAYLOAD_SIZE);
2349
2350 // Find crop info for reprocess stream
2351 cam_crop_data_t *crop_data = (cam_crop_data_t *)
2352 POINTER_OF(CAM_INTF_META_CROP_DATA, metadata);
2353 for (int j = 0; j < crop_data->num_of_streams; j++) {
2354 if (crop_data->crop_info[j].stream_id ==
2355 pSrcStream->getMyServerID()) {
2356 param.reprocess.crop_rect =
2357 crop_data->crop_info[j].crop;
2358 break;
2359 }
2360 }
2361 rc = pStream->setParameter(param);
2362 if (rc != NO_ERROR) {
2363 ALOGE("%s: stream setParameter for reprocess failed", __func__);
2364 break;
2365 }
2366 }
2367 }
2368 }
2369 return rc;
2370 }
2371
2372 /*===========================================================================
2373 * FUNCTION : stop
2374 *
2375 * DESCRIPTION: Unmap offline buffers and stop channel
2376 *
2377 * PARAMETERS : none
2378 *
2379 * RETURN : int32_t type of status
2380 * NO_ERROR -- success
2381 * none-zero failure code
2382 *==========================================================================*/
stop()2383 int32_t QCamera3ReprocessChannel::stop()
2384 {
2385 if (!mOfflineBuffers.empty()) {
2386 QCamera3Stream *stream = NULL;
2387 List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
2388 int error = NO_ERROR;
2389 for( ; it != mOfflineBuffers.end(); it++) {
2390 stream = (*it).stream;
2391 if (NULL != stream) {
2392 error = stream->unmapBuf((*it).type,
2393 (*it).index,
2394 -1);
2395 if (NO_ERROR != error) {
2396 ALOGE("%s: Error during offline buffer unmap %d",
2397 __func__, error);
2398 }
2399 }
2400 }
2401 mOfflineBuffers.clear();
2402 }
2403
2404 return QCamera3Channel::stop();
2405 }
2406
2407 /*===========================================================================
2408 * FUNCTION : doReprocess
2409 *
2410 * DESCRIPTION: request to do a reprocess on the frame
2411 *
2412 * PARAMETERS :
2413 * @buf_fd : fd to the input buffer that needs reprocess
2414 * @buf_lenght : length of the input buffer
2415 * @ret_val : result of reprocess.
2416 * Example: Could be faceID in case of register face image.
2417 *
2418 * RETURN : int32_t type of status
2419 * NO_ERROR -- success
2420 * none-zero failure code
2421 *==========================================================================*/
doReprocess(int buf_fd,uint32_t buf_length,int32_t & ret_val,mm_camera_super_buf_t * meta_frame)2422 int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd,
2423 uint32_t buf_length,
2424 int32_t &ret_val,
2425 mm_camera_super_buf_t *meta_frame)
2426 {
2427 int32_t rc = 0;
2428 if (m_numStreams < 1) {
2429 ALOGE("%s: No reprocess stream is created", __func__);
2430 return -1;
2431 }
2432 if (meta_frame == NULL) {
2433 ALOGE("%s: Did not get corresponding metadata in time", __func__);
2434 return -1;
2435 }
2436
2437 uint32_t buf_idx = 0;
2438 for (int i = 0; i < m_numStreams; i++) {
2439 rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
2440 buf_idx, -1,
2441 buf_fd, buf_length);
2442
2443 if (rc == NO_ERROR) {
2444 cam_stream_parm_buffer_t param;
2445 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
2446 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
2447 param.reprocess.buf_index = buf_idx;
2448 param.reprocess.meta_present = 1;
2449 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
2450 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
2451 rc = mStreams[i]->setParameter(param);
2452 if (rc == NO_ERROR) {
2453 ret_val = param.reprocess.ret_val;
2454 }
2455 mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
2456 buf_idx, -1);
2457 }
2458 }
2459 return rc;
2460 }
2461
2462 /*===========================================================================
2463 * FUNCTION : addReprocStreamsFromSource
2464 *
2465 * DESCRIPTION: add reprocess streams from input source channel
2466 *
2467 * PARAMETERS :
2468 * @config : pp feature configuration
2469 * @pSrcChannel : ptr to input source channel that needs reprocess
2470 * @pMetaChannel : ptr to metadata channel to get corresp. metadata
2471 * @offline : configure for offline reprocessing
2472 *
2473 * RETURN : int32_t type of status
2474 * NO_ERROR -- success
2475 * none-zero failure code
2476 *==========================================================================*/
addReprocStreamsFromSource(cam_pp_feature_config_t & pp_config,QCamera3Channel * pSrcChannel,QCamera3Channel * pMetaChannel)2477 int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
2478 QCamera3Channel *pSrcChannel,
2479 QCamera3Channel *pMetaChannel)
2480 {
2481 int32_t rc = 0;
2482 QCamera3Stream *pSrcStream = pSrcChannel->getStreamByIndex(0);
2483 if (pSrcStream == NULL) {
2484 ALOGE("%s: source channel doesn't have a stream", __func__);
2485 return BAD_VALUE;
2486 }
2487 cam_stream_reproc_config_t reprocess_config;
2488 cam_dimension_t streamDim;
2489 cam_stream_type_t streamType;
2490 cam_format_t streamFormat;
2491 cam_frame_len_offset_t frameOffset;
2492 int num_buffers = 2;
2493
2494 streamType = CAM_STREAM_TYPE_OFFLINE_PROC;
2495 pSrcStream->getFormat(streamFormat);
2496 pSrcStream->getFrameDimension(streamDim);
2497 pSrcStream->getFrameOffset(frameOffset);
2498 reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
2499
2500 reprocess_config.offline.input_fmt = streamFormat;
2501 reprocess_config.offline.input_dim = streamDim;
2502 reprocess_config.offline.input_buf_planes.plane_info = frameOffset;
2503 reprocess_config.offline.num_of_bufs = num_buffers;
2504 reprocess_config.offline.input_stream_type = pSrcStream->getMyType();
2505
2506
2507 reprocess_config.pp_feature_config = pp_config;
2508 mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle();
2509
2510 // pp feature config
2511 if (pp_config.feature_mask & CAM_QCOM_FEATURE_ROTATION) {
2512 if (pp_config.rotation == ROTATE_90 ||
2513 pp_config.rotation == ROTATE_270) {
2514 // rotated by 90 or 270, need to switch width and height
2515 int32_t temp = streamDim.height;
2516 streamDim.height = streamDim.width;
2517 streamDim.width = temp;
2518 }
2519 }
2520
2521 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
2522 m_handle,
2523 m_camOps,
2524 mPaddingInfo,
2525 (QCamera3Channel*)this);
2526 if (pStream == NULL) {
2527 ALOGE("%s: No mem for Stream", __func__);
2528 return NO_MEMORY;
2529 }
2530
2531 rc = pStream->init(streamType, streamFormat, streamDim, &reprocess_config,
2532 num_buffers,QCamera3Channel::streamCbRoutine, this);
2533
2534
2535 if (rc == 0) {
2536 mStreams[m_numStreams] = pStream;
2537 m_numStreams++;
2538 } else {
2539 ALOGE("%s: failed to create reprocess stream", __func__);
2540 delete pStream;
2541 }
2542
2543 if (rc == NO_ERROR) {
2544 m_pSrcChannel = pSrcChannel;
2545 m_pMetaChannel = pMetaChannel;
2546 }
2547 if(m_camOps->request_super_buf(m_camHandle,m_handle,1) < 0) {
2548 ALOGE("%s: Request for super buffer failed",__func__);
2549 }
2550 return rc;
2551 }
2552
2553 cam_dimension_t QCamera3SupportChannel::kDim = {640, 480};
2554
QCamera3SupportChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,cam_padding_info_t * paddingInfo,void * userData)2555 QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle,
2556 mm_camera_ops_t *cam_ops,
2557 cam_padding_info_t *paddingInfo,
2558 void *userData) :
2559 QCamera3Channel(cam_handle, cam_ops,
2560 NULL, paddingInfo, userData),
2561 mMemory(NULL)
2562 {
2563 }
2564
~QCamera3SupportChannel()2565 QCamera3SupportChannel::~QCamera3SupportChannel()
2566 {
2567 if (m_bIsActive)
2568 stop();
2569
2570 if (mMemory) {
2571 mMemory->deallocate();
2572 delete mMemory;
2573 mMemory = NULL;
2574 }
2575 }
2576
initialize()2577 int32_t QCamera3SupportChannel::initialize()
2578 {
2579 int32_t rc;
2580
2581 if (mMemory || m_numStreams > 0) {
2582 ALOGE("%s: Support channel already initialized", __func__);
2583 return -EINVAL;
2584 }
2585
2586 rc = init(NULL, NULL);
2587 if (rc < 0) {
2588 ALOGE("%s: init failed", __func__);
2589 return rc;
2590 }
2591
2592 // Hardcode to VGA size for now
2593 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_CALLBACK,
2594 CAM_FORMAT_YUV_420_NV21, kDim, MIN_STREAMING_BUFFER_NUM);
2595 if (rc < 0) {
2596 ALOGE("%s: addStream failed", __func__);
2597 }
2598 return rc;
2599 }
2600
request(buffer_handle_t *,uint32_t)2601 int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/,
2602 uint32_t /*frameNumber*/)
2603 {
2604 return NO_ERROR;
2605 }
2606
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)2607 void QCamera3SupportChannel::streamCbRoutine(
2608 mm_camera_super_buf_t *super_frame,
2609 QCamera3Stream * /*stream*/)
2610 {
2611 if (super_frame == NULL || super_frame->num_bufs != 1) {
2612 ALOGE("%s: super_frame is not valid", __func__);
2613 return;
2614 }
2615 bufDone(super_frame);
2616 free(super_frame);
2617 }
2618
getStreamBufs(uint32_t len)2619 QCamera3Memory* QCamera3SupportChannel::getStreamBufs(uint32_t len)
2620 {
2621 int rc;
2622
2623 mMemory = new QCamera3HeapMemory();
2624 if (!mMemory) {
2625 ALOGE("%s: unable to create heap memory", __func__);
2626 return NULL;
2627 }
2628 rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
2629 if (rc < 0) {
2630 ALOGE("%s: unable to allocate heap memory", __func__);
2631 delete mMemory;
2632 mMemory = NULL;
2633 return NULL;
2634 }
2635 return mMemory;
2636 }
2637
putStreamBufs()2638 void QCamera3SupportChannel::putStreamBufs()
2639 {
2640 mMemory->deallocate();
2641 delete mMemory;
2642 mMemory = NULL;
2643 }
2644
2645 }; // namespace qcamera
2646