1 /* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define LOG_TAG "QCamera3Channel"
31 //#define LOG_NDEBUG 0
32 #include <fcntl.h>
33 #include <stdlib.h>
34 #include <cstdlib>
35 #include <stdio.h>
36 #include <string.h>
37 #include <hardware/camera3.h>
38 #include <system/camera_metadata.h>
39 #include <gralloc_priv.h>
40 #include <utils/Log.h>
41 #include <utils/Errors.h>
42 #include <cutils/properties.h>
43 #include "QCamera3Channel.h"
44
45 using namespace android;
46
47 #define MIN_STREAMING_BUFFER_NUM 7+11
48
49 namespace qcamera {
50 static const char ExifAsciiPrefix[] =
51 { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 }; // "ASCII\0\0\0"
52
53 #define EXIF_ASCII_PREFIX_SIZE 8 //(sizeof(ExifAsciiPrefix))
54 #define FOCAL_LENGTH_DECIMAL_PRECISION 1000
55
56 /*===========================================================================
57 * FUNCTION : QCamera3Channel
58 *
59 * DESCRIPTION: constrcutor of QCamera3Channel
60 *
61 * PARAMETERS :
62 * @cam_handle : camera handle
63 * @cam_ops : ptr to camera ops table
64 *
65 * RETURN : none
66 *==========================================================================*/
QCamera3Channel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData)67 QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
68 mm_camera_ops_t *cam_ops,
69 channel_cb_routine cb_routine,
70 cam_padding_info_t *paddingInfo,
71 void *userData)
72 {
73 m_camHandle = cam_handle;
74 m_camOps = cam_ops;
75 m_bIsActive = false;
76
77 m_handle = 0;
78 m_numStreams = 0;
79 memset(mStreams, 0, sizeof(mStreams));
80 mUserData = userData;
81
82 mStreamInfoBuf = NULL;
83 mChannelCB = cb_routine;
84 mPaddingInfo = paddingInfo;
85 }
86
87 /*===========================================================================
88 * FUNCTION : QCamera3Channel
89 *
90 * DESCRIPTION: default constrcutor of QCamera3Channel
91 *
92 * PARAMETERS : none
93 *
94 * RETURN : none
95 *==========================================================================*/
QCamera3Channel()96 QCamera3Channel::QCamera3Channel()
97 {
98 m_camHandle = 0;
99 m_camOps = NULL;
100 m_bIsActive = false;
101
102 m_handle = 0;
103 m_numStreams = 0;
104 memset(mStreams, 0, sizeof(mStreams));
105 mUserData = NULL;
106
107 mStreamInfoBuf = NULL;
108 mChannelCB = NULL;
109 mPaddingInfo = NULL;
110 }
111
112 /*===========================================================================
113 * FUNCTION : ~QCamera3Channel
114 *
115 * DESCRIPTION: destructor of QCamera3Channel
116 *
117 * PARAMETERS : none
118 *
119 * RETURN : none
120 *==========================================================================*/
~QCamera3Channel()121 QCamera3Channel::~QCamera3Channel()
122 {
123 if (m_bIsActive)
124 stop();
125
126 for (int i = 0; i < m_numStreams; i++) {
127 if (mStreams[i] != NULL) {
128 delete mStreams[i];
129 mStreams[i] = 0;
130 }
131 }
132 if (m_handle) {
133 m_camOps->delete_channel(m_camHandle, m_handle);
134 ALOGE("%s: deleting channel %d", __func__, m_handle);
135 m_handle = 0;
136 }
137 m_numStreams = 0;
138 }
139
140 /*===========================================================================
141 * FUNCTION : init
142 *
143 * DESCRIPTION: initialization of channel
144 *
145 * PARAMETERS :
146 * @attr : channel bundle attribute setting
147 * @dataCB : data notify callback
148 * @userData: user data ptr
149 *
150 * RETURN : int32_t type of status
151 * NO_ERROR -- success
152 * none-zero failure code
153 *==========================================================================*/
init(mm_camera_channel_attr_t * attr,mm_camera_buf_notify_t dataCB)154 int32_t QCamera3Channel::init(mm_camera_channel_attr_t *attr,
155 mm_camera_buf_notify_t dataCB)
156 {
157 m_handle = m_camOps->add_channel(m_camHandle,
158 attr,
159 dataCB,
160 this);
161 if (m_handle == 0) {
162 ALOGE("%s: Add channel failed", __func__);
163 return UNKNOWN_ERROR;
164 }
165 return NO_ERROR;
166 }
167
168 /*===========================================================================
169 * FUNCTION : addStream
170 *
171 * DESCRIPTION: add a stream into channel
172 *
173 * PARAMETERS :
174 * @allocator : stream related buffer allocator
175 * @streamInfoBuf : ptr to buf that constains stream info
176 * @minStreamBufNum: number of stream buffers needed
177 * @paddingInfo : padding information
178 * @stream_cb : stream data notify callback
179 * @userdata : user data ptr
180 *
181 * RETURN : int32_t type of status
182 * NO_ERROR -- success
183 * none-zero failure code
184 *==========================================================================*/
addStream(cam_stream_type_t streamType,cam_format_t streamFormat,cam_dimension_t streamDim,uint8_t minStreamBufNum)185 int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
186 cam_format_t streamFormat,
187 cam_dimension_t streamDim,
188 uint8_t minStreamBufNum)
189 {
190 int32_t rc = NO_ERROR;
191
192 if (m_numStreams >= 1) {
193 ALOGE("%s: Only one stream per channel supported in v3 Hal", __func__);
194 return BAD_VALUE;
195 }
196
197 if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
198 ALOGE("%s: stream number (%d) exceeds max limit (%d)",
199 __func__, m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
200 return BAD_VALUE;
201 }
202 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
203 m_handle,
204 m_camOps,
205 mPaddingInfo,
206 this);
207 if (pStream == NULL) {
208 ALOGE("%s: No mem for Stream", __func__);
209 return NO_MEMORY;
210 }
211
212 rc = pStream->init(streamType, streamFormat, streamDim, NULL, minStreamBufNum,
213 streamCbRoutine, this);
214 if (rc == 0) {
215 mStreams[m_numStreams] = pStream;
216 m_numStreams++;
217 } else {
218 delete pStream;
219 }
220 return rc;
221 }
222
223 /*===========================================================================
224 * FUNCTION : start
225 *
226 * DESCRIPTION: start channel, which will start all streams belong to this channel
227 *
228 * PARAMETERS :
229 *
230 * RETURN : int32_t type of status
231 * NO_ERROR -- success
232 * none-zero failure code
233 *==========================================================================*/
start()234 int32_t QCamera3Channel::start()
235 {
236 int32_t rc = NO_ERROR;
237
238 if (m_numStreams > 1) {
239 ALOGE("%s: bundle not supported", __func__);
240 } else if (m_numStreams == 0) {
241 return NO_INIT;
242 }
243
244 if(m_bIsActive) {
245 ALOGD("%s: Attempt to start active channel", __func__);
246 return rc;
247 }
248
249 for (int i = 0; i < m_numStreams; i++) {
250 if (mStreams[i] != NULL) {
251 mStreams[i]->start();
252 }
253 }
254 rc = m_camOps->start_channel(m_camHandle, m_handle);
255
256 if (rc != NO_ERROR) {
257 for (int i = 0; i < m_numStreams; i++) {
258 if (mStreams[i] != NULL) {
259 mStreams[i]->stop();
260 }
261 }
262 } else {
263 m_bIsActive = true;
264 }
265
266 return rc;
267 }
268
269 /*===========================================================================
270 * FUNCTION : stop
271 *
272 * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
273 *
274 * PARAMETERS : none
275 *
276 * RETURN : int32_t type of status
277 * NO_ERROR -- success
278 * none-zero failure code
279 *==========================================================================*/
stop()280 int32_t QCamera3Channel::stop()
281 {
282 int32_t rc = NO_ERROR;
283 if(!m_bIsActive) {
284 ALOGE("%s: Attempt to stop inactive channel",__func__);
285 return rc;
286 }
287
288 for (int i = 0; i < m_numStreams; i++) {
289 if (mStreams[i] != NULL) {
290 mStreams[i]->stop();
291 }
292 }
293
294 rc = m_camOps->stop_channel(m_camHandle, m_handle);
295
296 m_bIsActive = false;
297 return rc;
298 }
299
300 /*===========================================================================
301 * FUNCTION : bufDone
302 *
303 * DESCRIPTION: return a stream buf back to kernel
304 *
305 * PARAMETERS :
306 * @recvd_frame : stream buf frame to be returned
307 *
308 * RETURN : int32_t type of status
309 * NO_ERROR -- success
310 * none-zero failure code
311 *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)312 int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame)
313 {
314 int32_t rc = NO_ERROR;
315 for (int i = 0; i < recvd_frame->num_bufs; i++) {
316 if (recvd_frame->bufs[i] != NULL) {
317 for (int j = 0; j < m_numStreams; j++) {
318 if (mStreams[j] != NULL &&
319 mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
320 rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
321 break; // break loop j
322 }
323 }
324 }
325 }
326
327 return rc;
328 }
329
330 /*===========================================================================
331 * FUNCTION : getStreamTypeMask
332 *
333 * DESCRIPTION: Get bit mask of all stream types in this channel
334 *
335 * PARAMETERS : None
336 *
337 * RETURN : Bit mask of all stream types in this channel
338 *==========================================================================*/
getStreamTypeMask()339 uint32_t QCamera3Channel::getStreamTypeMask()
340 {
341 uint32_t mask = 0;
342 for (int i = 0; i < m_numStreams; i++) {
343 mask |= (0x1 << mStreams[i]->getMyType());
344 }
345 return mask;
346 }
347
348 /*===========================================================================
349 * FUNCTION : getStreamID
350 *
351 * DESCRIPTION: Get StreamID of requested stream type
352 *
353 * PARAMETERS : streamMask
354 *
355 * RETURN : Stream ID
356 *==========================================================================*/
getStreamID(uint32_t streamMask)357 uint32_t QCamera3Channel::getStreamID(uint32_t streamMask)
358 {
359 uint32_t streamID = 0;
360 for (int i = 0; i < m_numStreams; i++) {
361 if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) {
362 streamID = mStreams[i]->getMyServerID();
363 break;
364 }
365 }
366 return streamID;
367 }
368
369 /*===========================================================================
370 * FUNCTION : getStreamByHandle
371 *
372 * DESCRIPTION: return stream object by stream handle
373 *
374 * PARAMETERS :
375 * @streamHandle : stream handle
376 *
377 * RETURN : stream object. NULL if not found
378 *==========================================================================*/
getStreamByHandle(uint32_t streamHandle)379 QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle)
380 {
381 for (int i = 0; i < m_numStreams; i++) {
382 if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
383 return mStreams[i];
384 }
385 }
386 return NULL;
387 }
388
389 /*===========================================================================
390 * FUNCTION : getStreamByIndex
391 *
392 * DESCRIPTION: return stream object by index
393 *
394 * PARAMETERS :
395 * @streamHandle : stream handle
396 *
397 * RETURN : stream object. NULL if not found
398 *==========================================================================*/
getStreamByIndex(uint8_t index)399 QCamera3Stream *QCamera3Channel::getStreamByIndex(uint8_t index)
400 {
401 if (index < m_numStreams) {
402 return mStreams[index];
403 }
404 return NULL;
405 }
406
407 /*===========================================================================
408 * FUNCTION : streamCbRoutine
409 *
410 * DESCRIPTION: callback routine for stream
411 *
412 * PARAMETERS :
413 * @streamHandle : stream handle
414 *
415 * RETURN : stream object. NULL if not found
416 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream,void * userdata)417 void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
418 QCamera3Stream *stream, void *userdata)
419 {
420 QCamera3Channel *channel = (QCamera3Channel *)userdata;
421 if (channel == NULL) {
422 ALOGE("%s: invalid channel pointer", __func__);
423 return;
424 }
425 channel->streamCbRoutine(super_frame, stream);
426 }
427
428 /*===========================================================================
429 * FUNCTION : QCamera3RegularChannel
430 *
431 * DESCRIPTION: constrcutor of QCamera3RegularChannel
432 *
433 * PARAMETERS :
434 * @cam_handle : camera handle
435 * @cam_ops : ptr to camera ops table
436 * @cb_routine : callback routine to frame aggregator
437 * @stream : camera3_stream_t structure
438 * @stream_type: Channel stream type
439 *
440 * RETURN : none
441 *==========================================================================*/
QCamera3RegularChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type)442 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
443 mm_camera_ops_t *cam_ops,
444 channel_cb_routine cb_routine,
445 cam_padding_info_t *paddingInfo,
446 void *userData,
447 camera3_stream_t *stream,
448 cam_stream_type_t stream_type) :
449 QCamera3Channel(cam_handle, cam_ops, cb_routine,
450 paddingInfo, userData),
451 mCamera3Stream(stream),
452 mNumBufs(0),
453 mStreamType(stream_type)
454 {
455 }
456
457 /*===========================================================================
458 * FUNCTION : ~QCamera3RegularChannel
459 *
460 * DESCRIPTION: destructor of QCamera3RegularChannel
461 *
462 * PARAMETERS : none
463 *
464 * RETURN : none
465 *==========================================================================*/
~QCamera3RegularChannel()466 QCamera3RegularChannel::~QCamera3RegularChannel()
467 {
468 mMemory.unregisterBuffers();
469 }
470
471 /*===========================================================================
472 * FUNCTION : initialize
473 *
474 * DESCRIPTION: Initialize and add camera channel & stream
475 *
476 * PARAMETERS :
477 *
478 * RETURN : int32_t type of status
479 * NO_ERROR -- success
480 * none-zero failure code
481 *==========================================================================*/
482
initialize()483 int32_t QCamera3RegularChannel::initialize()
484 {
485 int32_t rc = NO_ERROR;
486 cam_format_t streamFormat;
487 cam_dimension_t streamDim;
488
489 if (NULL == mCamera3Stream) {
490 ALOGE("%s: Camera stream uninitialized", __func__);
491 return NO_INIT;
492 }
493
494 if (1 <= m_numStreams) {
495 // Only one stream per channel supported in v3 Hal
496 return NO_ERROR;
497 }
498
499 rc = init(NULL, NULL);
500 if (rc < 0) {
501 ALOGE("%s: init failed", __func__);
502 return rc;
503 }
504
505 mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM;
506
507 if (mCamera3Stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
508 if (mStreamType == CAM_STREAM_TYPE_VIDEO) {
509 streamFormat = CAM_FORMAT_YUV_420_NV12;
510 } else if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
511 streamFormat = CAM_FORMAT_YUV_420_NV21;
512 } else {
513 //TODO: Add a new flag in libgralloc for ZSL buffers, and its size needs
514 // to be properly aligned and padded.
515 streamFormat = CAM_FORMAT_YUV_420_NV21;
516 }
517 } else if(mCamera3Stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
518 streamFormat = CAM_FORMAT_YUV_420_NV21;
519 } else if (mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
520 mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW16) {
521 // Bayer pattern doesn't matter here.
522 // All CAMIF raw format uses 10bit.
523 streamFormat = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
524 } else {
525
526 //TODO: Fail for other types of streams for now
527 ALOGE("%s: format is not IMPLEMENTATION_DEFINED or flexible", __func__);
528 return -EINVAL;
529 }
530
531 streamDim.width = mCamera3Stream->width;
532 streamDim.height = mCamera3Stream->height;
533
534 rc = QCamera3Channel::addStream(mStreamType,
535 streamFormat,
536 streamDim,
537 mNumBufs);
538
539 return rc;
540 }
541
542 /*===========================================================================
543 * FUNCTION : start
544 *
545 * DESCRIPTION: start a regular channel
546 *
547 * PARAMETERS :
548 *
549 * RETURN : int32_t type of status
550 * NO_ERROR -- success
551 * none-zero failure code
552 *==========================================================================*/
start()553 int32_t QCamera3RegularChannel::start()
554 {
555 int32_t rc = NO_ERROR;
556
557 if (0 < mMemory.getCnt()) {
558 rc = QCamera3Channel::start();
559 }
560
561 return rc;
562 }
563 /*===========================================================================
564 * FUNCTION : getInternalFormatBuffer
565 *
566 * DESCRIPTION: return buffer in the internal format structure
567 *
568 * PARAMETERS :
569 * @streamHandle : buffer handle
570 *
571 * RETURN : stream object. NULL if not found
572 *==========================================================================*/
getInternalFormatBuffer(buffer_handle_t * buffer)573 mm_camera_buf_def_t* QCamera3RegularChannel::getInternalFormatBuffer(
574 buffer_handle_t * buffer)
575 {
576 int32_t index;
577 if(buffer == NULL)
578 return NULL;
579 index = mMemory.getMatchBufIndex((void*)buffer);
580 if(index < 0) {
581 ALOGE("%s: Could not find object among registered buffers",__func__);
582 return NULL;
583 }
584 return mStreams[0]->getInternalFormatBuffer(index);
585 }
586
587 /*===========================================================================
588 * FUNCTION : request
589 *
590 * DESCRIPTION: process a request from camera service. Stream on if ncessary.
591 *
592 * PARAMETERS :
593 * @buffer : buffer to be filled for this request
594 *
595 * RETURN : 0 on a success start of capture
596 * -EINVAL on invalid input
597 * -ENODEV on serious error
598 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber)599 int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber)
600 {
601 //FIX ME: Return buffer back in case of failures below.
602
603 int32_t rc = NO_ERROR;
604 int index;
605
606 if (NULL == buffer) {
607 ALOGE("%s: Invalid buffer in channel request", __func__);
608 return BAD_VALUE;
609 }
610
611 if(!m_bIsActive) {
612 rc = registerBuffer(buffer);
613 if (NO_ERROR != rc) {
614 ALOGE("%s: On-the-fly buffer registration failed %d",
615 __func__, rc);
616 return rc;
617 }
618
619 rc = start();
620 if (NO_ERROR != rc) {
621 return rc;
622 }
623 } else {
624 ALOGV("%s: Request on an existing stream",__func__);
625 }
626
627 index = mMemory.getMatchBufIndex((void*)buffer);
628 if(index < 0) {
629 rc = registerBuffer(buffer);
630 if (NO_ERROR != rc) {
631 ALOGE("%s: On-the-fly buffer registration failed %d",
632 __func__, rc);
633 return rc;
634 }
635
636 index = mMemory.getMatchBufIndex((void*)buffer);
637 if (index < 0) {
638 ALOGE("%s: Could not find object among registered buffers",
639 __func__);
640 return DEAD_OBJECT;
641 }
642 }
643
644 rc = mStreams[0]->bufDone(index);
645 if(rc != NO_ERROR) {
646 ALOGE("%s: Failed to Q new buffer to stream",__func__);
647 return rc;
648 }
649
650 rc = mMemory.markFrameNumber(index, frameNumber);
651 return rc;
652 }
653
654 /*===========================================================================
655 * FUNCTION : registerBuffer
656 *
657 * DESCRIPTION: register streaming buffer to the channel object
658 *
659 * PARAMETERS :
660 * @buffer : buffer to be registered
661 *
662 * RETURN : int32_t type of status
663 * NO_ERROR -- success
664 * none-zero failure code
665 *==========================================================================*/
registerBuffer(buffer_handle_t * buffer)666 int32_t QCamera3RegularChannel::registerBuffer(buffer_handle_t *buffer)
667 {
668 int rc = 0;
669
670 if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
671 ALOGE("%s: Trying to register more buffers than initially requested",
672 __func__);
673 return BAD_VALUE;
674 }
675
676 if (0 == m_numStreams) {
677 rc = initialize();
678 if (rc != NO_ERROR) {
679 ALOGE("%s: Couldn't initialize camera stream %d",
680 __func__, rc);
681 return rc;
682 }
683 }
684
685 rc = mMemory.registerBuffer(buffer);
686 if (ALREADY_EXISTS == rc) {
687 return NO_ERROR;
688 } else if (NO_ERROR != rc) {
689 ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
690 return rc;
691 }
692
693 return rc;
694 }
695
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)696 void QCamera3RegularChannel::streamCbRoutine(
697 mm_camera_super_buf_t *super_frame,
698 QCamera3Stream *stream)
699 {
700 //FIXME Q Buf back in case of error?
701 uint8_t frameIndex;
702 buffer_handle_t *resultBuffer;
703 int32_t resultFrameNumber;
704 camera3_stream_buffer_t result;
705
706 if(!super_frame) {
707 ALOGE("%s: Invalid Super buffer",__func__);
708 return;
709 }
710
711 if(super_frame->num_bufs != 1) {
712 ALOGE("%s: Multiple streams are not supported",__func__);
713 return;
714 }
715 if(super_frame->bufs[0] == NULL ) {
716 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
717 __func__);
718 return;
719 }
720
721 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
722 if(frameIndex >= mNumBufs) {
723 ALOGE("%s: Error, Invalid index for buffer",__func__);
724 if(stream) {
725 stream->bufDone(frameIndex);
726 }
727 return;
728 }
729
730 ////Use below data to issue framework callback
731 resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
732 resultFrameNumber = mMemory.getFrameNumber(frameIndex);
733
734 result.stream = mCamera3Stream;
735 result.buffer = resultBuffer;
736 result.status = CAMERA3_BUFFER_STATUS_OK;
737 result.acquire_fence = -1;
738 result.release_fence = -1;
739
740 mChannelCB(NULL, &result, resultFrameNumber, mUserData);
741 free(super_frame);
742 return;
743 }
744
getStreamBufs(uint32_t)745 QCamera3Memory* QCamera3RegularChannel::getStreamBufs(uint32_t /*len*/)
746 {
747 return &mMemory;
748 }
749
750 int QCamera3RegularChannel::kMaxBuffers = 7;
751
QCamera3MetadataChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData)752 QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
753 mm_camera_ops_t *cam_ops,
754 channel_cb_routine cb_routine,
755 cam_padding_info_t *paddingInfo,
756 void *userData) :
757 QCamera3Channel(cam_handle, cam_ops,
758 cb_routine, paddingInfo, userData),
759 mMemory(NULL)
760 {
761 }
762
~QCamera3MetadataChannel()763 QCamera3MetadataChannel::~QCamera3MetadataChannel()
764 {
765 if (m_bIsActive)
766 stop();
767
768 if (mMemory) {
769 mMemory->deallocate();
770 delete mMemory;
771 mMemory = NULL;
772 }
773 }
774
initialize()775 int32_t QCamera3MetadataChannel::initialize()
776 {
777 int32_t rc;
778 cam_dimension_t streamDim;
779
780 if (mMemory || m_numStreams > 0) {
781 ALOGE("%s: metadata channel already initialized", __func__);
782 return -EINVAL;
783 }
784
785 rc = init(NULL, NULL);
786 if (rc < 0) {
787 ALOGE("%s: init failed", __func__);
788 return rc;
789 }
790
791 streamDim.width = sizeof(metadata_buffer_t),
792 streamDim.height = 1;
793 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
794 streamDim, MIN_STREAMING_BUFFER_NUM);
795 if (rc < 0) {
796 ALOGE("%s: addStream failed", __func__);
797 }
798 return rc;
799 }
800
request(buffer_handle_t *,uint32_t)801 int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/,
802 uint32_t /*frameNumber*/)
803 {
804 if (!m_bIsActive) {
805 return start();
806 }
807 else
808 return 0;
809 }
810
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)811 void QCamera3MetadataChannel::streamCbRoutine(
812 mm_camera_super_buf_t *super_frame,
813 QCamera3Stream * /*stream*/)
814 {
815 uint32_t requestNumber = 0;
816 if (super_frame == NULL || super_frame->num_bufs != 1) {
817 ALOGE("%s: super_frame is not valid", __func__);
818 return;
819 }
820 mChannelCB(super_frame, NULL, requestNumber, mUserData);
821 }
822
getStreamBufs(uint32_t len)823 QCamera3Memory* QCamera3MetadataChannel::getStreamBufs(uint32_t len)
824 {
825 int rc;
826 if (len < sizeof(metadata_buffer_t)) {
827 ALOGE("%s: size doesn't match %d vs %d", __func__,
828 len, sizeof(metadata_buffer_t));
829 return NULL;
830 }
831 mMemory = new QCamera3HeapMemory();
832 if (!mMemory) {
833 ALOGE("%s: unable to create metadata memory", __func__);
834 return NULL;
835 }
836 rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
837 if (rc < 0) {
838 ALOGE("%s: unable to allocate metadata memory", __func__);
839 delete mMemory;
840 mMemory = NULL;
841 return NULL;
842 }
843 memset(mMemory->getPtr(0), 0, sizeof(metadata_buffer_t));
844 return mMemory;
845 }
846
putStreamBufs()847 void QCamera3MetadataChannel::putStreamBufs()
848 {
849 mMemory->deallocate();
850 delete mMemory;
851 mMemory = NULL;
852 }
853 /*************************************************************************************/
854 // RAW Channel related functions
855 int QCamera3RawChannel::kMaxBuffers = 7;
856
QCamera3RawChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,bool raw_16)857 QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle,
858 mm_camera_ops_t *cam_ops,
859 channel_cb_routine cb_routine,
860 cam_padding_info_t *paddingInfo,
861 void *userData,
862 camera3_stream_t *stream,
863 bool raw_16) :
864 QCamera3RegularChannel(cam_handle, cam_ops,
865 cb_routine, paddingInfo, userData, stream,
866 CAM_STREAM_TYPE_RAW),
867 mIsRaw16(raw_16)
868 {
869 char prop[PROPERTY_VALUE_MAX];
870 property_get("persist.camera.raw.dump", prop, "0");
871 mRawDump = atoi(prop);
872 }
873
~QCamera3RawChannel()874 QCamera3RawChannel::~QCamera3RawChannel()
875 {
876 }
877
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)878 void QCamera3RawChannel::streamCbRoutine(
879 mm_camera_super_buf_t *super_frame,
880 QCamera3Stream * stream)
881 {
882 /* Move this back down once verified */
883 if (mRawDump)
884 dumpRawSnapshot(super_frame->bufs[0]);
885
886 if (mIsRaw16)
887 convertToRaw16(super_frame->bufs[0]);
888
889 //Make sure cache coherence because extra processing is done
890 mMemory.cleanInvalidateCache(super_frame->bufs[0]->buf_idx);
891
892 QCamera3RegularChannel::streamCbRoutine(super_frame, stream);
893 return;
894 }
895
dumpRawSnapshot(mm_camera_buf_def_t * frame)896 void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
897 {
898 QCamera3Stream *stream = getStreamByIndex(0);
899 char buf[32];
900 memset(buf, 0, sizeof(buf));
901 cam_dimension_t dim;
902 memset(&dim, 0, sizeof(dim));
903 stream->getFrameDimension(dim);
904
905 cam_frame_len_offset_t offset;
906 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
907 stream->getFrameOffset(offset);
908 snprintf(buf, sizeof(buf), "/data/r_%d_%dx%d.raw",
909 frame->frame_idx, dim.width, dim.height);
910
911 int file_fd = open(buf, O_RDWR| O_CREAT, 0777);
912 if (file_fd >= 0) {
913 int written_len = write(file_fd, frame->buffer, offset.frame_len);
914 ALOGE("%s: written number of bytes %d", __func__, written_len);
915 close(file_fd);
916 } else {
917 ALOGE("%s: failed to open file to dump image", __func__);
918 }
919
920 }
921
convertToRaw16(mm_camera_buf_def_t * frame)922 void QCamera3RawChannel::convertToRaw16(mm_camera_buf_def_t *frame)
923 {
924 // Convert image buffer from Opaque raw format to RAW16 format
925 // 10bit Opaque raw is stored in the format of:
926 // 0000 - p5 - p4 - p3 - p2 - p1 - p0
927 // where p0 to p5 are 6 pixels (each is 10bit)_and most significant
928 // 4 bits are 0s. Each 64bit word contains 6 pixels.
929
930 QCamera3Stream *stream = getStreamByIndex(0);
931 cam_dimension_t dim;
932 memset(&dim, 0, sizeof(dim));
933 stream->getFrameDimension(dim);
934
935 cam_frame_len_offset_t offset;
936 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
937 stream->getFrameOffset(offset);
938
939 uint32_t raw16_stride = (dim.width + 15) & ~15;
940 uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
941
942 // In-place format conversion.
943 // Raw16 format always occupy more memory than opaque raw10.
944 // Convert to Raw16 by iterating through all pixels from bottom-right
945 // to top-left of the image.
946 // One special notes:
947 // 1. Cross-platform raw16's stride is 16 pixels.
948 // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes.
949 for (int y = dim.height-1; y >= 0; y--) {
950 uint64_t* row_start = (uint64_t *)frame->buffer +
951 y * offset.mp[0].stride / 8;
952 for (int x = dim.width-1; x >= 0; x--) {
953 uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6)));
954 raw16_buffer[y*raw16_stride+x] = raw16_pixel;
955 }
956 }
957 }
958
959 /*************************************************************************************/
960
961 /*===========================================================================
962 * FUNCTION : jpegEvtHandle
963 *
964 * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events.
965 Construct result payload and call mChannelCb to deliver buffer
966 to framework.
967 *
968 * PARAMETERS :
969 * @status : status of jpeg job
970 * @client_hdl: jpeg client handle
971 * @jobId : jpeg job Id
972 * @p_ouput : ptr to jpeg output result struct
973 * @userdata : user data ptr
974 *
975 * RETURN : none
976 *==========================================================================*/
jpegEvtHandle(jpeg_job_status_t status,uint32_t,uint32_t jobId,mm_jpeg_output_t * p_output,void * userdata)977 void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status,
978 uint32_t /*client_hdl*/,
979 uint32_t jobId,
980 mm_jpeg_output_t *p_output,
981 void *userdata)
982 {
983 buffer_handle_t *resultBuffer, *jpegBufferHandle;
984 int32_t resultFrameNumber;
985 int resultStatus = CAMERA3_BUFFER_STATUS_OK;
986 camera3_stream_buffer_t result;
987 camera3_jpeg_blob_t jpegHeader;
988 char* jpeg_eof = 0;
989 int maxJpegSize;
990 QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
991 if (obj) {
992
993 //Release any cached metabuffer information
994 if (obj->mMetaFrame != NULL && obj->m_pMetaChannel != NULL) {
995 ((QCamera3MetadataChannel*)(obj->m_pMetaChannel))->bufDone(obj->mMetaFrame);
996 obj->mMetaFrame = NULL;
997 obj->m_pMetaChannel = NULL;
998 } else {
999 ALOGE("%s: Meta frame was NULL", __func__);
1000 }
1001 //Construct payload for process_capture_result. Call mChannelCb
1002
1003 qcamera_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId);
1004
1005 if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) {
1006 ALOGE("%s: Error in jobId: (%d) with status: %d", __func__, jobId, status);
1007 resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
1008 }
1009
1010 //Construct jpeg transient header of type camera3_jpeg_blob_t
1011 //Append at the end of jpeg image of buf_filled_len size
1012
1013 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
1014 jpegHeader.jpeg_size = p_output->buf_filled_len;
1015
1016
1017 char* jpeg_buf = (char *)p_output->buf_vaddr;
1018
1019 // Gralloc buffer may have additional padding for 4K page size
1020 // Follow size guidelines based on spec since framework relies
1021 // on that to reach end of buffer and with it the header
1022
1023 //Handle same as resultBuffer, but for readablity
1024 jpegBufferHandle =
1025 (buffer_handle_t *)obj->mMemory.getBufferHandle(obj->mCurrentBufIndex);
1026
1027 maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width;
1028 if (maxJpegSize > obj->mMemory.getSize(obj->mCurrentBufIndex)) {
1029 maxJpegSize = obj->mMemory.getSize(obj->mCurrentBufIndex);
1030 }
1031
1032 jpeg_eof = &jpeg_buf[maxJpegSize-sizeof(jpegHeader)];
1033 memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
1034 obj->mMemory.cleanInvalidateCache(obj->mCurrentBufIndex);
1035
1036 ////Use below data to issue framework callback
1037 resultBuffer = (buffer_handle_t *)obj->mMemory.getBufferHandle(obj->mCurrentBufIndex);
1038 resultFrameNumber = obj->mMemory.getFrameNumber(obj->mCurrentBufIndex);
1039
1040 result.stream = obj->mCamera3Stream;
1041 result.buffer = resultBuffer;
1042 result.status = resultStatus;
1043 result.acquire_fence = -1;
1044 result.release_fence = -1;
1045
1046 ALOGV("%s: Issue Callback", __func__);
1047 obj->mChannelCB(NULL, &result, resultFrameNumber, obj->mUserData);
1048
1049 // release internal data for jpeg job
1050 if (job != NULL) {
1051 obj->m_postprocessor.releaseJpegJobData(job);
1052 free(job);
1053 }
1054 return;
1055 // }
1056 } else {
1057 ALOGE("%s: Null userdata in jpeg callback", __func__);
1058 }
1059 }
1060
QCamera3PicChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream)1061 QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle,
1062 mm_camera_ops_t *cam_ops,
1063 channel_cb_routine cb_routine,
1064 cam_padding_info_t *paddingInfo,
1065 void *userData,
1066 camera3_stream_t *stream) :
1067 QCamera3Channel(cam_handle, cam_ops, cb_routine,
1068 paddingInfo, userData),
1069 m_postprocessor(this),
1070 mCamera3Stream(stream),
1071 mNumBufs(0),
1072 mCurrentBufIndex(-1),
1073 mYuvMemory(NULL),
1074 mMetaFrame(NULL)
1075 {
1076 mYuvWidth = stream->width;
1077 mYuvHeight = stream->height;
1078 int32_t rc = m_postprocessor.init(&mMemory, jpegEvtHandle, this);
1079 if (rc != 0) {
1080 ALOGE("Init Postprocessor failed");
1081 }
1082 }
1083
1084 /*===========================================================================
1085 * FUNCTION : stop
1086 *
1087 * DESCRIPTION: stop pic channel, which will stop all streams within, including
1088 * the reprocessing channel in postprocessor and YUV stream.
1089 *
1090 * PARAMETERS : none
1091 *
1092 * RETURN : int32_t type of status
1093 * NO_ERROR -- success
1094 * none-zero failure code
1095 *==========================================================================*/
stop()1096 int32_t QCamera3PicChannel::stop()
1097 {
1098 int32_t rc = NO_ERROR;
1099 if(!m_bIsActive) {
1100 ALOGE("%s: Attempt to stop inactive channel",__func__);
1101 return rc;
1102 }
1103
1104 m_postprocessor.stop();
1105
1106 rc |= QCamera3Channel::stop();
1107 return rc;
1108 }
1109
~QCamera3PicChannel()1110 QCamera3PicChannel::~QCamera3PicChannel()
1111 {
1112 stop();
1113
1114 int32_t rc = m_postprocessor.deinit();
1115 if (rc != 0) {
1116 ALOGE("De-init Postprocessor failed");
1117 }
1118 }
1119
initialize()1120 int32_t QCamera3PicChannel::initialize()
1121 {
1122 int32_t rc = NO_ERROR;
1123 cam_dimension_t streamDim;
1124 cam_stream_type_t streamType;
1125 cam_format_t streamFormat;
1126 mm_camera_channel_attr_t attr;
1127
1128 if (NULL == mCamera3Stream) {
1129 ALOGE("%s: Camera stream uninitialized", __func__);
1130 return NO_INIT;
1131 }
1132
1133 if (1 <= m_numStreams) {
1134 // Only one stream per channel supported in v3 Hal
1135 return NO_ERROR;
1136 }
1137
1138 memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
1139 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
1140 attr.look_back = 1;
1141 attr.post_frame_skip = 1;
1142 attr.water_mark = 1;
1143 attr.max_unmatched_frames = 1;
1144
1145 rc = init(&attr, NULL);
1146 if (rc < 0) {
1147 ALOGE("%s: init failed", __func__);
1148 return rc;
1149 }
1150
1151 streamType = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
1152 streamFormat = CAM_FORMAT_YUV_420_NV21;
1153 streamDim.width = mYuvWidth;
1154 streamDim.height = mYuvHeight;
1155
1156 int num_buffers = 1;
1157 mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM;
1158 rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
1159 num_buffers);
1160
1161 return rc;
1162 }
1163
request(buffer_handle_t * buffer,uint32_t frameNumber,mm_camera_buf_def_t * pInputBuffer,metadata_buffer_t * metadata)1164 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
1165 uint32_t frameNumber,
1166 mm_camera_buf_def_t *pInputBuffer,
1167 metadata_buffer_t *metadata)
1168 {
1169 //FIX ME: Return buffer back in case of failures below.
1170
1171 int32_t rc = NO_ERROR;
1172 int index;
1173 // Picture stream has already been started before any request comes in
1174 if (!m_bIsActive) {
1175 ALOGE("%s: Channel not started!!", __func__);
1176 return NO_INIT;
1177 }
1178
1179 index = mMemory.getMatchBufIndex((void*)buffer);
1180 if(index < 0) {
1181 rc = registerBuffer(buffer);
1182 if (NO_ERROR != rc) {
1183 ALOGE("%s: On-the-fly buffer registration failed %d",
1184 __func__, rc);
1185 return rc;
1186 }
1187
1188 index = mMemory.getMatchBufIndex((void*)buffer);
1189 if (index < 0) {
1190 ALOGE("%s: Could not find object among registered buffers",__func__);
1191 return DEAD_OBJECT;
1192 }
1193 }
1194 rc = mMemory.markFrameNumber(index, frameNumber);
1195
1196 //Start the postprocessor for jpeg encoding. Pass mMemory as destination buffer
1197 mCurrentBufIndex = index;
1198
1199 // Start postprocessor
1200 m_postprocessor.start(this, metadata);
1201
1202 // Queue jpeg settings
1203 rc = queueJpegSetting(index, metadata);
1204
1205 if (pInputBuffer == NULL)
1206 mStreams[0]->bufDone(0);
1207 else {
1208 mm_camera_super_buf_t *src_frame = NULL;
1209 src_frame = (mm_camera_super_buf_t *)malloc(
1210 sizeof(mm_camera_super_buf_t));
1211 if (src_frame == NULL) {
1212 ALOGE("%s: No memory for src frame", __func__);
1213 return NO_MEMORY;
1214 }
1215 memset(src_frame, 0, sizeof(mm_camera_super_buf_t));
1216 src_frame->num_bufs = 1;
1217 src_frame->bufs[0] = pInputBuffer;
1218
1219 ALOGD("%s: Post-process started", __func__);
1220 ALOGD("%s: Issue call to reprocess", __func__);
1221
1222 m_postprocessor.processPPMetadata(metadata);
1223 m_postprocessor.processData(src_frame);
1224 }
1225 return rc;
1226 }
1227
1228 /*===========================================================================
1229 * FUNCTION : dataNotifyCB
1230 *
1231 * DESCRIPTION: Channel Level callback used for super buffer data notify.
1232 * This function is registered with mm-camera-interface to handle
1233 * data notify
1234 *
1235 * PARAMETERS :
1236 * @recvd_frame : stream frame received
1237 * userdata : user data ptr
1238 *
1239 * RETURN : none
1240 *==========================================================================*/
dataNotifyCB(mm_camera_super_buf_t * recvd_frame,void * userdata)1241 void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
1242 void *userdata)
1243 {
1244 ALOGV("%s: E\n", __func__);
1245 QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
1246
1247 if (channel == NULL) {
1248 ALOGE("%s: invalid channel pointer", __func__);
1249 return;
1250 }
1251
1252 if(channel->m_numStreams != 1) {
1253 ALOGE("%s: Error: Bug: This callback assumes one stream per channel",__func__);
1254 return;
1255 }
1256
1257
1258 if(channel->mStreams[0] == NULL) {
1259 ALOGE("%s: Error: Invalid Stream object",__func__);
1260 return;
1261 }
1262
1263 channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]);
1264
1265 ALOGV("%s: X\n", __func__);
1266 return;
1267 }
1268
1269 /*===========================================================================
1270 * FUNCTION : registerBuffer
1271 *
1272 * DESCRIPTION: register streaming buffer to the channel object
1273 *
1274 * PARAMETERS :
1275 * @buffer : buffer to be registered
1276 *
1277 * RETURN : int32_t type of status
1278 * NO_ERROR -- success
1279 * none-zero failure code
1280 *==========================================================================*/
registerBuffer(buffer_handle_t * buffer)1281 int32_t QCamera3PicChannel::registerBuffer(buffer_handle_t *buffer)
1282 {
1283 int rc = 0;
1284
1285 if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
1286 ALOGE("%s: Trying to register more buffers than initially requested",
1287 __func__);
1288 return BAD_VALUE;
1289 }
1290
1291 if (0 == m_numStreams) {
1292 rc = initialize();
1293 if (rc != NO_ERROR) {
1294 ALOGE("%s: Couldn't initialize camera stream %d",
1295 __func__, rc);
1296 return rc;
1297 }
1298 }
1299 rc = mMemory.registerBuffer(buffer);
1300 if (ALREADY_EXISTS == rc) {
1301 return NO_ERROR;
1302 } else if (NO_ERROR != rc) {
1303 ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
1304 return rc;
1305 }
1306
1307 return rc;
1308 }
1309
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1310 void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
1311 QCamera3Stream *stream)
1312 {
1313 //TODO
1314 //Used only for getting YUV. Jpeg callback will be sent back from channel
1315 //directly to HWI. Refer to func jpegEvtHandle
1316
1317 //Got the yuv callback. Calling yuv callback handler in PostProc
1318 uint8_t frameIndex;
1319 mm_camera_super_buf_t* frame = NULL;
1320 if(!super_frame) {
1321 ALOGE("%s: Invalid Super buffer",__func__);
1322 return;
1323 }
1324
1325 if(super_frame->num_bufs != 1) {
1326 ALOGE("%s: Multiple streams are not supported",__func__);
1327 return;
1328 }
1329 if(super_frame->bufs[0] == NULL ) {
1330 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
1331 __func__);
1332 return;
1333 }
1334
1335 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
1336 if(frameIndex >= mNumBufs) {
1337 ALOGE("%s: Error, Invalid index for buffer",__func__);
1338 if(stream) {
1339 stream->bufDone(frameIndex);
1340 }
1341 return;
1342 }
1343
1344 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1345 if (frame == NULL) {
1346 ALOGE("%s: Error allocating memory to save received_frame structure.",
1347 __func__);
1348 if(stream) {
1349 stream->bufDone(frameIndex);
1350 }
1351 return;
1352 }
1353 *frame = *super_frame;
1354 m_postprocessor.processData(frame);
1355 free(super_frame);
1356 return;
1357 }
1358
getStreamBufs(uint32_t len)1359 QCamera3Memory* QCamera3PicChannel::getStreamBufs(uint32_t len)
1360 {
1361 int rc = 0;
1362
1363 mYuvMemory = new QCamera3HeapMemory();
1364 if (!mYuvMemory) {
1365 ALOGE("%s: unable to create metadata memory", __func__);
1366 return NULL;
1367 }
1368
1369 //Queue YUV buffers in the beginning mQueueAll = true
1370 rc = mYuvMemory->allocate(1, len, false);
1371 if (rc < 0) {
1372 ALOGE("%s: unable to allocate metadata memory", __func__);
1373 delete mYuvMemory;
1374 mYuvMemory = NULL;
1375 return NULL;
1376 }
1377 return mYuvMemory;
1378 }
1379
putStreamBufs()1380 void QCamera3PicChannel::putStreamBufs()
1381 {
1382 mMemory.unregisterBuffers();
1383
1384 mYuvMemory->deallocate();
1385 delete mYuvMemory;
1386 mYuvMemory = NULL;
1387 }
1388
queueReprocMetadata(metadata_buffer_t * metadata)1389 int32_t QCamera3PicChannel::queueReprocMetadata(metadata_buffer_t *metadata)
1390 {
1391 return m_postprocessor.processPPMetadata(metadata);
1392 }
1393
queueJpegSetting(int32_t index,metadata_buffer_t * metadata)1394 int32_t QCamera3PicChannel::queueJpegSetting(int32_t index, metadata_buffer_t *metadata)
1395 {
1396 jpeg_settings_t *settings =
1397 (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t));
1398
1399 if (!settings) {
1400 ALOGE("%s: out of memory allocating jpeg_settings", __func__);
1401 return -ENOMEM;
1402 }
1403
1404 memset(settings, 0, sizeof(jpeg_settings_t));
1405
1406 settings->out_buf_index = index;
1407
1408 settings->jpeg_orientation = 0;
1409 if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
1410 int32_t *orientation = (int32_t *)POINTER_OF(
1411 CAM_INTF_META_JPEG_ORIENTATION, metadata);
1412 settings->jpeg_orientation = *orientation;
1413 }
1414
1415 settings->jpeg_quality = 85;
1416 if (IS_PARM_VALID(CAM_INTF_META_JPEG_QUALITY, metadata)) {
1417 uint8_t *quality = (uint8_t *)POINTER_OF(
1418 CAM_INTF_META_JPEG_QUALITY, metadata);
1419 settings->jpeg_quality = *quality;
1420 }
1421
1422 if (IS_PARM_VALID(CAM_INTF_META_JPEG_THUMB_QUALITY, metadata)) {
1423 uint8_t *quality = (uint8_t *)POINTER_OF(
1424 CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
1425 settings->jpeg_thumb_quality = *quality;
1426 }
1427
1428 if (IS_PARM_VALID(CAM_INTF_META_JPEG_THUMB_SIZE, metadata)) {
1429 cam_dimension_t *dimension = (cam_dimension_t *)POINTER_OF(
1430 CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
1431 settings->thumbnail_size = *dimension;
1432 }
1433
1434 settings->gps_timestamp_valid = 0;
1435 if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata)) {
1436 int64_t *timestamp = (int64_t *)POINTER_OF(
1437 CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
1438 settings->gps_timestamp = *timestamp;
1439 settings->gps_timestamp_valid = 1;
1440 }
1441
1442 settings->gps_coordinates_valid = 0;
1443 if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_COORDINATES, metadata)) {
1444 double *coordinates = (double *)POINTER_OF(
1445 CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
1446 memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double));
1447 settings->gps_coordinates_valid = 1;
1448 }
1449
1450 if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata)) {
1451 char *proc_methods = (char *)POINTER_OF(
1452 CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
1453 memset(settings->gps_processing_method, 0,
1454 sizeof(settings->gps_processing_method));
1455 strncpy(settings->gps_processing_method, proc_methods,
1456 sizeof(settings->gps_processing_method));
1457 }
1458
1459 return m_postprocessor.processJpegSettingData(settings);
1460 }
1461
1462 /*===========================================================================
1463 * FUNCTION : getRational
1464 *
1465 * DESCRIPTION: compose rational struct
1466 *
1467 * PARAMETERS :
1468 * @rat : ptr to struct to store rational info
1469 * @num :num of the rational
1470 * @denom : denom of the rational
1471 *
1472 * RETURN : int32_t type of status
1473 * NO_ERROR -- success
1474 * none-zero failure code
1475 *==========================================================================*/
getRational(rat_t * rat,int num,int denom)1476 int32_t getRational(rat_t *rat, int num, int denom)
1477 {
1478 if (NULL == rat) {
1479 ALOGE("%s: NULL rat input", __func__);
1480 return BAD_VALUE;
1481 }
1482 rat->num = num;
1483 rat->denom = denom;
1484 return NO_ERROR;
1485 }
1486
1487 /*===========================================================================
1488 * FUNCTION : parseGPSCoordinate
1489 *
1490 * DESCRIPTION: parse GPS coordinate string
1491 *
1492 * PARAMETERS :
1493 * @coord_str : [input] coordinate string
1494 * @coord : [output] ptr to struct to store coordinate
1495 *
1496 * RETURN : int32_t type of status
1497 * NO_ERROR -- success
1498 * none-zero failure code
1499 *==========================================================================*/
parseGPSCoordinate(const char * coord_str,rat_t * coord)1500 int parseGPSCoordinate(const char *coord_str, rat_t* coord)
1501 {
1502 if(coord == NULL) {
1503 ALOGE("%s: error, invalid argument coord == NULL", __func__);
1504 return BAD_VALUE;
1505 }
1506 float degF = atof(coord_str);
1507 if (degF < 0) {
1508 degF = -degF;
1509 }
1510 float minF = (degF - (int) degF) * 60;
1511 float secF = (minF - (int) minF) * 60;
1512
1513 getRational(&coord[0], (int)degF, 1);
1514 getRational(&coord[1], (int)minF, 1);
1515 getRational(&coord[2], (int)(secF * 10000), 10000);
1516 return NO_ERROR;
1517 }
1518
1519 /*===========================================================================
1520 * FUNCTION : getExifDateTime
1521 *
1522 * DESCRIPTION: query exif date time
1523 *
1524 * PARAMETERS :
1525 * @dateTime : string to store exif date time
1526 * @subsecTime : string to store exif subsec time
1527 * @count : length of the dateTime string
1528 * @subsecCount: length of the subsecTime string
1529 *
1530 * RETURN : int32_t type of status
1531 * NO_ERROR -- success
1532 * none-zero failure code
1533 *==========================================================================*/
getExifDateTime(char * dateTime,char * subsecTime,uint32_t & count,uint32_t & subsecCount)1534 int32_t getExifDateTime(char *dateTime, char *subsecTime,
1535 uint32_t &count, uint32_t &subsecCount)
1536 {
1537 //get time and date from system
1538 struct timeval tv;
1539 struct tm *timeinfo;
1540
1541 gettimeofday(&tv, NULL);
1542 timeinfo = localtime(&tv.tv_sec);
1543 //Write datetime according to EXIF Spec
1544 //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
1545 snprintf(dateTime, 20, "%04d:%02d:%02d %02d:%02d:%02d",
1546 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
1547 timeinfo->tm_mday, timeinfo->tm_hour,
1548 timeinfo->tm_min, timeinfo->tm_sec);
1549 count = 20;
1550
1551 //Write subsec according to EXIF Sepc
1552 snprintf(subsecTime, 7, "%06ld", tv.tv_usec);
1553 subsecCount = 7;
1554 return NO_ERROR;
1555 }
1556
1557 /*===========================================================================
1558 * FUNCTION : getExifFocalLength
1559 *
1560 * DESCRIPTION: get exif focal lenght
1561 *
1562 * PARAMETERS :
1563 * @focalLength : ptr to rational strcut to store focal lenght
1564 *
1565 * RETURN : int32_t type of status
1566 * NO_ERROR -- success
1567 * none-zero failure code
1568 *==========================================================================*/
getExifFocalLength(rat_t * focalLength,float value)1569 int32_t getExifFocalLength(rat_t *focalLength, float value)
1570 {
1571 int focalLengthValue =
1572 (int)(value * FOCAL_LENGTH_DECIMAL_PRECISION);
1573 return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
1574 }
1575
1576 /*===========================================================================
1577 * FUNCTION : getExifExpTimeInfo
1578 *
1579 * DESCRIPTION: get exif exposure time information
1580 *
1581 * PARAMETERS :
1582 * @expoTimeInfo : expousure time value
1583 * RETURN : nt32_t type of status
1584 * NO_ERROR -- success
1585 * none-zero failure code
1586 *==========================================================================*/
getExifExpTimeInfo(rat_t * expoTimeInfo,int64_t value)1587 int32_t getExifExpTimeInfo(rat_t *expoTimeInfo, int64_t value)
1588 {
1589
1590 int cal_exposureTime;
1591 if (value != 0)
1592 cal_exposureTime = value;
1593 else
1594 cal_exposureTime = 60;
1595
1596 return getRational(expoTimeInfo, 1, cal_exposureTime);
1597 }
1598
1599 /*===========================================================================
1600 * FUNCTION : getExifGpsProcessingMethod
1601 *
1602 * DESCRIPTION: get GPS processing method
1603 *
1604 * PARAMETERS :
1605 * @gpsProcessingMethod : string to store GPS process method
1606 * @count : lenght of the string
1607 *
1608 * RETURN : int32_t type of status
1609 * NO_ERROR -- success
1610 * none-zero failure code
1611 *==========================================================================*/
getExifGpsProcessingMethod(char * gpsProcessingMethod,uint32_t & count,char * value)1612 int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod,
1613 uint32_t &count, char* value)
1614 {
1615 if(value != NULL) {
1616 memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
1617 count = EXIF_ASCII_PREFIX_SIZE;
1618 strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, value, strlen(value));
1619 count += strlen(value);
1620 gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
1621 return NO_ERROR;
1622 } else {
1623 return BAD_VALUE;
1624 }
1625 }
1626
1627 /*===========================================================================
1628 * FUNCTION : getExifLatitude
1629 *
1630 * DESCRIPTION: get exif latitude
1631 *
1632 * PARAMETERS :
1633 * @latitude : ptr to rational struct to store latitude info
1634 * @ladRef : charater to indicate latitude reference
1635 *
1636 * RETURN : int32_t type of status
1637 * NO_ERROR -- success
1638 * none-zero failure code
1639 *==========================================================================*/
getExifLatitude(rat_t * latitude,char * latRef,double value)1640 int32_t getExifLatitude(rat_t *latitude,
1641 char *latRef, double value)
1642 {
1643 char str[30];
1644 snprintf(str, sizeof(str), "%f", value);
1645 if(str != NULL) {
1646 parseGPSCoordinate(str, latitude);
1647
1648 //set Latitude Ref
1649 float latitudeValue = strtof(str, 0);
1650 if(latitudeValue < 0.0f) {
1651 latRef[0] = 'S';
1652 } else {
1653 latRef[0] = 'N';
1654 }
1655 latRef[1] = '\0';
1656 return NO_ERROR;
1657 }else{
1658 return BAD_VALUE;
1659 }
1660 }
1661
1662 /*===========================================================================
1663 * FUNCTION : getExifLongitude
1664 *
1665 * DESCRIPTION: get exif longitude
1666 *
1667 * PARAMETERS :
1668 * @longitude : ptr to rational struct to store longitude info
1669 * @lonRef : charater to indicate longitude reference
1670 *
1671 * RETURN : int32_t type of status
1672 * NO_ERROR -- success
1673 * none-zero failure code
1674 *==========================================================================*/
getExifLongitude(rat_t * longitude,char * lonRef,double value)1675 int32_t getExifLongitude(rat_t *longitude,
1676 char *lonRef, double value)
1677 {
1678 char str[30];
1679 snprintf(str, sizeof(str), "%f", value);
1680 if(str != NULL) {
1681 parseGPSCoordinate(str, longitude);
1682
1683 //set Longitude Ref
1684 float longitudeValue = strtof(str, 0);
1685 if(longitudeValue < 0.0f) {
1686 lonRef[0] = 'W';
1687 } else {
1688 lonRef[0] = 'E';
1689 }
1690 lonRef[1] = '\0';
1691 return NO_ERROR;
1692 }else{
1693 return BAD_VALUE;
1694 }
1695 }
1696
1697 /*===========================================================================
1698 * FUNCTION : getExifAltitude
1699 *
1700 * DESCRIPTION: get exif altitude
1701 *
1702 * PARAMETERS :
1703 * @altitude : ptr to rational struct to store altitude info
1704 * @altRef : charater to indicate altitude reference
1705 *
1706 * RETURN : int32_t type of status
1707 * NO_ERROR -- success
1708 * none-zero failure code
1709 *==========================================================================*/
getExifAltitude(rat_t * altitude,char * altRef,double value)1710 int32_t getExifAltitude(rat_t *altitude,
1711 char *altRef, double value)
1712 {
1713 char str[30];
1714 snprintf(str, sizeof(str), "%f", value);
1715 if(str != NULL) {
1716 double value = atof(str);
1717 *altRef = 0;
1718 if(value < 0){
1719 *altRef = 1;
1720 value = -value;
1721 }
1722 return getRational(altitude, value*1000, 1000);
1723 }else{
1724 return BAD_VALUE;
1725 }
1726 }
1727
1728 /*===========================================================================
1729 * FUNCTION : getExifGpsDateTimeStamp
1730 *
1731 * DESCRIPTION: get exif GPS date time stamp
1732 *
1733 * PARAMETERS :
1734 * @gpsDateStamp : GPS date time stamp string
1735 * @bufLen : length of the string
1736 * @gpsTimeStamp : ptr to rational struct to store time stamp info
1737 *
1738 * RETURN : int32_t type of status
1739 * NO_ERROR -- success
1740 * none-zero failure code
1741 *==========================================================================*/
getExifGpsDateTimeStamp(char * gpsDateStamp,uint32_t bufLen,rat_t * gpsTimeStamp,int64_t value)1742 int32_t getExifGpsDateTimeStamp(char *gpsDateStamp,
1743 uint32_t bufLen,
1744 rat_t *gpsTimeStamp, int64_t value)
1745 {
1746 char str[30];
1747 snprintf(str, sizeof(str), "%lld", value);
1748 if(str != NULL) {
1749 time_t unixTime = (time_t)atol(str);
1750 struct tm *UTCTimestamp = gmtime(&unixTime);
1751
1752 strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
1753
1754 getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
1755 getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
1756 getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
1757
1758 return NO_ERROR;
1759 } else {
1760 return BAD_VALUE;
1761 }
1762 }
1763
getExifExposureValue(srat_t * exposure_val,int32_t exposure_comp,cam_rational_type_t step)1764 int32_t getExifExposureValue(srat_t* exposure_val, int32_t exposure_comp,
1765 cam_rational_type_t step)
1766 {
1767 exposure_val->num = exposure_comp * step.numerator;
1768 exposure_val->denom = step.denominator;
1769 return 0;
1770 }
1771 /*===========================================================================
1772 * FUNCTION : getExifData
1773 *
1774 * DESCRIPTION: get exif data to be passed into jpeg encoding
1775 *
1776 * PARAMETERS : none
1777 *
1778 * RETURN : exif data from user setting and GPS
1779 *==========================================================================*/
getExifData(metadata_buffer_t * metadata,jpeg_settings_t * jpeg_settings)1780 QCamera3Exif *QCamera3PicChannel::getExifData(metadata_buffer_t *metadata,
1781 jpeg_settings_t *jpeg_settings)
1782 {
1783 QCamera3Exif *exif = new QCamera3Exif();
1784 if (exif == NULL) {
1785 ALOGE("%s: No memory for QCamera3Exif", __func__);
1786 return NULL;
1787 }
1788
1789 int32_t rc = NO_ERROR;
1790 uint32_t count = 0;
1791
1792 // add exif entries
1793 {
1794 char dateTime[20];
1795 char subsecTime[7];
1796 uint32_t subsecCount;
1797 memset(dateTime, 0, sizeof(dateTime));
1798 memset(subsecTime, 0, sizeof(subsecTime));
1799 count = 20;
1800 subsecCount = 7;
1801 rc = getExifDateTime(dateTime, subsecTime, count, subsecCount);
1802 if(rc == NO_ERROR) {
1803 exif->addEntry(EXIFTAGID_DATE_TIME,
1804 EXIF_ASCII,
1805 count,
1806 (void *)dateTime);
1807 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL,
1808 EXIF_ASCII,
1809 count,
1810 (void *)dateTime);
1811 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED,
1812 EXIF_ASCII,
1813 count,
1814 (void *)dateTime);
1815 exif->addEntry(EXIFTAGID_SUBSEC_TIME,
1816 EXIF_ASCII,
1817 subsecCount,
1818 (void *)subsecTime);
1819 exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL,
1820 EXIF_ASCII,
1821 subsecCount,
1822 (void *)subsecTime);
1823 exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED,
1824 EXIF_ASCII,
1825 subsecCount,
1826 (void *)subsecTime);
1827 } else {
1828 ALOGE("%s: getExifDateTime failed", __func__);
1829 }
1830 }
1831
1832 if (IS_PARM_VALID(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)) {
1833 float focal_length = *(float *)POINTER_OF(
1834 CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
1835 rat_t focalLength;
1836 rc = getExifFocalLength(&focalLength, focal_length);
1837 if (rc == NO_ERROR) {
1838 exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
1839 EXIF_RATIONAL,
1840 1,
1841 (void *)&(focalLength));
1842 } else {
1843 ALOGE("%s: getExifFocalLength failed", __func__);
1844 }
1845 }
1846
1847 if (IS_PARM_VALID(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)) {
1848 int16_t isoSpeed = *(int32_t *)POINTER_OF(
1849 CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
1850 exif->addEntry(EXIFTAGID_ISO_SPEED_RATING,
1851 EXIF_SHORT,
1852 1,
1853 (void *)&(isoSpeed));
1854 }
1855
1856 if (IS_PARM_VALID(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)) {
1857 int64_t sensor_exposure_time = *(int64_t *)POINTER_OF(
1858 CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
1859 rat_t sensorExpTime;
1860 rc = getExifExpTimeInfo(&sensorExpTime, sensor_exposure_time);
1861 if (rc == NO_ERROR){
1862 exif->addEntry(EXIFTAGID_EXPOSURE_TIME,
1863 EXIF_RATIONAL,
1864 1,
1865 (void *)&(sensorExpTime));
1866 } else {
1867 ALOGE("%s: getExifExpTimeInfo failed", __func__);
1868 }
1869 }
1870
1871 if (strlen(jpeg_settings->gps_processing_method) > 0) {
1872 char gpsProcessingMethod[
1873 EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
1874 count = 0;
1875 rc = getExifGpsProcessingMethod(gpsProcessingMethod,
1876 count, jpeg_settings->gps_processing_method);
1877 if(rc == NO_ERROR) {
1878 exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
1879 EXIF_ASCII,
1880 count,
1881 (void *)gpsProcessingMethod);
1882 } else {
1883 ALOGE("%s: getExifGpsProcessingMethod failed", __func__);
1884 }
1885 }
1886
1887 if (jpeg_settings->gps_coordinates_valid) {
1888
1889 //latitude
1890 rat_t latitude[3];
1891 char latRef[2];
1892 rc = getExifLatitude(latitude, latRef,
1893 jpeg_settings->gps_coordinates[0]);
1894 if(rc == NO_ERROR) {
1895 exif->addEntry(EXIFTAGID_GPS_LATITUDE,
1896 EXIF_RATIONAL,
1897 3,
1898 (void *)latitude);
1899 exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
1900 EXIF_ASCII,
1901 2,
1902 (void *)latRef);
1903 } else {
1904 ALOGE("%s: getExifLatitude failed", __func__);
1905 }
1906
1907 //longitude
1908 rat_t longitude[3];
1909 char lonRef[2];
1910 rc = getExifLongitude(longitude, lonRef,
1911 jpeg_settings->gps_coordinates[1]);
1912 if(rc == NO_ERROR) {
1913 exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
1914 EXIF_RATIONAL,
1915 3,
1916 (void *)longitude);
1917
1918 exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
1919 EXIF_ASCII,
1920 2,
1921 (void *)lonRef);
1922 } else {
1923 ALOGE("%s: getExifLongitude failed", __func__);
1924 }
1925
1926 //altitude
1927 rat_t altitude;
1928 char altRef;
1929 rc = getExifAltitude(&altitude, &altRef,
1930 jpeg_settings->gps_coordinates[2]);
1931 if(rc == NO_ERROR) {
1932 exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
1933 EXIF_RATIONAL,
1934 1,
1935 (void *)&(altitude));
1936
1937 exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
1938 EXIF_BYTE,
1939 1,
1940 (void *)&altRef);
1941 } else {
1942 ALOGE("%s: getExifAltitude failed", __func__);
1943 }
1944 }
1945
1946 if (jpeg_settings->gps_timestamp_valid) {
1947
1948 char gpsDateStamp[20];
1949 rat_t gpsTimeStamp[3];
1950 rc = getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp,
1951 jpeg_settings->gps_timestamp);
1952 if(rc == NO_ERROR) {
1953 exif->addEntry(EXIFTAGID_GPS_DATESTAMP,
1954 EXIF_ASCII,
1955 strlen(gpsDateStamp) + 1,
1956 (void *)gpsDateStamp);
1957
1958 exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
1959 EXIF_RATIONAL,
1960 3,
1961 (void *)gpsTimeStamp);
1962 } else {
1963 ALOGE("%s: getExifGpsDataTimeStamp failed", __func__);
1964 }
1965 }
1966
1967 if (IS_PARM_VALID(CAM_INTF_PARM_EV, metadata) &&
1968 IS_PARM_VALID(CAM_INTF_PARM_EV_STEP, metadata)) {
1969 int32_t exposure_comp = *(int32_t *)POINTER_OF(
1970 CAM_INTF_PARM_EV, metadata);
1971 cam_rational_type_t comp_step = *(cam_rational_type_t *)POINTER_OF(
1972 CAM_INTF_PARM_EV_STEP, metadata);
1973 srat_t exposure_val;
1974 rc = getExifExposureValue(&exposure_val, exposure_comp, comp_step);
1975 if(rc == NO_ERROR) {
1976 exif->addEntry(EXIFTAGID_EXPOSURE_BIAS_VALUE,
1977 EXIF_SRATIONAL,
1978 1,
1979 (void *)(&exposure_val));
1980 } else {
1981 ALOGE("%s: getExifExposureValue failed ", __func__);
1982 }
1983 }
1984
1985 char value[PROPERTY_VALUE_MAX];
1986 if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
1987 exif->addEntry(EXIFTAGID_MAKE,
1988 EXIF_ASCII,
1989 strlen(value) + 1,
1990 (void *)value);
1991 } else {
1992 ALOGE("%s: getExifMaker failed", __func__);
1993 }
1994
1995 if (property_get("ro.product.model", value, "QCAM-AA") > 0) {
1996 exif->addEntry(EXIFTAGID_MODEL,
1997 EXIF_ASCII,
1998 strlen(value) + 1,
1999 (void *)value);
2000 } else {
2001 ALOGE("%s: getExifModel failed", __func__);
2002 }
2003
2004 return exif;
2005 }
2006
overrideYuvSize(uint32_t width,uint32_t height)2007 void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height)
2008 {
2009 mYuvWidth = width;
2010 mYuvHeight = height;
2011 }
2012
2013 int QCamera3PicChannel::kMaxBuffers = 1;
2014
2015 /*===========================================================================
2016 * FUNCTION : QCamera3ReprocessChannel
2017 *
2018 * DESCRIPTION: constructor of QCamera3ReprocessChannel
2019 *
2020 * PARAMETERS :
2021 * @cam_handle : camera handle
2022 * @cam_ops : ptr to camera ops table
2023 * @pp_mask : post-proccess feature mask
2024 *
2025 * RETURN : none
2026 *==========================================================================*/
QCamera3ReprocessChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,void * ch_hdl)2027 QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle,
2028 mm_camera_ops_t *cam_ops,
2029 channel_cb_routine cb_routine,
2030 cam_padding_info_t *paddingInfo,
2031 void *userData, void *ch_hdl) :
2032 QCamera3Channel(cam_handle, cam_ops, cb_routine, paddingInfo, userData),
2033 picChHandle(ch_hdl),
2034 m_pSrcChannel(NULL),
2035 m_pMetaChannel(NULL),
2036 mMemory(NULL)
2037 {
2038 memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
2039 }
2040
2041
2042 /*===========================================================================
2043 * FUNCTION : QCamera3ReprocessChannel
2044 *
2045 * DESCRIPTION: constructor of QCamera3ReprocessChannel
2046 *
2047 * PARAMETERS :
2048 * @cam_handle : camera handle
2049 * @cam_ops : ptr to camera ops table
2050 * @pp_mask : post-proccess feature mask
2051 *
2052 * RETURN : none
2053 *==========================================================================*/
initialize()2054 int32_t QCamera3ReprocessChannel::initialize()
2055 {
2056 int32_t rc = NO_ERROR;
2057 mm_camera_channel_attr_t attr;
2058
2059 memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
2060 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
2061 attr.max_unmatched_frames = 1;
2062
2063 rc = init(&attr, NULL);
2064 if (rc < 0) {
2065 ALOGE("%s: init failed", __func__);
2066 }
2067 return rc;
2068 }
2069
2070
2071 /*===========================================================================
2072 * FUNCTION : QCamera3ReprocessChannel
2073 *
2074 * DESCRIPTION: constructor of QCamera3ReprocessChannel
2075 *
2076 * PARAMETERS :
2077 * @cam_handle : camera handle
2078 * @cam_ops : ptr to camera ops table
2079 * @pp_mask : post-proccess feature mask
2080 *
2081 * RETURN : none
2082 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)2083 void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
2084 QCamera3Stream *stream)
2085 {
2086 //Got the pproc data callback. Now send to jpeg encoding
2087 uint8_t frameIndex;
2088 mm_camera_super_buf_t* frame = NULL;
2089 QCamera3PicChannel *obj = (QCamera3PicChannel *)picChHandle;
2090
2091 if(!super_frame) {
2092 ALOGE("%s: Invalid Super buffer",__func__);
2093 return;
2094 }
2095
2096 if(super_frame->num_bufs != 1) {
2097 ALOGE("%s: Multiple streams are not supported",__func__);
2098 return;
2099 }
2100 if(super_frame->bufs[0] == NULL ) {
2101 ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
2102 __func__);
2103 return;
2104 }
2105
2106 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
2107 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
2108 if (frame == NULL) {
2109 ALOGE("%s: Error allocating memory to save received_frame structure.",
2110 __func__);
2111 if(stream) {
2112 stream->bufDone(frameIndex);
2113 }
2114 return;
2115 }
2116 *frame = *super_frame;
2117 obj->m_postprocessor.processPPData(frame);
2118 free(super_frame);
2119 return;
2120 }
2121
2122 /*===========================================================================
2123 * FUNCTION : QCamera3ReprocessChannel
2124 *
2125 * DESCRIPTION: default constructor of QCamera3ReprocessChannel
2126 *
2127 * PARAMETERS : none
2128 *
2129 * RETURN : none
2130 *==========================================================================*/
QCamera3ReprocessChannel()2131 QCamera3ReprocessChannel::QCamera3ReprocessChannel() :
2132 m_pSrcChannel(NULL),
2133 m_pMetaChannel(NULL)
2134 {
2135 }
2136
2137 /*===========================================================================
2138 * FUNCTION : getStreamBufs
2139 *
2140 * DESCRIPTION: register the buffers of the reprocess channel
2141 *
2142 * PARAMETERS : none
2143 *
2144 * RETURN : QCamera3Memory *
2145 *==========================================================================*/
getStreamBufs(uint32_t len)2146 QCamera3Memory* QCamera3ReprocessChannel::getStreamBufs(uint32_t len)
2147 {
2148 int rc = 0;
2149
2150 mMemory = new QCamera3HeapMemory();
2151 if (!mMemory) {
2152 ALOGE("%s: unable to create reproc memory", __func__);
2153 return NULL;
2154 }
2155
2156 //Queue YUV buffers in the beginning mQueueAll = true
2157 rc = mMemory->allocate(2, len, true);
2158 if (rc < 0) {
2159 ALOGE("%s: unable to allocate reproc memory", __func__);
2160 delete mMemory;
2161 mMemory = NULL;
2162 return NULL;
2163 }
2164 return mMemory;
2165 }
2166
2167 /*===========================================================================
2168 * FUNCTION : getStreamBufs
2169 *
2170 * DESCRIPTION: register the buffers of the reprocess channel
2171 *
2172 * PARAMETERS : none
2173 *
2174 * RETURN :
2175 *==========================================================================*/
putStreamBufs()2176 void QCamera3ReprocessChannel::putStreamBufs()
2177 {
2178 mMemory->deallocate();
2179 delete mMemory;
2180 mMemory = NULL;
2181 }
2182
2183 /*===========================================================================
2184 * FUNCTION : ~QCamera3ReprocessChannel
2185 *
2186 * DESCRIPTION: destructor of QCamera3ReprocessChannel
2187 *
2188 * PARAMETERS : none
2189 *
2190 * RETURN : none
2191 *==========================================================================*/
~QCamera3ReprocessChannel()2192 QCamera3ReprocessChannel::~QCamera3ReprocessChannel()
2193 {
2194 }
2195
2196 /*===========================================================================
2197 * FUNCTION : getStreamBySrcHandle
2198 *
2199 * DESCRIPTION: find reprocess stream by its source stream handle
2200 *
2201 * PARAMETERS :
2202 * @srcHandle : source stream handle
2203 *
2204 * RETURN : ptr to reprocess stream if found. NULL if not found
2205 *==========================================================================*/
getStreamBySrcHandle(uint32_t srcHandle)2206 QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle)
2207 {
2208 QCamera3Stream *pStream = NULL;
2209
2210 for (int i = 0; i < m_numStreams; i++) {
2211 if (mSrcStreamHandles[i] == srcHandle) {
2212 pStream = mStreams[i];
2213 break;
2214 }
2215 }
2216 return pStream;
2217 }
2218
2219 /*===========================================================================
2220 * FUNCTION : getSrcStreamBySrcHandle
2221 *
2222 * DESCRIPTION: find source stream by source stream handle
2223 *
2224 * PARAMETERS :
2225 * @srcHandle : source stream handle
2226 *
2227 * RETURN : ptr to reprocess stream if found. NULL if not found
2228 *==========================================================================*/
getSrcStreamBySrcHandle(uint32_t srcHandle)2229 QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle)
2230 {
2231 QCamera3Stream *pStream = NULL;
2232
2233 for (int i = 0; i < m_numStreams; i++) {
2234 if (mSrcStreamHandles[i] == srcHandle) {
2235 pStream = m_pSrcChannel->getStreamByIndex(i);
2236 break;
2237 }
2238 }
2239 return pStream;
2240 }
2241
2242 /*===========================================================================
2243 * FUNCTION : metadataBufDone
2244 *
2245 * DESCRIPTION: buf done method for a metadata buffer
2246 *
2247 * PARAMETERS :
2248 * @recvd_frame : received metadata frame
2249 *
2250 * RETURN :
2251 *==========================================================================*/
metadataBufDone(mm_camera_super_buf_t * recvd_frame)2252 int32_t QCamera3ReprocessChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
2253 {
2254 int32_t rc;
2255 rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame);
2256 free(recvd_frame);
2257 recvd_frame = NULL;
2258 return rc;
2259 }
2260
2261 /*===========================================================================
2262 * FUNCTION : doReprocess
2263 *
2264 * DESCRIPTION: request to do a reprocess on the frame
2265 *
2266 * PARAMETERS :
2267 * @frame : frame to be performed a reprocess
2268 *
2269 * RETURN : int32_t type of status
2270 * NO_ERROR -- success
2271 * none-zero failure code
2272 *==========================================================================*/
doReprocess(mm_camera_super_buf_t * frame,mm_camera_super_buf_t * meta_frame)2273 int32_t QCamera3ReprocessChannel::doReprocess(mm_camera_super_buf_t *frame,
2274 mm_camera_super_buf_t *meta_frame)
2275 {
2276 int32_t rc = 0;
2277 if (m_numStreams < 1) {
2278 ALOGE("%s: No reprocess stream is created", __func__);
2279 return -1;
2280 }
2281 if (m_pSrcChannel == NULL) {
2282 ALOGE("%s: No source channel for reprocess", __func__);
2283 return -1;
2284 }
2285 for (int i = 0; i < frame->num_bufs; i++) {
2286 QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
2287 if (pStream != NULL) {
2288 cam_stream_parm_buffer_t param;
2289 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
2290 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
2291 param.reprocess.buf_index = frame->bufs[i]->buf_idx;
2292 if (meta_frame != NULL) {
2293 param.reprocess.meta_present = 1;
2294 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
2295 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
2296 }
2297 rc = pStream->setParameter(param);
2298 if (rc != NO_ERROR) {
2299 ALOGE("%s: stream setParameter for reprocess failed", __func__);
2300 break;
2301 }
2302 }
2303 }
2304 return rc;
2305 }
2306
doReprocessOffline(mm_camera_super_buf_t * frame,metadata_buffer_t * metadata)2307 int32_t QCamera3ReprocessChannel::doReprocessOffline(mm_camera_super_buf_t *frame,
2308 metadata_buffer_t *metadata)
2309 {
2310 int32_t rc = 0;
2311 OfflineBuffer mappedBuffer;
2312 if (m_numStreams < 1) {
2313 ALOGE("%s: No reprocess stream is created", __func__);
2314 return -1;
2315 }
2316 if (m_pSrcChannel == NULL) {
2317 ALOGE("%s: No source channel for reprocess", __func__);
2318 return -1;
2319 }
2320
2321 uint32_t buf_idx = 0;
2322 for (int i = 0; i < frame->num_bufs; i++) {
2323 QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
2324 QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id);
2325 if (pStream != NULL && pSrcStream != NULL) {
2326
2327 rc = mStreams[i]->mapBuf(
2328 CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
2329 buf_idx, -1,
2330 frame->bufs[i]->fd, frame->bufs[i]->frame_len);
2331
2332 if (rc == NO_ERROR) {
2333 memset(&mappedBuffer, 0, sizeof(OfflineBuffer));
2334 mappedBuffer.index = buf_idx;
2335 mappedBuffer.stream = pStream;
2336 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
2337 mOfflineBuffers.push_back(mappedBuffer);
2338
2339 cam_stream_parm_buffer_t param;
2340 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
2341 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
2342 param.reprocess.buf_index = buf_idx;
2343
2344 param.reprocess.meta_present = 1;
2345 char* private_data = (char *)POINTER_OF(
2346 CAM_INTF_META_PRIVATE_DATA, metadata);
2347 memcpy(param.reprocess.private_data, private_data,
2348 MAX_METADATA_PAYLOAD_SIZE);
2349
2350 // Find crop info for reprocess stream
2351 cam_crop_data_t *crop_data = (cam_crop_data_t *)
2352 POINTER_OF(CAM_INTF_META_CROP_DATA, metadata);
2353 for (int j = 0; j < crop_data->num_of_streams; j++) {
2354 if (crop_data->crop_info[j].stream_id ==
2355 pSrcStream->getMyServerID()) {
2356 param.reprocess.crop_rect =
2357 crop_data->crop_info[j].crop;
2358 break;
2359 }
2360 }
2361 rc = pStream->setParameter(param);
2362 if (rc != NO_ERROR) {
2363 ALOGE("%s: stream setParameter for reprocess failed", __func__);
2364 break;
2365 }
2366 }
2367 }
2368 }
2369 return rc;
2370 }
2371
2372 /*===========================================================================
2373 * FUNCTION : stop
2374 *
2375 * DESCRIPTION: Unmap offline buffers and stop channel
2376 *
2377 * PARAMETERS : none
2378 *
2379 * RETURN : int32_t type of status
2380 * NO_ERROR -- success
2381 * none-zero failure code
2382 *==========================================================================*/
stop()2383 int32_t QCamera3ReprocessChannel::stop()
2384 {
2385 if (!mOfflineBuffers.empty()) {
2386 QCamera3Stream *stream = NULL;
2387 List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
2388 int error = NO_ERROR;
2389 for( ; it != mOfflineBuffers.end(); it++) {
2390 stream = (*it).stream;
2391 if (NULL != stream) {
2392 error = stream->unmapBuf((*it).type,
2393 (*it).index,
2394 -1);
2395 if (NO_ERROR != error) {
2396 ALOGE("%s: Error during offline buffer unmap %d",
2397 __func__, error);
2398 }
2399 }
2400 }
2401 mOfflineBuffers.clear();
2402 }
2403
2404 return QCamera3Channel::stop();
2405 }
2406
2407 /*===========================================================================
2408 * FUNCTION : doReprocess
2409 *
2410 * DESCRIPTION: request to do a reprocess on the frame
2411 *
2412 * PARAMETERS :
2413 * @buf_fd : fd to the input buffer that needs reprocess
2414 * @buf_lenght : length of the input buffer
2415 * @ret_val : result of reprocess.
2416 * Example: Could be faceID in case of register face image.
2417 *
2418 * RETURN : int32_t type of status
2419 * NO_ERROR -- success
2420 * none-zero failure code
2421 *==========================================================================*/
doReprocess(int buf_fd,uint32_t buf_length,int32_t & ret_val,mm_camera_super_buf_t * meta_frame)2422 int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd,
2423 uint32_t buf_length,
2424 int32_t &ret_val,
2425 mm_camera_super_buf_t *meta_frame)
2426 {
2427 int32_t rc = 0;
2428 if (m_numStreams < 1) {
2429 ALOGE("%s: No reprocess stream is created", __func__);
2430 return -1;
2431 }
2432 if (meta_frame == NULL) {
2433 ALOGE("%s: Did not get corresponding metadata in time", __func__);
2434 return -1;
2435 }
2436
2437 uint32_t buf_idx = 0;
2438 for (int i = 0; i < m_numStreams; i++) {
2439 rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
2440 buf_idx, -1,
2441 buf_fd, buf_length);
2442
2443 if (rc == NO_ERROR) {
2444 cam_stream_parm_buffer_t param;
2445 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
2446 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
2447 param.reprocess.buf_index = buf_idx;
2448 param.reprocess.meta_present = 1;
2449 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
2450 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
2451 rc = mStreams[i]->setParameter(param);
2452 if (rc == NO_ERROR) {
2453 ret_val = param.reprocess.ret_val;
2454 }
2455 mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
2456 buf_idx, -1);
2457 }
2458 }
2459 return rc;
2460 }
2461
2462 /*===========================================================================
2463 * FUNCTION : addReprocStreamsFromSource
2464 *
2465 * DESCRIPTION: add reprocess streams from input source channel
2466 *
2467 * PARAMETERS :
2468 * @config : pp feature configuration
2469 * @pSrcChannel : ptr to input source channel that needs reprocess
2470 * @pMetaChannel : ptr to metadata channel to get corresp. metadata
2471 * @offline : configure for offline reprocessing
2472 *
2473 * RETURN : int32_t type of status
2474 * NO_ERROR -- success
2475 * none-zero failure code
2476 *==========================================================================*/
addReprocStreamsFromSource(cam_pp_feature_config_t & pp_config,QCamera3Channel * pSrcChannel,QCamera3Channel * pMetaChannel)2477 int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
2478 QCamera3Channel *pSrcChannel,
2479 QCamera3Channel *pMetaChannel)
2480 {
2481 int32_t rc = 0;
2482 QCamera3Stream *pSrcStream = pSrcChannel->getStreamByIndex(0);
2483 if (pSrcStream == NULL) {
2484 ALOGE("%s: source channel doesn't have a stream", __func__);
2485 return BAD_VALUE;
2486 }
2487 cam_stream_reproc_config_t reprocess_config;
2488 cam_dimension_t streamDim;
2489 cam_stream_type_t streamType;
2490 cam_format_t streamFormat;
2491 cam_frame_len_offset_t frameOffset;
2492 int num_buffers = 2;
2493
2494 streamType = CAM_STREAM_TYPE_OFFLINE_PROC;
2495 pSrcStream->getFormat(streamFormat);
2496 pSrcStream->getFrameDimension(streamDim);
2497 pSrcStream->getFrameOffset(frameOffset);
2498 reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
2499
2500 reprocess_config.offline.input_fmt = streamFormat;
2501 reprocess_config.offline.input_dim = streamDim;
2502 reprocess_config.offline.input_buf_planes.plane_info = frameOffset;
2503 reprocess_config.offline.num_of_bufs = num_buffers;
2504 reprocess_config.offline.input_stream_type = pSrcStream->getMyType();
2505
2506
2507 reprocess_config.pp_feature_config = pp_config;
2508 mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle();
2509
2510 // pp feature config
2511 if (pp_config.feature_mask & CAM_QCOM_FEATURE_ROTATION) {
2512 if (pp_config.rotation == ROTATE_90 ||
2513 pp_config.rotation == ROTATE_270) {
2514 // rotated by 90 or 270, need to switch width and height
2515 int32_t temp = streamDim.height;
2516 streamDim.height = streamDim.width;
2517 streamDim.width = temp;
2518 }
2519 }
2520
2521 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
2522 m_handle,
2523 m_camOps,
2524 mPaddingInfo,
2525 (QCamera3Channel*)this);
2526 if (pStream == NULL) {
2527 ALOGE("%s: No mem for Stream", __func__);
2528 return NO_MEMORY;
2529 }
2530
2531 rc = pStream->init(streamType, streamFormat, streamDim, &reprocess_config,
2532 num_buffers,QCamera3Channel::streamCbRoutine, this);
2533
2534
2535 if (rc == 0) {
2536 mStreams[m_numStreams] = pStream;
2537 m_numStreams++;
2538 } else {
2539 ALOGE("%s: failed to create reprocess stream", __func__);
2540 delete pStream;
2541 }
2542
2543 if (rc == NO_ERROR) {
2544 m_pSrcChannel = pSrcChannel;
2545 m_pMetaChannel = pMetaChannel;
2546 }
2547 if(m_camOps->request_super_buf(m_camHandle,m_handle,1) < 0) {
2548 ALOGE("%s: Request for super buffer failed",__func__);
2549 }
2550 return rc;
2551 }
2552
2553 cam_dimension_t QCamera3SupportChannel::kDim = {640, 480};
2554
QCamera3SupportChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,cam_padding_info_t * paddingInfo,void * userData)2555 QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle,
2556 mm_camera_ops_t *cam_ops,
2557 cam_padding_info_t *paddingInfo,
2558 void *userData) :
2559 QCamera3Channel(cam_handle, cam_ops,
2560 NULL, paddingInfo, userData),
2561 mMemory(NULL)
2562 {
2563 }
2564
~QCamera3SupportChannel()2565 QCamera3SupportChannel::~QCamera3SupportChannel()
2566 {
2567 if (m_bIsActive)
2568 stop();
2569
2570 if (mMemory) {
2571 mMemory->deallocate();
2572 delete mMemory;
2573 mMemory = NULL;
2574 }
2575 }
2576
initialize()2577 int32_t QCamera3SupportChannel::initialize()
2578 {
2579 int32_t rc;
2580
2581 if (mMemory || m_numStreams > 0) {
2582 ALOGE("%s: Support channel already initialized", __func__);
2583 return -EINVAL;
2584 }
2585
2586 rc = init(NULL, NULL);
2587 if (rc < 0) {
2588 ALOGE("%s: init failed", __func__);
2589 return rc;
2590 }
2591
2592 // Hardcode to VGA size for now
2593 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_CALLBACK,
2594 CAM_FORMAT_YUV_420_NV21, kDim, MIN_STREAMING_BUFFER_NUM);
2595 if (rc < 0) {
2596 ALOGE("%s: addStream failed", __func__);
2597 }
2598 return rc;
2599 }
2600
request(buffer_handle_t *,uint32_t)2601 int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/,
2602 uint32_t /*frameNumber*/)
2603 {
2604 return NO_ERROR;
2605 }
2606
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)2607 void QCamera3SupportChannel::streamCbRoutine(
2608 mm_camera_super_buf_t *super_frame,
2609 QCamera3Stream * /*stream*/)
2610 {
2611 if (super_frame == NULL || super_frame->num_bufs != 1) {
2612 ALOGE("%s: super_frame is not valid", __func__);
2613 return;
2614 }
2615 bufDone(super_frame);
2616 free(super_frame);
2617 }
2618
getStreamBufs(uint32_t len)2619 QCamera3Memory* QCamera3SupportChannel::getStreamBufs(uint32_t len)
2620 {
2621 int rc;
2622
2623 mMemory = new QCamera3HeapMemory();
2624 if (!mMemory) {
2625 ALOGE("%s: unable to create heap memory", __func__);
2626 return NULL;
2627 }
2628 rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
2629 if (rc < 0) {
2630 ALOGE("%s: unable to allocate heap memory", __func__);
2631 delete mMemory;
2632 mMemory = NULL;
2633 return NULL;
2634 }
2635 return mMemory;
2636 }
2637
putStreamBufs()2638 void QCamera3SupportChannel::putStreamBufs()
2639 {
2640 mMemory->deallocate();
2641 delete mMemory;
2642 mMemory = NULL;
2643 }
2644
2645 }; // namespace qcamera
2646