• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 *     * Redistributions of source code must retain the above copyright
7 *       notice, this list of conditions and the following disclaimer.
8 *     * Redistributions in binary form must reproduce the above
9 *       copyright notice, this list of conditions and the following
10 *       disclaimer in the documentation and/or other materials provided
11 *       with the distribution.
12 *     * Neither the name of The Linux Foundation nor the names of its
13 *       contributors may be used to endorse or promote products derived
14 *       from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29 
30 #define ATRACE_TAG ATRACE_TAG_CAMERA
31 #define LOG_TAG "QCamera3Channel"
32 //#define LOG_NDEBUG 0
33 #include <fcntl.h>
34 #include <stdlib.h>
35 #include <cstdlib>
36 #include <stdio.h>
37 #include <string.h>
38 #include <hardware/camera3.h>
39 #include <system/camera_metadata.h>
40 #include <gralloc_priv.h>
41 #include <utils/Log.h>
42 #include <utils/Errors.h>
43 #include <utils/Trace.h>
44 #include <cutils/properties.h>
45 #include "QCamera3Channel.h"
46 #include "QCamera3HWI.h"
47 
48 using namespace android;
49 
50 #define MIN_STREAMING_BUFFER_NUM 7+11
51 
52 namespace qcamera {
53 static const char ExifAsciiPrefix[] =
54     { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };          // "ASCII\0\0\0"
55 static const char ExifUndefinedPrefix[] =
56     { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };   // "\0\0\0\0\0\0\0\0"
57 
58 #define EXIF_ASCII_PREFIX_SIZE           8   //(sizeof(ExifAsciiPrefix))
59 #define FOCAL_LENGTH_DECIMAL_PRECISION   100
60 
61 #define VIDEO_FORMAT    CAM_FORMAT_YUV_420_NV12
62 #define SNAPSHOT_FORMAT CAM_FORMAT_YUV_420_NV21
63 #define PREVIEW_FORMAT  CAM_FORMAT_YUV_420_NV21
64 #define DEFAULT_FORMAT  CAM_FORMAT_YUV_420_NV21
65 #define CALLBACK_FORMAT CAM_FORMAT_YUV_420_NV21
66 #define RAW_FORMAT      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG
67 
68 /*===========================================================================
69  * FUNCTION   : QCamera3Channel
70  *
71  * DESCRIPTION: constrcutor of QCamera3Channel
72  *
73  * PARAMETERS :
74  *   @cam_handle : camera handle
75  *   @cam_ops    : ptr to camera ops table
76  *
77  * RETURN     : none
78  *==========================================================================*/
QCamera3Channel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData)79 QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
80                                mm_camera_ops_t *cam_ops,
81                                channel_cb_routine cb_routine,
82                                cam_padding_info_t *paddingInfo,
83                                uint32_t postprocess_mask,
84                                void *userData)
85 {
86     m_camHandle = cam_handle;
87     m_camOps = cam_ops;
88     m_bIsActive = false;
89 
90     m_handle = 0;
91     m_numStreams = 0;
92     memset(mStreams, 0, sizeof(mStreams));
93     mUserData = userData;
94 
95     mStreamInfoBuf = NULL;
96     mChannelCB = cb_routine;
97     mPaddingInfo = paddingInfo;
98 
99     mPostProcMask = postprocess_mask;
100 
101     char prop[PROPERTY_VALUE_MAX];
102     property_get("persist.camera.yuv.dump", prop, "0");
103     mYUVDump = atoi(prop);
104     mIsType = IS_TYPE_NONE;
105 }
106 
107 /*===========================================================================
108  * FUNCTION   : QCamera3Channel
109  *
110  * DESCRIPTION: default constrcutor of QCamera3Channel
111  *
112  * PARAMETERS : none
113  *
114  * RETURN     : none
115  *==========================================================================*/
QCamera3Channel()116 QCamera3Channel::QCamera3Channel()
117 {
118     m_camHandle = 0;
119     m_camOps = NULL;
120     m_bIsActive = false;
121 
122     m_handle = 0;
123     m_numStreams = 0;
124     memset(mStreams, 0, sizeof(mStreams));
125     mUserData = NULL;
126 
127     mStreamInfoBuf = NULL;
128     mChannelCB = NULL;
129     mPaddingInfo = NULL;
130 
131     mPostProcMask = 0;
132 }
133 
134 /*===========================================================================
135  * FUNCTION   : ~QCamera3Channel
136  *
137  * DESCRIPTION: destructor of QCamera3Channel
138  *
139  * PARAMETERS : none
140  *
141  * RETURN     : none
142  *==========================================================================*/
~QCamera3Channel()143 QCamera3Channel::~QCamera3Channel()
144 {
145     if (m_bIsActive)
146         stop();
147 
148     for (int i = 0; i < m_numStreams; i++) {
149         if (mStreams[i] != NULL) {
150             delete mStreams[i];
151             mStreams[i] = 0;
152         }
153     }
154     if (m_handle) {
155         m_camOps->delete_channel(m_camHandle, m_handle);
156         ALOGE("%s: deleting channel %d", __func__, m_handle);
157         m_handle = 0;
158     }
159     m_numStreams = 0;
160 }
161 
162 /*===========================================================================
163  * FUNCTION   : init
164  *
165  * DESCRIPTION: initialization of channel
166  *
167  * PARAMETERS :
168  *   @attr    : channel bundle attribute setting
169  *   @dataCB  : data notify callback
170  *   @userData: user data ptr
171  *
172  * RETURN     : int32_t type of status
173  *              NO_ERROR  -- success
174  *              none-zero failure code
175  *==========================================================================*/
init(mm_camera_channel_attr_t * attr,mm_camera_buf_notify_t dataCB)176 int32_t QCamera3Channel::init(mm_camera_channel_attr_t *attr,
177                              mm_camera_buf_notify_t dataCB)
178 {
179     m_handle = m_camOps->add_channel(m_camHandle,
180                                       attr,
181                                       dataCB,
182                                       this);
183     if (m_handle == 0) {
184         ALOGE("%s: Add channel failed", __func__);
185         return UNKNOWN_ERROR;
186     }
187     return NO_ERROR;
188 }
189 
190 /*===========================================================================
191  * FUNCTION   : addStream
192  *
193  * DESCRIPTION: add a stream into channel
194  *
195  * PARAMETERS :
196  *   @allocator      : stream related buffer allocator
197  *   @streamInfoBuf  : ptr to buf that constains stream info
198  *   @minStreamBufNum: number of stream buffers needed
199  *   @paddingInfo    : padding information
200  *   @stream_cb      : stream data notify callback
201  *   @userdata       : user data ptr
202  *
203  * RETURN     : int32_t type of status
204  *              NO_ERROR  -- success
205  *              none-zero failure code
206  *==========================================================================*/
addStream(cam_stream_type_t streamType,cam_format_t streamFormat,cam_dimension_t streamDim,uint8_t minStreamBufNum,uint32_t postprocessMask,cam_is_type_t isType)207 int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
208                                   cam_format_t streamFormat,
209                                   cam_dimension_t streamDim,
210                                   uint8_t minStreamBufNum,
211                                   uint32_t postprocessMask,
212                                   cam_is_type_t isType)
213 {
214     int32_t rc = NO_ERROR;
215 
216     if (m_numStreams >= 1) {
217         ALOGE("%s: Only one stream per channel supported in v3 Hal", __func__);
218         return BAD_VALUE;
219     }
220 
221     if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
222         ALOGE("%s: stream number (%d) exceeds max limit (%d)",
223               __func__, m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
224         return BAD_VALUE;
225     }
226     QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
227                                                m_handle,
228                                                m_camOps,
229                                                mPaddingInfo,
230                                                this);
231     if (pStream == NULL) {
232         ALOGE("%s: No mem for Stream", __func__);
233         return NO_MEMORY;
234     }
235 
236     rc = pStream->init(streamType, streamFormat, streamDim, NULL, minStreamBufNum,
237                        postprocessMask, isType, streamCbRoutine, this);
238     if (rc == 0) {
239         mStreams[m_numStreams] = pStream;
240         m_numStreams++;
241     } else {
242         delete pStream;
243     }
244     return rc;
245 }
246 
247 /*===========================================================================
248  * FUNCTION   : start
249  *
250  * DESCRIPTION: start channel, which will start all streams belong to this channel
251  *
252  * PARAMETERS :
253  *
254  * RETURN     : int32_t type of status
255  *              NO_ERROR  -- success
256  *              none-zero failure code
257  *==========================================================================*/
start()258 int32_t QCamera3Channel::start()
259 {
260     ATRACE_CALL();
261     int32_t rc = NO_ERROR;
262 
263     if (m_numStreams > 1) {
264         ALOGE("%s: bundle not supported", __func__);
265     } else if (m_numStreams == 0) {
266         return NO_INIT;
267     }
268 
269     if(m_bIsActive) {
270         ALOGD("%s: Attempt to start active channel", __func__);
271         return rc;
272     }
273 
274     for (int i = 0; i < m_numStreams; i++) {
275         if (mStreams[i] != NULL) {
276             mStreams[i]->start();
277         }
278     }
279     rc = m_camOps->start_channel(m_camHandle, m_handle);
280 
281     if (rc != NO_ERROR) {
282         for (int i = 0; i < m_numStreams; i++) {
283             if (mStreams[i] != NULL) {
284                 mStreams[i]->stop();
285             }
286         }
287     } else {
288         m_bIsActive = true;
289     }
290 
291     return rc;
292 }
293 
294 /*===========================================================================
295  * FUNCTION   : stop
296  *
297  * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
298  *
299  * PARAMETERS : none
300  *
301  * RETURN     : int32_t type of status
302  *              NO_ERROR  -- success
303  *              none-zero failure code
304  *==========================================================================*/
stop()305 int32_t QCamera3Channel::stop()
306 {
307     ATRACE_CALL();
308     int32_t rc = NO_ERROR;
309     if(!m_bIsActive) {
310         ALOGE("%s: Attempt to stop inactive channel",__func__);
311         return rc;
312     }
313 
314     for (int i = 0; i < m_numStreams; i++) {
315         if (mStreams[i] != NULL) {
316             mStreams[i]->stop();
317         }
318     }
319 
320     rc = m_camOps->stop_channel(m_camHandle, m_handle);
321 
322     m_bIsActive = false;
323     return rc;
324 }
325 
326 /*===========================================================================
327  * FUNCTION   : bufDone
328  *
329  * DESCRIPTION: return a stream buf back to kernel
330  *
331  * PARAMETERS :
332  *   @recvd_frame  : stream buf frame to be returned
333  *
334  * RETURN     : int32_t type of status
335  *              NO_ERROR  -- success
336  *              none-zero failure code
337  *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)338 int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame)
339 {
340     int32_t rc = NO_ERROR;
341     for (int i = 0; i < recvd_frame->num_bufs; i++) {
342          if (recvd_frame->bufs[i] != NULL) {
343              for (int j = 0; j < m_numStreams; j++) {
344                  if (mStreams[j] != NULL &&
345                      mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
346                      rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
347                      break; // break loop j
348                  }
349              }
350          }
351     }
352 
353     return rc;
354 }
355 
356 /*===========================================================================
357  * FUNCTION   : getStreamTypeMask
358  *
359  * DESCRIPTION: Get bit mask of all stream types in this channel
360  *
361  * PARAMETERS : None
362  *
363  * RETURN     : Bit mask of all stream types in this channel
364  *==========================================================================*/
getStreamTypeMask()365 uint32_t QCamera3Channel::getStreamTypeMask()
366 {
367     uint32_t mask = 0;
368     for (int i = 0; i < m_numStreams; i++) {
369        mask |= (0x1 << mStreams[i]->getMyType());
370     }
371     return mask;
372 }
373 
374 /*===========================================================================
375  * FUNCTION   : getStreamID
376  *
377  * DESCRIPTION: Get StreamID of requested stream type
378  *
379  * PARAMETERS : streamMask
380  *
381  * RETURN     : Stream ID
382  *==========================================================================*/
getStreamID(uint32_t streamMask)383 uint32_t QCamera3Channel::getStreamID(uint32_t streamMask)
384 {
385     uint32_t streamID = 0;
386     for (int i = 0; i < m_numStreams; i++) {
387         if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) {
388             streamID = mStreams[i]->getMyServerID();
389             break;
390         }
391     }
392     return streamID;
393 }
394 
395 /*===========================================================================
396  * FUNCTION   : getStreamByHandle
397  *
398  * DESCRIPTION: return stream object by stream handle
399  *
400  * PARAMETERS :
401  *   @streamHandle : stream handle
402  *
403  * RETURN     : stream object. NULL if not found
404  *==========================================================================*/
getStreamByHandle(uint32_t streamHandle)405 QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle)
406 {
407     for (int i = 0; i < m_numStreams; i++) {
408         if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
409             return mStreams[i];
410         }
411     }
412     return NULL;
413 }
414 
415 /*===========================================================================
416  * FUNCTION   : getStreamByIndex
417  *
418  * DESCRIPTION: return stream object by index
419  *
420  * PARAMETERS :
421  *   @streamHandle : stream handle
422  *
423  * RETURN     : stream object. NULL if not found
424  *==========================================================================*/
getStreamByIndex(uint8_t index)425 QCamera3Stream *QCamera3Channel::getStreamByIndex(uint8_t index)
426 {
427     if (index < m_numStreams) {
428         return mStreams[index];
429     }
430     return NULL;
431 }
432 
433 /*===========================================================================
434  * FUNCTION   : streamCbRoutine
435  *
436  * DESCRIPTION: callback routine for stream
437  *
438  * PARAMETERS :
439  *   @streamHandle : stream handle
440  *
441  * RETURN     : stream object. NULL if not found
442  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream,void * userdata)443 void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
444                 QCamera3Stream *stream, void *userdata)
445 {
446     QCamera3Channel *channel = (QCamera3Channel *)userdata;
447     if (channel == NULL) {
448         ALOGE("%s: invalid channel pointer", __func__);
449         return;
450     }
451     channel->streamCbRoutine(super_frame, stream);
452 }
453 
454 /*===========================================================================
455  * FUNCTION   : dumpYUV
456  *
457  * DESCRIPTION: function to dump the YUV data from ISP/pproc
458  *
459  * PARAMETERS :
460  *   @frame   : frame to be dumped
461  *   @dim     : dimension of the stream
462  *   @offset  : offset of the data
463  *   @name    : 1 if it is ISP output/pproc input, 2 if it is pproc output
464  *
465  * RETURN  :
466  *==========================================================================*/
dumpYUV(mm_camera_buf_def_t * frame,cam_dimension_t dim,cam_frame_len_offset_t offset,uint8_t name)467 void QCamera3Channel::dumpYUV(mm_camera_buf_def_t *frame, cam_dimension_t dim,
468                               cam_frame_len_offset_t offset, uint8_t name)
469 {
470    char buf[64];
471    memset(buf, 0, sizeof(buf));
472    static int counter = 0;
473    /* Note that the image dimension will be the unrotated stream dimension.
474     * If you feel that the image would have been rotated during reprocess
475     * then swap the dimensions while opening the file
476     * */
477    snprintf(buf, sizeof(buf), "/data/local/tmp/%d_%d_%d_%dx%d.yuv",
478             name, counter, frame->frame_idx, dim.width, dim.height);
479    counter++;
480    int file_fd = open(buf, O_RDWR| O_CREAT, 0644);
481    if (file_fd >= 0) {
482       int written_len = write(file_fd, frame->buffer, offset.frame_len);
483       ALOGE("%s: written number of bytes %d", __func__, written_len);
484       close(file_fd);
485    } else {
486       ALOGE("%s: failed to open file to dump image", __func__);
487    }
488 
489 }
490 
491 /*===========================================================================
492  * FUNCTION   : QCamera3RegularChannel
493  *
494  * DESCRIPTION: constructor of QCamera3RegularChannel
495  *
496  * PARAMETERS :
497  *   @cam_handle : camera handle
498  *   @cam_ops    : ptr to camera ops table
499  *   @cb_routine : callback routine to frame aggregator
500  *   @stream     : camera3_stream_t structure
501  *   @stream_type: Channel stream type
502  *
503  * RETURN     : none
504  *==========================================================================*/
QCamera3RegularChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,uint32_t postprocess_mask)505 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
506                     mm_camera_ops_t *cam_ops,
507                     channel_cb_routine cb_routine,
508                     cam_padding_info_t *paddingInfo,
509                     void *userData,
510                     camera3_stream_t *stream,
511                     cam_stream_type_t stream_type,
512                     uint32_t postprocess_mask) :
513                         QCamera3Channel(cam_handle, cam_ops, cb_routine,
514                                 paddingInfo, postprocess_mask, userData),
515                         mCamera3Stream(stream),
516                         mNumBufs(0),
517                         mStreamType(stream_type)
518 {
519 }
520 
521 /*===========================================================================
522  * FUNCTION   : ~QCamera3RegularChannel
523  *
524  * DESCRIPTION: destructor of QCamera3RegularChannel
525  *
526  * PARAMETERS : none
527  *
528  * RETURN     : none
529  *==========================================================================*/
~QCamera3RegularChannel()530 QCamera3RegularChannel::~QCamera3RegularChannel()
531 {
532 }
533 
534 /*===========================================================================
535  * FUNCTION   : initialize
536  *
537  * DESCRIPTION: Initialize and add camera channel & stream
538  *
539  * PARAMETERS :
540  *
541  * RETURN     : int32_t type of status
542  *              NO_ERROR  -- success
543  *              none-zero failure code
544  *==========================================================================*/
545 
initialize(cam_is_type_t isType)546 int32_t QCamera3RawChannel::initialize(cam_is_type_t isType)
547 {
548     return QCamera3RegularChannel::initialize(isType);
549 }
initialize(cam_is_type_t isType)550 int32_t QCamera3RegularChannel::initialize(cam_is_type_t isType)
551 {
552     ATRACE_CALL();
553     int32_t rc = NO_ERROR;
554     cam_format_t streamFormat;
555     cam_dimension_t streamDim;
556 
557     if (NULL == mCamera3Stream) {
558         ALOGE("%s: Camera stream uninitialized", __func__);
559         return NO_INIT;
560     }
561 
562     if (1 <= m_numStreams) {
563         // Only one stream per channel supported in v3 Hal
564         return NO_ERROR;
565     }
566 
567     rc = init(NULL, NULL);
568     if (rc < 0) {
569         ALOGE("%s: init failed", __func__);
570         return rc;
571     }
572 
573     mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM;
574     mIsType  = isType;
575 
576     if (mCamera3Stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
577         if (mStreamType ==  CAM_STREAM_TYPE_VIDEO) {
578             streamFormat = VIDEO_FORMAT;
579         } else if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
580             streamFormat = PREVIEW_FORMAT;
581         } else {
582             //TODO: Add a new flag in libgralloc for ZSL buffers, and its size needs
583             // to be properly aligned and padded.
584             streamFormat = DEFAULT_FORMAT;
585         }
586     } else if(mCamera3Stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
587          streamFormat = CALLBACK_FORMAT;
588     } else if (mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
589          mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW10 ||
590          mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW16) {
591          // Bayer pattern doesn't matter here.
592          // All CAMIF raw format uses 10bit.
593          streamFormat = RAW_FORMAT;
594     } else {
595         //TODO: Fail for other types of streams for now
596         ALOGE("%s: format is not IMPLEMENTATION_DEFINED or flexible", __func__);
597         return -EINVAL;
598     }
599 
600     streamDim.width = mCamera3Stream->width;
601     streamDim.height = mCamera3Stream->height;
602 
603     rc = QCamera3Channel::addStream(mStreamType,
604             streamFormat,
605             streamDim,
606             mNumBufs,
607             mPostProcMask,
608             mIsType);
609 
610     return rc;
611 }
612 
613 /*===========================================================================
614 * FUNCTION   : start
615 *
616 * DESCRIPTION: start a regular channel
617 *
618 * PARAMETERS :
619 *
620 * RETURN     : int32_t type of status
621 *              NO_ERROR  -- success
622 *              none-zero failure code
623 *==========================================================================*/
start()624 int32_t QCamera3RegularChannel::start()
625 {
626     ATRACE_CALL();
627     int32_t rc = NO_ERROR;
628 
629     if (0 < mMemory.getCnt()) {
630         rc = QCamera3Channel::start();
631     }
632     return rc;
633 }
634 
635 /*===========================================================================
636  * FUNCTION   : request
637  *
638  * DESCRIPTION: process a request from camera service. Stream on if ncessary.
639  *
640  * PARAMETERS :
641  *   @buffer  : buffer to be filled for this request
642  *
643  * RETURN     : 0 on a success start of capture
644  *              -EINVAL on invalid input
645  *              -ENODEV on serious error
646  *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber)647 int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber)
648 {
649     ATRACE_CALL();
650     //FIX ME: Return buffer back in case of failures below.
651 
652     int32_t rc = NO_ERROR;
653     int index;
654 
655     if (NULL == buffer) {
656         ALOGE("%s: Invalid buffer in channel request", __func__);
657         return BAD_VALUE;
658     }
659 
660     if(!m_bIsActive) {
661         rc = registerBuffer(buffer, mIsType);
662         if (NO_ERROR != rc) {
663             ALOGE("%s: On-the-fly buffer registration failed %d",
664                     __func__, rc);
665             return rc;
666         }
667 
668         rc = start();
669         if (NO_ERROR != rc) {
670             return rc;
671         }
672     } else {
673         CDBG("%s: Request on an existing stream",__func__);
674     }
675 
676     index = mMemory.getMatchBufIndex((void*)buffer);
677     if(index < 0) {
678         rc = registerBuffer(buffer, mIsType);
679         if (NO_ERROR != rc) {
680             ALOGE("%s: On-the-fly buffer registration failed %d",
681                     __func__, rc);
682             return rc;
683         }
684 
685         index = mMemory.getMatchBufIndex((void*)buffer);
686         if (index < 0) {
687             ALOGE("%s: Could not find object among registered buffers",
688                     __func__);
689             return DEAD_OBJECT;
690         }
691     }
692 
693     rc = mStreams[0]->bufDone(index);
694     if(rc != NO_ERROR) {
695         ALOGE("%s: Failed to Q new buffer to stream",__func__);
696         return rc;
697     }
698 
699     rc = mMemory.markFrameNumber(index, frameNumber);
700     return rc;
701 }
702 
703 /*===========================================================================
704  * FUNCTION   : registerBuffer
705  *
706  * DESCRIPTION: register streaming buffer to the channel object
707  *
708  * PARAMETERS :
709  *   @buffer     : buffer to be registered
710  *
711  * RETURN     : int32_t type of status
712  *              NO_ERROR  -- success
713  *              none-zero failure code
714  *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType)715 int32_t QCamera3RegularChannel::registerBuffer(buffer_handle_t *buffer,
716         cam_is_type_t isType)
717 {
718     ATRACE_CALL();
719     int rc = 0;
720     mIsType = isType;
721     cam_stream_type_t streamType;
722 
723     if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
724         ALOGE("%s: Trying to register more buffers than initially requested",
725                 __func__);
726         return BAD_VALUE;
727     }
728 
729     if (0 == m_numStreams) {
730         rc = initialize(mIsType);
731         if (rc != NO_ERROR) {
732             ALOGE("%s: Couldn't initialize camera stream %d",
733                     __func__, rc);
734             return rc;
735         }
736     }
737 
738     streamType = mStreams[0]->getMyType();
739     rc = mMemory.registerBuffer(buffer, streamType);
740     if (ALREADY_EXISTS == rc) {
741         return NO_ERROR;
742     } else if (NO_ERROR != rc) {
743         ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
744         return rc;
745     }
746 
747     return rc;
748 }
749 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)750 void QCamera3RegularChannel::streamCbRoutine(
751                             mm_camera_super_buf_t *super_frame,
752                             QCamera3Stream *stream)
753 {
754     ATRACE_CALL();
755     //FIXME Q Buf back in case of error?
756     uint8_t frameIndex;
757     buffer_handle_t *resultBuffer;
758     int32_t resultFrameNumber;
759     camera3_stream_buffer_t result;
760 
761     if (NULL == stream) {
762         ALOGE("%s: Invalid stream", __func__);
763         return;
764     }
765 
766     if(!super_frame) {
767          ALOGE("%s: Invalid Super buffer",__func__);
768          return;
769     }
770 
771     if(super_frame->num_bufs != 1) {
772          ALOGE("%s: Multiple streams are not supported",__func__);
773          return;
774     }
775     if(super_frame->bufs[0] == NULL ) {
776          ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
777                   __func__);
778          return;
779     }
780 
781     frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
782     if(frameIndex >= mNumBufs) {
783          ALOGE("%s: Error, Invalid index for buffer",__func__);
784          stream->bufDone(frameIndex);
785          return;
786     }
787 
788     ////Use below data to issue framework callback
789     resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
790     resultFrameNumber = mMemory.getFrameNumber(frameIndex);
791 
792     result.stream = mCamera3Stream;
793     result.buffer = resultBuffer;
794     result.status = CAMERA3_BUFFER_STATUS_OK;
795     result.acquire_fence = -1;
796     result.release_fence = -1;
797     int32_t rc = stream->bufRelease(frameIndex);
798     if (NO_ERROR != rc) {
799         ALOGE("%s: Error %d releasing stream buffer %d",
800                 __func__, rc, frameIndex);
801     }
802 
803     rc = mMemory.unregisterBuffer(frameIndex);
804     if (NO_ERROR != rc) {
805         ALOGE("%s: Error %d unregistering stream buffer %d",
806                 __func__, rc, frameIndex);
807     }
808 
809     if (0 <= resultFrameNumber){
810         mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, mUserData);
811     } else {
812         ALOGE("%s: Bad frame number", __func__);
813     }
814 
815     free(super_frame);
816     return;
817 }
818 
getStreamBufs(uint32_t)819 QCamera3Memory* QCamera3RegularChannel::getStreamBufs(uint32_t /*len*/)
820 {
821     return &mMemory;
822 }
823 
putStreamBufs()824 void QCamera3RegularChannel::putStreamBufs()
825 {
826     mMemory.unregisterBuffers();
827 }
828 
829 int QCamera3RegularChannel::kMaxBuffers = MAX_INFLIGHT_REQUESTS;
830 
QCamera3MetadataChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData)831 QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
832                     mm_camera_ops_t *cam_ops,
833                     channel_cb_routine cb_routine,
834                     cam_padding_info_t *paddingInfo,
835                     uint32_t postprocess_mask,
836                     void *userData) :
837                         QCamera3Channel(cam_handle, cam_ops,
838                                 cb_routine, paddingInfo, postprocess_mask, userData),
839                         mMemory(NULL)
840 {
841 }
842 
~QCamera3MetadataChannel()843 QCamera3MetadataChannel::~QCamera3MetadataChannel()
844 {
845     if (m_bIsActive)
846         stop();
847 
848     if (mMemory) {
849         mMemory->deallocate();
850         delete mMemory;
851         mMemory = NULL;
852     }
853 }
854 
initialize(cam_is_type_t isType)855 int32_t QCamera3MetadataChannel::initialize(cam_is_type_t isType)
856 {
857     ATRACE_CALL();
858     int32_t rc;
859     cam_dimension_t streamDim;
860 
861     if (mMemory || m_numStreams > 0) {
862         ALOGE("%s: metadata channel already initialized", __func__);
863         return -EINVAL;
864     }
865 
866     rc = init(NULL, NULL);
867     if (rc < 0) {
868         ALOGE("%s: init failed", __func__);
869         return rc;
870     }
871     mIsType = isType;
872     streamDim.width = sizeof(metadata_buffer_t),
873     streamDim.height = 1;
874     rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
875         streamDim, MIN_STREAMING_BUFFER_NUM, mPostProcMask, mIsType);
876     if (rc < 0) {
877         ALOGE("%s: addStream failed", __func__);
878     }
879     return rc;
880 }
881 
request(buffer_handle_t *,uint32_t)882 int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/,
883                                                 uint32_t /*frameNumber*/)
884 {
885     if (!m_bIsActive) {
886         return start();
887     }
888     else
889         return 0;
890 }
891 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)892 void QCamera3MetadataChannel::streamCbRoutine(
893                         mm_camera_super_buf_t *super_frame,
894                         QCamera3Stream * /*stream*/)
895 {
896     ATRACE_CALL();
897     uint32_t requestNumber = 0;
898     if (super_frame == NULL || super_frame->num_bufs != 1) {
899         ALOGE("%s: super_frame is not valid", __func__);
900         return;
901     }
902     mChannelCB(super_frame, NULL, requestNumber, mUserData);
903 }
904 
getStreamBufs(uint32_t len)905 QCamera3Memory* QCamera3MetadataChannel::getStreamBufs(uint32_t len)
906 {
907     int rc;
908     if (len < sizeof(metadata_buffer_t)) {
909         ALOGE("%s: Metadata buffer size less than structure %d vs %d",
910                 __func__,
911                 len,
912                 sizeof(metadata_buffer_t));
913         return NULL;
914     }
915     mMemory = new QCamera3HeapMemory();
916     if (!mMemory) {
917         ALOGE("%s: unable to create metadata memory", __func__);
918         return NULL;
919     }
920     rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
921     if (rc < 0) {
922         ALOGE("%s: unable to allocate metadata memory", __func__);
923         delete mMemory;
924         mMemory = NULL;
925         return NULL;
926     }
927     memset(mMemory->getPtr(0), 0, sizeof(metadata_buffer_t));
928     return mMemory;
929 }
930 
putStreamBufs()931 void QCamera3MetadataChannel::putStreamBufs()
932 {
933     mMemory->deallocate();
934     delete mMemory;
935     mMemory = NULL;
936 }
937 /*************************************************************************************/
938 // RAW Channel related functions
939 int QCamera3RawChannel::kMaxBuffers = MAX_INFLIGHT_REQUESTS;
940 
QCamera3RawChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,uint32_t postprocess_mask,bool raw_16)941 QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle,
942                     mm_camera_ops_t *cam_ops,
943                     channel_cb_routine cb_routine,
944                     cam_padding_info_t *paddingInfo,
945                     void *userData,
946                     camera3_stream_t *stream,
947                     uint32_t postprocess_mask,
948                     bool raw_16) :
949                         QCamera3RegularChannel(cam_handle, cam_ops,
950                                 cb_routine, paddingInfo, userData, stream,
951                                 CAM_STREAM_TYPE_RAW, postprocess_mask),
952                         mIsRaw16(raw_16)
953 {
954     char prop[PROPERTY_VALUE_MAX];
955     property_get("persist.camera.raw.debug.dump", prop, "0");
956     mRawDump = atoi(prop);
957 }
958 
~QCamera3RawChannel()959 QCamera3RawChannel::~QCamera3RawChannel()
960 {
961 }
962 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)963 void QCamera3RawChannel::streamCbRoutine(
964                         mm_camera_super_buf_t *super_frame,
965                         QCamera3Stream * stream)
966 {
967     ATRACE_CALL();
968     /* Move this back down once verified */
969     if (mRawDump)
970         dumpRawSnapshot(super_frame->bufs[0]);
971 
972     if (mIsRaw16) {
973         if (RAW_FORMAT == CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG)
974             convertMipiToRaw16(super_frame->bufs[0]);
975         else
976             convertLegacyToRaw16(super_frame->bufs[0]);
977     }
978 
979     //Make sure cache coherence because extra processing is done
980     mMemory.cleanInvalidateCache(super_frame->bufs[0]->buf_idx);
981 
982     QCamera3RegularChannel::streamCbRoutine(super_frame, stream);
983     return;
984 }
985 
dumpRawSnapshot(mm_camera_buf_def_t * frame)986 void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
987 {
988    QCamera3Stream *stream = getStreamByIndex(0);
989    char buf[32];
990    memset(buf, 0, sizeof(buf));
991    cam_dimension_t dim;
992    memset(&dim, 0, sizeof(dim));
993    stream->getFrameDimension(dim);
994 
995    cam_frame_len_offset_t offset;
996    memset(&offset, 0, sizeof(cam_frame_len_offset_t));
997    stream->getFrameOffset(offset);
998    snprintf(buf, sizeof(buf), "/data/local/tmp/r_%d_%dx%d.raw",
999             frame->frame_idx, dim.width, dim.height);
1000 
1001    int file_fd = open(buf, O_RDWR| O_CREAT, 0644);
1002    if (file_fd >= 0) {
1003       int written_len = write(file_fd, frame->buffer, offset.frame_len);
1004       ALOGE("%s: written number of bytes %d", __func__, written_len);
1005       close(file_fd);
1006    } else {
1007       ALOGE("%s: failed to open file to dump image", __func__);
1008    }
1009 
1010 }
1011 
convertLegacyToRaw16(mm_camera_buf_def_t * frame)1012 void QCamera3RawChannel::convertLegacyToRaw16(mm_camera_buf_def_t *frame)
1013 {
1014     // Convert image buffer from Opaque raw format to RAW16 format
1015     // 10bit Opaque raw is stored in the format of:
1016     // 0000 - p5 - p4 - p3 - p2 - p1 - p0
1017     // where p0 to p5 are 6 pixels (each is 10bit)_and most significant
1018     // 4 bits are 0s. Each 64bit word contains 6 pixels.
1019 
1020     QCamera3Stream *stream = getStreamByIndex(0);
1021     cam_dimension_t dim;
1022     memset(&dim, 0, sizeof(dim));
1023     stream->getFrameDimension(dim);
1024 
1025     cam_frame_len_offset_t offset;
1026     memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1027     stream->getFrameOffset(offset);
1028 
1029     uint32_t raw16_stride = (dim.width + 15) & ~15;
1030     uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
1031 
1032     // In-place format conversion.
1033     // Raw16 format always occupy more memory than opaque raw10.
1034     // Convert to Raw16 by iterating through all pixels from bottom-right
1035     // to top-left of the image.
1036     // One special notes:
1037     // 1. Cross-platform raw16's stride is 16 pixels.
1038     // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes.
1039     for (int y = dim.height-1; y >= 0; y--) {
1040         uint64_t* row_start = (uint64_t *)frame->buffer +
1041             y * offset.mp[0].stride / 8;
1042         for (int x = dim.width-1;  x >= 0; x--) {
1043             uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6)));
1044             raw16_buffer[y*raw16_stride+x] = raw16_pixel;
1045         }
1046     }
1047 }
1048 
convertMipiToRaw16(mm_camera_buf_def_t * frame)1049 void QCamera3RawChannel::convertMipiToRaw16(mm_camera_buf_def_t *frame)
1050 {
1051     // Convert image buffer from mipi10 raw format to RAW16 format
1052     // mipi10 opaque raw is stored in the format of:
1053     // P3(1:0) P2(1:0) P1(1:0) P0(1:0) P3(9:2) P2(9:2) P1(9:2) P0(9:2)
1054     // 4 pixels occupy 5 bytes, no padding needed
1055 
1056     QCamera3Stream *stream = getStreamByIndex(0);
1057     cam_dimension_t dim;
1058     memset(&dim, 0, sizeof(dim));
1059     stream->getFrameDimension(dim);
1060 
1061     cam_frame_len_offset_t offset;
1062     memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1063     stream->getFrameOffset(offset);
1064 
1065     uint32_t raw16_stride = (dim.width + 15) & ~15;
1066     uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
1067 
1068     // In-place format conversion.
1069     // Raw16 format always occupy more memory than opaque raw10.
1070     // Convert to Raw16 by iterating through all pixels from bottom-right
1071     // to top-left of the image.
1072     // One special notes:
1073     // 1. Cross-platform raw16's stride is 16 pixels.
1074     // 2. mipi raw10's stride is 4 pixels, and aligned to 16 bytes.
1075     for (int y = dim.height-1; y >= 0; y--) {
1076         uint8_t* row_start = (uint8_t *)frame->buffer +
1077             y * offset.mp[0].stride;
1078         for (int x = dim.width-1;  x >= 0; x--) {
1079             uint8_t upper_8bit = row_start[5*(x/4)+x%4];
1080             uint8_t lower_2bit = ((row_start[5*(x/4)+4] >> (x%4)) & 0x3);
1081             uint16_t raw16_pixel = (((uint16_t)upper_8bit)<<2 | lower_2bit);
1082             raw16_buffer[y*raw16_stride+x] = raw16_pixel;
1083         }
1084     }
1085 
1086 }
1087 
1088 
1089 /*************************************************************************************/
1090 // RAW Dump Channel related functions
1091 
1092 int QCamera3RawDumpChannel::kMaxBuffers = 3;
1093 /*===========================================================================
1094  * FUNCTION   : QCamera3RawDumpChannel
1095  *
1096  * DESCRIPTION: Constructor for RawDumpChannel
1097  *
1098  * PARAMETERS :
1099  *   @cam_handle    : Handle for Camera
1100  *   @cam_ops       : Function pointer table
1101  *   @rawDumpSize   : Dimensions for the Raw stream
1102  *   @paddinginfo   : Padding information for stream
1103  *   @userData      : Cookie for parent
1104  *   @pp mask       : PP feature mask for this stream
1105  *
1106  * RETURN           : NA
1107  *==========================================================================*/
QCamera3RawDumpChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,cam_dimension_t rawDumpSize,cam_padding_info_t * paddingInfo,void * userData,uint32_t postprocess_mask)1108 QCamera3RawDumpChannel::QCamera3RawDumpChannel(uint32_t cam_handle,
1109                     mm_camera_ops_t *cam_ops,
1110                     cam_dimension_t rawDumpSize,
1111                     cam_padding_info_t *paddingInfo,
1112                     void *userData,
1113                     uint32_t postprocess_mask) :
1114                         QCamera3Channel(cam_handle, cam_ops, NULL,
1115                                 paddingInfo, postprocess_mask, userData),
1116                         mDim(rawDumpSize),
1117                         mMemory(NULL)
1118 {
1119     char prop[PROPERTY_VALUE_MAX];
1120     property_get("persist.camera.raw.dump", prop, "0");
1121     mRawDump = atoi(prop);
1122 }
1123 
1124 /*===========================================================================
1125  * FUNCTION   : QCamera3RawDumpChannel
1126  *
1127  * DESCRIPTION: Destructor for RawDumpChannel
1128  *
1129  * PARAMETERS :
1130  *
1131  * RETURN           : NA
1132  *==========================================================================*/
1133 
~QCamera3RawDumpChannel()1134 QCamera3RawDumpChannel::~QCamera3RawDumpChannel()
1135 {
1136 }
1137 
1138 /*===========================================================================
1139  * FUNCTION   : dumpRawSnapshot
1140  *
1141  * DESCRIPTION: Helper function to dump Raw frames
1142  *
1143  * PARAMETERS :
1144  *  @frame      : stream buf frame to be dumped
1145  *
1146  *  RETURN      : NA
1147  *==========================================================================*/
dumpRawSnapshot(mm_camera_buf_def_t * frame)1148 void QCamera3RawDumpChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
1149 {
1150     QCamera3Stream *stream = getStreamByIndex(0);
1151     char buf[128];
1152     struct timeval tv;
1153     struct tm *timeinfo;
1154 
1155     cam_dimension_t dim;
1156     memset(&dim, 0, sizeof(dim));
1157     stream->getFrameDimension(dim);
1158 
1159     cam_frame_len_offset_t offset;
1160     memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1161     stream->getFrameOffset(offset);
1162 
1163     gettimeofday(&tv, NULL);
1164     timeinfo = localtime(&tv.tv_sec);
1165 
1166     memset(buf, 0, sizeof(buf));
1167     snprintf(buf, sizeof(buf),
1168                  "/data/%04d-%02d-%02d-%02d-%02d-%02d-%06ld_%d_%dx%d.raw",
1169                  timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
1170                  timeinfo->tm_mday, timeinfo->tm_hour,
1171                  timeinfo->tm_min, timeinfo->tm_sec,tv.tv_usec,
1172                  frame->frame_idx, dim.width, dim.height);
1173 
1174     int file_fd = open(buf, O_RDWR| O_CREAT, 0777);
1175     if (file_fd >= 0) {
1176         int written_len = write(file_fd, frame->buffer, offset.frame_len);
1177         CDBG("%s: written number of bytes %d", __func__, written_len);
1178         close(file_fd);
1179     } else {
1180         ALOGE("%s: failed to open file to dump image", __func__);
1181     }
1182 }
1183 
1184 /*===========================================================================
1185  * FUNCTION   : streamCbRoutine
1186  *
1187  * DESCRIPTION: Callback routine invoked for each frame generated for
1188  *              Rawdump channel
1189  *
1190  * PARAMETERS :
1191  *   @super_frame  : stream buf frame generated
1192  *   @stream       : Underlying Stream object cookie
1193  *
1194  * RETURN          : NA
1195  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1196 void QCamera3RawDumpChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
1197                                                 QCamera3Stream *stream)
1198 {
1199     CDBG("%s: E",__func__);
1200     if (super_frame == NULL || super_frame->num_bufs != 1) {
1201         ALOGE("%s: super_frame is not valid", __func__);
1202         return;
1203     }
1204 
1205     if (mRawDump)
1206         dumpRawSnapshot(super_frame->bufs[0]);
1207 
1208     bufDone(super_frame);
1209     free(super_frame);
1210 }
1211 
1212 /*===========================================================================
1213  * FUNCTION   : getStreamBufs
1214  *
1215  * DESCRIPTION: Callback function provided to interface to get buffers.
1216  *
1217  * PARAMETERS :
1218  *   @len       : Length of each buffer to be allocated
1219  *
1220  * RETURN     : NULL on buffer allocation failure
1221  *              QCamera3Memory object on sucess
1222  *==========================================================================*/
getStreamBufs(uint32_t len)1223 QCamera3Memory* QCamera3RawDumpChannel::getStreamBufs(uint32_t len)
1224 {
1225     int rc;
1226     mMemory = new QCamera3HeapMemory();
1227 
1228     if (!mMemory) {
1229         ALOGE("%s: unable to create heap memory", __func__);
1230         return NULL;
1231     }
1232     rc = mMemory->allocate(kMaxBuffers, len, true);
1233     if (rc < 0) {
1234         ALOGE("%s: unable to allocate heap memory", __func__);
1235         delete mMemory;
1236         mMemory = NULL;
1237         return NULL;
1238     }
1239     return mMemory;
1240 }
1241 
1242 /*===========================================================================
1243  * FUNCTION   : putStreamBufs
1244  *
1245  * DESCRIPTION: Callback function provided to interface to return buffers.
1246  *              Although no handles are actually returned, implicitl assumption
1247  *              that interface will no longer use buffers and channel can
1248  *              deallocated if necessary.
1249  *
1250  * PARAMETERS : NA
1251  *
1252  * RETURN     : NA
1253  *==========================================================================*/
putStreamBufs()1254 void QCamera3RawDumpChannel::putStreamBufs()
1255 {
1256     mMemory->deallocate();
1257     delete mMemory;
1258     mMemory = NULL;
1259 }
1260 
1261 /*===========================================================================
1262  * FUNCTION : request
1263  *
1264  * DESCRIPTION: Request function used as trigger
1265  *
1266  * PARAMETERS :
1267  * @recvd_frame : buffer- this will be NULL since this is internal channel
1268  * @frameNumber : Undefined again since this is internal stream
1269  *
1270  * RETURN     : int32_t type of status
1271  *              NO_ERROR  -- success
1272  *              none-zero failure code
1273  *==========================================================================*/
request(buffer_handle_t *,uint32_t)1274 int32_t QCamera3RawDumpChannel::request(buffer_handle_t * /*buffer*/,
1275                                                 uint32_t /*frameNumber*/)
1276 {
1277     if (!m_bIsActive) {
1278         return QCamera3Channel::start();
1279     }
1280     else
1281         return 0;
1282 }
1283 
1284 /*===========================================================================
1285  * FUNCTION : intialize
1286  *
1287  * DESCRIPTION: Initializes channel params and creates underlying stream
1288  *
1289  * PARAMETERS : NA
1290  *
1291  * RETURN     : int32_t type of status
1292  *              NO_ERROR  -- success
1293  *              none-zero failure code
1294  *==========================================================================*/
initialize(cam_is_type_t isType)1295 int32_t QCamera3RawDumpChannel::initialize(cam_is_type_t isType)
1296 {
1297     int32_t rc;
1298 
1299     rc = init(NULL, NULL);
1300     if (rc < 0) {
1301         ALOGE("%s: init failed", __func__);
1302         return rc;
1303     }
1304     mIsType = isType;
1305     rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_RAW,
1306         CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG, mDim, kMaxBuffers,
1307         mPostProcMask, mIsType);
1308     if (rc < 0) {
1309         ALOGE("%s: addStream failed", __func__);
1310     }
1311     return rc;
1312 }
1313 /*************************************************************************************/
1314 
1315 /*===========================================================================
1316  * FUNCTION   : jpegEvtHandle
1317  *
1318  * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events.
1319                 Construct result payload and call mChannelCb to deliver buffer
1320                 to framework.
1321  *
1322  * PARAMETERS :
1323  *   @status    : status of jpeg job
1324  *   @client_hdl: jpeg client handle
1325  *   @jobId     : jpeg job Id
1326  *   @p_ouput   : ptr to jpeg output result struct
1327  *   @userdata  : user data ptr
1328  *
1329  * RETURN     : none
1330  *==========================================================================*/
jpegEvtHandle(jpeg_job_status_t status,uint32_t,uint32_t jobId,mm_jpeg_output_t * p_output,void * userdata)1331 void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status,
1332                                               uint32_t /*client_hdl*/,
1333                                               uint32_t jobId,
1334                                               mm_jpeg_output_t *p_output,
1335                                               void *userdata)
1336 {
1337     ATRACE_CALL();
1338     buffer_handle_t *resultBuffer, *jpegBufferHandle;
1339     int32_t resultFrameNumber;
1340     int resultStatus = CAMERA3_BUFFER_STATUS_OK;
1341     camera3_stream_buffer_t result;
1342     camera3_jpeg_blob_t jpegHeader;
1343     char* jpeg_eof = 0;
1344     int maxJpegSize;
1345     int32_t bufIdx;
1346 
1347     QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
1348     if (obj) {
1349         //Construct payload for process_capture_result. Call mChannelCb
1350 
1351         qcamera_hal3_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId);
1352 
1353         if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) {
1354             ALOGE("%s: Error in jobId: (%d) with status: %d", __func__, jobId, status);
1355             resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
1356         }
1357 
1358         bufIdx = job->jpeg_settings->out_buf_index;
1359         CDBG("%s: jpeg out_buf_index: %d", __func__, bufIdx);
1360 
1361         //Construct jpeg transient header of type camera3_jpeg_blob_t
1362         //Append at the end of jpeg image of buf_filled_len size
1363 
1364         jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
1365         jpegHeader.jpeg_size = p_output->buf_filled_len;
1366 
1367 
1368         char* jpeg_buf = (char *)p_output->buf_vaddr;
1369 
1370         // Gralloc buffer may have additional padding for 4K page size
1371         // Follow size guidelines based on spec since framework relies
1372         // on that to reach end of buffer and with it the header
1373 
1374         //Handle same as resultBuffer, but for readablity
1375         jpegBufferHandle =
1376             (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
1377 
1378         maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width;
1379         if (maxJpegSize > obj->mMemory.getSize(bufIdx)) {
1380             maxJpegSize = obj->mMemory.getSize(bufIdx);
1381         }
1382 
1383         jpeg_eof = &jpeg_buf[maxJpegSize-sizeof(jpegHeader)];
1384         memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
1385         obj->mMemory.cleanInvalidateCache(bufIdx);
1386 
1387         ////Use below data to issue framework callback
1388         resultBuffer = (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
1389         resultFrameNumber = obj->mMemory.getFrameNumber(bufIdx);
1390         int32_t rc = obj->mMemory.unregisterBuffer(bufIdx);
1391         if (NO_ERROR != rc) {
1392             ALOGE("%s: Error %d unregistering stream buffer %d",
1393                     __func__, rc, bufIdx);
1394         }
1395 
1396         result.stream = obj->mCamera3Stream;
1397         result.buffer = resultBuffer;
1398         result.status = resultStatus;
1399         result.acquire_fence = -1;
1400         result.release_fence = -1;
1401 
1402         // Release any snapshot buffers before calling
1403         // the user callback. The callback can potentially
1404         // unblock pending requests to snapshot stream.
1405         if (NULL != job) {
1406             int32_t snapshotIdx = -1;
1407             mm_camera_super_buf_t* src_frame = NULL;
1408 
1409             if (job->src_reproc_frame)
1410                 src_frame = job->src_reproc_frame;
1411             else
1412                 src_frame = job->src_frame;
1413 
1414             if (src_frame) {
1415                 if (obj->mStreams[0]->getMyHandle() ==
1416                         src_frame->bufs[0]->stream_id) {
1417                     snapshotIdx = src_frame->bufs[0]->buf_idx;
1418                 } else {
1419                     ALOGE("%s: Snapshot stream id %d and source frame %d don't match!",
1420                             __func__, obj->mStreams[0]->getMyHandle(),
1421                             src_frame->bufs[0]->stream_id);
1422                 }
1423             }
1424             if (0 <= snapshotIdx) {
1425                 Mutex::Autolock lock(obj->mFreeBuffersLock);
1426                 obj->mFreeBufferList.push_back(snapshotIdx);
1427             } else {
1428                 ALOGE("%s: Snapshot buffer not found!", __func__);
1429             }
1430         }
1431 
1432         CDBG("%s: Issue Callback", __func__);
1433         obj->mChannelCB(NULL, &result, resultFrameNumber, obj->mUserData);
1434 
1435         // release internal data for jpeg job
1436         if (job != NULL) {
1437             if ((NULL != job->fwk_frame) || (NULL != job->fwk_src_buffer)) {
1438                 obj->mOfflineMetaMemory.deallocate();
1439                 obj->mOfflineMemory.unregisterBuffers();
1440             }
1441             obj->m_postprocessor.releaseOfflineBuffers();
1442             obj->m_postprocessor.releaseJpegJobData(job);
1443             free(job);
1444         }
1445 
1446         return;
1447         // }
1448     } else {
1449         ALOGE("%s: Null userdata in jpeg callback", __func__);
1450     }
1451 }
1452 
QCamera3PicChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,uint32_t postprocess_mask,bool is4KVideo,QCamera3Channel * metadataChannel)1453 QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle,
1454                     mm_camera_ops_t *cam_ops,
1455                     channel_cb_routine cb_routine,
1456                     cam_padding_info_t *paddingInfo,
1457                     void *userData,
1458                     camera3_stream_t *stream,
1459                     uint32_t postprocess_mask,
1460                     bool is4KVideo,
1461                     QCamera3Channel *metadataChannel) :
1462                         QCamera3Channel(cam_handle, cam_ops, cb_routine,
1463                         paddingInfo, postprocess_mask, userData),
1464                         m_postprocessor(this),
1465                         mCamera3Stream(stream),
1466                         mNumBufsRegistered(CAM_MAX_NUM_BUFS_PER_STREAM),
1467                         mNumSnapshotBufs(0),
1468                         mCurrentBufIndex(-1),
1469                         mPostProcStarted(false),
1470                         mInputBufferConfig(false),
1471                         mYuvMemory(NULL),
1472                         m_pMetaChannel(metadataChannel),
1473                         mMetaFrame(NULL)
1474 {
1475     QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
1476     m_max_pic_dim = hal_obj->calcMaxJpegDim();
1477     mYuvWidth = stream->width;
1478     mYuvHeight = stream->height;
1479     // Use same pixelformat for 4K video case
1480     mStreamFormat = is4KVideo ? VIDEO_FORMAT : SNAPSHOT_FORMAT;
1481     mStreamType = CAM_STREAM_TYPE_SNAPSHOT;
1482     int32_t rc = m_postprocessor.init(&mMemory, jpegEvtHandle, mPostProcMask,
1483             this);
1484     if (rc != 0) {
1485         ALOGE("Init Postprocessor failed");
1486     }
1487 }
1488 
1489 /*===========================================================================
1490  * FUNCTION   : stop
1491  *
1492  * DESCRIPTION: stop pic channel, which will stop all streams within, including
1493  *              the reprocessing channel in postprocessor and YUV stream.
1494  *
1495  * PARAMETERS : none
1496  *
1497  * RETURN     : int32_t type of status
1498  *              NO_ERROR  -- success
1499  *              none-zero failure code
1500  *==========================================================================*/
stop()1501 int32_t QCamera3PicChannel::stop()
1502 {
1503     int32_t rc = NO_ERROR;
1504     if(!m_bIsActive) {
1505         ALOGE("%s: Attempt to stop inactive channel",__func__);
1506         return rc;
1507     }
1508 
1509     m_postprocessor.stop();
1510     mPostProcStarted = false;
1511     rc |= QCamera3Channel::stop();
1512     return rc;
1513 }
1514 
~QCamera3PicChannel()1515 QCamera3PicChannel::~QCamera3PicChannel()
1516 {
1517    stop();
1518 
1519    int32_t rc = m_postprocessor.stop();
1520    if (rc != NO_ERROR) {
1521        ALOGE("%s: Postprocessor stop failed", __func__);
1522    }
1523 
1524    rc = m_postprocessor.deinit();
1525    if (rc != 0) {
1526        ALOGE("De-init Postprocessor failed");
1527    }
1528 
1529    if (0 < mOfflineMetaMemory.getCnt()) {
1530        mOfflineMetaMemory.deallocate();
1531    }
1532    if (0 < mOfflineMemory.getCnt()) {
1533        mOfflineMemory.unregisterBuffers();
1534    }
1535 }
1536 
initialize(cam_is_type_t isType)1537 int32_t QCamera3PicChannel::initialize(cam_is_type_t isType)
1538 {
1539     int32_t rc = NO_ERROR;
1540     cam_dimension_t streamDim;
1541     cam_stream_type_t streamType;
1542     cam_format_t streamFormat;
1543     mm_camera_channel_attr_t attr;
1544 
1545     if (NULL == mCamera3Stream) {
1546         ALOGE("%s: Camera stream uninitialized", __func__);
1547         return NO_INIT;
1548     }
1549 
1550     if (1 <= m_numStreams) {
1551         // Only one stream per channel supported in v3 Hal
1552         return NO_ERROR;
1553     }
1554 
1555     memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
1556     attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
1557     attr.look_back = 1;
1558     attr.post_frame_skip = 1;
1559     attr.water_mark = 1;
1560     attr.max_unmatched_frames = 1;
1561 
1562     rc = init(&attr, NULL);
1563     if (rc < 0) {
1564         ALOGE("%s: init failed", __func__);
1565         return rc;
1566     }
1567     mIsType = isType;
1568 
1569     streamType = mStreamType;
1570     streamFormat = mStreamFormat;
1571     streamDim.width = mYuvWidth;
1572     streamDim.height = mYuvHeight;
1573 
1574     mNumSnapshotBufs = mCamera3Stream->max_buffers;
1575     rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
1576             (uint8_t)mCamera3Stream->max_buffers, mPostProcMask, mIsType);
1577 
1578     Mutex::Autolock lock(mFreeBuffersLock);
1579     mFreeBufferList.clear();
1580     for (uint32_t i = 0; i < mCamera3Stream->max_buffers; i++) {
1581         mFreeBufferList.push_back(i);
1582     }
1583 
1584     return rc;
1585 }
1586 
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata)1587 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
1588         uint32_t frameNumber,
1589         camera3_stream_buffer_t *pInputBuffer,
1590         metadata_buffer_t *metadata)
1591 {
1592     ATRACE_CALL();
1593     //FIX ME: Return buffer back in case of failures below.
1594 
1595     int32_t rc = NO_ERROR;
1596     int index;
1597     //extract rotation information
1598 
1599     reprocess_config_t reproc_cfg;
1600     memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
1601     reproc_cfg.padding = mPaddingInfo;
1602     //to ensure a big enough buffer size set the height and width
1603     //padding to max(height padding, width padding)
1604     if (reproc_cfg.padding->height_padding > reproc_cfg.padding->width_padding) {
1605        reproc_cfg.padding->width_padding = reproc_cfg.padding->height_padding;
1606     } else {
1607        reproc_cfg.padding->height_padding = reproc_cfg.padding->width_padding;
1608     }
1609     if (NULL != pInputBuffer) {
1610         reproc_cfg.input_stream_dim.width = pInputBuffer->stream->width;
1611         reproc_cfg.input_stream_dim.height = pInputBuffer->stream->height;
1612     } else {
1613         reproc_cfg.input_stream_dim.width = mYuvWidth;
1614         reproc_cfg.input_stream_dim.height = mYuvHeight;
1615         reproc_cfg.src_channel = this;
1616     }
1617     reproc_cfg.output_stream_dim.width = mCamera3Stream->width;
1618     reproc_cfg.output_stream_dim.height = mCamera3Stream->height;
1619     reproc_cfg.stream_type = mStreamType;
1620     reproc_cfg.stream_format = mStreamFormat;
1621     rc = mm_stream_calc_offset_snapshot(mStreamFormat, &reproc_cfg.input_stream_dim,
1622             reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
1623     if (rc != 0) {
1624         ALOGE("%s: Snapshot stream plane info calculation failed!", __func__);
1625         return rc;
1626     }
1627     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
1628           int32_t *rotation = (int32_t *)POINTER_OF_PARAM(
1629             CAM_INTF_META_JPEG_ORIENTATION, metadata);
1630           if (*rotation == 0) {
1631              reproc_cfg.rotation = ROTATE_0;
1632           } else if (*rotation == 90) {
1633              reproc_cfg.rotation = ROTATE_90;
1634           } else if (*rotation == 180) {
1635              reproc_cfg.rotation = ROTATE_180;
1636           } else if (*rotation == 270) {
1637              reproc_cfg.rotation = ROTATE_270;
1638           }
1639     }
1640 
1641     // Picture stream has already been started before any request comes in
1642     if (!m_bIsActive) {
1643         ALOGE("%s: Channel not started!!", __func__);
1644         return NO_INIT;
1645     }
1646 
1647     index = mMemory.getMatchBufIndex((void*)buffer);
1648     if(index < 0) {
1649         rc = registerBuffer(buffer, mIsType);
1650         if (NO_ERROR != rc) {
1651             ALOGE("%s: On-the-fly buffer registration failed %d",
1652                     __func__, rc);
1653             return rc;
1654         }
1655 
1656         index = mMemory.getMatchBufIndex((void*)buffer);
1657         if (index < 0) {
1658             ALOGE("%s: Could not find object among registered buffers",__func__);
1659             return DEAD_OBJECT;
1660         }
1661     }
1662     CDBG("%s: buffer index %d, frameNumber: %u", __func__, index, frameNumber);
1663 
1664     rc = mMemory.markFrameNumber(index, frameNumber);
1665 
1666     //Start the postprocessor for jpeg encoding. Pass mMemory as destination buffer
1667     mCurrentBufIndex = index;
1668 
1669     // Start postprocessor
1670     // This component needs to be re-configured
1671     // once we switch from input(framework) buffer
1672     // reprocess to standard capture!
1673     bool restartNeeded = ((!mInputBufferConfig) != (NULL != pInputBuffer));
1674     if((!mPostProcStarted) || restartNeeded) {
1675         m_postprocessor.start(reproc_cfg, metadata);
1676         mPostProcStarted = true;
1677         mInputBufferConfig = (NULL == pInputBuffer);
1678     }
1679 
1680     // Queue jpeg settings
1681     rc = queueJpegSetting(index, metadata);
1682 
1683     if (pInputBuffer == NULL) {
1684         Mutex::Autolock lock(mFreeBuffersLock);
1685         if (!mFreeBufferList.empty()) {
1686             List<uint32_t>::iterator it = mFreeBufferList.begin();
1687             uint32_t freeBuffer = *it;
1688             mStreams[0]->bufDone(freeBuffer);
1689             mFreeBufferList.erase(it);
1690         } else {
1691             ALOGE("%s: No snapshot buffers available!", __func__);
1692             rc = NOT_ENOUGH_DATA;
1693         }
1694     } else {
1695         if (0 < mOfflineMetaMemory.getCnt()) {
1696             mOfflineMetaMemory.deallocate();
1697         }
1698         if (0 < mOfflineMemory.getCnt()) {
1699             mOfflineMemory.unregisterBuffers();
1700         }
1701 
1702         int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1703         if(input_index < 0) {
1704             rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer, mStreamType);
1705             if (NO_ERROR != rc) {
1706                 ALOGE("%s: On-the-fly input buffer registration failed %d",
1707                         __func__, rc);
1708                 return rc;
1709             }
1710 
1711             input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1712             if (input_index < 0) {
1713                 ALOGE("%s: Could not find object among registered buffers",__func__);
1714                 return DEAD_OBJECT;
1715             }
1716         }
1717         qcamera_fwk_input_pp_data_t *src_frame = NULL;
1718         src_frame = (qcamera_fwk_input_pp_data_t *)malloc(
1719                 sizeof(qcamera_fwk_input_pp_data_t));
1720         if (src_frame == NULL) {
1721             ALOGE("%s: No memory for src frame", __func__);
1722             return NO_MEMORY;
1723         }
1724         memset(src_frame, 0, sizeof(qcamera_fwk_input_pp_data_t));
1725         src_frame->src_frame = *pInputBuffer;
1726         rc = mOfflineMemory.getBufDef(reproc_cfg.input_stream_plane_info.plane_info,
1727                 src_frame->input_buffer, input_index);
1728         if (rc != 0) {
1729             free(src_frame);
1730             return rc;
1731         }
1732         if (mYUVDump) {
1733            dumpYUV(&src_frame->input_buffer, reproc_cfg.input_stream_dim,
1734                    reproc_cfg.input_stream_plane_info.plane_info, 1);
1735         }
1736         cam_dimension_t dim = {sizeof(metadata_buffer_t), 1};
1737         cam_stream_buf_plane_info_t meta_planes;
1738         rc = mm_stream_calc_offset_metadata(&dim, mPaddingInfo, &meta_planes);
1739         if (rc != 0) {
1740             ALOGE("%s: Metadata stream plane info calculation failed!", __func__);
1741             free(src_frame);
1742             return rc;
1743         }
1744 
1745         rc = mOfflineMetaMemory.allocate(1, sizeof(metadata_buffer_t), false);
1746         if (NO_ERROR != rc) {
1747             ALOGE("%s: Couldn't allocate offline metadata buffer!", __func__);
1748             free(src_frame);
1749             return rc;
1750         }
1751         mm_camera_buf_def_t meta_buf;
1752         cam_frame_len_offset_t offset = meta_planes.plane_info;
1753         rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, 0);
1754         if (NO_ERROR != rc) {
1755             free(src_frame);
1756             return rc;
1757         }
1758         memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t));
1759         src_frame->metadata_buffer = meta_buf;
1760         src_frame->reproc_config = reproc_cfg;
1761 
1762         CDBG_HIGH("%s: Post-process started", __func__);
1763         CDBG_HIGH("%s: Issue call to reprocess", __func__);
1764 
1765         m_postprocessor.processData(src_frame);
1766     }
1767     return rc;
1768 }
1769 
1770 
1771 /*===========================================================================
1772  * FUNCTION : metadataBufDone
1773  *
1774  * DESCRIPTION: Buffer done method for a metadata buffer
1775  *
1776  * PARAMETERS :
1777  * @recvd_frame : received metadata frame
1778  *
1779  * RETURN     : int32_t type of status
1780  *              NO_ERROR  -- success
1781  *              none-zero failure code
1782  *==========================================================================*/
metadataBufDone(mm_camera_super_buf_t * recvd_frame)1783 int32_t QCamera3PicChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
1784 {
1785     int32_t rc = NO_ERROR;;
1786     if ((NULL == m_pMetaChannel) || (NULL == recvd_frame)) {
1787         ALOGE("%s: Metadata channel or metadata buffer invalid", __func__);
1788         return BAD_VALUE;
1789     }
1790 
1791     rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame);
1792 
1793     return rc;
1794 }
1795 
1796 /*===========================================================================
1797  * FUNCTION   : dataNotifyCB
1798  *
1799  * DESCRIPTION: Channel Level callback used for super buffer data notify.
1800  *              This function is registered with mm-camera-interface to handle
1801  *              data notify
1802  *
1803  * PARAMETERS :
1804  *   @recvd_frame   : stream frame received
1805  *   userdata       : user data ptr
1806  *
1807  * RETURN     : none
1808  *==========================================================================*/
dataNotifyCB(mm_camera_super_buf_t * recvd_frame,void * userdata)1809 void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
1810                                  void *userdata)
1811 {
1812     ATRACE_CALL();
1813     CDBG("%s: E\n", __func__);
1814     QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
1815 
1816     if (channel == NULL) {
1817         ALOGE("%s: invalid channel pointer", __func__);
1818         return;
1819     }
1820 
1821     if(channel->m_numStreams != 1) {
1822         ALOGE("%s: Error: Bug: This callback assumes one stream per channel",__func__);
1823         return;
1824     }
1825 
1826 
1827     if(channel->mStreams[0] == NULL) {
1828         ALOGE("%s: Error: Invalid Stream object",__func__);
1829         return;
1830     }
1831 
1832     channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]);
1833 
1834     CDBG("%s: X\n", __func__);
1835     return;
1836 }
1837 
1838 /*===========================================================================
1839  * FUNCTION   : registerBuffer
1840  *
1841  * DESCRIPTION: register streaming buffer to the channel object
1842  *
1843  * PARAMETERS :
1844  *   @buffer     : buffer to be registered
1845  *
1846  * RETURN     : int32_t type of status
1847  *              NO_ERROR  -- success
1848  *              none-zero failure code
1849  *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType)1850 int32_t QCamera3PicChannel::registerBuffer(buffer_handle_t *buffer, cam_is_type_t isType)
1851 {
1852     int rc = 0;
1853     mIsType = isType;
1854     if ((uint32_t)mMemory.getCnt() > (mNumBufsRegistered - 1)) {
1855         ALOGE("%s: Trying to register more buffers than initially requested",
1856                 __func__);
1857         return BAD_VALUE;
1858     }
1859 
1860     if (0 == m_numStreams) {
1861         rc = initialize(mIsType);
1862         if (rc != NO_ERROR) {
1863             ALOGE("%s: Couldn't initialize camera stream %d",
1864                     __func__, rc);
1865             return rc;
1866         }
1867     }
1868 
1869     rc = mMemory.registerBuffer(buffer, mStreamType);
1870     if (ALREADY_EXISTS == rc) {
1871         return NO_ERROR;
1872     } else if (NO_ERROR != rc) {
1873         ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
1874         return rc;
1875     }
1876 
1877     CDBG("%s: X",__func__);
1878 
1879     return rc;
1880 }
1881 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1882 void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
1883                             QCamera3Stream *stream)
1884 {
1885     ATRACE_CALL();
1886     //TODO
1887     //Used only for getting YUV. Jpeg callback will be sent back from channel
1888     //directly to HWI. Refer to func jpegEvtHandle
1889 
1890     //Got the yuv callback. Calling yuv callback handler in PostProc
1891     uint8_t frameIndex;
1892     mm_camera_super_buf_t* frame = NULL;
1893     if(!super_frame) {
1894          ALOGE("%s: Invalid Super buffer",__func__);
1895          return;
1896     }
1897 
1898     if(super_frame->num_bufs != 1) {
1899          ALOGE("%s: Multiple streams are not supported",__func__);
1900          return;
1901     }
1902     if(super_frame->bufs[0] == NULL ) {
1903          ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
1904                   __func__);
1905          return;
1906     }
1907 
1908     frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
1909     CDBG("%s: recvd buf_idx: %u for further processing",
1910         __func__, (uint32_t)frameIndex);
1911     if(frameIndex >= mNumSnapshotBufs) {
1912          ALOGE("%s: Error, Invalid index for buffer",__func__);
1913          if(stream) {
1914              Mutex::Autolock lock(mFreeBuffersLock);
1915              mFreeBufferList.push_back(frameIndex);
1916              stream->bufDone(frameIndex);
1917          }
1918          return;
1919     }
1920 
1921     frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1922     if (frame == NULL) {
1923        ALOGE("%s: Error allocating memory to save received_frame structure.",
1924                                                                     __func__);
1925        if(stream) {
1926            Mutex::Autolock lock(mFreeBuffersLock);
1927            mFreeBufferList.push_back(frameIndex);
1928            stream->bufDone(frameIndex);
1929        }
1930        return;
1931     }
1932     *frame = *super_frame;
1933 
1934     if(mYUVDump) {
1935         cam_dimension_t dim;
1936         memset(&dim, 0, sizeof(dim));
1937         stream->getFrameDimension(dim);
1938         cam_frame_len_offset_t offset;
1939         memset(&offset, 0, sizeof(cam_frame_len_offset_t));
1940         stream->getFrameOffset(offset);
1941         dumpYUV(frame->bufs[0], dim, offset, 1);
1942     }
1943 
1944     m_postprocessor.processData(frame);
1945     free(super_frame);
1946     return;
1947 }
1948 
getStreamBufs(uint32_t len)1949 QCamera3Memory* QCamera3PicChannel::getStreamBufs(uint32_t len)
1950 {
1951     int rc = 0;
1952 
1953     mYuvMemory = new QCamera3HeapMemory();
1954     if (!mYuvMemory) {
1955         ALOGE("%s: unable to create metadata memory", __func__);
1956         return NULL;
1957     }
1958 
1959     //Queue YUV buffers in the beginning mQueueAll = true
1960     rc = mYuvMemory->allocate(mCamera3Stream->max_buffers, len, false);
1961     if (rc < 0) {
1962         ALOGE("%s: unable to allocate metadata memory", __func__);
1963         delete mYuvMemory;
1964         mYuvMemory = NULL;
1965         return NULL;
1966     }
1967     return mYuvMemory;
1968 }
1969 
putStreamBufs()1970 void QCamera3PicChannel::putStreamBufs()
1971 {
1972     mMemory.unregisterBuffers();
1973 
1974     mYuvMemory->deallocate();
1975     delete mYuvMemory;
1976     mYuvMemory = NULL;
1977 }
1978 
queueReprocMetadata(mm_camera_super_buf_t * metadata)1979 int32_t QCamera3PicChannel::queueReprocMetadata(mm_camera_super_buf_t *metadata)
1980 {
1981     return m_postprocessor.processPPMetadata(metadata);
1982 }
1983 
queueJpegSetting(int32_t index,metadata_buffer_t * metadata)1984 int32_t QCamera3PicChannel::queueJpegSetting(int32_t index, metadata_buffer_t *metadata)
1985 {
1986     jpeg_settings_t *settings =
1987             (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t));
1988 
1989     if (!settings) {
1990         ALOGE("%s: out of memory allocating jpeg_settings", __func__);
1991         return -ENOMEM;
1992     }
1993 
1994     memset(settings, 0, sizeof(jpeg_settings_t));
1995 
1996     settings->out_buf_index = index;
1997 
1998     settings->jpeg_orientation = 0;
1999     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
2000         int32_t *orientation = (int32_t *)POINTER_OF_PARAM(
2001                 CAM_INTF_META_JPEG_ORIENTATION, metadata);
2002         settings->jpeg_orientation = *orientation;
2003     }
2004 
2005     settings->jpeg_quality = 85;
2006     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_QUALITY, metadata)) {
2007         uint8_t *quality = (uint8_t *)POINTER_OF_PARAM(
2008                 CAM_INTF_META_JPEG_QUALITY, metadata);
2009         settings->jpeg_quality = *quality;
2010     }
2011 
2012     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_THUMB_QUALITY, metadata)) {
2013         uint8_t *quality = (uint8_t *)POINTER_OF_PARAM(
2014                 CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
2015         settings->jpeg_thumb_quality = *quality;
2016     }
2017 
2018     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_THUMB_SIZE, metadata)) {
2019         cam_dimension_t *dimension = (cam_dimension_t *)POINTER_OF_PARAM(
2020                 CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
2021         settings->thumbnail_size = *dimension;
2022     }
2023 
2024     settings->gps_timestamp_valid = 0;
2025     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata)) {
2026         int64_t *timestamp = (int64_t *)POINTER_OF_PARAM(
2027                 CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
2028         settings->gps_timestamp = *timestamp;
2029         settings->gps_timestamp_valid = 1;
2030     }
2031 
2032     settings->gps_coordinates_valid = 0;
2033     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_GPS_COORDINATES, metadata)) {
2034         double *coordinates = (double *)POINTER_OF_PARAM(
2035                 CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
2036         memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double));
2037         settings->gps_coordinates_valid = 1;
2038     }
2039 
2040     if (IS_PARAM_AVAILABLE(CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata)) {
2041         char *proc_methods = (char *)POINTER_OF_PARAM(
2042                 CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
2043         memset(settings->gps_processing_method, 0,
2044                 sizeof(settings->gps_processing_method));
2045         strncpy(settings->gps_processing_method, proc_methods,
2046                 sizeof(settings->gps_processing_method));
2047     }
2048 
2049     return m_postprocessor.processJpegSettingData(settings);
2050 }
2051 
2052 /*===========================================================================
2053  * FUNCTION   : getRational
2054  *
2055  * DESCRIPTION: compose rational struct
2056  *
2057  * PARAMETERS :
2058  *   @rat     : ptr to struct to store rational info
2059  *   @num     :num of the rational
2060  *   @denom   : denom of the rational
2061  *
2062  * RETURN     : int32_t type of status
2063  *              NO_ERROR  -- success
2064  *              none-zero failure code
2065  *==========================================================================*/
getRational(rat_t * rat,int num,int denom)2066 int32_t getRational(rat_t *rat, int num, int denom)
2067 {
2068     if (NULL == rat) {
2069         ALOGE("%s: NULL rat input", __func__);
2070         return BAD_VALUE;
2071     }
2072     rat->num = num;
2073     rat->denom = denom;
2074     return NO_ERROR;
2075 }
2076 
2077 /*===========================================================================
2078  * FUNCTION   : parseGPSCoordinate
2079  *
2080  * DESCRIPTION: parse GPS coordinate string
2081  *
2082  * PARAMETERS :
2083  *   @coord_str : [input] coordinate string
2084  *   @coord     : [output]  ptr to struct to store coordinate
2085  *
2086  * RETURN     : int32_t type of status
2087  *              NO_ERROR  -- success
2088  *              none-zero failure code
2089  *==========================================================================*/
parseGPSCoordinate(const char * coord_str,rat_t * coord)2090 int parseGPSCoordinate(const char *coord_str, rat_t* coord)
2091 {
2092     if(coord == NULL) {
2093         ALOGE("%s: error, invalid argument coord == NULL", __func__);
2094         return BAD_VALUE;
2095     }
2096     float degF = atof(coord_str);
2097     if (degF < 0) {
2098         degF = -degF;
2099     }
2100     float minF = (degF - (int) degF) * 60;
2101     float secF = (minF - (int) minF) * 60;
2102 
2103     getRational(&coord[0], (int)degF, 1);
2104     getRational(&coord[1], (int)minF, 1);
2105     getRational(&coord[2], (int)(secF * 10000), 10000);
2106     return NO_ERROR;
2107 }
2108 
2109 /*===========================================================================
2110  * FUNCTION   : getExifDateTime
2111  *
2112  * DESCRIPTION: query exif date time
2113  *
2114  * PARAMETERS :
2115  *   @dateTime   : string to store exif date time
2116  *   @subsecTime : string to store exif subsec time
2117  *   @count      : length of the dateTime string
2118  *   @subsecCount: length of the subsecTime string
2119  *
2120  * RETURN     : int32_t type of status
2121  *              NO_ERROR  -- success
2122  *              none-zero failure code
2123  *==========================================================================*/
getExifDateTime(char * dateTime,char * subsecTime,uint32_t & count,uint32_t & subsecCount)2124 int32_t getExifDateTime(char *dateTime, char *subsecTime,
2125         uint32_t &count, uint32_t &subsecCount)
2126 {
2127     //get time and date from system
2128     struct timeval tv;
2129     struct tm *timeinfo;
2130 
2131     gettimeofday(&tv, NULL);
2132     timeinfo = localtime(&tv.tv_sec);
2133     //Write datetime according to EXIF Spec
2134     //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
2135     snprintf(dateTime, 20, "%04d:%02d:%02d %02d:%02d:%02d",
2136              timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
2137              timeinfo->tm_mday, timeinfo->tm_hour,
2138              timeinfo->tm_min, timeinfo->tm_sec);
2139     count = 20;
2140 
2141     //Write subsec according to EXIF Sepc
2142     snprintf(subsecTime, 7, "%06ld", tv.tv_usec);
2143     subsecCount = 7;
2144     return NO_ERROR;
2145 }
2146 
2147 /*===========================================================================
2148  * FUNCTION   : getExifFocalLength
2149  *
2150  * DESCRIPTION: get exif focal lenght
2151  *
2152  * PARAMETERS :
2153  *   @focalLength : ptr to rational strcut to store focal lenght
2154  *
2155  * RETURN     : int32_t type of status
2156  *              NO_ERROR  -- success
2157  *              none-zero failure code
2158  *==========================================================================*/
getExifFocalLength(rat_t * focalLength,float value)2159 int32_t getExifFocalLength(rat_t *focalLength, float value)
2160 {
2161     int focalLengthValue =
2162         (int)(value * FOCAL_LENGTH_DECIMAL_PRECISION);
2163     return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
2164 }
2165 
2166 /*===========================================================================
2167   * FUNCTION   : getExifExpTimeInfo
2168   *
2169   * DESCRIPTION: get exif exposure time information
2170   *
2171   * PARAMETERS :
2172   *   @expoTimeInfo     : expousure time value
2173   * RETURN     : nt32_t type of status
2174   *              NO_ERROR  -- success
2175   *              none-zero failure code
2176   *==========================================================================*/
getExifExpTimeInfo(rat_t * expoTimeInfo,int64_t value)2177 int32_t getExifExpTimeInfo(rat_t *expoTimeInfo, int64_t value)
2178 {
2179 
2180     int cal_exposureTime;
2181     if (value != 0)
2182         cal_exposureTime = value;
2183     else
2184         cal_exposureTime = 60;
2185 
2186     return getRational(expoTimeInfo, 1, cal_exposureTime);
2187 }
2188 
2189 /*===========================================================================
2190  * FUNCTION   : getExifGpsProcessingMethod
2191  *
2192  * DESCRIPTION: get GPS processing method
2193  *
2194  * PARAMETERS :
2195  *   @gpsProcessingMethod : string to store GPS process method
2196  *   @count               : lenght of the string
2197  *
2198  * RETURN     : int32_t type of status
2199  *              NO_ERROR  -- success
2200  *              none-zero failure code
2201  *==========================================================================*/
getExifGpsProcessingMethod(char * gpsProcessingMethod,uint32_t & count,char * value)2202 int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod,
2203                                    uint32_t &count, char* value)
2204 {
2205     if(value != NULL) {
2206         memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
2207         count = EXIF_ASCII_PREFIX_SIZE;
2208         strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, value, strlen(value));
2209         count += strlen(value);
2210         gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
2211         return NO_ERROR;
2212     } else {
2213         return BAD_VALUE;
2214     }
2215 }
2216 
2217 /*===========================================================================
2218  * FUNCTION   : getExifLatitude
2219  *
2220  * DESCRIPTION: get exif latitude
2221  *
2222  * PARAMETERS :
2223  *   @latitude : ptr to rational struct to store latitude info
2224  *   @ladRef   : charater to indicate latitude reference
2225  *
2226  * RETURN     : int32_t type of status
2227  *              NO_ERROR  -- success
2228  *              none-zero failure code
2229  *==========================================================================*/
getExifLatitude(rat_t * latitude,char * latRef,double value)2230 int32_t getExifLatitude(rat_t *latitude,
2231                                            char *latRef, double value)
2232 {
2233     char str[30];
2234     snprintf(str, sizeof(str), "%f", value);
2235     if(str != NULL) {
2236         parseGPSCoordinate(str, latitude);
2237 
2238         //set Latitude Ref
2239         float latitudeValue = strtof(str, 0);
2240         if(latitudeValue < 0.0f) {
2241             latRef[0] = 'S';
2242         } else {
2243             latRef[0] = 'N';
2244         }
2245         latRef[1] = '\0';
2246         return NO_ERROR;
2247     }else{
2248         return BAD_VALUE;
2249     }
2250 }
2251 
2252 /*===========================================================================
2253  * FUNCTION   : getExifLongitude
2254  *
2255  * DESCRIPTION: get exif longitude
2256  *
2257  * PARAMETERS :
2258  *   @longitude : ptr to rational struct to store longitude info
2259  *   @lonRef    : charater to indicate longitude reference
2260  *
2261  * RETURN     : int32_t type of status
2262  *              NO_ERROR  -- success
2263  *              none-zero failure code
2264  *==========================================================================*/
getExifLongitude(rat_t * longitude,char * lonRef,double value)2265 int32_t getExifLongitude(rat_t *longitude,
2266                                             char *lonRef, double value)
2267 {
2268     char str[30];
2269     snprintf(str, sizeof(str), "%f", value);
2270     if(str != NULL) {
2271         parseGPSCoordinate(str, longitude);
2272 
2273         //set Longitude Ref
2274         float longitudeValue = strtof(str, 0);
2275         if(longitudeValue < 0.0f) {
2276             lonRef[0] = 'W';
2277         } else {
2278             lonRef[0] = 'E';
2279         }
2280         lonRef[1] = '\0';
2281         return NO_ERROR;
2282     }else{
2283         return BAD_VALUE;
2284     }
2285 }
2286 
2287 /*===========================================================================
2288  * FUNCTION   : getExifAltitude
2289  *
2290  * DESCRIPTION: get exif altitude
2291  *
2292  * PARAMETERS :
2293  *   @altitude : ptr to rational struct to store altitude info
2294  *   @altRef   : charater to indicate altitude reference
2295  *
2296  * RETURN     : int32_t type of status
2297  *              NO_ERROR  -- success
2298  *              none-zero failure code
2299  *==========================================================================*/
getExifAltitude(rat_t * altitude,char * altRef,double value)2300 int32_t getExifAltitude(rat_t *altitude,
2301                                            char *altRef, double value)
2302 {
2303     char str[30];
2304     snprintf(str, sizeof(str), "%f", value);
2305     if(str != NULL) {
2306         double value = atof(str);
2307         *altRef = 0;
2308         if(value < 0){
2309             *altRef = 1;
2310             value = -value;
2311         }
2312         return getRational(altitude, value*1000, 1000);
2313     }else{
2314         return BAD_VALUE;
2315     }
2316 }
2317 
2318 /*===========================================================================
2319  * FUNCTION   : getExifGpsDateTimeStamp
2320  *
2321  * DESCRIPTION: get exif GPS date time stamp
2322  *
2323  * PARAMETERS :
2324  *   @gpsDateStamp : GPS date time stamp string
2325  *   @bufLen       : length of the string
2326  *   @gpsTimeStamp : ptr to rational struct to store time stamp info
2327  *
2328  * RETURN     : int32_t type of status
2329  *              NO_ERROR  -- success
2330  *              none-zero failure code
2331  *==========================================================================*/
getExifGpsDateTimeStamp(char * gpsDateStamp,uint32_t bufLen,rat_t * gpsTimeStamp,int64_t value)2332 int32_t getExifGpsDateTimeStamp(char *gpsDateStamp,
2333                                            uint32_t bufLen,
2334                                            rat_t *gpsTimeStamp, int64_t value)
2335 {
2336     char str[30];
2337     snprintf(str, sizeof(str), "%lld", value);
2338     if(str != NULL) {
2339         time_t unixTime = (time_t)atol(str);
2340         struct tm *UTCTimestamp = gmtime(&unixTime);
2341 
2342         strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
2343 
2344         getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
2345         getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
2346         getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
2347 
2348         return NO_ERROR;
2349     } else {
2350         return BAD_VALUE;
2351     }
2352 }
2353 
getExifExposureValue(srat_t * exposure_val,int32_t exposure_comp,cam_rational_type_t step)2354 int32_t getExifExposureValue(srat_t* exposure_val, int32_t exposure_comp,
2355                              cam_rational_type_t step)
2356 {
2357     exposure_val->num = exposure_comp * step.numerator;
2358     exposure_val->denom = step.denominator;
2359     return 0;
2360 }
2361 /*===========================================================================
2362  * FUNCTION   : getExifData
2363  *
2364  * DESCRIPTION: get exif data to be passed into jpeg encoding
2365  *
2366  * PARAMETERS : none
2367  *
2368  * RETURN     : exif data from user setting and GPS
2369  *==========================================================================*/
getExifData(metadata_buffer_t * metadata,jpeg_settings_t * jpeg_settings)2370 QCamera3Exif *QCamera3PicChannel::getExifData(metadata_buffer_t *metadata,
2371         jpeg_settings_t *jpeg_settings)
2372 {
2373     QCamera3Exif *exif = new QCamera3Exif();
2374     if (exif == NULL) {
2375         ALOGE("%s: No memory for QCamera3Exif", __func__);
2376         return NULL;
2377     }
2378 
2379     int32_t rc = NO_ERROR;
2380     uint32_t count = 0;
2381 
2382     // add exif entries
2383     {
2384         char dateTime[20];
2385         char subsecTime[7];
2386         uint32_t subsecCount;
2387         memset(dateTime, 0, sizeof(dateTime));
2388         memset(subsecTime, 0, sizeof(subsecTime));
2389         count = 20;
2390         subsecCount = 7;
2391         rc = getExifDateTime(dateTime, subsecTime, count, subsecCount);
2392         if(rc == NO_ERROR) {
2393             exif->addEntry(EXIFTAGID_DATE_TIME,
2394                     EXIF_ASCII,
2395                     count,
2396                     (void *)dateTime);
2397             exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL,
2398                     EXIF_ASCII,
2399                     count,
2400                     (void *)dateTime);
2401             exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED,
2402                     EXIF_ASCII,
2403                     count,
2404                     (void *)dateTime);
2405             exif->addEntry(EXIFTAGID_SUBSEC_TIME,
2406                     EXIF_ASCII,
2407                     subsecCount,
2408                     (void *)subsecTime);
2409             exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL,
2410                     EXIF_ASCII,
2411                     subsecCount,
2412                     (void *)subsecTime);
2413             exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED,
2414                     EXIF_ASCII,
2415                     subsecCount,
2416                     (void *)subsecTime);
2417         } else {
2418             ALOGE("%s: getExifDateTime failed", __func__);
2419         }
2420     }
2421 
2422     if (IS_PARAM_AVAILABLE(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)) {
2423         float focal_length = *(float *)POINTER_OF_PARAM(
2424                 CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
2425         rat_t focalLength;
2426         rc = getExifFocalLength(&focalLength, focal_length);
2427         if (rc == NO_ERROR) {
2428             exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
2429                     EXIF_RATIONAL,
2430                     1,
2431                     (void *)&(focalLength));
2432         } else {
2433             ALOGE("%s: getExifFocalLength failed", __func__);
2434         }
2435     }
2436 
2437     if (IS_PARAM_AVAILABLE(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)) {
2438         int16_t isoSpeed = *(int32_t *)POINTER_OF_PARAM(
2439                 CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
2440         exif->addEntry(EXIFTAGID_ISO_SPEED_RATING,
2441                    EXIF_SHORT,
2442                    1,
2443                    (void *)&(isoSpeed));
2444     }
2445 
2446     if (IS_PARAM_AVAILABLE(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)) {
2447         int64_t sensor_exposure_time = *(int64_t *)POINTER_OF_PARAM(
2448                 CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
2449         rat_t sensorExpTime;
2450         rc = getExifExpTimeInfo(&sensorExpTime, sensor_exposure_time);
2451         if (rc == NO_ERROR){
2452             exif->addEntry(EXIFTAGID_EXPOSURE_TIME,
2453                     EXIF_RATIONAL,
2454                     1,
2455                     (void *)&(sensorExpTime));
2456         } else {
2457             ALOGE("%s: getExifExpTimeInfo failed", __func__);
2458         }
2459     }
2460 
2461     if (strlen(jpeg_settings->gps_processing_method) > 0) {
2462         char gpsProcessingMethod[
2463                     EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
2464         count = 0;
2465         rc = getExifGpsProcessingMethod(gpsProcessingMethod,
2466                 count, jpeg_settings->gps_processing_method);
2467         if(rc == NO_ERROR) {
2468             exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
2469                     EXIF_ASCII,
2470                     count,
2471                     (void *)gpsProcessingMethod);
2472         } else {
2473             ALOGE("%s: getExifGpsProcessingMethod failed", __func__);
2474         }
2475     }
2476 
2477     if (jpeg_settings->gps_coordinates_valid) {
2478 
2479         //latitude
2480         rat_t latitude[3];
2481         char latRef[2];
2482         rc = getExifLatitude(latitude, latRef,
2483                 jpeg_settings->gps_coordinates[0]);
2484         if(rc == NO_ERROR) {
2485             exif->addEntry(EXIFTAGID_GPS_LATITUDE,
2486                            EXIF_RATIONAL,
2487                            3,
2488                            (void *)latitude);
2489             exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
2490                            EXIF_ASCII,
2491                            2,
2492                            (void *)latRef);
2493         } else {
2494             ALOGE("%s: getExifLatitude failed", __func__);
2495         }
2496 
2497         //longitude
2498         rat_t longitude[3];
2499         char lonRef[2];
2500         rc = getExifLongitude(longitude, lonRef,
2501                 jpeg_settings->gps_coordinates[1]);
2502         if(rc == NO_ERROR) {
2503             exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
2504                            EXIF_RATIONAL,
2505                            3,
2506                            (void *)longitude);
2507 
2508             exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
2509                            EXIF_ASCII,
2510                            2,
2511                            (void *)lonRef);
2512         } else {
2513             ALOGE("%s: getExifLongitude failed", __func__);
2514         }
2515 
2516         //altitude
2517         rat_t altitude;
2518         char altRef;
2519         rc = getExifAltitude(&altitude, &altRef,
2520                 jpeg_settings->gps_coordinates[2]);
2521         if(rc == NO_ERROR) {
2522             exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
2523                            EXIF_RATIONAL,
2524                            1,
2525                            (void *)&(altitude));
2526 
2527             exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
2528                            EXIF_BYTE,
2529                            1,
2530                            (void *)&altRef);
2531         } else {
2532             ALOGE("%s: getExifAltitude failed", __func__);
2533         }
2534     }
2535 
2536     if (jpeg_settings->gps_timestamp_valid) {
2537 
2538         char gpsDateStamp[20];
2539         rat_t gpsTimeStamp[3];
2540         rc = getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp,
2541                 jpeg_settings->gps_timestamp);
2542         if(rc == NO_ERROR) {
2543             exif->addEntry(EXIFTAGID_GPS_DATESTAMP,
2544                            EXIF_ASCII,
2545                            strlen(gpsDateStamp) + 1,
2546                            (void *)gpsDateStamp);
2547 
2548             exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
2549                            EXIF_RATIONAL,
2550                            3,
2551                            (void *)gpsTimeStamp);
2552         } else {
2553             ALOGE("%s: getExifGpsDataTimeStamp failed", __func__);
2554         }
2555     }
2556 
2557     if (IS_PARAM_AVAILABLE(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) &&
2558             IS_PARAM_AVAILABLE(CAM_INTF_PARM_EV_STEP, metadata)) {
2559         int32_t exposure_comp = *(int32_t *)POINTER_OF_PARAM(
2560                 CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata);
2561         cam_rational_type_t comp_step = *(cam_rational_type_t *)POINTER_OF_PARAM(
2562                 CAM_INTF_PARM_EV_STEP, metadata);
2563         srat_t exposure_val;
2564         rc = getExifExposureValue(&exposure_val, exposure_comp, comp_step);
2565         if(rc == NO_ERROR) {
2566             exif->addEntry(EXIFTAGID_EXPOSURE_BIAS_VALUE,
2567                        EXIF_SRATIONAL,
2568                        1,
2569                        (void *)(&exposure_val));
2570         } else {
2571             ALOGE("%s: getExifExposureValue failed ", __func__);
2572         }
2573     }
2574 
2575     char value[PROPERTY_VALUE_MAX];
2576     if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
2577         exif->addEntry(EXIFTAGID_MAKE,
2578                        EXIF_ASCII,
2579                        strlen(value) + 1,
2580                        (void *)value);
2581     } else {
2582         ALOGE("%s: getExifMaker failed", __func__);
2583     }
2584 
2585     if (property_get("ro.product.model", value, "QCAM-AA") > 0) {
2586         exif->addEntry(EXIFTAGID_MODEL,
2587                        EXIF_ASCII,
2588                        strlen(value) + 1,
2589                        (void *)value);
2590     } else {
2591         ALOGE("%s: getExifModel failed", __func__);
2592     }
2593 
2594     return exif;
2595 }
2596 
2597 /* There can be MAX_INFLIGHT_REQUESTS number of requests that could get queued up. Hence
2598  allocating same number of picture channel buffers */
2599 int QCamera3PicChannel::kMaxBuffers = MAX_INFLIGHT_REQUESTS;
2600 
overrideYuvSize(uint32_t width,uint32_t height)2601 void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height)
2602 {
2603    mYuvWidth = width;
2604    mYuvHeight = height;
2605 }
2606 
2607 /*===========================================================================
2608  * FUNCTION   : QCamera3ReprocessChannel
2609  *
2610  * DESCRIPTION: constructor of QCamera3ReprocessChannel
2611  *
2612  * PARAMETERS :
2613  *   @cam_handle : camera handle
2614  *   @cam_ops    : ptr to camera ops table
2615  *   @pp_mask    : post-proccess feature mask
2616  *
2617  * RETURN     : none
2618  *==========================================================================*/
QCamera3ReprocessChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData,void * ch_hdl)2619 QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle,
2620                                                  mm_camera_ops_t *cam_ops,
2621                                                  channel_cb_routine cb_routine,
2622                                                  cam_padding_info_t *paddingInfo,
2623                                                  uint32_t postprocess_mask,
2624                                                  void *userData, void *ch_hdl) :
2625     QCamera3Channel(cam_handle, cam_ops, cb_routine, paddingInfo, postprocess_mask,
2626                     userData),
2627     picChHandle(ch_hdl),
2628     mOfflineBuffersIndex(-1),
2629     m_pSrcChannel(NULL),
2630     m_pMetaChannel(NULL),
2631     mMemory(NULL)
2632 {
2633     memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
2634     mOfflineMetaIndex = MAX_INFLIGHT_REQUESTS -1;
2635 }
2636 
2637 
2638 /*===========================================================================
2639  * FUNCTION   : QCamera3ReprocessChannel
2640  *
2641  * DESCRIPTION: constructor of QCamera3ReprocessChannel
2642  *
2643  * PARAMETERS :
2644  *   @cam_handle : camera handle
2645  *   @cam_ops    : ptr to camera ops table
2646  *   @pp_mask    : post-proccess feature mask
2647  *
2648  * RETURN     : none
2649  *==========================================================================*/
initialize(cam_is_type_t isType)2650 int32_t QCamera3ReprocessChannel::initialize(cam_is_type_t isType)
2651 {
2652     int32_t rc = NO_ERROR;
2653     mm_camera_channel_attr_t attr;
2654 
2655     memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
2656     attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
2657     attr.max_unmatched_frames = 1;
2658 
2659     rc = init(&attr, NULL);
2660     if (rc < 0) {
2661         ALOGE("%s: init failed", __func__);
2662     }
2663     mIsType = isType;
2664     return rc;
2665 }
2666 
2667 
2668 /*===========================================================================
2669  * FUNCTION   : QCamera3ReprocessChannel
2670  *
2671  * DESCRIPTION: constructor of QCamera3ReprocessChannel
2672  *
2673  * PARAMETERS :
2674  *   @cam_handle : camera handle
2675  *   @cam_ops    : ptr to camera ops table
2676  *   @pp_mask    : post-proccess feature mask
2677  *
2678  * RETURN     : none
2679  *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)2680 void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
2681                                   QCamera3Stream *stream)
2682 {
2683     //Got the pproc data callback. Now send to jpeg encoding
2684     uint8_t frameIndex;
2685     mm_camera_super_buf_t* frame = NULL;
2686     QCamera3PicChannel *obj = (QCamera3PicChannel *)picChHandle;
2687 
2688     if(!super_frame) {
2689          ALOGE("%s: Invalid Super buffer",__func__);
2690          return;
2691     }
2692 
2693     if(super_frame->num_bufs != 1) {
2694          ALOGE("%s: Multiple streams are not supported",__func__);
2695          return;
2696     }
2697     if(super_frame->bufs[0] == NULL ) {
2698          ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
2699                   __func__);
2700          return;
2701     }
2702 
2703     frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
2704     frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
2705     if (frame == NULL) {
2706        ALOGE("%s: Error allocating memory to save received_frame structure.",
2707                                                                     __func__);
2708        if(stream) {
2709            stream->bufDone(frameIndex);
2710        }
2711        return;
2712     }
2713     CDBG("%s: bufIndex: %u recvd from post proc",
2714         __func__, (uint32_t)frameIndex);
2715     *frame = *super_frame;
2716     if(mYUVDump) {
2717         cam_dimension_t dim;
2718         memset(&dim, 0, sizeof(dim));
2719         stream->getFrameDimension(dim);
2720         cam_frame_len_offset_t offset;
2721         memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2722         stream->getFrameOffset(offset);
2723         dumpYUV(frame->bufs[0], dim, offset, 2);
2724     }
2725     obj->m_postprocessor.processPPData(frame);
2726     free(super_frame);
2727     return;
2728 }
2729 
2730 /*===========================================================================
2731  * FUNCTION   : QCamera3ReprocessChannel
2732  *
2733  * DESCRIPTION: default constructor of QCamera3ReprocessChannel
2734  *
2735  * PARAMETERS : none
2736  *
2737  * RETURN     : none
2738  *==========================================================================*/
QCamera3ReprocessChannel()2739 QCamera3ReprocessChannel::QCamera3ReprocessChannel() :
2740     m_pSrcChannel(NULL),
2741     m_pMetaChannel(NULL)
2742 {
2743 }
2744 
2745 /*===========================================================================
2746  * FUNCTION   : getStreamBufs
2747  *
2748  * DESCRIPTION: register the buffers of the reprocess channel
2749  *
2750  * PARAMETERS : none
2751  *
2752  * RETURN     : QCamera3Memory *
2753  *==========================================================================*/
getStreamBufs(uint32_t len)2754 QCamera3Memory* QCamera3ReprocessChannel::getStreamBufs(uint32_t len)
2755 {
2756    int rc = 0;
2757 
2758     mMemory = new QCamera3HeapMemory();
2759     if (!mMemory) {
2760         ALOGE("%s: unable to create reproc memory", __func__);
2761         return NULL;
2762     }
2763 
2764     //Queue YUV buffers in the beginning mQueueAll = true
2765     /* There can be MAX_INFLIGHT_REQUESTS number of requests that could get queued up.
2766      * Hence allocating same number of reprocess channel's output buffers */
2767     rc = mMemory->allocate(MAX_INFLIGHT_REQUESTS, len, true);
2768     if (rc < 0) {
2769         ALOGE("%s: unable to allocate reproc memory", __func__);
2770         delete mMemory;
2771         mMemory = NULL;
2772         return NULL;
2773     }
2774     return mMemory;
2775 }
2776 
2777 /*===========================================================================
2778  * FUNCTION   : getStreamBufs
2779  *
2780  * DESCRIPTION: register the buffers of the reprocess channel
2781  *
2782  * PARAMETERS : none
2783  *
2784  * RETURN     :
2785  *==========================================================================*/
putStreamBufs()2786 void QCamera3ReprocessChannel::putStreamBufs()
2787 {
2788     mMemory->deallocate();
2789     delete mMemory;
2790     mMemory = NULL;
2791 }
2792 
2793 /*===========================================================================
2794  * FUNCTION   : ~QCamera3ReprocessChannel
2795  *
2796  * DESCRIPTION: destructor of QCamera3ReprocessChannel
2797  *
2798  * PARAMETERS : none
2799  *
2800  * RETURN     : none
2801  *==========================================================================*/
~QCamera3ReprocessChannel()2802 QCamera3ReprocessChannel::~QCamera3ReprocessChannel()
2803 {
2804 }
2805 
2806 /*===========================================================================
2807  * FUNCTION   : getStreamBySrcHandle
2808  *
2809  * DESCRIPTION: find reprocess stream by its source stream handle
2810  *
2811  * PARAMETERS :
2812  *   @srcHandle : source stream handle
2813  *
2814  * RETURN     : ptr to reprocess stream if found. NULL if not found
2815  *==========================================================================*/
getStreamBySrcHandle(uint32_t srcHandle)2816 QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle)
2817 {
2818     QCamera3Stream *pStream = NULL;
2819 
2820     for (int i = 0; i < m_numStreams; i++) {
2821         if (mSrcStreamHandles[i] == srcHandle) {
2822             pStream = mStreams[i];
2823             break;
2824         }
2825     }
2826     return pStream;
2827 }
2828 
2829 /*===========================================================================
2830  * FUNCTION   : getSrcStreamBySrcHandle
2831  *
2832  * DESCRIPTION: find source stream by source stream handle
2833  *
2834  * PARAMETERS :
2835  *   @srcHandle : source stream handle
2836  *
2837  * RETURN     : ptr to reprocess stream if found. NULL if not found
2838  *==========================================================================*/
getSrcStreamBySrcHandle(uint32_t srcHandle)2839 QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle)
2840 {
2841     QCamera3Stream *pStream = NULL;
2842 
2843     if (NULL == m_pSrcChannel) {
2844         return NULL;
2845     }
2846 
2847     for (int i = 0; i < m_numStreams; i++) {
2848         if (mSrcStreamHandles[i] == srcHandle) {
2849             pStream = m_pSrcChannel->getStreamByIndex(i);
2850             break;
2851         }
2852     }
2853     return pStream;
2854 }
2855 
2856 /*===========================================================================
2857  * FUNCTION   : stop
2858  *
2859  * DESCRIPTION: stop channel
2860  *
2861  * PARAMETERS : none
2862  *
2863  * RETURN     : int32_t type of status
2864  *              NO_ERROR  -- success
2865  *              none-zero failure code
2866  *==========================================================================*/
stop()2867 int32_t QCamera3ReprocessChannel::stop()
2868 {
2869     unmapOfflineBuffers(true);
2870 
2871     return QCamera3Channel::stop();
2872 }
2873 
2874 /*===========================================================================
2875  * FUNCTION   : unmapOfflineBuffers
2876  *
2877  * DESCRIPTION: Unmaps offline buffers
2878  *
2879  * PARAMETERS : none
2880  *
2881  * RETURN     : int32_t type of status
2882  *              NO_ERROR  -- success
2883  *              none-zero failure code
2884  *==========================================================================*/
unmapOfflineBuffers(bool all)2885 int32_t QCamera3ReprocessChannel::unmapOfflineBuffers(bool all)
2886 {
2887     int rc = NO_ERROR;
2888     if (!mOfflineBuffers.empty()) {
2889         QCamera3Stream *stream = NULL;
2890         List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
2891         for (; it != mOfflineBuffers.end(); it++) {
2892            stream = (*it).stream;
2893            if (NULL != stream) {
2894                rc = stream->unmapBuf((*it).type,
2895                                      (*it).index,
2896                                         -1);
2897                if (NO_ERROR != rc) {
2898                    ALOGE("%s: Error during offline buffer unmap %d",
2899                          __func__, rc);
2900                }
2901                CDBG("%s: Unmapped buffer with index %d", __func__, (*it).index);
2902            }
2903            if (!all) {
2904                mOfflineBuffers.erase(it);
2905                break;
2906            }
2907         }
2908         if (all) {
2909            mOfflineBuffers.clear();
2910         }
2911     }
2912 
2913     if (!mOfflineMetaBuffers.empty()) {
2914         QCamera3Stream *stream = NULL;
2915         List<OfflineBuffer>::iterator it = mOfflineMetaBuffers.begin();
2916         for (; it != mOfflineMetaBuffers.end(); it++) {
2917            stream = (*it).stream;
2918            if (NULL != stream) {
2919                rc = stream->unmapBuf((*it).type,
2920                                      (*it).index,
2921                                         -1);
2922                if (NO_ERROR != rc) {
2923                    ALOGE("%s: Error during offline buffer unmap %d",
2924                          __func__, rc);
2925                }
2926                CDBG("%s: Unmapped meta buffer with index %d", __func__, (*it).index);
2927            }
2928            if (!all) {
2929                mOfflineMetaBuffers.erase(it);
2930                break;
2931            }
2932         }
2933         if (all) {
2934            mOfflineMetaBuffers.clear();
2935         }
2936     }
2937     return rc;
2938 }
2939 
2940 
2941 /*===========================================================================
2942  * FUNCTION   : extractFrameAndRotation
2943  *
2944  * DESCRIPTION: Extract output rotation and frame data if present
2945  *
2946  * PARAMETERS :
2947  *   @frame     : input frame from source stream
2948  *   meta_buffer: metadata buffer
2949  *   @metadata  : corresponding metadata
2950  *   @fwk_frame :
2951  *
2952  * RETURN     : int32_t type of status
2953  *              NO_ERROR  -- success
2954  *              none-zero failure code
2955  *==========================================================================*/
extractFrameCropAndRotation(mm_camera_super_buf_t * frame,mm_camera_buf_def_t * meta_buffer,jpeg_settings_t * jpeg_settings,qcamera_fwk_input_pp_data_t & fwk_frame)2956 int32_t QCamera3ReprocessChannel::extractFrameCropAndRotation(mm_camera_super_buf_t *frame,
2957         mm_camera_buf_def_t *meta_buffer, jpeg_settings_t *jpeg_settings,
2958         qcamera_fwk_input_pp_data_t &fwk_frame)
2959 {
2960     if ((NULL == meta_buffer) || (NULL == frame) || (NULL == jpeg_settings)) {
2961         return BAD_VALUE;
2962     }
2963 
2964     metadata_buffer_t *meta = (metadata_buffer_t *)meta_buffer->buffer;
2965     if (NULL == meta) {
2966         return BAD_VALUE;
2967     }
2968 
2969     for (int i = 0; i < frame->num_bufs; i++) {
2970         QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
2971         QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id);
2972 
2973         if (pStream != NULL && pSrcStream != NULL) {
2974             // Find rotation info for reprocess stream
2975             if (jpeg_settings->jpeg_orientation == 0) {
2976                fwk_frame.reproc_config.rotation = ROTATE_0;
2977             } else if (jpeg_settings->jpeg_orientation == 90) {
2978                fwk_frame.reproc_config.rotation = ROTATE_90;
2979             } else if (jpeg_settings->jpeg_orientation == 180) {
2980                fwk_frame.reproc_config.rotation = ROTATE_180;
2981             } else if (jpeg_settings->jpeg_orientation == 270) {
2982                fwk_frame.reproc_config.rotation = ROTATE_270;
2983             }
2984 
2985             // Find crop info for reprocess stream
2986             cam_crop_data_t *crop_data = (cam_crop_data_t *)
2987                 POINTER_OF_PARAM(CAM_INTF_META_CROP_DATA, meta);
2988             if (NULL != crop_data) {
2989                 for (int j = 0; j < crop_data->num_of_streams; j++) {
2990                     if (crop_data->crop_info[j].stream_id ==
2991                         pSrcStream->getMyServerID()) {
2992                         fwk_frame.reproc_config.output_crop =
2993                             crop_data->crop_info[0].crop;
2994                         CDBG("%s: Found offline reprocess crop %dx%d %dx%d",
2995                               __func__,
2996                               crop_data->crop_info[0].crop.left,
2997                               crop_data->crop_info[0].crop.top,
2998                               crop_data->crop_info[0].crop.width,
2999                               crop_data->crop_info[0].crop.height);
3000                      }
3001                 }
3002             }
3003             fwk_frame.input_buffer = *frame->bufs[i];
3004             fwk_frame.metadata_buffer = *meta_buffer;
3005             break;
3006         } else {
3007             ALOGE("%s: Source/Re-process streams are invalid", __func__);
3008             return BAD_VALUE;
3009         }
3010     }
3011 
3012     return NO_ERROR;
3013 }
3014 
3015 /*===========================================================================
3016  * FUNCTION   : extractCrop
3017  *
3018  * DESCRIPTION: Extract framework output crop if present
3019  *
3020  * PARAMETERS :
3021  *   @frame     : input frame for reprocessing
3022  *
3023  * RETURN     : int32_t type of status
3024  *              NO_ERROR  -- success
3025  *              none-zero failure code
3026  *==========================================================================*/
extractCrop(qcamera_fwk_input_pp_data_t * frame)3027 int32_t QCamera3ReprocessChannel::extractCrop(qcamera_fwk_input_pp_data_t *frame)
3028 {
3029     if (NULL == frame) {
3030         ALOGE("%s: Incorrect input frame", __func__);
3031         return BAD_VALUE;
3032     }
3033 
3034     if (NULL == frame->metadata_buffer.buffer) {
3035         ALOGE("%s: No metadata available", __func__);
3036         return BAD_VALUE;
3037     }
3038 
3039     // Find crop info for reprocess stream
3040     metadata_buffer_t *meta = (metadata_buffer_t *) frame->metadata_buffer.buffer;
3041     if (IS_META_AVAILABLE(CAM_INTF_META_CROP_DATA, meta)) {
3042         cam_crop_data_t *crop_data = (cam_crop_data_t *)
3043                 POINTER_OF_PARAM(CAM_INTF_META_CROP_DATA, meta);
3044         if (1 == crop_data->num_of_streams) {
3045             frame->reproc_config.output_crop = crop_data->crop_info[0].crop;
3046             CDBG("%s: Found offline reprocess crop %dx%d %dx%d", __func__,
3047                     crop_data->crop_info[0].crop.left,
3048                     crop_data->crop_info[0].crop.top,
3049                     crop_data->crop_info[0].crop.width,
3050                     crop_data->crop_info[0].crop.height);
3051         } else {
3052             ALOGE("%s: Incorrect number of offline crop data entries %d",
3053                     __func__,
3054                     crop_data->num_of_streams);
3055             return BAD_VALUE;
3056         }
3057     } else {
3058         CDBG_HIGH("%s: Crop data not present", __func__);
3059     }
3060 
3061     return NO_ERROR;
3062 }
3063 
3064 /*===========================================================================
3065  * FUNCTION   : doReprocessOffline
3066  *
3067  * DESCRIPTION: request to do a reprocess on the frame
3068  *
3069  * PARAMETERS :
3070  *   @frame     : input frame for reprocessing
3071  *
3072  * RETURN     : int32_t type of status
3073  *              NO_ERROR  -- success
3074  *              none-zero failure code
3075  *==========================================================================*/
doReprocessOffline(qcamera_fwk_input_pp_data_t * frame)3076  int32_t QCamera3ReprocessChannel::doReprocessOffline(qcamera_fwk_input_pp_data_t *frame)
3077 {
3078     int32_t rc = 0;
3079     OfflineBuffer mappedBuffer;
3080 
3081     if (m_numStreams < 1) {
3082         ALOGE("%s: No reprocess stream is created", __func__);
3083         return -1;
3084     }
3085 
3086     if (NULL == frame) {
3087         ALOGE("%s: Incorrect input frame", __func__);
3088         return BAD_VALUE;
3089     }
3090 
3091     if (NULL == frame->metadata_buffer.buffer) {
3092         ALOGE("%s: No metadata available", __func__);
3093         return BAD_VALUE;
3094     }
3095 
3096     if (NULL == frame->input_buffer.buffer) {
3097         ALOGE("%s: No input buffer available", __func__);
3098         return BAD_VALUE;
3099     }
3100 
3101     if ((0 == m_numStreams) || (NULL == mStreams[0])) {
3102         ALOGE("%s: Reprocess stream not initialized!", __func__);
3103         return NO_INIT;
3104     }
3105 
3106     QCamera3Stream *pStream = mStreams[0];
3107     int32_t max_idx = MAX_INFLIGHT_REQUESTS-1;
3108     //loop back the indices if max burst count reached
3109     if (mOfflineBuffersIndex == max_idx) {
3110        mOfflineBuffersIndex = -1;
3111     }
3112     uint32_t buf_idx = mOfflineBuffersIndex + 1;
3113 
3114     rc = pStream->mapBuf(
3115             CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
3116             buf_idx, -1,
3117             frame->input_buffer.fd, frame->input_buffer.frame_len);
3118     if (NO_ERROR == rc) {
3119         mappedBuffer.index = buf_idx;
3120         mappedBuffer.stream = pStream;
3121         mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
3122         mOfflineBuffers.push_back(mappedBuffer);
3123         mOfflineBuffersIndex = buf_idx;
3124         CDBG("%s: Mapped buffer with index %d", __func__, mOfflineBuffersIndex);
3125     }
3126 
3127     max_idx = MAX_INFLIGHT_REQUESTS*2 - 1;
3128     //loop back the indices if max burst count reached
3129     if (mOfflineMetaIndex == max_idx) {
3130        mOfflineMetaIndex = MAX_INFLIGHT_REQUESTS-1;
3131     }
3132     uint32_t meta_buf_idx = mOfflineMetaIndex + 1;
3133 
3134     rc |= pStream->mapBuf(
3135             CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,
3136             meta_buf_idx, -1,
3137             frame->metadata_buffer.fd, frame->metadata_buffer.frame_len);
3138     if (NO_ERROR == rc) {
3139         mappedBuffer.index = meta_buf_idx;
3140         mappedBuffer.stream = pStream;
3141         mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF;
3142         mOfflineMetaBuffers.push_back(mappedBuffer);
3143         mOfflineMetaIndex = meta_buf_idx;
3144         CDBG("%s: Mapped meta buffer with index %d", __func__, mOfflineMetaIndex);
3145     }
3146 
3147     if (rc == NO_ERROR) {
3148         cam_stream_parm_buffer_t param;
3149         memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
3150         param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
3151         param.reprocess.buf_index = buf_idx;
3152         param.reprocess.frame_idx = frame->input_buffer.frame_idx;
3153         param.reprocess.meta_present = 1;
3154         param.reprocess.meta_buf_index = meta_buf_idx;
3155         param.reprocess.frame_pp_config.rotation = frame->reproc_config.rotation;
3156         param.reprocess.frame_pp_config.crop.input_crop = frame->reproc_config.output_crop;
3157         param.reprocess.frame_pp_config.crop.crop_enabled = 1;
3158         rc = pStream->setParameter(param);
3159         if (rc != NO_ERROR) {
3160             ALOGE("%s: stream setParameter for reprocess failed", __func__);
3161         }
3162     } else {
3163         ALOGE("%s: Input buffer memory map failed: %d", __func__, rc);
3164     }
3165 
3166     return rc;
3167 }
3168 
3169 /*===========================================================================
3170  * FUNCTION   : doReprocess
3171  *
3172  * DESCRIPTION: request to do a reprocess on the frame
3173  *
3174  * PARAMETERS :
3175  *   @buf_fd     : fd to the input buffer that needs reprocess
3176  *   @buf_lenght : length of the input buffer
3177  *   @ret_val    : result of reprocess.
3178  *                 Example: Could be faceID in case of register face image.
3179  *
3180  * RETURN     : int32_t type of status
3181  *              NO_ERROR  -- success
3182  *              none-zero failure code
3183  *==========================================================================*/
doReprocess(int buf_fd,uint32_t buf_length,int32_t & ret_val,mm_camera_super_buf_t * meta_frame)3184 int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd,
3185                                               uint32_t buf_length,
3186                                               int32_t &ret_val,
3187                                               mm_camera_super_buf_t *meta_frame)
3188 {
3189     int32_t rc = 0;
3190     if (m_numStreams < 1) {
3191         ALOGE("%s: No reprocess stream is created", __func__);
3192         return -1;
3193     }
3194     if (meta_frame == NULL) {
3195         ALOGE("%s: Did not get corresponding metadata in time", __func__);
3196         return -1;
3197     }
3198 
3199     uint32_t buf_idx = 0;
3200     for (int i = 0; i < m_numStreams; i++) {
3201         rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
3202                                  buf_idx, -1,
3203                                  buf_fd, buf_length);
3204 
3205         if (rc == NO_ERROR) {
3206             cam_stream_parm_buffer_t param;
3207             memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
3208             param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
3209             param.reprocess.buf_index = buf_idx;
3210             param.reprocess.meta_present = 1;
3211             param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
3212             param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
3213             rc = mStreams[i]->setParameter(param);
3214             if (rc == NO_ERROR) {
3215                 ret_val = param.reprocess.ret_val;
3216             }
3217             mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
3218                                   buf_idx, -1);
3219         }
3220     }
3221     return rc;
3222 }
3223 
3224 /*===========================================================================
3225  * FUNCTION   : addReprocStreamsFromSource
3226  *
3227  * DESCRIPTION: add reprocess streams from input source channel
3228  *
3229  * PARAMETERS :
3230  *   @config         : pp feature configuration
3231  *   @src_config     : source reprocess configuration
3232  *   @pMetaChannel   : ptr to metadata channel to get corresp. metadata
3233  *   @offline        : configure for offline reprocessing
3234  *
3235  * RETURN     : int32_t type of status
3236  *              NO_ERROR  -- success
3237  *              none-zero failure code
3238  *==========================================================================*/
addReprocStreamsFromSource(cam_pp_feature_config_t & pp_config,const reprocess_config_t & src_config,cam_is_type_t is_type,QCamera3Channel * pMetaChannel)3239 int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
3240         const reprocess_config_t &src_config , cam_is_type_t is_type,
3241         QCamera3Channel *pMetaChannel)
3242 {
3243     int32_t rc = 0;
3244     cam_stream_reproc_config_t reprocess_config;
3245     cam_stream_type_t streamType;
3246 
3247     /* There can be MAX_INFLIGHT_REQUESTS number of requests that could get queued up.
3248      * Hence allocating same number of reprocess channel's output buffers */
3249     int num_buffers = MAX_INFLIGHT_REQUESTS;
3250     cam_dimension_t streamDim = src_config.output_stream_dim;
3251 
3252     if (NULL != src_config.src_channel) {
3253         QCamera3Stream *pSrcStream = src_config.src_channel->getStreamByIndex(0);
3254         if (pSrcStream == NULL) {
3255            ALOGE("%s: source channel doesn't have a stream", __func__);
3256            return BAD_VALUE;
3257         }
3258         mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle();
3259     }
3260 
3261     streamType = CAM_STREAM_TYPE_OFFLINE_PROC;
3262     reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
3263 
3264     reprocess_config.offline.input_fmt = src_config.stream_format;
3265     reprocess_config.offline.input_dim = src_config.input_stream_dim;
3266     reprocess_config.offline.input_buf_planes.plane_info =
3267             src_config.input_stream_plane_info.plane_info;
3268     reprocess_config.offline.num_of_bufs = num_buffers;
3269     reprocess_config.offline.input_type = src_config.stream_type;
3270 
3271     reprocess_config.pp_feature_config = pp_config;
3272     QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
3273             m_handle,
3274             m_camOps,
3275             mPaddingInfo,
3276             (QCamera3Channel*)this);
3277     if (pStream == NULL) {
3278         ALOGE("%s: No mem for Stream", __func__);
3279         return NO_MEMORY;
3280     }
3281 
3282     rc = pStream->init(streamType, src_config.stream_format,
3283             streamDim, &reprocess_config,
3284             num_buffers,
3285             reprocess_config.pp_feature_config.feature_mask,
3286             is_type,
3287             QCamera3Channel::streamCbRoutine, this);
3288 
3289     if (rc == 0) {
3290         mStreams[m_numStreams] = pStream;
3291         m_numStreams++;
3292     } else {
3293         ALOGE("%s: failed to create reprocess stream", __func__);
3294         delete pStream;
3295     }
3296 
3297     if (rc == NO_ERROR) {
3298         m_pSrcChannel = src_config.src_channel;
3299         m_pMetaChannel = pMetaChannel;
3300     }
3301     if(m_camOps->request_super_buf(m_camHandle,m_handle,1,0) < 0) {
3302         ALOGE("%s: Request for super buffer failed",__func__);
3303     }
3304     return rc;
3305 }
3306 
3307 cam_dimension_t QCamera3SupportChannel::kDim = {640, 480};
3308 
QCamera3SupportChannel(uint32_t cam_handle,mm_camera_ops_t * cam_ops,cam_padding_info_t * paddingInfo,uint32_t postprocess_mask,void * userData)3309 QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle,
3310                     mm_camera_ops_t *cam_ops,
3311                     cam_padding_info_t *paddingInfo,
3312                     uint32_t postprocess_mask,
3313                     void *userData) :
3314                         QCamera3Channel(cam_handle, cam_ops,
3315                                 NULL, paddingInfo, postprocess_mask, userData),
3316                         mMemory(NULL)
3317 {
3318 }
3319 
~QCamera3SupportChannel()3320 QCamera3SupportChannel::~QCamera3SupportChannel()
3321 {
3322     if (m_bIsActive)
3323         stop();
3324 
3325     if (mMemory) {
3326         mMemory->deallocate();
3327         delete mMemory;
3328         mMemory = NULL;
3329     }
3330 }
3331 
initialize(cam_is_type_t isType)3332 int32_t QCamera3SupportChannel::initialize(cam_is_type_t isType)
3333 {
3334     int32_t rc;
3335 
3336     if (mMemory || m_numStreams > 0) {
3337         ALOGE("%s: metadata channel already initialized", __func__);
3338         return -EINVAL;
3339     }
3340 
3341     rc = init(NULL, NULL);
3342     if (rc < 0) {
3343         ALOGE("%s: init failed", __func__);
3344         return rc;
3345     }
3346     mIsType = isType;
3347     // Hardcode to VGA size for now
3348     rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_CALLBACK,
3349         CAM_FORMAT_YUV_420_NV21, kDim, MIN_STREAMING_BUFFER_NUM,
3350         mPostProcMask, mIsType);
3351     if (rc < 0) {
3352         ALOGE("%s: addStream failed", __func__);
3353     }
3354     return rc;
3355 }
3356 
request(buffer_handle_t *,uint32_t)3357 int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/,
3358                                                 uint32_t /*frameNumber*/)
3359 {
3360     return NO_ERROR;
3361 }
3362 
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)3363 void QCamera3SupportChannel::streamCbRoutine(
3364                         mm_camera_super_buf_t *super_frame,
3365                         QCamera3Stream * /*stream*/)
3366 {
3367     if (super_frame == NULL || super_frame->num_bufs != 1) {
3368         ALOGE("%s: super_frame is not valid", __func__);
3369         return;
3370     }
3371     bufDone(super_frame);
3372     free(super_frame);
3373 }
3374 
getStreamBufs(uint32_t len)3375 QCamera3Memory* QCamera3SupportChannel::getStreamBufs(uint32_t len)
3376 {
3377     int rc;
3378 
3379     mMemory = new QCamera3HeapMemory();
3380     if (!mMemory) {
3381         ALOGE("%s: unable to create heap memory", __func__);
3382         return NULL;
3383     }
3384     rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
3385     if (rc < 0) {
3386         ALOGE("%s: unable to allocate heap memory", __func__);
3387         delete mMemory;
3388         mMemory = NULL;
3389         return NULL;
3390     }
3391     return mMemory;
3392 }
3393 
putStreamBufs()3394 void QCamera3SupportChannel::putStreamBufs()
3395 {
3396     mMemory->deallocate();
3397     delete mMemory;
3398     mMemory = NULL;
3399 }
3400 
3401 }; // namespace qcamera
3402