1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define LOG_TAG "QCameraPostProc"
31
32 // System dependencies
33 #include <fcntl.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 #include <utils/Errors.h>
37
38 // Camera dependencies
39 #include "QCamera2HWI.h"
40 #include "QCameraPostProc.h"
41 #include "QCameraTrace.h"
42
43 extern "C" {
44 #include "mm_camera_dbg.h"
45 }
46
47 namespace qcamera {
48
49 const char *QCameraPostProcessor::STORE_LOCATION = "/sdcard/img_%d.jpg";
50
51 #define FREE_JPEG_OUTPUT_BUFFER(ptr,cnt) \
52 int jpeg_bufs; \
53 for (jpeg_bufs = 0; jpeg_bufs < (int)cnt; jpeg_bufs++) { \
54 if (ptr[jpeg_bufs] != NULL) { \
55 free(ptr[jpeg_bufs]); \
56 ptr[jpeg_bufs] = NULL; \
57 } \
58 }
59
60 /*===========================================================================
61 * FUNCTION : QCameraPostProcessor
62 *
63 * DESCRIPTION: constructor of QCameraPostProcessor.
64 *
65 * PARAMETERS :
66 * @cam_ctrl : ptr to HWI object
67 *
68 * RETURN : None
69 *==========================================================================*/
QCameraPostProcessor(QCamera2HardwareInterface * cam_ctrl)70 QCameraPostProcessor::QCameraPostProcessor(QCamera2HardwareInterface *cam_ctrl)
71 : m_parent(cam_ctrl),
72 mJpegCB(NULL),
73 mJpegUserData(NULL),
74 mJpegClientHandle(0),
75 mJpegSessionId(0),
76 m_pJpegExifObj(NULL),
77 m_bThumbnailNeeded(TRUE),
78 mPPChannelCount(0),
79 m_bInited(FALSE),
80 m_inputPPQ(releaseOngoingPPData, this),
81 m_ongoingPPQ(releaseOngoingPPData, this),
82 m_inputJpegQ(releaseJpegData, this),
83 m_ongoingJpegQ(releaseJpegData, this),
84 m_inputRawQ(releaseRawData, this),
85 mSaveFrmCnt(0),
86 mUseSaveProc(false),
87 mUseJpegBurst(false),
88 mJpegMemOpt(true),
89 m_JpegOutputMemCount(0),
90 mNewJpegSessionNeeded(true),
91 m_bufCountPPQ(0),
92 m_PPindex(0)
93 {
94 memset(&mJpegHandle, 0, sizeof(mJpegHandle));
95 memset(&mJpegMpoHandle, 0, sizeof(mJpegMpoHandle));
96 memset(&m_pJpegOutputMem, 0, sizeof(m_pJpegOutputMem));
97 memset(mPPChannels, 0, sizeof(mPPChannels));
98 m_DataMem = NULL;
99 mOfflineDataBufs = NULL;
100 pthread_mutex_init(&m_reprocess_lock,NULL);
101 }
102
103 /*===========================================================================
104 * FUNCTION : ~QCameraPostProcessor
105 *
106 * DESCRIPTION: deconstructor of QCameraPostProcessor.
107 *
108 * PARAMETERS : None
109 *
110 * RETURN : None
111 *==========================================================================*/
~QCameraPostProcessor()112 QCameraPostProcessor::~QCameraPostProcessor()
113 {
114 FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem,m_JpegOutputMemCount);
115 if (m_pJpegExifObj != NULL) {
116 delete m_pJpegExifObj;
117 m_pJpegExifObj = NULL;
118 }
119 for (int8_t i = 0; i < mPPChannelCount; i++) {
120 QCameraChannel *pChannel = mPPChannels[i];
121 if ( pChannel != NULL ) {
122 pChannel->stop();
123 delete pChannel;
124 pChannel = NULL;
125 }
126 }
127 mPPChannelCount = 0;
128 pthread_mutex_destroy(&m_reprocess_lock);
129 }
130
131 /*===========================================================================
132 * FUNCTION : setJpegHandle
133 *
134 * DESCRIPTION: set JPEG client handles
135 *
136 * PARAMETERS :
137 * @pJpegHandle : JPEG ops handle
138 * @pJpegMpoHandle : MPO JPEG ops handle
139 * @clientHandle : JPEG client handle
140 *
141 * RETURN : int32_t type of status
142 * NO_ERROR -- success
143 * none-zero failure code
144 *==========================================================================*/
setJpegHandle(mm_jpeg_ops_t * pJpegHandle,mm_jpeg_mpo_ops_t * pJpegMpoHandle,uint32_t clientHandle)145 int32_t QCameraPostProcessor::setJpegHandle(mm_jpeg_ops_t *pJpegHandle,
146 mm_jpeg_mpo_ops_t *pJpegMpoHandle, uint32_t clientHandle)
147 {
148 LOGH("E mJpegClientHandle: %d, clientHandle: %d",
149 mJpegClientHandle, clientHandle);
150
151 if(pJpegHandle) {
152 memcpy(&mJpegHandle, pJpegHandle, sizeof(mm_jpeg_ops_t));
153 }
154
155 if(pJpegMpoHandle) {
156 memcpy(&mJpegMpoHandle, pJpegMpoHandle, sizeof(mm_jpeg_mpo_ops_t));
157 }
158 mJpegClientHandle = clientHandle;
159 LOGH("X mJpegClientHandle: %d, clientHandle: %d",
160 mJpegClientHandle, clientHandle);
161 return NO_ERROR;
162 }
163
164 /*===========================================================================
165 * FUNCTION : init
166 *
167 * DESCRIPTION: initialization of postprocessor
168 *
169 * PARAMETERS :
170 * @jpeg_cb : callback to handle jpeg event from mm-camera-interface
171 * @user_data : user data ptr for jpeg callback
172 *
173 * RETURN : int32_t type of status
174 * NO_ERROR -- success
175 * none-zero failure code
176 *==========================================================================*/
init(jpeg_encode_callback_t jpeg_cb,void * user_data)177 int32_t QCameraPostProcessor::init(jpeg_encode_callback_t jpeg_cb, void *user_data)
178 {
179 mJpegCB = jpeg_cb;
180 mJpegUserData = user_data;
181 m_dataProcTh.launch(dataProcessRoutine, this);
182 m_saveProcTh.launch(dataSaveRoutine, this);
183 m_parent->mParameters.setReprocCount();
184 m_bInited = TRUE;
185 return NO_ERROR;
186 }
187
188 /*===========================================================================
189 * FUNCTION : deinit
190 *
191 * DESCRIPTION: de-initialization of postprocessor
192 *
193 * PARAMETERS : None
194 *
195 * RETURN : int32_t type of status
196 * NO_ERROR -- success
197 * none-zero failure code
198 *==========================================================================*/
deinit()199 int32_t QCameraPostProcessor::deinit()
200 {
201 if (m_bInited == TRUE) {
202 m_dataProcTh.exit();
203 m_saveProcTh.exit();
204 m_bInited = FALSE;
205 }
206 return NO_ERROR;
207 }
208
209 /*===========================================================================
210 * FUNCTION : start
211 *
212 * DESCRIPTION: start postprocessor. Data process thread and data notify thread
213 * will be launched.
214 *
215 * PARAMETERS :
216 * @pSrcChannel : source channel obj ptr that possibly needs reprocess
217 *
218 * RETURN : int32_t type of status
219 * NO_ERROR -- success
220 * none-zero failure code
221 *
222 * NOTE : if any reprocess is needed, a reprocess channel/stream
223 * will be started.
224 *==========================================================================*/
start(QCameraChannel * pSrcChannel)225 int32_t QCameraPostProcessor::start(QCameraChannel *pSrcChannel)
226 {
227 char prop[PROPERTY_VALUE_MAX];
228 int32_t rc = NO_ERROR;
229 QCameraChannel *pInputChannel = pSrcChannel;
230
231 LOGH("E ");
232 if (m_bInited == FALSE) {
233 LOGE("postproc not initialized yet");
234 return UNKNOWN_ERROR;
235 }
236
237 if (m_DataMem != NULL) {
238 m_DataMem->release(m_DataMem);
239 m_DataMem = NULL;
240 }
241
242 if (pInputChannel == NULL) {
243 LOGE("Input Channel for pproc is NULL.");
244 return UNKNOWN_ERROR;
245 }
246
247 if ( m_parent->needReprocess() ) {
248 for (int8_t i = 0; i < mPPChannelCount; i++) {
249 // Delete previous reproc channel
250 QCameraReprocessChannel *pChannel = mPPChannels[i];
251 if (pChannel != NULL) {
252 pChannel->stop();
253 delete pChannel;
254 pChannel = NULL;
255 }
256 }
257 mPPChannelCount = 0;
258
259 m_bufCountPPQ = 0;
260 if (!m_parent->isLongshotEnabled()) {
261 m_parent->mParameters.setReprocCount();
262 }
263
264 if (m_parent->mParameters.getManualCaptureMode() >=
265 CAM_MANUAL_CAPTURE_TYPE_3) {
266 mPPChannelCount = m_parent->mParameters.getReprocCount() - 1;
267 } else {
268 mPPChannelCount = m_parent->mParameters.getReprocCount();
269 }
270
271 // Create all reproc channels and start channel
272 for (int8_t i = 0; i < mPPChannelCount; i++) {
273 mPPChannels[i] = m_parent->addReprocChannel(pInputChannel, i);
274 if (mPPChannels[i] == NULL) {
275 LOGE("cannot add multi reprocess channel i = %d", i);
276 return UNKNOWN_ERROR;
277 }
278 rc = mPPChannels[i]->start();
279 if (rc != 0) {
280 LOGE("cannot start multi reprocess channel i = %d", i);
281 delete mPPChannels[i];
282 mPPChannels[i] = NULL;
283 return UNKNOWN_ERROR;
284 }
285 pInputChannel = static_cast<QCameraChannel *>(mPPChannels[i]);
286 }
287 }
288
289 property_get("persist.camera.longshot.save", prop, "0");
290 mUseSaveProc = atoi(prop) > 0 ? true : false;
291
292 m_PPindex = 0;
293 m_InputMetadata.clear();
294 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, TRUE, FALSE);
295 m_parent->m_cbNotifier.startSnapshots();
296 LOGH("X rc = %d", rc);
297 return rc;
298 }
299
300 /*===========================================================================
301 * FUNCTION : stop
302 *
303 * DESCRIPTION: stop postprocessor. Data process and notify thread will be stopped.
304 *
305 * PARAMETERS : None
306 *
307 * RETURN : int32_t type of status
308 * NO_ERROR -- success
309 * none-zero failure code
310 *
311 * NOTE : reprocess channel will be stopped and deleted if there is any
312 *==========================================================================*/
stop()313 int32_t QCameraPostProcessor::stop()
314 {
315 if (m_bInited == TRUE) {
316 m_parent->m_cbNotifier.stopSnapshots();
317
318 if (m_DataMem != NULL) {
319 m_DataMem->release(m_DataMem);
320 m_DataMem = NULL;
321 }
322
323 // dataProc Thread need to process "stop" as sync call because abort jpeg job should be a sync call
324 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
325 }
326 // stop reproc channel if exists
327 for (int8_t i = 0; i < mPPChannelCount; i++) {
328 QCameraReprocessChannel *pChannel = mPPChannels[i];
329 if (pChannel != NULL) {
330 pChannel->stop();
331 delete pChannel;
332 pChannel = NULL;
333 }
334 }
335 mPPChannelCount = 0;
336 m_PPindex = 0;
337 m_InputMetadata.clear();
338
339 if (mOfflineDataBufs != NULL) {
340 mOfflineDataBufs->deallocate();
341 delete mOfflineDataBufs;
342 mOfflineDataBufs = NULL;
343 }
344 return NO_ERROR;
345 }
346
347 /*===========================================================================
348 * FUNCTION : createJpegSession
349 *
350 * DESCRIPTION: start JPEG session in parallel to reproces to reduce the KPI
351 *
352 * PARAMETERS :
353 * @pSrcChannel : source channel obj ptr that possibly needs reprocess
354 *
355 * RETURN : int32_t type of status
356 * NO_ERROR -- success
357 * none-zero failure code
358 *==========================================================================*/
createJpegSession(QCameraChannel * pSrcChannel)359 int32_t QCameraPostProcessor::createJpegSession(QCameraChannel *pSrcChannel)
360 {
361 int32_t rc = NO_ERROR;
362
363 LOGH("E ");
364 if (m_bInited == FALSE) {
365 LOGE("postproc not initialized yet");
366 return UNKNOWN_ERROR;
367 }
368
369 if (pSrcChannel == NULL) {
370 LOGE("Input Channel for pproc is NULL.");
371 return UNKNOWN_ERROR;
372 }
373
374 if (mPPChannelCount > 0) {
375 QCameraChannel *pChannel = NULL;
376 int ppChannel_idx = mPPChannelCount - 1;
377 pChannel = m_parent->needReprocess() ? mPPChannels[ppChannel_idx] :
378 pSrcChannel;
379 QCameraStream *pSnapshotStream = NULL;
380 QCameraStream *pThumbStream = NULL;
381 bool thumb_stream_needed = ((!m_parent->isZSLMode() ||
382 (m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT) ==
383 m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_PREVIEW))) &&
384 !m_parent->mParameters.generateThumbFromMain());
385
386 if (pChannel == NULL) {
387 LOGE("Input Channel for pproc is NULL for index %d.",
388 ppChannel_idx);
389 return UNKNOWN_ERROR;
390 }
391
392 for (uint32_t i = 0; i < pChannel->getNumOfStreams(); ++i) {
393 QCameraStream *pStream = pChannel->getStreamByIndex(i);
394
395 if ( NULL == pStream ) {
396 break;
397 }
398
399 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
400 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
401 pSnapshotStream = pStream;
402 }
403
404 if ((thumb_stream_needed) &&
405 (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
406 pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
407 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
408 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
409 pThumbStream = pStream;
410 }
411 }
412
413 // If thumbnail is not part of the reprocess channel, then
414 // try to get it from the source channel
415 if ((thumb_stream_needed) && (NULL == pThumbStream) &&
416 (pChannel == mPPChannels[ppChannel_idx])) {
417 for (uint32_t i = 0; i < pSrcChannel->getNumOfStreams(); ++i) {
418 QCameraStream *pStream = pSrcChannel->getStreamByIndex(i);
419
420 if ( NULL == pStream ) {
421 break;
422 }
423
424 if (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
425 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
426 pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
427 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
428 pThumbStream = pStream;
429 }
430 }
431 }
432
433 if ( NULL != pSnapshotStream ) {
434 mm_jpeg_encode_params_t encodeParam;
435 memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
436 rc = getJpegEncodingConfig(encodeParam, pSnapshotStream, pThumbStream);
437 if (rc != NO_ERROR) {
438 LOGE("error getting encoding config");
439 return rc;
440 }
441 LOGH("[KPI Perf] : call jpeg create_session");
442
443 rc = mJpegHandle.create_session(mJpegClientHandle,
444 &encodeParam,
445 &mJpegSessionId);
446 if (rc != NO_ERROR) {
447 LOGE("error creating a new jpeg encoding session");
448 return rc;
449 }
450 mNewJpegSessionNeeded = false;
451 }
452 }
453 LOGH("X ");
454 return rc;
455 }
456
457 /*===========================================================================
458 * FUNCTION : getJpegEncodingConfig
459 *
460 * DESCRIPTION: function to prepare encoding job information
461 *
462 * PARAMETERS :
463 * @encode_parm : param to be filled with encoding configuration
464 *
465 * RETURN : int32_t type of status
466 * NO_ERROR -- success
467 * none-zero failure code
468 *==========================================================================*/
getJpegEncodingConfig(mm_jpeg_encode_params_t & encode_parm,QCameraStream * main_stream,QCameraStream * thumb_stream)469 int32_t QCameraPostProcessor::getJpegEncodingConfig(mm_jpeg_encode_params_t& encode_parm,
470 QCameraStream *main_stream,
471 QCameraStream *thumb_stream)
472 {
473 LOGD("E");
474 int32_t ret = NO_ERROR;
475 size_t out_size;
476
477 char prop[PROPERTY_VALUE_MAX];
478 property_get("persist.camera.jpeg_burst", prop, "0");
479 mUseJpegBurst = (atoi(prop) > 0) && !mUseSaveProc;
480 encode_parm.burst_mode = mUseJpegBurst;
481
482 cam_rect_t crop;
483 memset(&crop, 0, sizeof(cam_rect_t));
484 main_stream->getCropInfo(crop);
485
486 cam_dimension_t src_dim, dst_dim;
487 memset(&src_dim, 0, sizeof(cam_dimension_t));
488 memset(&dst_dim, 0, sizeof(cam_dimension_t));
489 main_stream->getFrameDimension(src_dim);
490
491 bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
492 if (hdr_output_crop && crop.height) {
493 dst_dim.height = crop.height;
494 } else {
495 dst_dim.height = src_dim.height;
496 }
497 if (hdr_output_crop && crop.width) {
498 dst_dim.width = crop.width;
499 } else {
500 dst_dim.width = src_dim.width;
501 }
502
503 // set rotation only when no online rotation or offline pp rotation is done before
504 if (!m_parent->needRotationReprocess()) {
505 encode_parm.rotation = m_parent->mParameters.getJpegRotation();
506 }
507
508 encode_parm.main_dim.src_dim = src_dim;
509 encode_parm.main_dim.dst_dim = dst_dim;
510
511 m_dst_dim = dst_dim;
512
513 encode_parm.jpeg_cb = mJpegCB;
514 encode_parm.userdata = mJpegUserData;
515
516 m_bThumbnailNeeded = TRUE; // need encode thumbnail by default
517 // system property to disable the thumbnail encoding in order to reduce the power
518 // by default thumbnail encoding is set to TRUE and explicitly set this property to
519 // disable the thumbnail encoding
520 property_get("persist.camera.tn.disable", prop, "0");
521 if (atoi(prop) == 1) {
522 m_bThumbnailNeeded = FALSE;
523 LOGH("m_bThumbnailNeeded is %d", m_bThumbnailNeeded);
524 }
525 cam_dimension_t thumbnailSize;
526 memset(&thumbnailSize, 0, sizeof(cam_dimension_t));
527 m_parent->getThumbnailSize(thumbnailSize);
528 if (thumbnailSize.width == 0 || thumbnailSize.height == 0) {
529 // (0,0) means no thumbnail
530 m_bThumbnailNeeded = FALSE;
531 }
532 encode_parm.encode_thumbnail = m_bThumbnailNeeded;
533
534 // get color format
535 cam_format_t img_fmt = CAM_FORMAT_YUV_420_NV12;
536 main_stream->getFormat(img_fmt);
537 encode_parm.color_format = getColorfmtFromImgFmt(img_fmt);
538
539 // get jpeg quality
540 uint32_t val = m_parent->getJpegQuality();
541 if (0U < val) {
542 encode_parm.quality = val;
543 } else {
544 LOGH("Using default JPEG quality");
545 encode_parm.quality = 85;
546 }
547 cam_frame_len_offset_t main_offset;
548 memset(&main_offset, 0, sizeof(cam_frame_len_offset_t));
549 main_stream->getFrameOffset(main_offset);
550
551 // src buf config
552 QCameraMemory *pStreamMem = main_stream->getStreamBufs();
553 if (pStreamMem == NULL) {
554 LOGE("cannot get stream bufs from main stream");
555 ret = BAD_VALUE;
556 goto on_error;
557 }
558 encode_parm.num_src_bufs = pStreamMem->getCnt();
559 for (uint32_t i = 0; i < encode_parm.num_src_bufs; i++) {
560 camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
561 if (stream_mem != NULL) {
562 encode_parm.src_main_buf[i].index = i;
563 encode_parm.src_main_buf[i].buf_size = stream_mem->size;
564 encode_parm.src_main_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
565 encode_parm.src_main_buf[i].fd = pStreamMem->getFd(i);
566 encode_parm.src_main_buf[i].format = MM_JPEG_FMT_YUV;
567 encode_parm.src_main_buf[i].offset = main_offset;
568 }
569 }
570 LOGI("Src Buffer cnt = %d, res = %dX%d len = %d rot = %d "
571 "src_dim = %dX%d dst_dim = %dX%d",
572 encode_parm.num_src_bufs,
573 main_offset.mp[0].width, main_offset.mp[0].height,
574 main_offset.frame_len, encode_parm.rotation,
575 src_dim.width, src_dim.height,
576 dst_dim.width, dst_dim.height);
577
578 if (m_bThumbnailNeeded == TRUE) {
579 m_parent->getThumbnailSize(encode_parm.thumb_dim.dst_dim);
580
581 if (thumb_stream == NULL) {
582 thumb_stream = main_stream;
583 }
584 if (((90 == m_parent->mParameters.getJpegRotation())
585 || (270 == m_parent->mParameters.getJpegRotation()))
586 && (m_parent->needRotationReprocess())) {
587 // swap thumbnail dimensions
588 cam_dimension_t tmp_dim = encode_parm.thumb_dim.dst_dim;
589 encode_parm.thumb_dim.dst_dim.width = tmp_dim.height;
590 encode_parm.thumb_dim.dst_dim.height = tmp_dim.width;
591 }
592 pStreamMem = thumb_stream->getStreamBufs();
593 if (pStreamMem == NULL) {
594 LOGE("cannot get stream bufs from thumb stream");
595 ret = BAD_VALUE;
596 goto on_error;
597 }
598 cam_frame_len_offset_t thumb_offset;
599 memset(&thumb_offset, 0, sizeof(cam_frame_len_offset_t));
600 thumb_stream->getFrameOffset(thumb_offset);
601 encode_parm.num_tmb_bufs = pStreamMem->getCnt();
602 for (uint32_t i = 0; i < pStreamMem->getCnt(); i++) {
603 camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
604 if (stream_mem != NULL) {
605 encode_parm.src_thumb_buf[i].index = i;
606 encode_parm.src_thumb_buf[i].buf_size = stream_mem->size;
607 encode_parm.src_thumb_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
608 encode_parm.src_thumb_buf[i].fd = pStreamMem->getFd(i);
609 encode_parm.src_thumb_buf[i].format = MM_JPEG_FMT_YUV;
610 encode_parm.src_thumb_buf[i].offset = thumb_offset;
611 }
612 }
613 cam_format_t img_fmt_thumb = CAM_FORMAT_YUV_420_NV12;
614 thumb_stream->getFormat(img_fmt_thumb);
615 encode_parm.thumb_color_format = getColorfmtFromImgFmt(img_fmt_thumb);
616
617 // crop is the same if frame is the same
618 if (thumb_stream != main_stream) {
619 memset(&crop, 0, sizeof(cam_rect_t));
620 thumb_stream->getCropInfo(crop);
621 }
622
623 memset(&src_dim, 0, sizeof(cam_dimension_t));
624 thumb_stream->getFrameDimension(src_dim);
625 encode_parm.thumb_dim.src_dim = src_dim;
626
627 if (!m_parent->needRotationReprocess()) {
628 encode_parm.thumb_rotation = m_parent->mParameters.getJpegRotation();
629 }
630 encode_parm.thumb_dim.crop = crop;
631 encode_parm.thumb_from_postview =
632 !m_parent->mParameters.generateThumbFromMain() &&
633 (img_fmt_thumb != CAM_FORMAT_YUV_420_NV12_UBWC) &&
634 (m_parent->mParameters.useJpegExifRotation() ||
635 m_parent->mParameters.getJpegRotation() == 0);
636
637 if (encode_parm.thumb_from_postview &&
638 m_parent->mParameters.useJpegExifRotation()){
639 encode_parm.thumb_rotation =
640 m_parent->mParameters.getJpegExifRotation();
641 }
642
643 LOGI("Src THUMB buf_cnt = %d, res = %dX%d len = %d rot = %d "
644 "src_dim = %dX%d, dst_dim = %dX%d",
645 encode_parm.num_tmb_bufs,
646 thumb_offset.mp[0].width, thumb_offset.mp[0].height,
647 thumb_offset.frame_len, encode_parm.thumb_rotation,
648 encode_parm.thumb_dim.src_dim.width,
649 encode_parm.thumb_dim.src_dim.height,
650 encode_parm.thumb_dim.dst_dim.width,
651 encode_parm.thumb_dim.dst_dim.height);
652 }
653
654 encode_parm.num_dst_bufs = 1;
655 if (mUseJpegBurst) {
656 encode_parm.num_dst_bufs = MAX_JPEG_BURST;
657 }
658 encode_parm.get_memory = NULL;
659 out_size = main_offset.frame_len;
660 if (mJpegMemOpt) {
661 encode_parm.get_memory = getJpegMemory;
662 encode_parm.put_memory = releaseJpegMemory;
663 out_size = sizeof(omx_jpeg_ouput_buf_t);
664 encode_parm.num_dst_bufs = encode_parm.num_src_bufs;
665 }
666 m_JpegOutputMemCount = (uint32_t)encode_parm.num_dst_bufs;
667 for (uint32_t i = 0; i < m_JpegOutputMemCount; i++) {
668 if (m_pJpegOutputMem[i] != NULL)
669 free(m_pJpegOutputMem[i]);
670 omx_jpeg_ouput_buf_t omx_out_buf;
671 memset(&omx_out_buf, 0, sizeof(omx_jpeg_ouput_buf_t));
672 omx_out_buf.handle = this;
673 // allocate output buf for jpeg encoding
674 m_pJpegOutputMem[i] = malloc(out_size);
675
676 if (NULL == m_pJpegOutputMem[i]) {
677 ret = NO_MEMORY;
678 LOGE("initHeapMem for jpeg, ret = NO_MEMORY");
679 goto on_error;
680 }
681
682 if (mJpegMemOpt) {
683 memcpy(m_pJpegOutputMem[i], &omx_out_buf, sizeof(omx_out_buf));
684 }
685
686 encode_parm.dest_buf[i].index = i;
687 encode_parm.dest_buf[i].buf_size = main_offset.frame_len;
688 encode_parm.dest_buf[i].buf_vaddr = (uint8_t *)m_pJpegOutputMem[i];
689 encode_parm.dest_buf[i].fd = -1;
690 encode_parm.dest_buf[i].format = MM_JPEG_FMT_YUV;
691 encode_parm.dest_buf[i].offset = main_offset;
692 }
693
694 LOGD("X");
695 return NO_ERROR;
696
697 on_error:
698 FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem, m_JpegOutputMemCount);
699
700 LOGD("X with error %d", ret);
701 return ret;
702 }
703
704 /*===========================================================================
705 * FUNCTION : sendEvtNotify
706 *
707 * DESCRIPTION: send event notify through notify callback registered by upper layer
708 *
709 * PARAMETERS :
710 * @msg_type: msg type of notify
711 * @ext1 : extension
712 * @ext2 : extension
713 *
714 * RETURN : int32_t type of status
715 * NO_ERROR -- success
716 * none-zero failure code
717 *==========================================================================*/
sendEvtNotify(int32_t msg_type,int32_t ext1,int32_t ext2)718 int32_t QCameraPostProcessor::sendEvtNotify(int32_t msg_type,
719 int32_t ext1,
720 int32_t ext2)
721 {
722 return m_parent->sendEvtNotify(msg_type, ext1, ext2);
723 }
724
725 /*===========================================================================
726 * FUNCTION : sendDataNotify
727 *
728 * DESCRIPTION: enqueue data into dataNotify thread
729 *
730 * PARAMETERS :
731 * @msg_type: data callback msg type
732 * @data : ptr to data memory struct
733 * @index : index to data buffer
734 * @metadata: ptr to meta data buffer if there is any
735 * @release_data : ptr to struct indicating if data need to be released
736 * after notify
737 * @super_buf_frame_idx : super buffer frame index
738 *
739 * RETURN : int32_t type of status
740 * NO_ERROR -- success
741 * none-zero failure code
742 *==========================================================================*/
sendDataNotify(int32_t msg_type,camera_memory_t * data,uint8_t index,camera_frame_metadata_t * metadata,qcamera_release_data_t * release_data,uint32_t super_buf_frame_idx)743 int32_t QCameraPostProcessor::sendDataNotify(int32_t msg_type,
744 camera_memory_t *data,
745 uint8_t index,
746 camera_frame_metadata_t *metadata,
747 qcamera_release_data_t *release_data,
748 uint32_t super_buf_frame_idx)
749 {
750 qcamera_data_argm_t *data_cb = (qcamera_data_argm_t *)malloc(sizeof(qcamera_data_argm_t));
751 if (NULL == data_cb) {
752 LOGE("no mem for acamera_data_argm_t");
753 return NO_MEMORY;
754 }
755 memset(data_cb, 0, sizeof(qcamera_data_argm_t));
756 data_cb->msg_type = msg_type;
757 data_cb->data = data;
758 data_cb->index = index;
759 data_cb->metadata = metadata;
760 if (release_data != NULL) {
761 data_cb->release_data = *release_data;
762 }
763
764 qcamera_callback_argm_t cbArg;
765 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
766 cbArg.cb_type = QCAMERA_DATA_SNAPSHOT_CALLBACK;
767 cbArg.msg_type = msg_type;
768 cbArg.data = data;
769 cbArg.metadata = metadata;
770 cbArg.user_data = data_cb;
771 cbArg.cookie = this;
772 cbArg.release_cb = releaseNotifyData;
773 cbArg.frame_index = super_buf_frame_idx;
774 int rc = m_parent->m_cbNotifier.notifyCallback(cbArg);
775 if ( NO_ERROR != rc ) {
776 LOGE("Error enqueuing jpeg data into notify queue");
777 releaseNotifyData(data_cb, this, UNKNOWN_ERROR);
778 return UNKNOWN_ERROR;
779 }
780
781 return rc;
782 }
783
784 /*===========================================================================
785 * FUNCTION : validatePostProcess
786 *
787 * DESCRIPTION: Verify output buffer count of pp module
788 *
789 * PARAMETERS :
790 * @frame : process frame received from mm-camera-interface
791 *
792 * RETURN : bool type of status
793 * TRUE -- success
794 * FALSE failure
795 *==========================================================================*/
validatePostProcess(mm_camera_super_buf_t * frame)796 bool QCameraPostProcessor::validatePostProcess(mm_camera_super_buf_t *frame)
797 {
798 bool status = TRUE;
799 QCameraChannel *pChannel = NULL;
800 QCameraReprocessChannel *m_pReprocChannel = NULL;
801
802 if (frame == NULL) {
803 return status;
804 }
805
806 pChannel = m_parent->getChannelByHandle(frame->ch_id);
807 for (int8_t i = 0; i < mPPChannelCount; i++) {
808 if (pChannel == mPPChannels[i]->getSrcChannel()) {
809 m_pReprocChannel = mPPChannels[i];
810 break;
811 }
812 }
813
814 if ((m_pReprocChannel != NULL) && (pChannel == m_pReprocChannel->getSrcChannel())) {
815 QCameraStream *pStream = NULL;
816 for (uint8_t i = 0; i < m_pReprocChannel->getNumOfStreams(); i++) {
817 pStream = m_pReprocChannel->getStreamByIndex(i);
818 if (pStream && (m_inputPPQ.getCurrentSize() > 0) &&
819 (m_ongoingPPQ.getCurrentSize() >= pStream->getNumQueuedBuf())) {
820 LOGW("Out of PP Buffer PPQ = %d ongoingQ = %d Jpeg = %d onJpeg = %d",
821 m_inputPPQ.getCurrentSize(), m_ongoingPPQ.getCurrentSize(),
822 m_inputJpegQ.getCurrentSize(), m_ongoingJpegQ.getCurrentSize());
823 status = FALSE;
824 break;
825 }
826 }
827 }
828 return status;
829 }
830
831 /*===========================================================================
832 * FUNCTION : getOfflinePPInputBuffer
833 *
834 * DESCRIPTION: Function to generate offline post proc buffer
835 *
836 * PARAMETERS :
837 * @src_frame : process frame received from mm-camera-interface
838 *
839 * RETURN : Buffer pointer if successfull
840 * : NULL in case of failures
841 *==========================================================================*/
getOfflinePPInputBuffer(mm_camera_super_buf_t * src_frame)842 mm_camera_buf_def_t *QCameraPostProcessor::getOfflinePPInputBuffer(
843 mm_camera_super_buf_t *src_frame)
844 {
845 mm_camera_buf_def_t *mBufDefs = NULL;
846 QCameraChannel *pChannel = NULL;
847 QCameraStream *src_pStream = NULL;
848 mm_camera_buf_def_t *data_frame = NULL;
849 mm_camera_buf_def_t *meta_frame = NULL;
850
851 if (mOfflineDataBufs == NULL) {
852 LOGE("Offline Buffer not allocated");
853 return NULL;
854 }
855
856 uint32_t num_bufs = mOfflineDataBufs->getCnt();
857 size_t bufDefsSize = num_bufs * sizeof(mm_camera_buf_def_t);
858 mBufDefs = (mm_camera_buf_def_t *)malloc(bufDefsSize);
859 if (mBufDefs == NULL) {
860 LOGE("No memory");
861 return NULL;
862 }
863 memset(mBufDefs, 0, bufDefsSize);
864
865 pChannel = m_parent->getChannelByHandle(src_frame->ch_id);
866 for (uint32_t i = 0; i < src_frame->num_bufs; i++) {
867 src_pStream = pChannel->getStreamByHandle(
868 src_frame->bufs[i]->stream_id);
869 if (src_pStream != NULL) {
870 if (src_pStream->getMyType() == CAM_STREAM_TYPE_RAW) {
871 LOGH("Found RAW input stream");
872 data_frame = src_frame->bufs[i];
873 } else if (src_pStream->getMyType() == CAM_STREAM_TYPE_METADATA){
874 LOGH("Found Metada input stream");
875 meta_frame = src_frame->bufs[i];
876 }
877 }
878 }
879
880 if ((src_pStream != NULL) && (data_frame != NULL)) {
881 cam_frame_len_offset_t offset;
882 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
883 src_pStream->getFrameOffset(offset);
884 for (uint32_t i = 0; i < num_bufs; i++) {
885 mBufDefs[i] = *data_frame;
886 mOfflineDataBufs->getBufDef(offset, mBufDefs[i], i);
887
888 LOGD("Dumping RAW data on offline buffer");
889 /*Actual data memcpy just for verification*/
890 memcpy(mBufDefs[i].buffer, data_frame->buffer,
891 mBufDefs[i].frame_len);
892 }
893 releaseSuperBuf(src_frame, CAM_STREAM_TYPE_RAW);
894 } else {
895 free(mBufDefs);
896 mBufDefs = NULL;
897 }
898
899 LOGH("mBufDefs = %p", mBufDefs);
900 return mBufDefs;
901 }
902
903 /*===========================================================================
904 * FUNCTION : processData
905 *
906 * DESCRIPTION: enqueue data into dataProc thread
907 *
908 * PARAMETERS :
909 * @frame : process frame received from mm-camera-interface
910 *
911 * RETURN : int32_t type of status
912 * NO_ERROR -- success
913 * none-zero failure code
914 *
915 * NOTE : depends on if offline reprocess is needed, received frame will
916 * be sent to either input queue of postprocess or jpeg encoding
917 *==========================================================================*/
processData(mm_camera_super_buf_t * frame)918 int32_t QCameraPostProcessor::processData(mm_camera_super_buf_t *frame)
919 {
920 if (m_bInited == FALSE) {
921 LOGE("postproc not initialized yet");
922 return UNKNOWN_ERROR;
923 }
924
925 if (frame == NULL) {
926 LOGE("Invalid parameter");
927 return UNKNOWN_ERROR;
928 }
929
930 mm_camera_buf_def_t *meta_frame = NULL;
931 for (uint32_t i = 0; i < frame->num_bufs; i++) {
932 // look through input superbuf
933 if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
934 meta_frame = frame->bufs[i];
935 break;
936 }
937 }
938 if (meta_frame != NULL) {
939 //Function to upadte metadata for frame based parameter
940 m_parent->updateMetadata((metadata_buffer_t *)meta_frame->buffer);
941 }
942
943 if (m_parent->needReprocess()) {
944 if ((!m_parent->isLongshotEnabled() &&
945 !m_parent->m_stateMachine.isNonZSLCaptureRunning()) ||
946 (m_parent->isLongshotEnabled() &&
947 m_parent->isCaptureShutterEnabled())) {
948 //play shutter sound
949 m_parent->playShutter();
950 }
951
952 ATRACE_INT("Camera:Reprocess", 1);
953 LOGH("need reprocess");
954
955 // enqueu to post proc input queue
956 qcamera_pp_data_t *pp_request_job =
957 (qcamera_pp_data_t *)malloc(sizeof(qcamera_pp_data_t));
958 if (pp_request_job == NULL) {
959 LOGE("No memory for pproc job");
960 return NO_MEMORY;
961 }
962 memset(pp_request_job, 0, sizeof(qcamera_pp_data_t));
963 pp_request_job->src_frame = frame;
964 pp_request_job->src_reproc_frame = frame;
965 pp_request_job->reprocCount = 0;
966 pp_request_job->ppChannelIndex = 0;
967
968 if ((NULL != frame) &&
969 (0 < frame->num_bufs)
970 && (m_parent->isRegularCapture())) {
971 /*Regular capture. Source stream will be deleted*/
972 mm_camera_buf_def_t *bufs = NULL;
973 uint32_t num_bufs = frame->num_bufs;
974 bufs = new mm_camera_buf_def_t[num_bufs];
975 if (NULL == bufs) {
976 LOGE("Unable to allocate cached buffers");
977 return NO_MEMORY;
978 }
979
980 for (uint32_t i = 0; i < num_bufs; i++) {
981 bufs[i] = *frame->bufs[i];
982 frame->bufs[i] = &bufs[i];
983 }
984 pp_request_job->src_reproc_bufs = bufs;
985
986 // Don't release source frame after encoding
987 // at this point the source channel will not exist.
988 pp_request_job->reproc_frame_release = true;
989 }
990
991 if (mOfflineDataBufs != NULL) {
992 pp_request_job->offline_reproc_buf =
993 getOfflinePPInputBuffer(frame);
994 if (pp_request_job->offline_reproc_buf != NULL) {
995 pp_request_job->offline_buffer = true;
996 }
997 }
998
999 if (false == m_inputPPQ.enqueue((void *)pp_request_job)) {
1000 LOGW("Input PP Q is not active!!!");
1001 releaseSuperBuf(frame);
1002 free(frame);
1003 free(pp_request_job);
1004 frame = NULL;
1005 pp_request_job = NULL;
1006 return NO_ERROR;
1007 }
1008 if (m_parent->mParameters.isAdvCamFeaturesEnabled()
1009 && (meta_frame != NULL)) {
1010 m_InputMetadata.add(meta_frame);
1011 }
1012 } else if (m_parent->mParameters.isNV16PictureFormat() ||
1013 m_parent->mParameters.isNV21PictureFormat()) {
1014 //check if raw frame information is needed.
1015 if(m_parent->mParameters.isYUVFrameInfoNeeded())
1016 setYUVFrameInfo(frame);
1017
1018 processRawData(frame);
1019 } else {
1020 //play shutter sound
1021 if(!m_parent->m_stateMachine.isNonZSLCaptureRunning() &&
1022 !m_parent->mLongshotEnabled)
1023 m_parent->playShutter();
1024
1025 LOGH("no need offline reprocess, sending to jpeg encoding");
1026 qcamera_jpeg_data_t *jpeg_job =
1027 (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
1028 if (jpeg_job == NULL) {
1029 LOGE("No memory for jpeg job");
1030 return NO_MEMORY;
1031 }
1032
1033 memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
1034 jpeg_job->src_frame = frame;
1035
1036 if (meta_frame != NULL) {
1037 // fill in meta data frame ptr
1038 jpeg_job->metadata = (metadata_buffer_t *)meta_frame->buffer;
1039 }
1040
1041 // enqueu to jpeg input queue
1042 if (!m_inputJpegQ.enqueue((void *)jpeg_job)) {
1043 LOGW("Input Jpeg Q is not active!!!");
1044 releaseJpegJobData(jpeg_job);
1045 free(jpeg_job);
1046 jpeg_job = NULL;
1047 return NO_ERROR;
1048 }
1049 }
1050
1051 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1052 return NO_ERROR;
1053 }
1054
1055 /*===========================================================================
1056 * FUNCTION : processRawData
1057 *
1058 * DESCRIPTION: enqueue raw data into dataProc thread
1059 *
1060 * PARAMETERS :
1061 * @frame : process frame received from mm-camera-interface
1062 *
1063 * RETURN : int32_t type of status
1064 * NO_ERROR -- success
1065 * none-zero failure code
1066 *==========================================================================*/
processRawData(mm_camera_super_buf_t * frame)1067 int32_t QCameraPostProcessor::processRawData(mm_camera_super_buf_t *frame)
1068 {
1069 if (m_bInited == FALSE) {
1070 LOGE("postproc not initialized yet");
1071 return UNKNOWN_ERROR;
1072 }
1073
1074 // enqueu to raw input queue
1075 if (m_inputRawQ.enqueue((void *)frame)) {
1076 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1077 } else {
1078 LOGW("m_inputRawQ is not active!!!");
1079 releaseSuperBuf(frame);
1080 free(frame);
1081 frame = NULL;
1082 }
1083 return NO_ERROR;
1084 }
1085
1086 /*===========================================================================
1087 * FUNCTION : processJpegEvt
1088 *
1089 * DESCRIPTION: process jpeg event from mm-jpeg-interface.
1090 *
1091 * PARAMETERS :
1092 * @evt : payload of jpeg event, including information about jpeg encoding
1093 * status, jpeg size and so on.
1094 *
1095 * RETURN : int32_t type of status
1096 * NO_ERROR -- success
1097 * none-zero failure code
1098 *
1099 * NOTE : This event will also trigger DataProc thread to move to next job
1100 * processing (i.e., send a new jpeg encoding job to mm-jpeg-interface
1101 * if there is any pending job in jpeg input queue)
1102 *==========================================================================*/
processJpegEvt(qcamera_jpeg_evt_payload_t * evt)1103 int32_t QCameraPostProcessor::processJpegEvt(qcamera_jpeg_evt_payload_t *evt)
1104 {
1105 if (m_bInited == FALSE) {
1106 LOGE("postproc not initialized yet");
1107 return UNKNOWN_ERROR;
1108 }
1109
1110 int32_t rc = NO_ERROR;
1111 camera_memory_t *jpeg_mem = NULL;
1112 omx_jpeg_ouput_buf_t *jpeg_out = NULL;
1113 void *jpegData = NULL;
1114 if (mUseSaveProc && m_parent->isLongshotEnabled()) {
1115 qcamera_jpeg_evt_payload_t *saveData = ( qcamera_jpeg_evt_payload_t * ) malloc(sizeof(qcamera_jpeg_evt_payload_t));
1116 if ( NULL == saveData ) {
1117 LOGE("Can not allocate save data message!");
1118 return NO_MEMORY;
1119 }
1120 *saveData = *evt;
1121 if (m_inputSaveQ.enqueue((void *) saveData)) {
1122 m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1123 } else {
1124 LOGD("m_inputSaveQ PP Q is not active!!!");
1125 free(saveData);
1126 saveData = NULL;
1127 return rc;
1128 }
1129 } else {
1130 /* To be removed later when ISP Frame sync feature is available
1131 qcamera_jpeg_data_t *jpeg_job =
1132 (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue(matchJobId,
1133 (void*)&evt->jobId);
1134 uint32_t frame_idx = jpeg_job->src_frame->bufs[0]->frame_idx;*/
1135 uint32_t frame_idx = 75;
1136 LOGH("FRAME INDEX %d", frame_idx);
1137 // Release jpeg job data
1138 m_ongoingJpegQ.flushNodes(matchJobId, (void*)&evt->jobId);
1139
1140 if (m_inputPPQ.getCurrentSize() > 0) {
1141 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1142 }
1143 LOGH("[KPI Perf] : jpeg job %d", evt->jobId);
1144
1145 if ((false == m_parent->m_bIntJpegEvtPending) &&
1146 (m_parent->mDataCb == NULL ||
1147 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) == 0 )) {
1148 LOGW("No dataCB or CAMERA_MSG_COMPRESSED_IMAGE not enabled");
1149 rc = NO_ERROR;
1150 goto end;
1151 }
1152
1153 if(evt->status == JPEG_JOB_STATUS_ERROR) {
1154 LOGE("Error event handled from jpeg, status = %d",
1155 evt->status);
1156 rc = FAILED_TRANSACTION;
1157 goto end;
1158 }
1159 if (!mJpegMemOpt) {
1160 jpegData = evt->out_data.buf_vaddr;
1161 }
1162 else {
1163 jpeg_out = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
1164 if (jpeg_out != NULL) {
1165 jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
1166 if (jpeg_mem != NULL) {
1167 jpegData = jpeg_mem->data;
1168 }
1169 }
1170 }
1171 m_parent->dumpJpegToFile(jpegData,
1172 evt->out_data.buf_filled_len,
1173 evt->jobId);
1174 LOGH("Dump jpeg_size=%d", evt->out_data.buf_filled_len);
1175 if(true == m_parent->m_bIntJpegEvtPending) {
1176 //Sending JPEG snapshot taken notification to HAL
1177 pthread_mutex_lock(&m_parent->m_int_lock);
1178 pthread_cond_signal(&m_parent->m_int_cond);
1179 pthread_mutex_unlock(&m_parent->m_int_lock);
1180 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1181 return rc;
1182 }
1183 if (!mJpegMemOpt) {
1184 // alloc jpeg memory to pass to upper layer
1185 jpeg_mem = m_parent->mGetMemory(-1, evt->out_data.buf_filled_len,
1186 1, m_parent->mCallbackCookie);
1187 if (NULL == jpeg_mem) {
1188 rc = NO_MEMORY;
1189 LOGE("getMemory for jpeg, ret = NO_MEMORY");
1190 goto end;
1191 }
1192 memcpy(jpeg_mem->data, evt->out_data.buf_vaddr, evt->out_data.buf_filled_len);
1193 }
1194 LOGH("Calling upperlayer callback to store JPEG image");
1195 qcamera_release_data_t release_data;
1196 memset(&release_data, 0, sizeof(qcamera_release_data_t));
1197 release_data.data = jpeg_mem;
1198 LOGI("[KPI Perf]: PROFILE_JPEG_CB");
1199 rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
1200 jpeg_mem,
1201 0,
1202 NULL,
1203 &release_data,
1204 frame_idx);
1205 m_parent->setOutputImageCount(m_parent->getOutputImageCount() + 1);
1206
1207 end:
1208 if (rc != NO_ERROR) {
1209 // send error msg to upper layer
1210 LOGE("Jpeg Encoding failed. Notify Application");
1211 sendEvtNotify(CAMERA_MSG_ERROR,
1212 UNKNOWN_ERROR,
1213 0);
1214
1215 if (NULL != jpeg_mem) {
1216 jpeg_mem->release(jpeg_mem);
1217 jpeg_mem = NULL;
1218 }
1219 }
1220
1221 /* check whether to send callback for depth map */
1222 if (m_parent->mParameters.isUbiRefocus() &&
1223 (m_parent->getOutputImageCount() + 1 ==
1224 m_parent->mParameters.getRefocusOutputCount())) {
1225 m_parent->setOutputImageCount(m_parent->getOutputImageCount() + 1);
1226
1227 jpeg_mem = m_DataMem;
1228 release_data.data = jpeg_mem;
1229 m_DataMem = NULL;
1230 LOGH("[KPI Perf]: send jpeg callback for depthmap ");
1231 rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
1232 jpeg_mem,
1233 0,
1234 NULL,
1235 &release_data,
1236 frame_idx);
1237 if (rc != NO_ERROR) {
1238 // send error msg to upper layer
1239 sendEvtNotify(CAMERA_MSG_ERROR,
1240 UNKNOWN_ERROR,
1241 0);
1242 if (NULL != jpeg_mem) {
1243 jpeg_mem->release(jpeg_mem);
1244 jpeg_mem = NULL;
1245 }
1246 }
1247 m_DataMem = NULL;
1248 }
1249 }
1250
1251 // wait up data proc thread to do next job,
1252 // if previous request is blocked due to ongoing jpeg job
1253 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1254
1255 m_parent->m_perfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
1256
1257 return rc;
1258 }
1259
1260 /*===========================================================================
1261 * FUNCTION : processPPData
1262 *
1263 * DESCRIPTION: process received frame after reprocess.
1264 *
1265 * PARAMETERS :
1266 * @frame : received frame from reprocess channel.
1267 *
1268 * RETURN : int32_t type of status
1269 * NO_ERROR -- success
1270 * none-zero failure code
1271 *
1272 * NOTE : The frame after reprocess need to send to jpeg encoding.
1273 *==========================================================================*/
processPPData(mm_camera_super_buf_t * frame)1274 int32_t QCameraPostProcessor::processPPData(mm_camera_super_buf_t *frame)
1275 {
1276 bool triggerEvent = TRUE;
1277
1278 LOGD("QCameraPostProcessor::processPPData");
1279 bool needSuperBufMatch = m_parent->mParameters.generateThumbFromMain();
1280 if (m_bInited == FALSE) {
1281 LOGE("postproc not initialized yet");
1282 return UNKNOWN_ERROR;
1283 }
1284
1285 qcamera_pp_data_t *job = (qcamera_pp_data_t *)m_ongoingPPQ.dequeue();
1286 if (NULL == job) {
1287 LOGE("Cannot find reprocess job");
1288 return BAD_VALUE;
1289 }
1290
1291 if (!needSuperBufMatch && (job->src_frame == NULL
1292 || job->src_reproc_frame == NULL) ) {
1293 LOGE("Invalid reprocess job");
1294 return BAD_VALUE;
1295 }
1296
1297 if (!needSuperBufMatch && (m_parent->mParameters.isNV16PictureFormat() ||
1298 m_parent->mParameters.isNV21PictureFormat())) {
1299 releaseOngoingPPData(job, this);
1300 free(job);
1301
1302 if(m_parent->mParameters.isYUVFrameInfoNeeded())
1303 setYUVFrameInfo(frame);
1304 return processRawData(frame);
1305 }
1306 #ifdef TARGET_TS_MAKEUP
1307 // find snapshot frame frame
1308 mm_camera_buf_def_t *pReprocFrame = NULL;
1309 QCameraStream * pSnapshotStream = NULL;
1310 QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
1311 if (pChannel == NULL) {
1312 for (int8_t i = 0; i < mPPChannelCount; i++) {
1313 if ((mPPChannels[i] != NULL) &&
1314 (validate_handle(mPPChannels[i]->getMyHandle(), frame->ch_id))) {
1315 pChannel = mPPChannels[i];
1316 break;
1317 }
1318 }
1319 }
1320 if (pChannel == NULL) {
1321 LOGE("No corresponding channel (ch_id = %d) exist, return here",
1322 frame->ch_id);
1323 return BAD_VALUE;
1324 }
1325
1326 for (uint32_t i = 0; i < frame->num_bufs; i++) {
1327 pSnapshotStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
1328 if (pSnapshotStream != NULL) {
1329 if (pSnapshotStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
1330 pReprocFrame = frame->bufs[i];
1331 break;
1332 }
1333 }
1334 }
1335 if (pReprocFrame != NULL && m_parent->mParameters.isFaceDetectionEnabled()) {
1336 m_parent->TsMakeupProcess_Snapshot(pReprocFrame,pSnapshotStream);
1337 } else {
1338 LOGH("pReprocFrame == NULL || isFaceDetectionEnabled = %d",
1339 m_parent->mParameters.isFaceDetectionEnabled());
1340 }
1341 #endif
1342 if ((m_parent->isLongshotEnabled())
1343 && (!m_parent->isCaptureShutterEnabled())
1344 && (!m_parent->mCACDoneReceived)) {
1345 // play shutter sound for longshot
1346 // after reprocess is done
1347 m_parent->playShutter();
1348 }
1349 m_parent->mCACDoneReceived = FALSE;
1350
1351 int8_t mCurReprocCount = job->reprocCount;
1352 int8_t mCurChannelIndex = job->ppChannelIndex;
1353 if ( mCurReprocCount > 1 ) {
1354 //In case of pp 2nd pass, we can release input of 2nd pass
1355 releaseSuperBuf(job->src_frame);
1356 free(job->src_frame);
1357 job->src_frame = NULL;
1358 }
1359
1360 LOGD("mCurReprocCount = %d mCurChannelIndex = %d mTotalNumReproc = %d",
1361 mCurReprocCount, mCurChannelIndex,
1362 m_parent->mParameters.getReprocCount());
1363 if (mCurReprocCount < m_parent->mParameters.getReprocCount()) {
1364 //More pp pass needed. Push frame back to pp queue.
1365 qcamera_pp_data_t *pp_request_job = job;
1366 pp_request_job->src_frame = frame;
1367
1368 if ((mPPChannels[mCurChannelIndex]->getReprocCount()
1369 == mCurReprocCount) &&
1370 (mPPChannels[mCurChannelIndex + 1] != NULL)) {
1371 pp_request_job->ppChannelIndex++;
1372 }
1373
1374 // enqueu to post proc input queue
1375 if (false == m_inputPPQ.enqueue((void *)pp_request_job)) {
1376 LOGW("m_input PP Q is not active!!!");
1377 releaseOngoingPPData(pp_request_job,this);
1378 free(pp_request_job);
1379 pp_request_job = NULL;
1380 triggerEvent = FALSE;
1381 }
1382 } else {
1383 //Done with post processing. Send frame to Jpeg
1384 qcamera_jpeg_data_t *jpeg_job =
1385 (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
1386 if (jpeg_job == NULL) {
1387 LOGE("No memory for jpeg job");
1388 return NO_MEMORY;
1389 }
1390
1391 memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
1392 jpeg_job->src_frame = frame;
1393 jpeg_job->src_reproc_frame = job ? job->src_reproc_frame : NULL;
1394 jpeg_job->src_reproc_bufs = job ? job->src_reproc_bufs : NULL;
1395 jpeg_job->reproc_frame_release = job ? job->reproc_frame_release : false;
1396 jpeg_job->offline_reproc_buf = job ? job->offline_reproc_buf : NULL;
1397 jpeg_job->offline_buffer = job ? job->offline_buffer : false;
1398
1399 // find meta data frame
1400 mm_camera_buf_def_t *meta_frame = NULL;
1401 if (m_parent->mParameters.isAdvCamFeaturesEnabled()) {
1402 size_t meta_idx = m_parent->mParameters.getExifBufIndex(m_PPindex);
1403 if (m_InputMetadata.size() >= (meta_idx + 1)) {
1404 meta_frame = m_InputMetadata.itemAt(meta_idx);
1405 } else {
1406 LOGW("Input metadata vector contains %d entries, index required %d",
1407 m_InputMetadata.size(), meta_idx);
1408 }
1409 m_PPindex++;
1410 } else {
1411 for (uint32_t i = 0; job && job->src_reproc_frame &&
1412 (i < job->src_reproc_frame->num_bufs); i++) {
1413 // look through input superbuf
1414 if (job->src_reproc_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
1415 meta_frame = job->src_reproc_frame->bufs[i];
1416 break;
1417 }
1418 }
1419
1420 if (meta_frame == NULL) {
1421 // look through reprocess superbuf
1422 for (uint32_t i = 0; i < frame->num_bufs; i++) {
1423 if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
1424 meta_frame = frame->bufs[i];
1425 break;
1426 }
1427 }
1428 }
1429 }
1430 if (meta_frame != NULL) {
1431 // fill in meta data frame ptr
1432 jpeg_job->metadata = (metadata_buffer_t *)meta_frame->buffer;
1433 }
1434
1435 if (m_parent->mParameters.getQuadraCfa()) {
1436 // find offline metadata frame for quadra CFA
1437 mm_camera_buf_def_t *pOfflineMetaFrame = NULL;
1438 QCameraStream * pOfflineMetadataStream = NULL;
1439 QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
1440 if (pChannel == NULL) {
1441 for (int8_t i = 0; i < mPPChannelCount; i++) {
1442 if ((mPPChannels[i] != NULL) &&
1443 (mPPChannels[i]->getMyHandle() == frame->ch_id)) {
1444 pChannel = mPPChannels[i];
1445 break;
1446 }
1447 }
1448 }
1449 if (pChannel == NULL) {
1450 LOGE("No corresponding channel (ch_id = %d) exist, return here",
1451 frame->ch_id);
1452 return BAD_VALUE;
1453 }
1454
1455 for (uint32_t i = 0; i < frame->num_bufs; i++) {
1456 pOfflineMetadataStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
1457 if (pOfflineMetadataStream != NULL) {
1458 if (pOfflineMetadataStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)) {
1459 pOfflineMetaFrame = frame->bufs[i];
1460 break;
1461 }
1462 }
1463 }
1464 if (pOfflineMetaFrame != NULL) {
1465 // fill in meta data frame ptr
1466 jpeg_job->metadata = (metadata_buffer_t *)pOfflineMetaFrame->buffer;
1467
1468 // Dump offline metadata for Tuning
1469 char value[PROPERTY_VALUE_MAX];
1470 property_get("persist.camera.dumpmetadata", value, "0");
1471 int32_t enabled = atoi(value);
1472 if (enabled && jpeg_job->metadata->is_tuning_params_valid) {
1473 m_parent->dumpMetadataToFile(pOfflineMetadataStream,pOfflineMetaFrame,
1474 (char *)"Offline_isp_meta");
1475 }
1476 }
1477 }
1478
1479 // enqueu reprocessed frame to jpeg input queue
1480 if (false == m_inputJpegQ.enqueue((void *)jpeg_job)) {
1481 LOGW("Input Jpeg Q is not active!!!");
1482 releaseJpegJobData(jpeg_job);
1483 free(jpeg_job);
1484 jpeg_job = NULL;
1485 triggerEvent = FALSE;
1486 }
1487
1488 // free pp job buf
1489 pthread_mutex_lock(&m_reprocess_lock);
1490 if (job) {
1491 free(job);
1492 }
1493 pthread_mutex_unlock(&m_reprocess_lock);
1494 }
1495
1496 LOGD("");
1497 // wait up data proc thread
1498
1499 if (triggerEvent) {
1500 m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
1501 }
1502
1503 return NO_ERROR;
1504 }
1505
1506 /*===========================================================================
1507 * FUNCTION : findJpegJobByJobId
1508 *
1509 * DESCRIPTION: find a jpeg job from ongoing Jpeg queue by its job ID
1510 *
1511 * PARAMETERS :
1512 * @jobId : job Id of the job
1513 *
1514 * RETURN : ptr to a jpeg job struct. NULL if not found.
1515 *
1516 * NOTE : Currently only one job is sending to mm-jpeg-interface for jpeg
1517 * encoding. Therefore simply dequeue from the ongoing Jpeg Queue
1518 * will serve the purpose to find the jpeg job.
1519 *==========================================================================*/
findJpegJobByJobId(uint32_t jobId)1520 qcamera_jpeg_data_t *QCameraPostProcessor::findJpegJobByJobId(uint32_t jobId)
1521 {
1522 qcamera_jpeg_data_t * job = NULL;
1523 if (jobId == 0) {
1524 LOGE("not a valid jpeg jobId");
1525 return NULL;
1526 }
1527
1528 // currely only one jpeg job ongoing, so simply dequeue the head
1529 job = (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue();
1530 return job;
1531 }
1532
1533 /*===========================================================================
1534 * FUNCTION : releasePPInputData
1535 *
1536 * DESCRIPTION: callback function to release post process input data node
1537 *
1538 * PARAMETERS :
1539 * @data : ptr to post process input data
1540 * @user_data : user data ptr (QCameraReprocessor)
1541 *
1542 * RETURN : None
1543 *==========================================================================*/
releasePPInputData(void * data,void * user_data)1544 void QCameraPostProcessor::releasePPInputData(void *data, void *user_data)
1545 {
1546 QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
1547 if (NULL != pme) {
1548 qcamera_pp_request_t *pp_job = (qcamera_pp_request_t *)data;
1549 if (NULL != pp_job->src_frame) {
1550 pme->releaseSuperBuf(pp_job->src_frame);
1551 if (pp_job->src_frame == pp_job->src_reproc_frame)
1552 pp_job->src_reproc_frame = NULL;
1553 free(pp_job->src_frame);
1554 pp_job->src_frame = NULL;
1555 }
1556 if (NULL != pp_job->src_reproc_frame) {
1557 pme->releaseSuperBuf(pp_job->src_reproc_frame);
1558 free(pp_job->src_reproc_frame);
1559 pp_job->src_reproc_frame = NULL;
1560 }
1561 pp_job->reprocCount = 0;
1562 }
1563 }
1564
1565 /*===========================================================================
1566 * FUNCTION : releaseJpegData
1567 *
1568 * DESCRIPTION: callback function to release jpeg job node
1569 *
1570 * PARAMETERS :
1571 * @data : ptr to ongoing jpeg job data
1572 * @user_data : user data ptr (QCameraReprocessor)
1573 *
1574 * RETURN : None
1575 *==========================================================================*/
releaseJpegData(void * data,void * user_data)1576 void QCameraPostProcessor::releaseJpegData(void *data, void *user_data)
1577 {
1578 QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
1579 if (NULL != pme) {
1580 pme->releaseJpegJobData((qcamera_jpeg_data_t *)data);
1581 LOGH("Rleased job ID %u",
1582 ((qcamera_jpeg_data_t *)data)->jobId);
1583 }
1584 }
1585
1586 /*===========================================================================
1587 * FUNCTION : releaseOngoingPPData
1588 *
1589 * DESCRIPTION: callback function to release ongoing postprocess job node
1590 *
1591 * PARAMETERS :
1592 * @data : ptr to onging postprocess job
1593 * @user_data : user data ptr (QCameraReprocessor)
1594 *
1595 * RETURN : None
1596 *==========================================================================*/
releaseOngoingPPData(void * data,void * user_data)1597 void QCameraPostProcessor::releaseOngoingPPData(void *data, void *user_data)
1598 {
1599 QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
1600 if (NULL != pme) {
1601 qcamera_pp_data_t *pp_job = (qcamera_pp_data_t *)data;
1602 if (NULL != pp_job->src_frame) {
1603 if (!pp_job->reproc_frame_release) {
1604 pme->releaseSuperBuf(pp_job->src_frame);
1605 }
1606 if (pp_job->src_frame == pp_job->src_reproc_frame)
1607 pp_job->src_reproc_frame = NULL;
1608
1609 free(pp_job->src_frame);
1610 pp_job->src_frame = NULL;
1611 }
1612 if (NULL != pp_job->src_reproc_frame) {
1613 pme->releaseSuperBuf(pp_job->src_reproc_frame);
1614 free(pp_job->src_reproc_frame);
1615 pp_job->src_reproc_frame = NULL;
1616 }
1617 if ((pp_job->offline_reproc_buf != NULL)
1618 && (pp_job->offline_buffer)) {
1619 free(pp_job->offline_reproc_buf);
1620 pp_job->offline_buffer = false;
1621 }
1622 pp_job->reprocCount = 0;
1623 }
1624 }
1625
1626 /*===========================================================================
1627 * FUNCTION : releaseNotifyData
1628 *
1629 * DESCRIPTION: function to release internal resources in notify data struct
1630 *
1631 * PARAMETERS :
1632 * @user_data : ptr user data
1633 * @cookie : callback cookie
1634 * @cb_status : callback status
1635 *
1636 * RETURN : None
1637 *
1638 * NOTE : deallocate jpeg heap memory if it's not NULL
1639 *==========================================================================*/
releaseNotifyData(void * user_data,void * cookie,int32_t cb_status)1640 void QCameraPostProcessor::releaseNotifyData(void *user_data,
1641 void *cookie,
1642 int32_t cb_status)
1643 {
1644 LOGD("releaseNotifyData release_data %p", user_data);
1645
1646 qcamera_data_argm_t *app_cb = ( qcamera_data_argm_t * ) user_data;
1647 QCameraPostProcessor *postProc = ( QCameraPostProcessor * ) cookie;
1648 if ( ( NULL != app_cb ) && ( NULL != postProc ) ) {
1649
1650 if ( postProc->mUseSaveProc &&
1651 app_cb->release_data.unlinkFile &&
1652 ( NO_ERROR != cb_status ) ) {
1653
1654 String8 unlinkPath((const char *) app_cb->release_data.data->data,
1655 app_cb->release_data.data->size);
1656 int rc = unlink(unlinkPath.string());
1657 LOGH("Unlinking stored file rc = %d",
1658 rc);
1659 }
1660
1661 if (app_cb && NULL != app_cb->release_data.data) {
1662 app_cb->release_data.data->release(app_cb->release_data.data);
1663 app_cb->release_data.data = NULL;
1664 }
1665 if (app_cb && NULL != app_cb->release_data.frame) {
1666 postProc->releaseSuperBuf(app_cb->release_data.frame);
1667 free(app_cb->release_data.frame);
1668 app_cb->release_data.frame = NULL;
1669 }
1670 if (app_cb && NULL != app_cb->release_data.streamBufs) {
1671 app_cb->release_data.streamBufs->deallocate();
1672 delete app_cb->release_data.streamBufs;
1673 app_cb->release_data.streamBufs = NULL;
1674 }
1675 free(app_cb);
1676 }
1677 }
1678
1679 /*===========================================================================
1680 * FUNCTION : releaseSuperBuf
1681 *
1682 * DESCRIPTION: function to release a superbuf frame by returning back to kernel
1683 *
1684 * PARAMETERS :
1685 * @super_buf : ptr to the superbuf frame
1686 *
1687 * RETURN : None
1688 *==========================================================================*/
releaseSuperBuf(mm_camera_super_buf_t * super_buf)1689 void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf)
1690 {
1691 QCameraChannel *pChannel = NULL;
1692
1693 if (NULL != super_buf) {
1694 pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
1695
1696 if ( NULL == pChannel ) {
1697 for (int8_t i = 0; i < mPPChannelCount; i++) {
1698 if ((mPPChannels[i] != NULL) &&
1699 (validate_handle(mPPChannels[i]->getMyHandle(),
1700 super_buf->ch_id))) {
1701 pChannel = mPPChannels[i];
1702 break;
1703 }
1704 }
1705 }
1706
1707 if (pChannel != NULL) {
1708 pChannel->bufDone(super_buf);
1709 } else {
1710 LOGE("Channel id %d not found!!",
1711 super_buf->ch_id);
1712 }
1713 }
1714 }
1715
1716 /*===========================================================================
1717 * FUNCTION : releaseSuperBuf
1718 *
1719 * DESCRIPTION : function to release a superbuf frame by returning back to kernel
1720 *
1721 * PARAMETERS :
1722 * @super_buf : ptr to the superbuf frame
1723 * @stream_type: Type of stream to be released
1724 *
1725 * RETURN : None
1726 *==========================================================================*/
releaseSuperBuf(mm_camera_super_buf_t * super_buf,cam_stream_type_t stream_type)1727 void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf,
1728 cam_stream_type_t stream_type)
1729 {
1730 QCameraChannel *pChannel = NULL;
1731
1732 if (NULL != super_buf) {
1733 pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
1734 if (pChannel == NULL) {
1735 for (int8_t i = 0; i < mPPChannelCount; i++) {
1736 if ((mPPChannels[i] != NULL) &&
1737 (validate_handle(mPPChannels[i]->getMyHandle(),
1738 super_buf->ch_id))) {
1739 pChannel = mPPChannels[i];
1740 break;
1741 }
1742 }
1743 }
1744
1745 if (pChannel != NULL) {
1746 for (uint32_t i = 0; i < super_buf->num_bufs; i++) {
1747 if (super_buf->bufs[i] != NULL) {
1748 QCameraStream *pStream =
1749 pChannel->getStreamByHandle(super_buf->bufs[i]->stream_id);
1750 if ((pStream != NULL) && ((pStream->getMyType() == stream_type)
1751 || (pStream->getMyOriginalType() == stream_type))) {
1752 pChannel->bufDone(super_buf, super_buf->bufs[i]->stream_id);
1753 break;
1754 }
1755 }
1756 }
1757 } else {
1758 LOGE("Channel id %d not found!!",
1759 super_buf->ch_id);
1760 }
1761 }
1762 }
1763
1764 /*===========================================================================
1765 * FUNCTION : releaseJpegJobData
1766 *
1767 * DESCRIPTION: function to release internal resources in jpeg job struct
1768 *
1769 * PARAMETERS :
1770 * @job : ptr to jpeg job struct
1771 *
1772 * RETURN : None
1773 *
1774 * NOTE : original source frame need to be queued back to kernel for
1775 * future use. Output buf of jpeg job need to be released since
1776 * it's allocated for each job. Exif object need to be deleted.
1777 *==========================================================================*/
releaseJpegJobData(qcamera_jpeg_data_t * job)1778 void QCameraPostProcessor::releaseJpegJobData(qcamera_jpeg_data_t *job)
1779 {
1780 LOGD("E");
1781 if (NULL != job) {
1782 if (NULL != job->src_reproc_frame) {
1783 if (!job->reproc_frame_release) {
1784 releaseSuperBuf(job->src_reproc_frame);
1785 }
1786 free(job->src_reproc_frame);
1787 job->src_reproc_frame = NULL;
1788 }
1789
1790 if (NULL != job->src_frame) {
1791 releaseSuperBuf(job->src_frame);
1792 free(job->src_frame);
1793 job->src_frame = NULL;
1794 }
1795
1796 if (NULL != job->pJpegExifObj) {
1797 delete job->pJpegExifObj;
1798 job->pJpegExifObj = NULL;
1799 }
1800
1801 if (NULL != job->src_reproc_bufs) {
1802 delete [] job->src_reproc_bufs;
1803 }
1804
1805 if ((job->offline_reproc_buf != NULL)
1806 && (job->offline_buffer)) {
1807 free(job->offline_reproc_buf);
1808 job->offline_buffer = false;
1809 }
1810 }
1811 LOGD("X");
1812 }
1813
1814 /*===========================================================================
1815 * FUNCTION : releaseSaveJobData
1816 *
1817 * DESCRIPTION: function to release internal resources in store jobs
1818 *
1819 * PARAMETERS :
1820 * @job : ptr to save job struct
1821 *
1822 * RETURN : None
1823 *
1824 *==========================================================================*/
releaseSaveJobData(void * data,void * user_data)1825 void QCameraPostProcessor::releaseSaveJobData(void *data, void *user_data)
1826 {
1827 LOGD("E");
1828
1829 QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
1830 if (NULL == pme) {
1831 LOGE("Invalid postproc handle");
1832 return;
1833 }
1834
1835 qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) data;
1836 if (job_data == NULL) {
1837 LOGE("Invalid jpeg event data");
1838 return;
1839 }
1840
1841 // find job by jobId
1842 qcamera_jpeg_data_t *job = pme->findJpegJobByJobId(job_data->jobId);
1843
1844 if (NULL != job) {
1845 pme->releaseJpegJobData(job);
1846 free(job);
1847 } else {
1848 LOGE("Invalid jpeg job");
1849 }
1850
1851 LOGD("X");
1852 }
1853
1854 /*===========================================================================
1855 * FUNCTION : releaseRawData
1856 *
1857 * DESCRIPTION: function to release internal resources in store jobs
1858 *
1859 * PARAMETERS :
1860 * @job : ptr to save job struct
1861 *
1862 * RETURN : None
1863 *
1864 *==========================================================================*/
releaseRawData(void * data,void * user_data)1865 void QCameraPostProcessor::releaseRawData(void *data, void *user_data)
1866 {
1867 LOGD("E");
1868
1869 QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
1870 if (NULL == pme) {
1871 LOGE("Invalid postproc handle");
1872 return;
1873 }
1874 mm_camera_super_buf_t *super_buf = (mm_camera_super_buf_t *) data;
1875 pme->releaseSuperBuf(super_buf);
1876
1877 LOGD("X");
1878 }
1879
1880
1881 /*===========================================================================
1882 * FUNCTION : getColorfmtFromImgFmt
1883 *
1884 * DESCRIPTION: function to return jpeg color format based on its image format
1885 *
1886 * PARAMETERS :
1887 * @img_fmt : image format
1888 *
1889 * RETURN : jpeg color format that can be understandable by omx lib
1890 *==========================================================================*/
getColorfmtFromImgFmt(cam_format_t img_fmt)1891 mm_jpeg_color_format QCameraPostProcessor::getColorfmtFromImgFmt(cam_format_t img_fmt)
1892 {
1893 switch (img_fmt) {
1894 case CAM_FORMAT_YUV_420_NV21:
1895 case CAM_FORMAT_YUV_420_NV21_VENUS:
1896 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
1897 case CAM_FORMAT_YUV_420_NV21_ADRENO:
1898 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
1899 case CAM_FORMAT_YUV_420_NV12:
1900 case CAM_FORMAT_YUV_420_NV12_VENUS:
1901 return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
1902 case CAM_FORMAT_YUV_420_YV12:
1903 return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
1904 case CAM_FORMAT_YUV_422_NV61:
1905 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1;
1906 case CAM_FORMAT_YUV_422_NV16:
1907 return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1;
1908 default:
1909 return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
1910 }
1911 }
1912
1913 /*===========================================================================
1914 * FUNCTION : getJpegImgTypeFromImgFmt
1915 *
1916 * DESCRIPTION: function to return jpeg encode image type based on its image format
1917 *
1918 * PARAMETERS :
1919 * @img_fmt : image format
1920 *
1921 * RETURN : return jpeg source image format (YUV or Bitstream)
1922 *==========================================================================*/
getJpegImgTypeFromImgFmt(cam_format_t img_fmt)1923 mm_jpeg_format_t QCameraPostProcessor::getJpegImgTypeFromImgFmt(cam_format_t img_fmt)
1924 {
1925 switch (img_fmt) {
1926 case CAM_FORMAT_YUV_420_NV21:
1927 case CAM_FORMAT_YUV_420_NV21_ADRENO:
1928 case CAM_FORMAT_YUV_420_NV12:
1929 case CAM_FORMAT_YUV_420_NV12_VENUS:
1930 case CAM_FORMAT_YUV_420_NV21_VENUS:
1931 case CAM_FORMAT_YUV_420_YV12:
1932 case CAM_FORMAT_YUV_422_NV61:
1933 case CAM_FORMAT_YUV_422_NV16:
1934 return MM_JPEG_FMT_YUV;
1935 default:
1936 return MM_JPEG_FMT_YUV;
1937 }
1938 }
1939
1940 /*===========================================================================
1941 * FUNCTION : queryStreams
1942 *
1943 * DESCRIPTION: utility method for retrieving main, thumbnail and reprocess
1944 * streams and frame from bundled super buffer
1945 *
1946 * PARAMETERS :
1947 * @main : ptr to main stream if present
1948 * @thumb : ptr to thumbnail stream if present
1949 * @reproc : ptr to reprocess stream if present
1950 * @main_image : ptr to main image if present
1951 * @thumb_image: ptr to thumbnail image if present
1952 * @frame : bundled super buffer
1953 * @reproc_frame : bundled source frame buffer
1954 *
1955 * RETURN : int32_t type of status
1956 * NO_ERROR -- success
1957 * none-zero failure code
1958 *==========================================================================*/
queryStreams(QCameraStream ** main,QCameraStream ** thumb,QCameraStream ** reproc,mm_camera_buf_def_t ** main_image,mm_camera_buf_def_t ** thumb_image,mm_camera_super_buf_t * frame,mm_camera_super_buf_t * reproc_frame)1959 int32_t QCameraPostProcessor::queryStreams(QCameraStream **main,
1960 QCameraStream **thumb,
1961 QCameraStream **reproc,
1962 mm_camera_buf_def_t **main_image,
1963 mm_camera_buf_def_t **thumb_image,
1964 mm_camera_super_buf_t *frame,
1965 mm_camera_super_buf_t *reproc_frame)
1966 {
1967 if (NULL == frame) {
1968 return NO_INIT;
1969 }
1970
1971 QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
1972 // check reprocess channel if not found
1973 if (pChannel == NULL) {
1974 for (int8_t i = 0; i < mPPChannelCount; i++) {
1975 if ((mPPChannels[i] != NULL) &&
1976 validate_handle(mPPChannels[i]->getMyHandle(), frame->ch_id)) {
1977 pChannel = mPPChannels[i];
1978 break;
1979 }
1980 }
1981 }
1982 if (pChannel == NULL) {
1983 LOGD("No corresponding channel (ch_id = %d) exist, return here",
1984 frame->ch_id);
1985 return BAD_VALUE;
1986 }
1987
1988 // Use snapshot stream to create thumbnail if snapshot and preview
1989 // flip settings doesn't match in ZSL mode.
1990 bool thumb_stream_needed = ((!m_parent->isZSLMode() ||
1991 (m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT) ==
1992 m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_PREVIEW))) &&
1993 !m_parent->mParameters.generateThumbFromMain());
1994
1995 *main = *thumb = *reproc = NULL;
1996 *main_image = *thumb_image = NULL;
1997 // find snapshot frame and thumnail frame
1998 for (uint32_t i = 0; i < frame->num_bufs; i++) {
1999 QCameraStream *pStream =
2000 pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
2001 if (pStream != NULL) {
2002 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
2003 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
2004 pStream->isTypeOf(CAM_STREAM_TYPE_VIDEO) ||
2005 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_VIDEO) ||
2006 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)) {
2007 *main= pStream;
2008 *main_image = frame->bufs[i];
2009 } else if (thumb_stream_needed &&
2010 (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
2011 pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
2012 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
2013 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
2014 *thumb = pStream;
2015 *thumb_image = frame->bufs[i];
2016 }
2017 if (pStream->isTypeOf(CAM_STREAM_TYPE_OFFLINE_PROC) ) {
2018 *reproc = pStream;
2019 }
2020 }
2021 }
2022
2023 if (thumb_stream_needed && *thumb_image == NULL && reproc_frame != NULL) {
2024 QCameraChannel *pSrcReprocChannel = NULL;
2025 pSrcReprocChannel = m_parent->getChannelByHandle(reproc_frame->ch_id);
2026 if (pSrcReprocChannel != NULL) {
2027 // find thumbnail frame
2028 for (uint32_t i = 0; i < reproc_frame->num_bufs; i++) {
2029 QCameraStream *pStream =
2030 pSrcReprocChannel->getStreamByHandle(
2031 reproc_frame->bufs[i]->stream_id);
2032 if (pStream != NULL) {
2033 if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
2034 pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
2035 *thumb = pStream;
2036 *thumb_image = reproc_frame->bufs[i];
2037 }
2038 }
2039 }
2040 }
2041 }
2042
2043 return NO_ERROR;
2044 }
2045
2046 /*===========================================================================
2047 * FUNCTION : syncStreamParams
2048 *
2049 * DESCRIPTION: Query the runtime parameters of all streams included
2050 * in the main and reprocessed frames
2051 *
2052 * PARAMETERS :
2053 * @frame : Main image super buffer
2054 * @reproc_frame : Image supper buffer that got processed
2055 *
2056 * RETURN : int32_t type of status
2057 * NO_ERROR -- success
2058 * none-zero failure code
2059 *==========================================================================*/
syncStreamParams(mm_camera_super_buf_t * frame,mm_camera_super_buf_t * reproc_frame)2060 int32_t QCameraPostProcessor::syncStreamParams(mm_camera_super_buf_t *frame,
2061 mm_camera_super_buf_t *reproc_frame)
2062 {
2063 QCameraStream *reproc_stream = NULL;
2064 QCameraStream *main_stream = NULL;
2065 QCameraStream *thumb_stream = NULL;
2066 mm_camera_buf_def_t *main_frame = NULL;
2067 mm_camera_buf_def_t *thumb_frame = NULL;
2068 int32_t ret = NO_ERROR;
2069
2070 ret = queryStreams(&main_stream,
2071 &thumb_stream,
2072 &reproc_stream,
2073 &main_frame,
2074 &thumb_frame,
2075 frame,
2076 reproc_frame);
2077 if (NO_ERROR != ret) {
2078 LOGE("Camera streams query from input frames failed %d",
2079 ret);
2080 return ret;
2081 }
2082
2083 if (NULL != main_stream) {
2084 ret = main_stream->syncRuntimeParams();
2085 if (NO_ERROR != ret) {
2086 LOGE("Syncing of main stream runtime parameters failed %d",
2087 ret);
2088 return ret;
2089 }
2090 }
2091
2092 if (NULL != thumb_stream) {
2093 ret = thumb_stream->syncRuntimeParams();
2094 if (NO_ERROR != ret) {
2095 LOGE("Syncing of thumb stream runtime parameters failed %d",
2096 ret);
2097 return ret;
2098 }
2099 }
2100
2101 if ((NULL != reproc_stream) && (reproc_stream != main_stream)) {
2102 ret = reproc_stream->syncRuntimeParams();
2103 if (NO_ERROR != ret) {
2104 LOGE("Syncing of reproc stream runtime parameters failed %d",
2105 ret);
2106 return ret;
2107 }
2108 }
2109
2110 return ret;
2111 }
2112
2113 /*===========================================================================
2114 * FUNCTION : encodeData
2115 *
2116 * DESCRIPTION: function to prepare encoding job information and send to
2117 * mm-jpeg-interface to do the encoding job
2118 *
2119 * PARAMETERS :
2120 * @jpeg_job_data : ptr to a struct saving job related information
2121 * @needNewSess : flag to indicate if a new jpeg encoding session need
2122 * to be created. After creation, this flag will be toggled
2123 *
2124 * RETURN : int32_t type of status
2125 * NO_ERROR -- success
2126 * none-zero failure code
2127 *==========================================================================*/
encodeData(qcamera_jpeg_data_t * jpeg_job_data,uint8_t & needNewSess)2128 int32_t QCameraPostProcessor::encodeData(qcamera_jpeg_data_t *jpeg_job_data,
2129 uint8_t &needNewSess)
2130 {
2131 LOGD("E");
2132 int32_t ret = NO_ERROR;
2133 mm_jpeg_job_t jpg_job;
2134 uint32_t jobId = 0;
2135 QCameraStream *reproc_stream = NULL;
2136 QCameraStream *main_stream = NULL;
2137 mm_camera_buf_def_t *main_frame = NULL;
2138 QCameraStream *thumb_stream = NULL;
2139 mm_camera_buf_def_t *thumb_frame = NULL;
2140 mm_camera_super_buf_t *recvd_frame = jpeg_job_data->src_frame;
2141 cam_rect_t crop;
2142 cam_stream_parm_buffer_t param;
2143 cam_stream_img_prop_t imgProp;
2144
2145 // find channel
2146 QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
2147 // check reprocess channel if not found
2148 if (pChannel == NULL) {
2149 for (int8_t i = 0; i < mPPChannelCount; i++) {
2150 if ((mPPChannels[i] != NULL) &&
2151 (validate_handle(mPPChannels[i]->getMyHandle(), recvd_frame->ch_id))) {
2152 pChannel = mPPChannels[i];
2153 break;
2154 }
2155 }
2156 }
2157
2158 if (pChannel == NULL) {
2159 LOGE("No corresponding channel (ch_id = %d) exist, return here",
2160 recvd_frame->ch_id);
2161 return BAD_VALUE;
2162 }
2163
2164 const uint32_t jpeg_rotation = m_parent->mParameters.getJpegRotation();
2165
2166 ret = queryStreams(&main_stream,
2167 &thumb_stream,
2168 &reproc_stream,
2169 &main_frame,
2170 &thumb_frame,
2171 recvd_frame,
2172 jpeg_job_data->src_reproc_frame);
2173 if (NO_ERROR != ret) {
2174 return ret;
2175 }
2176
2177 if(NULL == main_frame){
2178 LOGE("Main frame is NULL");
2179 return BAD_VALUE;
2180 }
2181
2182 if(NULL == thumb_frame){
2183 LOGD("Thumbnail frame does not exist");
2184 }
2185
2186 QCameraMemory *memObj = (QCameraMemory *)main_frame->mem_info;
2187 if (NULL == memObj) {
2188 LOGE("Memeory Obj of main frame is NULL");
2189 return NO_MEMORY;
2190 }
2191
2192 // dump snapshot frame if enabled
2193 m_parent->dumpFrameToFile(main_stream, main_frame,
2194 QCAMERA_DUMP_FRM_INPUT_JPEG, (char *)"CPP");
2195
2196 // send upperlayer callback for raw image
2197 camera_memory_t *mem = memObj->getMemory(main_frame->buf_idx, false);
2198 if (NULL != m_parent->mDataCb &&
2199 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
2200 qcamera_callback_argm_t cbArg;
2201 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2202 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
2203 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
2204 cbArg.data = mem;
2205 cbArg.index = 0;
2206 // Data callback, set read/write flags
2207 main_frame->cache_flags |= CPU_HAS_READ;
2208 m_parent->m_cbNotifier.notifyCallback(cbArg);
2209 }
2210 if (NULL != m_parent->mNotifyCb &&
2211 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
2212 qcamera_callback_argm_t cbArg;
2213 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2214 cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
2215 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
2216 cbArg.ext1 = 0;
2217 cbArg.ext2 = 0;
2218 m_parent->m_cbNotifier.notifyCallback(cbArg);
2219 }
2220
2221 if (mJpegClientHandle <= 0) {
2222 LOGE("Error: bug here, mJpegClientHandle is 0");
2223 return UNKNOWN_ERROR;
2224 }
2225
2226 if (needNewSess) {
2227 // create jpeg encoding session
2228 mm_jpeg_encode_params_t encodeParam;
2229 memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
2230 ret = getJpegEncodingConfig(encodeParam, main_stream, thumb_stream);
2231 if (ret != NO_ERROR) {
2232 LOGE("error getting encoding config");
2233 return ret;
2234 }
2235 LOGH("[KPI Perf] : call jpeg create_session");
2236 ret = mJpegHandle.create_session(mJpegClientHandle, &encodeParam, &mJpegSessionId);
2237 if (ret != NO_ERROR) {
2238 LOGE("error creating a new jpeg encoding session");
2239 return ret;
2240 }
2241 needNewSess = FALSE;
2242 }
2243 // Fill in new job
2244 memset(&jpg_job, 0, sizeof(mm_jpeg_job_t));
2245 jpg_job.job_type = JPEG_JOB_TYPE_ENCODE;
2246 jpg_job.encode_job.session_id = mJpegSessionId;
2247 jpg_job.encode_job.src_index = (int32_t)main_frame->buf_idx;
2248 jpg_job.encode_job.dst_index = 0;
2249
2250 if (mJpegMemOpt) {
2251 jpg_job.encode_job.dst_index = jpg_job.encode_job.src_index;
2252 } else if (mUseJpegBurst) {
2253 jpg_job.encode_job.dst_index = -1;
2254 }
2255
2256 // use src to reproc frame as work buffer; if src buf is not available
2257 // jpeg interface will allocate work buffer
2258 if (jpeg_job_data->src_reproc_frame != NULL) {
2259 int32_t ret = NO_ERROR;
2260 QCameraStream *main_stream = NULL;
2261 mm_camera_buf_def_t *main_frame = NULL;
2262 QCameraStream *thumb_stream = NULL;
2263 mm_camera_buf_def_t *thumb_frame = NULL;
2264 QCameraStream *reproc_stream = NULL;
2265 mm_camera_buf_def_t *workBuf = NULL;
2266 // Call queryStreams to fetch source of reproc frame
2267 ret = queryStreams(&main_stream,
2268 &thumb_stream,
2269 &reproc_stream,
2270 &main_frame,
2271 &thumb_frame,
2272 jpeg_job_data->src_reproc_frame,
2273 NULL);
2274
2275 if ((NO_ERROR == ret) && ((workBuf = main_frame) != NULL)
2276 && !m_parent->isLowPowerMode()) {
2277 camera_memory_t *camWorkMem = NULL;
2278 int workBufIndex = workBuf->buf_idx;
2279 QCameraMemory *workMem = (QCameraMemory *)workBuf->mem_info;
2280 if (workMem != NULL) {
2281 camWorkMem = workMem->getMemory(workBufIndex, false);
2282 }
2283 if (camWorkMem != NULL && workMem != NULL) {
2284 jpg_job.encode_job.work_buf.buf_size = camWorkMem->size;
2285 jpg_job.encode_job.work_buf.buf_vaddr = (uint8_t *)camWorkMem->data;
2286 jpg_job.encode_job.work_buf.fd = workMem->getFd(workBufIndex);
2287 workMem->invalidateCache(workBufIndex);
2288 }
2289 }
2290 }
2291
2292 cam_dimension_t src_dim;
2293 memset(&src_dim, 0, sizeof(cam_dimension_t));
2294 main_stream->getFrameDimension(src_dim);
2295
2296 bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
2297 bool img_feature_enabled =
2298 m_parent->mParameters.isUbiFocusEnabled() ||
2299 m_parent->mParameters.isUbiRefocus() ||
2300 m_parent->mParameters.isChromaFlashEnabled() ||
2301 m_parent->mParameters.isOptiZoomEnabled() ||
2302 m_parent->mParameters.isStillMoreEnabled();
2303
2304 LOGH("Crop needed %d", img_feature_enabled);
2305 crop.left = 0;
2306 crop.top = 0;
2307 crop.height = src_dim.height;
2308 crop.width = src_dim.width;
2309
2310 param = main_stream->getOutputCrop();
2311 for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
2312 if (param.outputCrop.crop_info[i].stream_id
2313 == main_stream->getMyServerID()) {
2314 crop = param.outputCrop.crop_info[i].crop;
2315 main_stream->setCropInfo(crop);
2316 }
2317 }
2318 if (img_feature_enabled) {
2319 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
2320
2321 param = main_stream->getImgProp();
2322 imgProp = param.imgProp;
2323 main_stream->setCropInfo(imgProp.crop);
2324 crop = imgProp.crop;
2325 thumb_stream = NULL; /* use thumbnail from main image */
2326
2327 if ((reproc_stream != NULL) && (m_DataMem == NULL) &&
2328 m_parent->mParameters.isUbiRefocus()) {
2329
2330 QCameraHeapMemory* miscBufHandler = reproc_stream->getMiscBuf();
2331 cam_misc_buf_t* refocusResult =
2332 reinterpret_cast<cam_misc_buf_t *>(miscBufHandler->getPtr(0));
2333 uint32_t resultSize = refocusResult->header_size +
2334 refocusResult->width * refocusResult->height;
2335 camera_memory_t *dataMem = m_parent->mGetMemory(-1, resultSize,
2336 1, m_parent->mCallbackCookie);
2337
2338 LOGH("Refocus result header %u dims %dx%d",
2339 resultSize, refocusResult->width, refocusResult->height);
2340
2341 if (dataMem && dataMem->data) {
2342 memcpy(dataMem->data, refocusResult->data, resultSize);
2343 //save mem pointer for depth map
2344 m_DataMem = dataMem;
2345 }
2346 }
2347 } else if ((reproc_stream != NULL) && (m_parent->mParameters.isTruePortraitEnabled())) {
2348
2349 QCameraHeapMemory* miscBufHandler = reproc_stream->getMiscBuf();
2350 cam_misc_buf_t* tpResult =
2351 reinterpret_cast<cam_misc_buf_t *>(miscBufHandler->getPtr(0));
2352 uint32_t tpMetaSize = tpResult->header_size + tpResult->width * tpResult->height;
2353
2354 LOGH("True portrait result header %d% dims dx%d",
2355 tpMetaSize, tpResult->width, tpResult->height);
2356
2357 CAM_DUMP_TO_FILE(QCAMERA_DUMP_FRM_LOCATION"tp", "bm", -1, "y",
2358 &tpResult->data, tpMetaSize);
2359 }
2360
2361 cam_dimension_t dst_dim;
2362
2363 if (hdr_output_crop && crop.height) {
2364 dst_dim.height = crop.height;
2365 } else {
2366 dst_dim.height = src_dim.height;
2367 }
2368 if (hdr_output_crop && crop.width) {
2369 dst_dim.width = crop.width;
2370 } else {
2371 dst_dim.width = src_dim.width;
2372 }
2373
2374 // main dim
2375 jpg_job.encode_job.main_dim.src_dim = src_dim;
2376 jpg_job.encode_job.main_dim.dst_dim = dst_dim;
2377 jpg_job.encode_job.main_dim.crop = crop;
2378
2379 // get 3a sw version info
2380 cam_q3a_version_t sw_version =
2381 m_parent->getCamHalCapabilities()->q3a_version;
2382
2383 // get exif data
2384 QCameraExif *pJpegExifObj = m_parent->getExifData();
2385 jpeg_job_data->pJpegExifObj = pJpegExifObj;
2386 if (pJpegExifObj != NULL) {
2387 jpg_job.encode_job.exif_info.exif_data = pJpegExifObj->getEntries();
2388 jpg_job.encode_job.exif_info.numOfEntries =
2389 pJpegExifObj->getNumOfEntries();
2390 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[0] =
2391 sw_version.major_version;
2392 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[1] =
2393 sw_version.minor_version;
2394 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[2] =
2395 sw_version.patch_version;
2396 jpg_job.encode_job.exif_info.debug_data.sw_3a_version[3] =
2397 sw_version.new_feature_des;
2398 }
2399
2400 // set rotation only when no online rotation or offline pp rotation is done before
2401 if (!m_parent->needRotationReprocess()) {
2402 jpg_job.encode_job.rotation = jpeg_rotation;
2403 }
2404 LOGH("jpeg rotation is set to %d", jpg_job.encode_job.rotation);
2405
2406 // thumbnail dim
2407 if (m_bThumbnailNeeded == TRUE) {
2408 m_parent->getThumbnailSize(jpg_job.encode_job.thumb_dim.dst_dim);
2409
2410 if (thumb_stream == NULL) {
2411 // need jpeg thumbnail, but no postview/preview stream exists
2412 // we use the main stream/frame to encode thumbnail
2413 thumb_stream = main_stream;
2414 thumb_frame = main_frame;
2415 }
2416 if (m_parent->needRotationReprocess() &&
2417 ((90 == jpeg_rotation) || (270 == jpeg_rotation))) {
2418 // swap thumbnail dimensions
2419 cam_dimension_t tmp_dim = jpg_job.encode_job.thumb_dim.dst_dim;
2420 jpg_job.encode_job.thumb_dim.dst_dim.width = tmp_dim.height;
2421 jpg_job.encode_job.thumb_dim.dst_dim.height = tmp_dim.width;
2422 }
2423
2424 memset(&src_dim, 0, sizeof(cam_dimension_t));
2425 thumb_stream->getFrameDimension(src_dim);
2426 jpg_job.encode_job.thumb_dim.src_dim = src_dim;
2427
2428 // crop is the same if frame is the same
2429 if (thumb_frame != main_frame) {
2430 crop.left = 0;
2431 crop.top = 0;
2432 crop.height = src_dim.height;
2433 crop.width = src_dim.width;
2434
2435 param = thumb_stream->getOutputCrop();
2436 for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
2437 if (param.outputCrop.crop_info[i].stream_id
2438 == thumb_stream->getMyServerID()) {
2439 crop = param.outputCrop.crop_info[i].crop;
2440 thumb_stream->setCropInfo(crop);
2441 }
2442 }
2443 }
2444
2445
2446 jpg_job.encode_job.thumb_dim.crop = crop;
2447 if (thumb_frame != NULL) {
2448 jpg_job.encode_job.thumb_index = thumb_frame->buf_idx;
2449 }
2450 LOGI("Thumbnail idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
2451 jpg_job.encode_job.thumb_index,
2452 jpg_job.encode_job.thumb_dim.src_dim.width,
2453 jpg_job.encode_job.thumb_dim.src_dim.height,
2454 jpg_job.encode_job.thumb_dim.dst_dim.width,
2455 jpg_job.encode_job.thumb_dim.dst_dim.height);
2456 }
2457
2458 LOGI("Main image idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
2459 jpg_job.encode_job.src_index,
2460 jpg_job.encode_job.main_dim.src_dim.width,
2461 jpg_job.encode_job.main_dim.src_dim.height,
2462 jpg_job.encode_job.main_dim.dst_dim.width,
2463 jpg_job.encode_job.main_dim.dst_dim.height);
2464
2465 if (thumb_frame != NULL) {
2466 // dump thumbnail frame if enabled
2467 m_parent->dumpFrameToFile(thumb_stream, thumb_frame, QCAMERA_DUMP_FRM_THUMBNAIL);
2468 }
2469
2470 if (jpeg_job_data->metadata != NULL) {
2471 // fill in meta data frame ptr
2472 jpg_job.encode_job.p_metadata = jpeg_job_data->metadata;
2473 }
2474
2475 jpg_job.encode_job.hal_version = CAM_HAL_V1;
2476 m_parent->mExifParams.sensor_params.sens_type = m_parent->getSensorType();
2477 jpg_job.encode_job.cam_exif_params = m_parent->mExifParams;
2478 jpg_job.encode_job.cam_exif_params.debug_params =
2479 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
2480 if (!jpg_job.encode_job.cam_exif_params.debug_params) {
2481 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
2482 return NO_MEMORY;
2483 }
2484
2485 jpg_job.encode_job.mobicat_mask = m_parent->mParameters.getMobicatMask();
2486
2487
2488 if (NULL != jpg_job.encode_job.p_metadata && (jpg_job.encode_job.mobicat_mask > 0)) {
2489
2490 if (m_parent->mExifParams.debug_params) {
2491 memcpy(jpg_job.encode_job.cam_exif_params.debug_params,
2492 m_parent->mExifParams.debug_params, (sizeof(mm_jpeg_debug_exif_params_t)));
2493
2494 /* Save a copy of mobicat params */
2495 jpg_job.encode_job.p_metadata->is_mobicat_aec_params_valid =
2496 jpg_job.encode_job.cam_exif_params.cam_3a_params_valid;
2497
2498 if (jpg_job.encode_job.cam_exif_params.cam_3a_params_valid) {
2499 jpg_job.encode_job.p_metadata->mobicat_aec_params =
2500 jpg_job.encode_job.cam_exif_params.cam_3a_params;
2501 }
2502
2503 /* Save a copy of 3A debug params */
2504 jpg_job.encode_job.p_metadata->is_statsdebug_ae_params_valid =
2505 jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid;
2506 jpg_job.encode_job.p_metadata->is_statsdebug_awb_params_valid =
2507 jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid;
2508 jpg_job.encode_job.p_metadata->is_statsdebug_af_params_valid =
2509 jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid;
2510 jpg_job.encode_job.p_metadata->is_statsdebug_asd_params_valid =
2511 jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid;
2512 jpg_job.encode_job.p_metadata->is_statsdebug_stats_params_valid =
2513 jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid;
2514 jpg_job.encode_job.p_metadata->is_statsdebug_bestats_params_valid =
2515 jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid;
2516 jpg_job.encode_job.p_metadata->is_statsdebug_bhist_params_valid =
2517 jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid;
2518 jpg_job.encode_job.p_metadata->is_statsdebug_3a_tuning_params_valid =
2519 jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid;
2520
2521 if (jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid) {
2522 jpg_job.encode_job.p_metadata->statsdebug_ae_data =
2523 jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params;
2524 }
2525 if (jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid) {
2526 jpg_job.encode_job.p_metadata->statsdebug_awb_data =
2527 jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params;
2528 }
2529 if (jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid) {
2530 jpg_job.encode_job.p_metadata->statsdebug_af_data =
2531 jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params;
2532 }
2533 if (jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid) {
2534 jpg_job.encode_job.p_metadata->statsdebug_asd_data =
2535 jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params;
2536 }
2537 if (jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid) {
2538 jpg_job.encode_job.p_metadata->statsdebug_stats_buffer_data =
2539 jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params;
2540 }
2541 if (jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid) {
2542 jpg_job.encode_job.p_metadata->statsdebug_bestats_buffer_data =
2543 jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params;
2544 }
2545 if (jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid) {
2546 jpg_job.encode_job.p_metadata->statsdebug_bhist_data =
2547 jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params;
2548 }
2549 if (jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid) {
2550 jpg_job.encode_job.p_metadata->statsdebug_3a_tuning_data =
2551 jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params;
2552 }
2553 }
2554
2555 }
2556
2557 /* Init the QTable */
2558 for (int i = 0; i < QTABLE_MAX; i++) {
2559 jpg_job.encode_job.qtable_set[i] = 0;
2560 }
2561
2562 const cam_sync_related_sensors_event_info_t* related_cam_info =
2563 m_parent->getRelatedCamSyncInfo();
2564 if (related_cam_info->sync_control == CAM_SYNC_RELATED_SENSORS_ON &&
2565 m_parent->getMpoComposition()) {
2566 jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_MPO;
2567 if (related_cam_info->type == CAM_TYPE_MAIN ) {
2568 jpg_job.encode_job.multi_image_info.is_primary = TRUE;
2569 LOGD("Encoding MPO Primary JPEG");
2570 } else {
2571 jpg_job.encode_job.multi_image_info.is_primary = FALSE;
2572 LOGD("Encoding MPO Aux JPEG");
2573 }
2574 jpg_job.encode_job.multi_image_info.num_of_images = 2;
2575 } else {
2576 LOGD("Encoding Single JPEG");
2577 jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_JPEG;
2578 jpg_job.encode_job.multi_image_info.is_primary = FALSE;
2579 jpg_job.encode_job.multi_image_info.num_of_images = 1;
2580 }
2581 //Do Cache ops before sending to encode
2582 if (main_frame != NULL) {
2583 main_stream->handleCacheOps(main_frame);
2584 }
2585 if ((thumb_stream != NULL) && (thumb_frame != NULL)) {
2586 thumb_stream->handleCacheOps(thumb_frame);
2587 }
2588
2589 LOGI("[KPI Perf] : PROFILE_JPEG_JOB_START");
2590 ret = mJpegHandle.start_job(&jpg_job, &jobId);
2591 if (jpg_job.encode_job.cam_exif_params.debug_params) {
2592 free(jpg_job.encode_job.cam_exif_params.debug_params);
2593 }
2594 if (ret == NO_ERROR) {
2595 // remember job info
2596 jpeg_job_data->jobId = jobId;
2597 }
2598
2599 return ret;
2600 }
2601
2602 /*===========================================================================
2603 * FUNCTION : processRawImageImpl
2604 *
2605 * DESCRIPTION: function to send raw image to upper layer
2606 *
2607 * PARAMETERS :
2608 * @recvd_frame : frame to be encoded
2609 *
2610 * RETURN : int32_t type of status
2611 * NO_ERROR -- success
2612 * none-zero failure code
2613 *==========================================================================*/
processRawImageImpl(mm_camera_super_buf_t * recvd_frame)2614 int32_t QCameraPostProcessor::processRawImageImpl(mm_camera_super_buf_t *recvd_frame)
2615 {
2616 int32_t rc = NO_ERROR;
2617
2618 QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
2619 QCameraStream *pStream = NULL;
2620 mm_camera_buf_def_t *frame = NULL;
2621 // check reprocess channel if not found
2622 if (pChannel == NULL) {
2623 for (int8_t i = 0; i < mPPChannelCount; i++) {
2624 if ((mPPChannels[i] != NULL) &&
2625 (validate_handle(mPPChannels[i]->getMyHandle(), recvd_frame->ch_id))) {
2626 pChannel = mPPChannels[i];
2627 break;
2628 }
2629 }
2630 }
2631 if (pChannel == NULL) {
2632 LOGE("No corresponding channel (ch_id = %d) exist, return here",
2633 recvd_frame->ch_id);
2634 return BAD_VALUE;
2635 }
2636
2637 // find snapshot frame
2638 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
2639 QCameraStream *pCurStream =
2640 pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
2641 if (pCurStream != NULL) {
2642 if (pCurStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
2643 pCurStream->isTypeOf(CAM_STREAM_TYPE_RAW) ||
2644 pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
2645 pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)) {
2646 pStream = pCurStream;
2647 frame = recvd_frame->bufs[i];
2648 break;
2649 }
2650 }
2651 }
2652
2653 if ( NULL == frame ) {
2654 LOGE("No valid raw buffer");
2655 return BAD_VALUE;
2656 }
2657
2658 QCameraMemory *rawMemObj = (QCameraMemory *)frame->mem_info;
2659 bool zslChannelUsed = m_parent->isZSLMode() &&
2660 ( pChannel != mPPChannels[0] );
2661 camera_memory_t *raw_mem = NULL;
2662
2663 if (rawMemObj != NULL) {
2664 if (zslChannelUsed) {
2665 raw_mem = rawMemObj->getMemory(frame->buf_idx, false);
2666 } else {
2667 raw_mem = m_parent->mGetMemory(-1,
2668 frame->frame_len,
2669 1,
2670 m_parent->mCallbackCookie);
2671 if (NULL == raw_mem) {
2672 LOGE("Not enough memory for RAW cb ");
2673 return NO_MEMORY;
2674 }
2675 memcpy(raw_mem->data, frame->buffer, frame->frame_len);
2676 }
2677 }
2678
2679 if (NULL != rawMemObj && NULL != raw_mem) {
2680 // dump frame into file
2681 if (frame->stream_type == CAM_STREAM_TYPE_SNAPSHOT ||
2682 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
2683 // for YUV422 NV16 case
2684 m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_INPUT_JPEG);
2685 } else {
2686 //Received RAW snapshot taken notification
2687 m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_RAW);
2688
2689 if(true == m_parent->m_bIntRawEvtPending) {
2690 //Sending RAW snapshot taken notification to HAL
2691 memset(&m_dst_dim, 0, sizeof(m_dst_dim));
2692 pStream->getFrameDimension(m_dst_dim);
2693 pthread_mutex_lock(&m_parent->m_int_lock);
2694 pthread_cond_signal(&m_parent->m_int_cond);
2695 pthread_mutex_unlock(&m_parent->m_int_lock);
2696 raw_mem->release(raw_mem);
2697 return rc;
2698 }
2699 }
2700
2701 // send data callback / notify for RAW_IMAGE
2702 if (NULL != m_parent->mDataCb &&
2703 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
2704 qcamera_callback_argm_t cbArg;
2705 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2706 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
2707 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
2708 cbArg.data = raw_mem;
2709 cbArg.index = 0;
2710 frame->cache_flags |= CPU_HAS_READ;
2711 m_parent->m_cbNotifier.notifyCallback(cbArg);
2712 }
2713 if (NULL != m_parent->mNotifyCb &&
2714 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
2715 qcamera_callback_argm_t cbArg;
2716 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
2717 cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
2718 cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
2719 cbArg.ext1 = 0;
2720 cbArg.ext2 = 0;
2721 frame->cache_flags |= CPU_HAS_READ;
2722 m_parent->m_cbNotifier.notifyCallback(cbArg);
2723 }
2724
2725 if ((m_parent->mDataCb != NULL) &&
2726 m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) > 0) {
2727 qcamera_release_data_t release_data;
2728 memset(&release_data, 0, sizeof(qcamera_release_data_t));
2729 if ( zslChannelUsed ) {
2730 release_data.frame = recvd_frame;
2731 } else {
2732 release_data.data = raw_mem;
2733 }
2734 rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
2735 raw_mem,
2736 0,
2737 NULL,
2738 &release_data);
2739 } else {
2740 raw_mem->release(raw_mem);
2741 }
2742 } else {
2743 LOGE("Cannot get raw mem");
2744 rc = UNKNOWN_ERROR;
2745 }
2746
2747 return rc;
2748 }
2749
2750 /*===========================================================================
2751 * FUNCTION : dataSaveRoutine
2752 *
2753 * DESCRIPTION: data saving routine
2754 *
2755 * PARAMETERS :
2756 * @data : user data ptr (QCameraPostProcessor)
2757 *
2758 * RETURN : None
2759 *==========================================================================*/
dataSaveRoutine(void * data)2760 void *QCameraPostProcessor::dataSaveRoutine(void *data)
2761 {
2762 int running = 1;
2763 int ret;
2764 uint8_t is_active = FALSE;
2765 QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
2766 QCameraCmdThread *cmdThread = &pme->m_saveProcTh;
2767 cmdThread->setName("CAM_JpegSave");
2768 char saveName[PROPERTY_VALUE_MAX];
2769
2770 LOGH("E");
2771 do {
2772 do {
2773 ret = cam_sem_wait(&cmdThread->cmd_sem);
2774 if (ret != 0 && errno != EINVAL) {
2775 LOGE("cam_sem_wait error (%s)",
2776 strerror(errno));
2777 return NULL;
2778 }
2779 } while (ret != 0);
2780
2781 // we got notified about new cmd avail in cmd queue
2782 camera_cmd_type_t cmd = cmdThread->getCmd();
2783 switch (cmd) {
2784 case CAMERA_CMD_TYPE_START_DATA_PROC:
2785 LOGH("start data proc");
2786 is_active = TRUE;
2787 pme->m_inputSaveQ.init();
2788 break;
2789 case CAMERA_CMD_TYPE_STOP_DATA_PROC:
2790 {
2791 LOGH("stop data proc");
2792 is_active = FALSE;
2793
2794 // flush input save Queue
2795 pme->m_inputSaveQ.flush();
2796
2797 // signal cmd is completed
2798 cam_sem_post(&cmdThread->sync_sem);
2799 }
2800 break;
2801 case CAMERA_CMD_TYPE_DO_NEXT_JOB:
2802 {
2803 LOGH("Do next job, active is %d", is_active);
2804
2805 qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) pme->m_inputSaveQ.dequeue();
2806 if (job_data == NULL) {
2807 LOGE("Invalid jpeg event data");
2808 continue;
2809 }
2810 //qcamera_jpeg_data_t *jpeg_job =
2811 // (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue(false);
2812 //uint32_t frame_idx = jpeg_job->src_frame->bufs[0]->frame_idx;
2813 uint32_t frame_idx = 75;
2814
2815 pme->m_ongoingJpegQ.flushNodes(matchJobId, (void*)&job_data->jobId);
2816
2817 LOGH("[KPI Perf] : jpeg job %d", job_data->jobId);
2818
2819 if (is_active == TRUE) {
2820 memset(saveName, '\0', sizeof(saveName));
2821 snprintf(saveName,
2822 sizeof(saveName),
2823 QCameraPostProcessor::STORE_LOCATION,
2824 pme->mSaveFrmCnt);
2825
2826 int file_fd = open(saveName, O_RDWR | O_CREAT, 0655);
2827 if (file_fd >= 0) {
2828 ssize_t written_len = write(file_fd, job_data->out_data.buf_vaddr,
2829 job_data->out_data.buf_filled_len);
2830 if ((ssize_t)job_data->out_data.buf_filled_len != written_len) {
2831 LOGE("Failed save complete data %d bytes "
2832 "written instead of %d bytes!",
2833 written_len,
2834 job_data->out_data.buf_filled_len);
2835 } else {
2836 LOGH("written number of bytes %d\n",
2837 written_len);
2838 }
2839
2840 close(file_fd);
2841 } else {
2842 LOGE("fail t open file for saving");
2843 }
2844 pme->mSaveFrmCnt++;
2845
2846 camera_memory_t* jpeg_mem = pme->m_parent->mGetMemory(-1,
2847 strlen(saveName),
2848 1,
2849 pme->m_parent->mCallbackCookie);
2850 if (NULL == jpeg_mem) {
2851 ret = NO_MEMORY;
2852 LOGE("getMemory for jpeg, ret = NO_MEMORY");
2853 goto end;
2854 }
2855 memcpy(jpeg_mem->data, saveName, strlen(saveName));
2856
2857 LOGH("Calling upperlayer callback to store JPEG image");
2858 qcamera_release_data_t release_data;
2859 memset(&release_data, 0, sizeof(qcamera_release_data_t));
2860 release_data.data = jpeg_mem;
2861 release_data.unlinkFile = true;
2862 LOGI("[KPI Perf]: PROFILE_JPEG_CB ");
2863 ret = pme->sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
2864 jpeg_mem,
2865 0,
2866 NULL,
2867 &release_data,
2868 frame_idx);
2869 }
2870
2871 end:
2872 free(job_data);
2873 }
2874 break;
2875 case CAMERA_CMD_TYPE_EXIT:
2876 LOGH("save thread exit");
2877 running = 0;
2878 break;
2879 default:
2880 break;
2881 }
2882 } while (running);
2883 LOGH("X");
2884 return NULL;
2885 }
2886
2887 /*===========================================================================
2888 * FUNCTION : dataProcessRoutine
2889 *
2890 * DESCRIPTION: data process routine that handles input data either from input
2891 * Jpeg Queue to do jpeg encoding, or from input PP Queue to do
2892 * reprocess.
2893 *
2894 * PARAMETERS :
2895 * @data : user data ptr (QCameraPostProcessor)
2896 *
2897 * RETURN : None
2898 *==========================================================================*/
dataProcessRoutine(void * data)2899 void *QCameraPostProcessor::dataProcessRoutine(void *data)
2900 {
2901 int running = 1;
2902 int ret;
2903 uint8_t is_active = FALSE;
2904 QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
2905 QCameraCmdThread *cmdThread = &pme->m_dataProcTh;
2906 cmdThread->setName("CAM_DataProc");
2907
2908 LOGH("E");
2909 do {
2910 do {
2911 ret = cam_sem_wait(&cmdThread->cmd_sem);
2912 if (ret != 0 && errno != EINVAL) {
2913 LOGE("cam_sem_wait error (%s)",
2914 strerror(errno));
2915 return NULL;
2916 }
2917 } while (ret != 0);
2918
2919 // we got notified about new cmd avail in cmd queue
2920 camera_cmd_type_t cmd = cmdThread->getCmd();
2921 switch (cmd) {
2922 case CAMERA_CMD_TYPE_START_DATA_PROC:
2923 LOGH("start data proc");
2924 is_active = TRUE;
2925
2926 pme->m_ongoingPPQ.init();
2927 pme->m_inputJpegQ.init();
2928 pme->m_inputPPQ.init();
2929 pme->m_inputRawQ.init();
2930
2931 pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC,
2932 FALSE,
2933 FALSE);
2934
2935 // signal cmd is completed
2936 cam_sem_post(&cmdThread->sync_sem);
2937
2938 break;
2939 case CAMERA_CMD_TYPE_STOP_DATA_PROC:
2940 {
2941 LOGH("stop data proc");
2942 is_active = FALSE;
2943
2944 pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC,
2945 TRUE,
2946 TRUE);
2947 // cancel all ongoing jpeg jobs
2948 qcamera_jpeg_data_t *jpeg_job =
2949 (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
2950 while (jpeg_job != NULL) {
2951 pme->mJpegHandle.abort_job(jpeg_job->jobId);
2952
2953 pme->releaseJpegJobData(jpeg_job);
2954 free(jpeg_job);
2955
2956 jpeg_job = (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
2957 }
2958
2959 // destroy jpeg encoding session
2960 if ( 0 < pme->mJpegSessionId ) {
2961 pme->mJpegHandle.destroy_session(pme->mJpegSessionId);
2962 pme->mJpegSessionId = 0;
2963 }
2964
2965 // free jpeg out buf and exif obj
2966 FREE_JPEG_OUTPUT_BUFFER(pme->m_pJpegOutputMem,
2967 pme->m_JpegOutputMemCount);
2968
2969 if (pme->m_pJpegExifObj != NULL) {
2970 delete pme->m_pJpegExifObj;
2971 pme->m_pJpegExifObj = NULL;
2972 }
2973
2974 // flush ongoing postproc Queue
2975 pme->m_ongoingPPQ.flush();
2976
2977 // flush input jpeg Queue
2978 pme->m_inputJpegQ.flush();
2979
2980 // flush input Postproc Queue
2981 pme->m_inputPPQ.flush();
2982
2983 // flush input raw Queue
2984 pme->m_inputRawQ.flush();
2985
2986 // signal cmd is completed
2987 cam_sem_post(&cmdThread->sync_sem);
2988
2989 pme->mNewJpegSessionNeeded = true;
2990 }
2991 break;
2992 case CAMERA_CMD_TYPE_DO_NEXT_JOB:
2993 {
2994 LOGH("Do next job, active is %d", is_active);
2995 if (is_active == TRUE) {
2996 qcamera_jpeg_data_t *jpeg_job =
2997 (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
2998
2999 if (NULL != jpeg_job) {
3000 // To avoid any race conditions,
3001 // sync any stream specific parameters here.
3002 if (pme->m_parent->mParameters.isAdvCamFeaturesEnabled()) {
3003 // Sync stream params, only if advanced features configured
3004 // Reduces the latency for normal snapshot.
3005 pme->syncStreamParams(jpeg_job->src_frame, NULL);
3006 }
3007
3008 // add into ongoing jpeg job Q
3009 if (pme->m_ongoingJpegQ.enqueue((void *)jpeg_job)) {
3010 ret = pme->encodeData(jpeg_job,
3011 pme->mNewJpegSessionNeeded);
3012 if (NO_ERROR != ret) {
3013 // dequeue the last one
3014 pme->m_ongoingJpegQ.dequeue(false);
3015 pme->releaseJpegJobData(jpeg_job);
3016 free(jpeg_job);
3017 jpeg_job = NULL;
3018 pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
3019 }
3020 } else {
3021 LOGW("m_ongoingJpegQ is not active!!!");
3022 pme->releaseJpegJobData(jpeg_job);
3023 free(jpeg_job);
3024 jpeg_job = NULL;
3025 }
3026 }
3027
3028
3029 // process raw data if any
3030 mm_camera_super_buf_t *super_buf =
3031 (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
3032
3033 if (NULL != super_buf) {
3034 //play shutter sound
3035 pme->m_parent->playShutter();
3036 ret = pme->processRawImageImpl(super_buf);
3037 if (NO_ERROR != ret) {
3038 pme->releaseSuperBuf(super_buf);
3039 free(super_buf);
3040 pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
3041 }
3042 }
3043
3044 ret = pme->doReprocess();
3045 if (NO_ERROR != ret) {
3046 pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
3047 } else {
3048 ret = pme->stopCapture();
3049 }
3050
3051 } else {
3052 // not active, simply return buf and do no op
3053 qcamera_jpeg_data_t *jpeg_data =
3054 (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
3055 if (NULL != jpeg_data) {
3056 pme->releaseJpegJobData(jpeg_data);
3057 free(jpeg_data);
3058 }
3059 mm_camera_super_buf_t *super_buf =
3060 (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
3061 if (NULL != super_buf) {
3062 pme->releaseSuperBuf(super_buf);
3063 free(super_buf);
3064 }
3065
3066 // flush input Postproc Queue
3067 pme->m_inputPPQ.flush();
3068 }
3069 }
3070 break;
3071 case CAMERA_CMD_TYPE_EXIT:
3072 running = 0;
3073 break;
3074 default:
3075 break;
3076 }
3077 } while (running);
3078 LOGH("X");
3079 return NULL;
3080 }
3081
3082 /*===========================================================================
3083 * FUNCTION : doReprocess
3084 *
3085 * DESCRIPTION: Trigger channel reprocessing
3086 *
3087 * PARAMETERS :None
3088 *
3089 * RETURN : int32_t type of status
3090 * NO_ERROR -- success
3091 * none-zero failure code
3092 *==========================================================================*/
doReprocess()3093 int32_t QCameraPostProcessor::doReprocess()
3094 {
3095 int32_t ret = NO_ERROR;
3096 QCameraChannel *m_pSrcChannel = NULL;
3097 QCameraStream *pMetaStream = NULL;
3098 uint8_t meta_buf_index = 0;
3099 mm_camera_buf_def_t *meta_buf = NULL;
3100 mm_camera_super_buf_t *ppInputFrame = NULL;
3101
3102 qcamera_pp_data_t *ppreq_job = (qcamera_pp_data_t *)m_inputPPQ.peek();
3103 if ((ppreq_job == NULL) || (ppreq_job->src_frame == NULL)) {
3104 return ret;
3105 }
3106
3107 if (!validatePostProcess(ppreq_job->src_frame)) {
3108 return ret;
3109 }
3110
3111 ppreq_job = (qcamera_pp_data_t *)m_inputPPQ.dequeue();
3112 if (ppreq_job == NULL || ppreq_job->src_frame == NULL ||
3113 ppreq_job->src_reproc_frame == NULL) {
3114 return ret;
3115 }
3116
3117 mm_camera_super_buf_t *src_frame = ppreq_job->src_frame;
3118 mm_camera_super_buf_t *src_reproc_frame = ppreq_job->src_reproc_frame;
3119 int8_t mCurReprocCount = ppreq_job->reprocCount;
3120 int8_t mCurChannelIdx = ppreq_job->ppChannelIndex;
3121
3122 LOGD("frame = %p src_frame = %p mCurReprocCount = %d mCurChannelIdx = %d",
3123 src_frame,src_reproc_frame,mCurReprocCount, mCurChannelIdx);
3124
3125 if ((m_parent->mParameters.getManualCaptureMode() >=
3126 CAM_MANUAL_CAPTURE_TYPE_3) && (mCurChannelIdx == 0)) {
3127 ppInputFrame = src_reproc_frame;
3128 } else {
3129 ppInputFrame = src_frame;
3130 }
3131
3132 if (mPPChannelCount >= CAM_PP_CHANNEL_MAX) {
3133 LOGE("invalid channel count");
3134 return UNKNOWN_ERROR;
3135 }
3136
3137 // find meta data stream and index of meta data frame in the superbuf
3138 for (int8_t j = 0; j < mPPChannelCount; j++) {
3139 /*First search in src buffer for any offline metadata */
3140 for (uint32_t i = 0; i < src_frame->num_bufs; i++) {
3141 QCameraStream *pStream = mPPChannels[j]->getStreamByHandle(
3142 src_frame->bufs[i]->stream_id);
3143 if (pStream != NULL && pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)) {
3144 meta_buf_index = (uint8_t) src_frame->bufs[i]->buf_idx;
3145 pMetaStream = pStream;
3146 meta_buf = src_frame->bufs[i];
3147 break;
3148 }
3149 }
3150
3151 if ((pMetaStream != NULL) && (meta_buf != NULL)) {
3152 LOGD("Found Offline stream metadata = %d",
3153 (int)meta_buf_index);
3154 break;
3155 }
3156 }
3157
3158 if ((pMetaStream == NULL) && (meta_buf == NULL)) {
3159 for (int8_t j = 0; j < mPPChannelCount; j++) {
3160 m_pSrcChannel = mPPChannels[j]->getSrcChannel();
3161 if (m_pSrcChannel == NULL)
3162 continue;
3163 for (uint32_t i = 0; i < src_reproc_frame->num_bufs; i++) {
3164 QCameraStream *pStream =
3165 m_pSrcChannel->getStreamByHandle(
3166 src_reproc_frame->bufs[i]->stream_id);
3167 if (pStream != NULL && pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
3168 meta_buf_index = (uint8_t) src_reproc_frame->bufs[i]->buf_idx;
3169 pMetaStream = pStream;
3170 meta_buf = src_reproc_frame->bufs[i];
3171 break;
3172 }
3173 }
3174 if ((pMetaStream != NULL) && (meta_buf != NULL)) {
3175 LOGD("Found Meta data info for reprocessing index = %d",
3176 (int)meta_buf_index);
3177 break;
3178 }
3179 }
3180 }
3181
3182 if (m_parent->mParameters.isAdvCamFeaturesEnabled()) {
3183 // No need to sync stream params, if none of the advanced features configured
3184 // Reduces the latency for normal snapshot.
3185 syncStreamParams(src_frame, src_reproc_frame);
3186 }
3187
3188 if (mPPChannels[mCurChannelIdx] != NULL) {
3189 // add into ongoing PP job Q
3190 ppreq_job->reprocCount = (int8_t) (mCurReprocCount + 1);
3191
3192 if ((m_parent->needOfflineReprocessing()) || (ppreq_job->offline_buffer)) {
3193 m_bufCountPPQ++;
3194 if (m_ongoingPPQ.enqueue((void *)ppreq_job)) {
3195 pthread_mutex_lock(&m_reprocess_lock);
3196 ret = mPPChannels[mCurChannelIdx]->doReprocessOffline(ppInputFrame,
3197 meta_buf, m_parent->mParameters);
3198 if (ret != NO_ERROR) {
3199 pthread_mutex_unlock(&m_reprocess_lock);
3200 goto end;
3201 }
3202
3203 if ((ppreq_job->offline_buffer) &&
3204 (ppreq_job->offline_reproc_buf)) {
3205 mPPChannels[mCurChannelIdx]->doReprocessOffline(
3206 ppreq_job->offline_reproc_buf, meta_buf);
3207 }
3208 pthread_mutex_unlock(&m_reprocess_lock);
3209 } else {
3210 LOGW("m_ongoingPPQ is not active!!!");
3211 ret = UNKNOWN_ERROR;
3212 goto end;
3213 }
3214 } else {
3215 m_bufCountPPQ++;
3216 if (!m_ongoingPPQ.enqueue((void *)ppreq_job)) {
3217 LOGW("m_ongoingJpegQ is not active!!!");
3218 ret = UNKNOWN_ERROR;
3219 goto end;
3220 }
3221
3222 int32_t numRequiredPPQBufsForSingleOutput = (int32_t)
3223 m_parent->mParameters.getNumberInBufsForSingleShot();
3224
3225 if (m_bufCountPPQ % numRequiredPPQBufsForSingleOutput == 0) {
3226 int32_t extra_pp_job_count =
3227 m_parent->mParameters.getNumberOutBufsForSingleShot() -
3228 m_parent->mParameters.getNumberInBufsForSingleShot();
3229
3230 for (int32_t i = 0; i < extra_pp_job_count; i++) {
3231 qcamera_pp_data_t *extra_pp_job =
3232 (qcamera_pp_data_t *)calloc(1, sizeof(qcamera_pp_data_t));
3233 if (!extra_pp_job) {
3234 LOGE("no mem for qcamera_pp_data_t");
3235 ret = NO_MEMORY;
3236 break;
3237 }
3238 extra_pp_job->reprocCount = ppreq_job->reprocCount;
3239 if (!m_ongoingPPQ.enqueue((void *)extra_pp_job)) {
3240 LOGW("m_ongoingJpegQ is not active!!!");
3241 releaseOngoingPPData(extra_pp_job, this);
3242 free(extra_pp_job);
3243 extra_pp_job = NULL;
3244 goto end;
3245 }
3246 }
3247 }
3248
3249 ret = mPPChannels[mCurChannelIdx]->doReprocess(ppInputFrame,
3250 m_parent->mParameters, pMetaStream, meta_buf_index);
3251 }
3252 } else {
3253 LOGE("Reprocess channel is NULL");
3254 ret = UNKNOWN_ERROR;
3255 }
3256
3257 end:
3258 if (ret != NO_ERROR) {
3259 releaseOngoingPPData(ppreq_job, this);
3260 if (ppreq_job != NULL) {
3261 free(ppreq_job);
3262 ppreq_job = NULL;
3263 }
3264 }
3265 return ret;
3266 }
3267
3268 /*===========================================================================
3269 * FUNCTION : getReprocChannel
3270 *
3271 * DESCRIPTION: Returns reprocessing channel handle
3272 *
3273 * PARAMETERS : index for reprocessing array
3274 *
3275 * RETURN : QCameraReprocessChannel * type of pointer
3276 NULL if no reprocessing channel
3277 *==========================================================================*/
getReprocChannel(uint8_t index)3278 QCameraReprocessChannel * QCameraPostProcessor::getReprocChannel(uint8_t index)
3279 {
3280 if (index >= mPPChannelCount) {
3281 LOGE("Invalid index value");
3282 return NULL;
3283 }
3284 return mPPChannels[index];
3285 }
3286
3287 /*===========================================================================
3288 * FUNCTION : stopCapture
3289 *
3290 * DESCRIPTION: Trigger image capture stop
3291 *
3292 * PARAMETERS :
3293 * None
3294 *
3295 * RETURN : int32_t type of status
3296 * NO_ERROR -- success
3297 * none-zero failure code
3298 *==========================================================================*/
stopCapture()3299 int32_t QCameraPostProcessor::stopCapture()
3300 {
3301 int rc = NO_ERROR;
3302
3303 if (m_parent->isRegularCapture()) {
3304 rc = m_parent->processAPI(
3305 QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL,
3306 NULL);
3307 }
3308 return rc;
3309 }
3310
3311 /*===========================================================================
3312 * FUNCTION : getJpegPaddingReq
3313 *
3314 * DESCRIPTION: function to add an entry to exif data
3315 *
3316 * PARAMETERS :
3317 * @padding_info : jpeg specific padding requirement
3318 *
3319 * RETURN : int32_t type of status
3320 * NO_ERROR -- success
3321 * none-zero failure code
3322 *==========================================================================*/
getJpegPaddingReq(cam_padding_info_t & padding_info)3323 int32_t QCameraPostProcessor::getJpegPaddingReq(cam_padding_info_t &padding_info)
3324 {
3325 // TODO: hardcode for now, needs to query from mm-jpeg-interface
3326 padding_info.width_padding = CAM_PAD_NONE;
3327 padding_info.height_padding = CAM_PAD_TO_16;
3328 padding_info.plane_padding = CAM_PAD_TO_WORD;
3329 padding_info.offset_info.offset_x = 0;
3330 padding_info.offset_info.offset_y = 0;
3331 return NO_ERROR;
3332 }
3333
3334 /*===========================================================================
3335 * FUNCTION : setYUVFrameInfo
3336 *
3337 * DESCRIPTION: set Raw YUV frame data info for up-layer
3338 *
3339 * PARAMETERS :
3340 * @frame : process frame received from mm-camera-interface
3341 *
3342 * RETURN : int32_t type of status
3343 * NO_ERROR -- success
3344 * none-zero failure code
3345 *
3346 * NOTE : currently we return frame len, y offset, cbcr offset and frame format
3347 *==========================================================================*/
setYUVFrameInfo(mm_camera_super_buf_t * recvd_frame)3348 int32_t QCameraPostProcessor::setYUVFrameInfo(mm_camera_super_buf_t *recvd_frame)
3349 {
3350 QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
3351 // check reprocess channel if not found
3352 if (pChannel == NULL) {
3353 for (int8_t i = 0; i < mPPChannelCount; i++) {
3354 if ((mPPChannels[i] != NULL) &&
3355 (validate_handle(mPPChannels[i]->getMyHandle(), recvd_frame->ch_id))) {
3356 pChannel = mPPChannels[i];
3357 break;
3358 }
3359 }
3360 }
3361
3362 if (pChannel == NULL) {
3363 LOGE("No corresponding channel (ch_id = %d) exist, return here",
3364 recvd_frame->ch_id);
3365 return BAD_VALUE;
3366 }
3367
3368 // find snapshot frame
3369 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
3370 QCameraStream *pStream =
3371 pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
3372 if (pStream != NULL) {
3373 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
3374 pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
3375 //get the main frame, use stream info
3376 cam_frame_len_offset_t frame_offset;
3377 cam_dimension_t frame_dim;
3378 cam_format_t frame_fmt;
3379 const char *fmt_string;
3380 pStream->getFrameDimension(frame_dim);
3381 pStream->getFrameOffset(frame_offset);
3382 pStream->getFormat(frame_fmt);
3383 fmt_string = m_parent->mParameters.getFrameFmtString(frame_fmt);
3384
3385 int cbcr_offset = (int32_t)frame_offset.mp[0].len -
3386 frame_dim.width * frame_dim.height;
3387
3388 LOGH("frame width=%d, height=%d, yoff=%d, cbcroff=%d, fmt_string=%s",
3389 frame_dim.width, frame_dim.height, frame_offset.mp[0].offset, cbcr_offset, fmt_string);
3390 return NO_ERROR;
3391 }
3392 }
3393 }
3394
3395 return BAD_VALUE;
3396 }
3397
matchJobId(void * data,void *,void * match_data)3398 bool QCameraPostProcessor::matchJobId(void *data, void *, void *match_data)
3399 {
3400 qcamera_jpeg_data_t * job = (qcamera_jpeg_data_t *) data;
3401 uint32_t job_id = *((uint32_t *) match_data);
3402 return job->jobId == job_id;
3403 }
3404
3405 /*===========================================================================
3406 * FUNCTION : getJpegMemory
3407 *
3408 * DESCRIPTION: buffer allocation function
3409 * to pass to jpeg interface
3410 *
3411 * PARAMETERS :
3412 * @out_buf : buffer descriptor struct
3413 *
3414 * RETURN : int32_t type of status
3415 * NO_ERROR -- success
3416 * none-zero failure code
3417 *==========================================================================*/
getJpegMemory(omx_jpeg_ouput_buf_t * out_buf)3418 int QCameraPostProcessor::getJpegMemory(omx_jpeg_ouput_buf_t *out_buf)
3419 {
3420 LOGH("Allocating jpeg out buffer of size: %d", out_buf->size);
3421 QCameraPostProcessor *procInst = (QCameraPostProcessor *) out_buf->handle;
3422 camera_memory_t *cam_mem = procInst->m_parent->mGetMemory(out_buf->fd, out_buf->size, 1U,
3423 procInst->m_parent->mCallbackCookie);
3424 out_buf->mem_hdl = cam_mem;
3425 out_buf->vaddr = cam_mem->data;
3426
3427 return 0;
3428 }
3429
3430 /*===========================================================================
3431 * FUNCTION : releaseJpegMemory
3432 *
3433 * DESCRIPTION: release jpeg memory function
3434 * to pass to jpeg interface, in case of abort
3435 *
3436 * PARAMETERS :
3437 * @out_buf : buffer descriptor struct
3438 *
3439 * RETURN : int32_t type of status
3440 * NO_ERROR -- success
3441 * none-zero failure code
3442 *==========================================================================*/
releaseJpegMemory(omx_jpeg_ouput_buf_t * out_buf)3443 int QCameraPostProcessor::releaseJpegMemory(omx_jpeg_ouput_buf_t *out_buf)
3444 {
3445 if (out_buf && out_buf->mem_hdl) {
3446 LOGD("releasing jpeg out buffer of size: %d", out_buf->size);
3447 camera_memory_t *cam_mem = (camera_memory_t*)out_buf->mem_hdl;
3448 cam_mem->release(cam_mem);
3449 out_buf->mem_hdl = NULL;
3450 out_buf->vaddr = NULL;
3451 return NO_ERROR;
3452 }
3453 return -1;
3454 }
3455
3456 /*===========================================================================
3457 * FUNCTION : QCameraExif
3458 *
3459 * DESCRIPTION: constructor of QCameraExif
3460 *
3461 * PARAMETERS : None
3462 *
3463 * RETURN : None
3464 *==========================================================================*/
QCameraExif()3465 QCameraExif::QCameraExif()
3466 : m_nNumEntries(0)
3467 {
3468 memset(m_Entries, 0, sizeof(m_Entries));
3469 }
3470
3471 /*===========================================================================
3472 * FUNCTION : ~QCameraExif
3473 *
3474 * DESCRIPTION: deconstructor of QCameraExif. Will release internal memory ptr.
3475 *
3476 * PARAMETERS : None
3477 *
3478 * RETURN : None
3479 *==========================================================================*/
~QCameraExif()3480 QCameraExif::~QCameraExif()
3481 {
3482 for (uint32_t i = 0; i < m_nNumEntries; i++) {
3483 switch (m_Entries[i].tag_entry.type) {
3484 case EXIF_BYTE:
3485 {
3486 if (m_Entries[i].tag_entry.count > 1 &&
3487 m_Entries[i].tag_entry.data._bytes != NULL) {
3488 free(m_Entries[i].tag_entry.data._bytes);
3489 m_Entries[i].tag_entry.data._bytes = NULL;
3490 }
3491 }
3492 break;
3493 case EXIF_ASCII:
3494 {
3495 if (m_Entries[i].tag_entry.data._ascii != NULL) {
3496 free(m_Entries[i].tag_entry.data._ascii);
3497 m_Entries[i].tag_entry.data._ascii = NULL;
3498 }
3499 }
3500 break;
3501 case EXIF_SHORT:
3502 {
3503 if (m_Entries[i].tag_entry.count > 1 &&
3504 m_Entries[i].tag_entry.data._shorts != NULL) {
3505 free(m_Entries[i].tag_entry.data._shorts);
3506 m_Entries[i].tag_entry.data._shorts = NULL;
3507 }
3508 }
3509 break;
3510 case EXIF_LONG:
3511 {
3512 if (m_Entries[i].tag_entry.count > 1 &&
3513 m_Entries[i].tag_entry.data._longs != NULL) {
3514 free(m_Entries[i].tag_entry.data._longs);
3515 m_Entries[i].tag_entry.data._longs = NULL;
3516 }
3517 }
3518 break;
3519 case EXIF_RATIONAL:
3520 {
3521 if (m_Entries[i].tag_entry.count > 1 &&
3522 m_Entries[i].tag_entry.data._rats != NULL) {
3523 free(m_Entries[i].tag_entry.data._rats);
3524 m_Entries[i].tag_entry.data._rats = NULL;
3525 }
3526 }
3527 break;
3528 case EXIF_UNDEFINED:
3529 {
3530 if (m_Entries[i].tag_entry.data._undefined != NULL) {
3531 free(m_Entries[i].tag_entry.data._undefined);
3532 m_Entries[i].tag_entry.data._undefined = NULL;
3533 }
3534 }
3535 break;
3536 case EXIF_SLONG:
3537 {
3538 if (m_Entries[i].tag_entry.count > 1 &&
3539 m_Entries[i].tag_entry.data._slongs != NULL) {
3540 free(m_Entries[i].tag_entry.data._slongs);
3541 m_Entries[i].tag_entry.data._slongs = NULL;
3542 }
3543 }
3544 break;
3545 case EXIF_SRATIONAL:
3546 {
3547 if (m_Entries[i].tag_entry.count > 1 &&
3548 m_Entries[i].tag_entry.data._srats != NULL) {
3549 free(m_Entries[i].tag_entry.data._srats);
3550 m_Entries[i].tag_entry.data._srats = NULL;
3551 }
3552 }
3553 break;
3554 }
3555 }
3556 }
3557
3558 /*===========================================================================
3559 * FUNCTION : addEntry
3560 *
3561 * DESCRIPTION: function to add an entry to exif data
3562 *
3563 * PARAMETERS :
3564 * @tagid : exif tag ID
3565 * @type : data type
3566 * @count : number of data in uint of its type
3567 * @data : input data ptr
3568 *
3569 * RETURN : int32_t type of status
3570 * NO_ERROR -- success
3571 * none-zero failure code
3572 *==========================================================================*/
addEntry(exif_tag_id_t tagid,exif_tag_type_t type,uint32_t count,void * data)3573 int32_t QCameraExif::addEntry(exif_tag_id_t tagid,
3574 exif_tag_type_t type,
3575 uint32_t count,
3576 void *data)
3577 {
3578 int32_t rc = NO_ERROR;
3579 if(m_nNumEntries >= MAX_EXIF_TABLE_ENTRIES) {
3580 LOGE("Number of entries exceeded limit");
3581 return NO_MEMORY;
3582 }
3583
3584 m_Entries[m_nNumEntries].tag_id = tagid;
3585 m_Entries[m_nNumEntries].tag_entry.type = type;
3586 m_Entries[m_nNumEntries].tag_entry.count = count;
3587 m_Entries[m_nNumEntries].tag_entry.copy = 1;
3588 switch (type) {
3589 case EXIF_BYTE:
3590 {
3591 if (count > 1) {
3592 uint8_t *values = (uint8_t *)malloc(count);
3593 if (values == NULL) {
3594 LOGE("No memory for byte array");
3595 rc = NO_MEMORY;
3596 } else {
3597 memcpy(values, data, count);
3598 m_Entries[m_nNumEntries].tag_entry.data._bytes = values;
3599 }
3600 } else {
3601 m_Entries[m_nNumEntries].tag_entry.data._byte = *(uint8_t *)data;
3602 }
3603 }
3604 break;
3605 case EXIF_ASCII:
3606 {
3607 char *str = NULL;
3608 str = (char *)malloc(count + 1);
3609 if (str == NULL) {
3610 LOGE("No memory for ascii string");
3611 rc = NO_MEMORY;
3612 } else {
3613 memset(str, 0, count + 1);
3614 memcpy(str, data, count);
3615 m_Entries[m_nNumEntries].tag_entry.data._ascii = str;
3616 }
3617 }
3618 break;
3619 case EXIF_SHORT:
3620 {
3621 uint16_t *exif_data = (uint16_t *)data;
3622 if (count > 1) {
3623 uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
3624 if (values == NULL) {
3625 LOGE("No memory for short array");
3626 rc = NO_MEMORY;
3627 } else {
3628 memcpy(values, exif_data, count * sizeof(uint16_t));
3629 m_Entries[m_nNumEntries].tag_entry.data._shorts = values;
3630 }
3631 } else {
3632 m_Entries[m_nNumEntries].tag_entry.data._short = *(uint16_t *)data;
3633 }
3634 }
3635 break;
3636 case EXIF_LONG:
3637 {
3638 uint32_t *exif_data = (uint32_t *)data;
3639 if (count > 1) {
3640 uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
3641 if (values == NULL) {
3642 LOGE("No memory for long array");
3643 rc = NO_MEMORY;
3644 } else {
3645 memcpy(values, exif_data, count * sizeof(uint32_t));
3646 m_Entries[m_nNumEntries].tag_entry.data._longs = values;
3647 }
3648 } else {
3649 m_Entries[m_nNumEntries].tag_entry.data._long = *(uint32_t *)data;
3650 }
3651 }
3652 break;
3653 case EXIF_RATIONAL:
3654 {
3655 rat_t *exif_data = (rat_t *)data;
3656 if (count > 1) {
3657 rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
3658 if (values == NULL) {
3659 LOGE("No memory for rational array");
3660 rc = NO_MEMORY;
3661 } else {
3662 memcpy(values, exif_data, count * sizeof(rat_t));
3663 m_Entries[m_nNumEntries].tag_entry.data._rats = values;
3664 }
3665 } else {
3666 m_Entries[m_nNumEntries].tag_entry.data._rat = *(rat_t *)data;
3667 }
3668 }
3669 break;
3670 case EXIF_UNDEFINED:
3671 {
3672 uint8_t *values = (uint8_t *)malloc(count);
3673 if (values == NULL) {
3674 LOGE("No memory for undefined array");
3675 rc = NO_MEMORY;
3676 } else {
3677 memcpy(values, data, count);
3678 m_Entries[m_nNumEntries].tag_entry.data._undefined = values;
3679 }
3680 }
3681 break;
3682 case EXIF_SLONG:
3683 {
3684 uint32_t *exif_data = (uint32_t *)data;
3685 if (count > 1) {
3686 int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
3687 if (values == NULL) {
3688 LOGE("No memory for signed long array");
3689 rc = NO_MEMORY;
3690 } else {
3691 memcpy(values, exif_data, count * sizeof(int32_t));
3692 m_Entries[m_nNumEntries].tag_entry.data._slongs = values;
3693 }
3694 } else {
3695 m_Entries[m_nNumEntries].tag_entry.data._slong = *(int32_t *)data;
3696 }
3697 }
3698 break;
3699 case EXIF_SRATIONAL:
3700 {
3701 srat_t *exif_data = (srat_t *)data;
3702 if (count > 1) {
3703 srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
3704 if (values == NULL) {
3705 LOGE("No memory for signed rational array");
3706 rc = NO_MEMORY;
3707 } else {
3708 memcpy(values, exif_data, count * sizeof(srat_t));
3709 m_Entries[m_nNumEntries].tag_entry.data._srats = values;
3710 }
3711 } else {
3712 m_Entries[m_nNumEntries].tag_entry.data._srat = *(srat_t *)data;
3713 }
3714 }
3715 break;
3716 }
3717
3718 // Increase number of entries
3719 m_nNumEntries++;
3720 return rc;
3721 }
3722
3723 }; // namespace qcamera
3724