1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30 #define LOG_TAG "QCamera2HWI"
31
32 // System dependencies
33 #include <fcntl.h>
34 #include <stdio.h>
35 #include <stdlib.h>
36 #define STAT_H <SYSTEM_HEADER_PREFIX/stat.h>
37 #include STAT_H
38 #include <utils/Errors.h>
39
40 // Camera dependencies
41 #include "QCamera2HWI.h"
42 #include "QCameraTrace.h"
43
44 extern "C" {
45 #include "mm_camera_dbg.h"
46 }
47
48 namespace qcamera {
49
50 /*===========================================================================
51 * FUNCTION : zsl_channel_cb
52 *
53 * DESCRIPTION: helper function to handle ZSL superbuf callback directly from
54 * mm-camera-interface
55 *
56 * PARAMETERS :
57 * @recvd_frame : received super buffer
58 * @userdata : user data ptr
59 *
60 * RETURN : None
61 *
62 * NOTE : recvd_frame will be released after this call by caller, so if
63 * async operation needed for recvd_frame, it's our responsibility
64 * to save a copy for this variable to be used later.
65 *==========================================================================*/
zsl_channel_cb(mm_camera_super_buf_t * recvd_frame,void * userdata)66 void QCamera2HardwareInterface::zsl_channel_cb(mm_camera_super_buf_t *recvd_frame,
67 void *userdata)
68 {
69 ATRACE_CALL();
70 LOGH("[KPI Perf]: E");
71 char value[PROPERTY_VALUE_MAX];
72 bool dump_raw = false;
73 bool log_matching = false;
74 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
75 if (pme == NULL ||
76 pme->mCameraHandle == NULL ||
77 pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
78 LOGE("camera obj not valid");
79 return;
80 }
81
82 QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_ZSL];
83 if (pChannel == NULL ||
84 pChannel->getMyHandle() != recvd_frame->ch_id) {
85 LOGE("ZSL channel doesn't exist, return here");
86 return;
87 }
88
89 if(pme->mParameters.isSceneSelectionEnabled() &&
90 !pme->m_stateMachine.isCaptureRunning()) {
91 pme->selectScene(pChannel, recvd_frame);
92 pChannel->bufDone(recvd_frame);
93 return;
94 }
95
96 LOGD("Frame CB Unlock : %d, is AEC Locked: %d",
97 recvd_frame->bUnlockAEC, pme->m_bLedAfAecLock);
98 if(recvd_frame->bUnlockAEC && pme->m_bLedAfAecLock) {
99 qcamera_sm_internal_evt_payload_t *payload =
100 (qcamera_sm_internal_evt_payload_t *)malloc(
101 sizeof(qcamera_sm_internal_evt_payload_t));
102 if (NULL != payload) {
103 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
104 payload->evt_type = QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK;
105 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
106 if (rc != NO_ERROR) {
107 LOGE("processEvt for retro AEC unlock failed");
108 free(payload);
109 payload = NULL;
110 }
111 } else {
112 LOGE("No memory for retro AEC event");
113 }
114 }
115
116 // Check if retro-active frames are completed and camera is
117 // ready to go ahead with LED estimation for regular frames
118 if (recvd_frame->bReadyForPrepareSnapshot) {
119 // Send an event
120 LOGD("Ready for Prepare Snapshot, signal ");
121 qcamera_sm_internal_evt_payload_t *payload =
122 (qcamera_sm_internal_evt_payload_t *)malloc(
123 sizeof(qcamera_sm_internal_evt_payload_t));
124 if (NULL != payload) {
125 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
126 payload->evt_type = QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT;
127 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
128 if (rc != NO_ERROR) {
129 LOGW("processEvt Ready for Snaphot failed");
130 free(payload);
131 payload = NULL;
132 }
133 } else {
134 LOGE("No memory for prepare signal event detect"
135 " qcamera_sm_internal_evt_payload_t");
136 }
137 }
138
139 /* indicate the parent that capture is done */
140 pme->captureDone();
141
142 // save a copy for the superbuf
143 mm_camera_super_buf_t* frame =
144 (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
145 if (frame == NULL) {
146 LOGE("Error allocating memory to save received_frame structure.");
147 pChannel->bufDone(recvd_frame);
148 return;
149 }
150 *frame = *recvd_frame;
151
152 if (recvd_frame->num_bufs > 0) {
153 LOGI("[KPI Perf]: superbuf frame_idx %d",
154 recvd_frame->bufs[0]->frame_idx);
155 }
156
157 // DUMP RAW if available
158 property_get("persist.camera.zsl_raw", value, "0");
159 dump_raw = atoi(value) > 0 ? true : false;
160 if (dump_raw) {
161 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
162 if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
163 mm_camera_buf_def_t * raw_frame = recvd_frame->bufs[i];
164 QCameraStream *pStream = pChannel->getStreamByHandle(raw_frame->stream_id);
165 if (NULL != pStream) {
166 pme->dumpFrameToFile(pStream, raw_frame, QCAMERA_DUMP_FRM_RAW);
167 }
168 break;
169 }
170 }
171 }
172
173 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
174 if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
175 mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
176 QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
177 if (NULL != pStream) {
178 pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
179 }
180 break;
181 }
182 }
183 //
184 // whether need FD Metadata along with Snapshot frame in ZSL mode
185 if(pme->needFDMetadata(QCAMERA_CH_TYPE_ZSL)){
186 //Need Face Detection result for snapshot frames
187 //Get the Meta Data frames
188 mm_camera_buf_def_t *pMetaFrame = NULL;
189 for (uint32_t i = 0; i < frame->num_bufs; i++) {
190 QCameraStream *pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
191 if (pStream != NULL) {
192 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
193 pMetaFrame = frame->bufs[i]; //find the metadata
194 break;
195 }
196 }
197 }
198
199 if(pMetaFrame != NULL){
200 metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
201 //send the face detection info
202 cam_faces_data_t faces_data;
203 pme->fillFacesData(faces_data, pMetaData);
204 //HARD CODE here before MCT can support
205 faces_data.detection_data.fd_type = QCAMERA_FD_SNAPSHOT;
206
207 qcamera_sm_internal_evt_payload_t *payload =
208 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
209 if (NULL != payload) {
210 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
211 payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
212 payload->faces_data = faces_data;
213 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
214 if (rc != NO_ERROR) {
215 LOGW("processEvt face_detection_result failed");
216 free(payload);
217 payload = NULL;
218 }
219 } else {
220 LOGE("No memory for face_detection_result qcamera_sm_internal_evt_payload_t");
221 }
222 }
223 }
224
225 property_get("persist.camera.dumpmetadata", value, "0");
226 int32_t enabled = atoi(value);
227 if (enabled) {
228 mm_camera_buf_def_t *pMetaFrame = NULL;
229 QCameraStream *pStream = NULL;
230 for (uint32_t i = 0; i < frame->num_bufs; i++) {
231 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
232 if (pStream != NULL) {
233 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
234 pMetaFrame = frame->bufs[i];
235 if (pMetaFrame != NULL &&
236 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
237 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "ZSL_Snapshot");
238 }
239 break;
240 }
241 }
242 }
243 }
244
245 property_get("persist.camera.zsl_matching", value, "0");
246 log_matching = atoi(value) > 0 ? true : false;
247 if (log_matching) {
248 LOGH("ZSL super buffer contains:");
249 QCameraStream *pStream = NULL;
250 for (uint32_t i = 0; i < frame->num_bufs; i++) {
251 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
252 if (pStream != NULL ) {
253 LOGH("Buffer with V4L index %d frame index %d of type %d Timestamp: %ld %ld ",
254 frame->bufs[i]->buf_idx,
255 frame->bufs[i]->frame_idx,
256 pStream->getMyType(),
257 frame->bufs[i]->ts.tv_sec,
258 frame->bufs[i]->ts.tv_nsec);
259 }
260 }
261 }
262
263 // Wait on Postproc initialization if needed
264 // then send to postprocessor
265 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
266 (NO_ERROR != pme->m_postprocessor.processData(frame))) {
267 LOGE("Failed to trigger process data");
268 pChannel->bufDone(recvd_frame);
269 free(frame);
270 frame = NULL;
271 return;
272 }
273
274 LOGH("[KPI Perf]: X");
275 }
276
277 /*===========================================================================
278 * FUNCTION : selectScene
279 *
280 * DESCRIPTION: send a preview callback when a specific selected scene is applied
281 *
282 * PARAMETERS :
283 * @pChannel: Camera channel
284 * @frame : Bundled super buffer
285 *
286 * RETURN : int32_t type of status
287 * NO_ERROR -- success
288 * none-zero failure code
289 *==========================================================================*/
selectScene(QCameraChannel * pChannel,mm_camera_super_buf_t * frame)290 int32_t QCamera2HardwareInterface::selectScene(QCameraChannel *pChannel,
291 mm_camera_super_buf_t *frame)
292 {
293 mm_camera_buf_def_t *pMetaFrame = NULL;
294 QCameraStream *pStream = NULL;
295 int32_t rc = NO_ERROR;
296
297 if ((NULL == frame) || (NULL == pChannel)) {
298 LOGE("Invalid scene select input");
299 return BAD_VALUE;
300 }
301
302 cam_scene_mode_type selectedScene = mParameters.getSelectedScene();
303 if (CAM_SCENE_MODE_MAX == selectedScene) {
304 LOGL("No selected scene");
305 return NO_ERROR;
306 }
307
308 for (uint32_t i = 0; i < frame->num_bufs; i++) {
309 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
310 if (pStream != NULL) {
311 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
312 pMetaFrame = frame->bufs[i];
313 break;
314 }
315 }
316 }
317
318 if (NULL == pMetaFrame) {
319 LOGE("No metadata buffer found in scene select super buffer");
320 return NO_INIT;
321 }
322
323 metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
324
325 IF_META_AVAILABLE(cam_scene_mode_type, scene, CAM_INTF_META_CURRENT_SCENE, pMetaData) {
326 if ((*scene == selectedScene) &&
327 (mDataCb != NULL) &&
328 (msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0)) {
329 mm_camera_buf_def_t *preview_frame = NULL;
330 for (uint32_t i = 0; i < frame->num_bufs; i++) {
331 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
332 if (pStream != NULL) {
333 if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
334 preview_frame = frame->bufs[i];
335 break;
336 }
337 }
338 }
339 if (preview_frame) {
340 QCameraGrallocMemory *memory = (QCameraGrallocMemory *)preview_frame->mem_info;
341 uint32_t idx = preview_frame->buf_idx;
342 preview_frame->cache_flags |= CPU_HAS_READ;
343 rc = sendPreviewCallback(pStream, memory, idx);
344 if (NO_ERROR != rc) {
345 LOGE("Error triggering scene select preview callback");
346 } else {
347 mParameters.setSelectedScene(CAM_SCENE_MODE_MAX);
348 }
349 } else {
350 LOGE("No preview buffer found in scene select super buffer");
351 return NO_INIT;
352 }
353 }
354 } else {
355 LOGE("No current scene metadata!");
356 rc = NO_INIT;
357 }
358
359 return rc;
360 }
361
362 /*===========================================================================
363 * FUNCTION : capture_channel_cb_routine
364 *
365 * DESCRIPTION: helper function to handle snapshot superbuf callback directly from
366 * mm-camera-interface
367 *
368 * PARAMETERS :
369 * @recvd_frame : received super buffer
370 * @userdata : user data ptr
371 *
372 * RETURN : None
373 *
374 * NOTE : recvd_frame will be released after this call by caller, so if
375 * async operation needed for recvd_frame, it's our responsibility
376 * to save a copy for this variable to be used later.
377 *==========================================================================*/
capture_channel_cb_routine(mm_camera_super_buf_t * recvd_frame,void * userdata)378 void QCamera2HardwareInterface::capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
379 void *userdata)
380 {
381 KPI_ATRACE_CALL();
382 char value[PROPERTY_VALUE_MAX];
383 LOGH("[KPI Perf]: E PROFILE_YUV_CB_TO_HAL");
384 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
385 if (pme == NULL ||
386 pme->mCameraHandle == NULL ||
387 pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
388 LOGE("camera obj not valid");
389 return;
390 }
391
392 QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_CAPTURE];
393 if (pChannel == NULL ||
394 pChannel->getMyHandle() != recvd_frame->ch_id) {
395 LOGE("Capture channel doesn't exist, return here");
396 return;
397 }
398
399 // save a copy for the superbuf
400 mm_camera_super_buf_t* frame =
401 (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
402 if (frame == NULL) {
403 LOGE("Error allocating memory to save received_frame structure.");
404 pChannel->bufDone(recvd_frame);
405 return;
406 }
407 *frame = *recvd_frame;
408
409 if (recvd_frame->num_bufs > 0) {
410 LOGI("[KPI Perf]: superbuf frame_idx %d",
411 recvd_frame->bufs[0]->frame_idx);
412 }
413
414 for ( uint32_t i= 0 ; i < recvd_frame->num_bufs ; i++ ) {
415 if ( recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT ) {
416 mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
417 QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
418 if ( NULL != pStream ) {
419 pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
420 }
421 break;
422 }
423 }
424
425 property_get("persist.camera.dumpmetadata", value, "0");
426 int32_t enabled = atoi(value);
427 if (enabled) {
428 mm_camera_buf_def_t *pMetaFrame = NULL;
429 QCameraStream *pStream = NULL;
430 for (uint32_t i = 0; i < frame->num_bufs; i++) {
431 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
432 if (pStream != NULL) {
433 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
434 pMetaFrame = frame->bufs[i]; //find the metadata
435 if (pMetaFrame != NULL &&
436 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
437 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
438 }
439 break;
440 }
441 }
442 }
443 }
444
445 // Wait on Postproc initialization if needed
446 // then send to postprocessor
447 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
448 (NO_ERROR != pme->m_postprocessor.processData(frame))) {
449 LOGE("Failed to trigger process data");
450 pChannel->bufDone(recvd_frame);
451 free(frame);
452 frame = NULL;
453 return;
454 }
455
456 /* START of test register face image for face authentication */
457 #ifdef QCOM_TEST_FACE_REGISTER_FACE
458 static uint8_t bRunFaceReg = 1;
459
460 if (bRunFaceReg > 0) {
461 // find snapshot frame
462 QCameraStream *main_stream = NULL;
463 mm_camera_buf_def_t *main_frame = NULL;
464 for (int i = 0; i < recvd_frame->num_bufs; i++) {
465 QCameraStream *pStream =
466 pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
467 if (pStream != NULL) {
468 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
469 main_stream = pStream;
470 main_frame = recvd_frame->bufs[i];
471 break;
472 }
473 }
474 }
475 if (main_stream != NULL && main_frame != NULL) {
476 int32_t faceId = -1;
477 cam_pp_offline_src_config_t config;
478 memset(&config, 0, sizeof(cam_pp_offline_src_config_t));
479 config.num_of_bufs = 1;
480 main_stream->getFormat(config.input_fmt);
481 main_stream->getFrameDimension(config.input_dim);
482 main_stream->getFrameOffset(config.input_buf_planes.plane_info);
483 LOGH("DEBUG: registerFaceImage E");
484 int32_t rc = pme->registerFaceImage(main_frame->buffer, &config, faceId);
485 LOGH("DEBUG: registerFaceImage X, ret=%d, faceId=%d", rc, faceId);
486 bRunFaceReg = 0;
487 }
488 }
489
490 #endif
491 /* END of test register face image for face authentication */
492
493 LOGH("[KPI Perf]: X");
494 }
495 #ifdef TARGET_TS_MAKEUP
TsMakeupProcess_Preview(mm_camera_buf_def_t * pFrame,QCameraStream * pStream)496 bool QCamera2HardwareInterface::TsMakeupProcess_Preview(mm_camera_buf_def_t *pFrame,
497 QCameraStream * pStream) {
498 LOGD("begin");
499 bool bRet = false;
500 if (pStream == NULL || pFrame == NULL) {
501 bRet = false;
502 LOGH("pStream == NULL || pFrame == NULL");
503 } else {
504 bRet = TsMakeupProcess(pFrame, pStream, mFaceRect);
505 }
506 LOGD("end bRet = %d ",bRet);
507 return bRet;
508 }
509
TsMakeupProcess_Snapshot(mm_camera_buf_def_t * pFrame,QCameraStream * pStream)510 bool QCamera2HardwareInterface::TsMakeupProcess_Snapshot(mm_camera_buf_def_t *pFrame,
511 QCameraStream * pStream) {
512 LOGD("begin");
513 bool bRet = false;
514 if (pStream == NULL || pFrame == NULL) {
515 bRet = false;
516 LOGH("pStream == NULL || pFrame == NULL");
517 } else {
518 cam_frame_len_offset_t offset;
519 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
520 pStream->getFrameOffset(offset);
521
522 cam_dimension_t dim;
523 pStream->getFrameDimension(dim);
524
525 unsigned char *yBuf = (unsigned char*)pFrame->buffer;
526 unsigned char *uvBuf = yBuf + offset.mp[0].len;
527 TSMakeupDataEx inMakeupData;
528 inMakeupData.frameWidth = dim.width;
529 inMakeupData.frameHeight = dim.height;
530 inMakeupData.yBuf = yBuf;
531 inMakeupData.uvBuf = uvBuf;
532 inMakeupData.yStride = offset.mp[0].stride;
533 inMakeupData.uvStride = offset.mp[1].stride;
534 LOGD("detect begin");
535 TSHandle fd_handle = ts_detectface_create_context();
536 if (fd_handle != NULL) {
537 cam_format_t fmt;
538 pStream->getFormat(fmt);
539 int iret = ts_detectface_detectEx(fd_handle, &inMakeupData);
540 LOGD("ts_detectface_detect iret = %d",iret);
541 if (iret <= 0) {
542 bRet = false;
543 } else {
544 TSRect faceRect;
545 memset(&faceRect,-1,sizeof(TSRect));
546 iret = ts_detectface_get_face_info(fd_handle, 0, &faceRect, NULL,NULL,NULL);
547 LOGD("ts_detectface_get_face_info iret=%d,faceRect.left=%ld,"
548 "faceRect.top=%ld,faceRect.right=%ld,faceRect.bottom=%ld"
549 ,iret,faceRect.left,faceRect.top,faceRect.right,faceRect.bottom);
550 bRet = TsMakeupProcess(pFrame,pStream,faceRect);
551 }
552 ts_detectface_destroy_context(&fd_handle);
553 fd_handle = NULL;
554 } else {
555 LOGH("fd_handle == NULL");
556 }
557 LOGD("detect end");
558 }
559 LOGD("end bRet = %d ",bRet);
560 return bRet;
561 }
562
TsMakeupProcess(mm_camera_buf_def_t * pFrame,QCameraStream * pStream,TSRect & faceRect)563 bool QCamera2HardwareInterface::TsMakeupProcess(mm_camera_buf_def_t *pFrame,
564 QCameraStream * pStream,TSRect& faceRect) {
565 bool bRet = false;
566 LOGD("begin");
567 if (pStream == NULL || pFrame == NULL) {
568 LOGH("pStream == NULL || pFrame == NULL ");
569 return false;
570 }
571
572 int whiteLevel, cleanLevel;
573 bool enableMakeup = (faceRect.left > -1) &&
574 (mParameters.getTsMakeupInfo(whiteLevel, cleanLevel));
575 if (enableMakeup) {
576 cam_dimension_t dim;
577 cam_frame_len_offset_t offset;
578 pStream->getFrameDimension(dim);
579 pStream->getFrameOffset(offset);
580 unsigned char *tempOriBuf = NULL;
581
582 tempOriBuf = (unsigned char*)pFrame->buffer;
583 unsigned char *yBuf = tempOriBuf;
584 unsigned char *uvBuf = tempOriBuf + offset.mp[0].len;
585 unsigned char *tmpBuf = new unsigned char[offset.frame_len];
586 if (tmpBuf == NULL) {
587 LOGH("tmpBuf == NULL ");
588 return false;
589 }
590 TSMakeupDataEx inMakeupData, outMakeupData;
591 whiteLevel = whiteLevel <= 0 ? 0 : (whiteLevel >= 100 ? 100 : whiteLevel);
592 cleanLevel = cleanLevel <= 0 ? 0 : (cleanLevel >= 100 ? 100 : cleanLevel);
593 inMakeupData.frameWidth = dim.width; // NV21 Frame width > 0
594 inMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
595 inMakeupData.yBuf = yBuf; // Y buffer pointer
596 inMakeupData.uvBuf = uvBuf; // VU buffer pointer
597 inMakeupData.yStride = offset.mp[0].stride;
598 inMakeupData.uvStride = offset.mp[1].stride;
599 outMakeupData.frameWidth = dim.width; // NV21 Frame width > 0
600 outMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
601 outMakeupData.yBuf = tmpBuf; // Y buffer pointer
602 outMakeupData.uvBuf = tmpBuf + offset.mp[0].len; // VU buffer pointer
603 outMakeupData.yStride = offset.mp[0].stride;
604 outMakeupData.uvStride = offset.mp[1].stride;
605 LOGD("faceRect:left 2:%ld,,right:%ld,,top:%ld,,bottom:%ld,,Level:%dx%d",
606 faceRect.left,faceRect.right,faceRect.top,faceRect.bottom,cleanLevel,whiteLevel);
607 ts_makeup_skin_beautyEx(&inMakeupData, &outMakeupData, &(faceRect),cleanLevel,whiteLevel);
608 memcpy((unsigned char*)pFrame->buffer, tmpBuf, offset.frame_len);
609 QCameraMemory *memory = (QCameraMemory *)pFrame->mem_info;
610 memory->cleanCache(pFrame->buf_idx);
611 if (tmpBuf != NULL) {
612 delete[] tmpBuf;
613 tmpBuf = NULL;
614 }
615 }
616 LOGD("end bRet = %d ",bRet);
617 return bRet;
618 }
619 #endif
620 /*===========================================================================
621 * FUNCTION : postproc_channel_cb_routine
622 *
623 * DESCRIPTION: helper function to handle postprocess superbuf callback directly from
624 * mm-camera-interface
625 *
626 * PARAMETERS :
627 * @recvd_frame : received super buffer
628 * @userdata : user data ptr
629 *
630 * RETURN : None
631 *
632 * NOTE : recvd_frame will be released after this call by caller, so if
633 * async operation needed for recvd_frame, it's our responsibility
634 * to save a copy for this variable to be used later.
635 *==========================================================================*/
postproc_channel_cb_routine(mm_camera_super_buf_t * recvd_frame,void * userdata)636 void QCamera2HardwareInterface::postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
637 void *userdata)
638 {
639 ATRACE_CALL();
640 LOGH("[KPI Perf]: E");
641 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
642 if (pme == NULL ||
643 pme->mCameraHandle == NULL ||
644 pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
645 LOGE("camera obj not valid");
646 return;
647 }
648
649 // save a copy for the superbuf
650 mm_camera_super_buf_t* frame =
651 (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
652 if (frame == NULL) {
653 LOGE("Error allocating memory to save received_frame structure.");
654 return;
655 }
656 *frame = *recvd_frame;
657
658 if (recvd_frame->num_bufs > 0) {
659 LOGI("[KPI Perf]: frame_idx %d", recvd_frame->bufs[0]->frame_idx);
660 }
661 // Wait on JPEG create session
662 pme->waitDeferredWork(pme->mJpegJob);
663
664 // send to postprocessor
665 pme->m_postprocessor.processPPData(frame);
666
667 ATRACE_INT("Camera:Reprocess", 0);
668 LOGH("[KPI Perf]: X");
669 }
670
671 /*===========================================================================
672 * FUNCTION : synchronous_stream_cb_routine
673 *
674 * DESCRIPTION: Function to handle STREAM SYNC CALLBACKS
675 *
676 * PARAMETERS :
677 * @super_frame : received super buffer
678 * @stream : stream object
679 * @userdata : user data ptr
680 *
681 * RETURN : None
682 *
683 * NOTE : This Function is excecuted in mm-interface context.
684 * Avoid adding latency on this thread.
685 *==========================================================================*/
synchronous_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)686 void QCamera2HardwareInterface::synchronous_stream_cb_routine(
687 mm_camera_super_buf_t *super_frame, QCameraStream * stream,
688 void *userdata)
689 {
690 nsecs_t frameTime = 0, mPreviewTimestamp = 0;
691 int err = NO_ERROR;
692
693 ATRACE_CALL();
694 LOGH("[KPI Perf] : BEGIN");
695 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
696 QCameraGrallocMemory *memory = NULL;
697
698 if (pme == NULL) {
699 LOGE("Invalid hardware object");
700 return;
701 }
702 if (super_frame == NULL) {
703 LOGE("Invalid super buffer");
704 return;
705 }
706 mm_camera_buf_def_t *frame = super_frame->bufs[0];
707 if (NULL == frame) {
708 LOGE("Frame is NULL");
709 return;
710 }
711
712 if (stream->getMyType() != CAM_STREAM_TYPE_PREVIEW) {
713 LOGE("This is only for PREVIEW stream for now");
714 return;
715 }
716
717 if(pme->m_bPreviewStarted) {
718 LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
719 pme->m_bPreviewStarted = false;
720 }
721
722 if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
723 pthread_mutex_lock(&pme->mGrallocLock);
724 pme->mLastPreviewFrameID = frame->frame_idx;
725 pthread_mutex_unlock(&pme->mGrallocLock);
726 LOGH("preview is not running, no need to process");
727 return;
728 }
729
730 frameTime = nsecs_t(frame->ts.tv_sec) * 1000000000LL + frame->ts.tv_nsec;
731 // Calculate the future presentation time stamp for displaying frames at regular interval
732 #if 0 // Temporary removing the dependency on libgui
733 mPreviewTimestamp = pme->mCameraDisplay.computePresentationTimeStamp(frameTime);
734 #endif
735 stream->mStreamTimestamp = frameTime;
736 memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
737
738 #ifdef TARGET_TS_MAKEUP
739 pme->TsMakeupProcess_Preview(frame,stream);
740 #endif
741
742 // Enqueue buffer to gralloc.
743 uint32_t idx = frame->buf_idx;
744 LOGD("%p Enqueue Buffer to display %d frame Time = %lld Display Time = %lld",
745 pme, idx, frameTime, mPreviewTimestamp);
746 err = memory->enqueueBuffer(idx, mPreviewTimestamp);
747
748 if (err == NO_ERROR) {
749 pthread_mutex_lock(&pme->mGrallocLock);
750 pme->mLastPreviewFrameID = frame->frame_idx;
751 pme->mEnqueuedBuffers++;
752 pthread_mutex_unlock(&pme->mGrallocLock);
753 } else {
754 LOGE("Enqueue Buffer failed");
755 }
756
757 LOGH("[KPI Perf] : END");
758 return;
759 }
760
761 /*===========================================================================
762 * FUNCTION : preview_stream_cb_routine
763 *
764 * DESCRIPTION: helper function to handle preview frame from preview stream in
765 * normal case with display.
766 *
767 * PARAMETERS :
768 * @super_frame : received super buffer
769 * @stream : stream object
770 * @userdata : user data ptr
771 *
772 * RETURN : None
773 *
774 * NOTE : caller passes the ownership of super_frame, it's our
775 * responsibility to free super_frame once it's done. The new
776 * preview frame will be sent to display, and an older frame
777 * will be dequeued from display and needs to be returned back
778 * to kernel for future use.
779 *==========================================================================*/
preview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)780 void QCamera2HardwareInterface::preview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
781 QCameraStream * stream,
782 void *userdata)
783 {
784 KPI_ATRACE_CALL();
785 LOGH("[KPI Perf] : BEGIN");
786 int err = NO_ERROR;
787 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
788 QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
789 uint8_t dequeueCnt = 0;
790
791 if (pme == NULL) {
792 LOGE("Invalid hardware object");
793 free(super_frame);
794 return;
795 }
796 if (memory == NULL) {
797 LOGE("Invalid memory object");
798 free(super_frame);
799 return;
800 }
801
802 mm_camera_buf_def_t *frame = super_frame->bufs[0];
803 if (NULL == frame) {
804 LOGE("preview frame is NLUL");
805 free(super_frame);
806 return;
807 }
808
809 // For instant capture and for instant AEC, keep track of the frame counter.
810 // This count will be used to check against the corresponding bound values.
811 if (pme->mParameters.isInstantAECEnabled() ||
812 pme->mParameters.isInstantCaptureEnabled()) {
813 pme->mInstantAecFrameCount++;
814 }
815
816 pthread_mutex_lock(&pme->mGrallocLock);
817 if (!stream->isSyncCBEnabled()) {
818 pme->mLastPreviewFrameID = frame->frame_idx;
819 }
820 if (((!stream->isSyncCBEnabled()) &&
821 (!pme->needProcessPreviewFrame(frame->frame_idx))) ||
822 ((stream->isSyncCBEnabled()) &&
823 (memory->isBufOwnedByCamera(frame->buf_idx)))) {
824 //If buffer owned by camera, then it is not enqueued to display.
825 // bufDone it back to backend.
826 pthread_mutex_unlock(&pme->mGrallocLock);
827 LOGH("preview is not running, no need to process");
828 stream->bufDone(frame->buf_idx);
829 free(super_frame);
830 return;
831 } else {
832 pthread_mutex_unlock(&pme->mGrallocLock);
833 }
834
835 if (pme->needDebugFps()) {
836 pme->debugShowPreviewFPS();
837 }
838
839 uint32_t idx = frame->buf_idx;
840
841 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
842
843 if(pme->m_bPreviewStarted) {
844 LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
845 pme->m_bPreviewStarted = false ;
846 }
847
848 if (!stream->isSyncCBEnabled()) {
849 LOGD("Enqueue Buffer to display %d", idx);
850 #ifdef TARGET_TS_MAKEUP
851 pme->TsMakeupProcess_Preview(frame,stream);
852 #endif
853 err = memory->enqueueBuffer(idx);
854
855 if (err == NO_ERROR) {
856 pthread_mutex_lock(&pme->mGrallocLock);
857 pme->mEnqueuedBuffers++;
858 dequeueCnt = pme->mEnqueuedBuffers;
859 pthread_mutex_unlock(&pme->mGrallocLock);
860 } else {
861 LOGE("Enqueue Buffer failed");
862 }
863 } else {
864 pthread_mutex_lock(&pme->mGrallocLock);
865 dequeueCnt = pme->mEnqueuedBuffers;
866 pthread_mutex_unlock(&pme->mGrallocLock);
867 }
868
869 // Display the buffer.
870 LOGD("%p displayBuffer %d E", pme, idx);
871 uint8_t numMapped = memory->getMappable();
872
873 for (uint8_t i = 0; i < dequeueCnt; i++) {
874 int dequeuedIdx = memory->dequeueBuffer();
875 if (dequeuedIdx < 0 || dequeuedIdx >= memory->getCnt()) {
876 LOGE("Invalid dequeued buffer index %d from display",
877 dequeuedIdx);
878 break;
879 } else {
880 pthread_mutex_lock(&pme->mGrallocLock);
881 pme->mEnqueuedBuffers--;
882 pthread_mutex_unlock(&pme->mGrallocLock);
883 if (dequeuedIdx >= numMapped) {
884 // This buffer has not yet been mapped to the backend
885 err = stream->mapNewBuffer((uint32_t)dequeuedIdx);
886 if (memory->checkIfAllBuffersMapped()) {
887 // check if mapping is done for all the buffers
888 // Signal the condition for create jpeg session
889 Mutex::Autolock l(pme->mMapLock);
890 pme->mMapCond.signal();
891 LOGH("Mapping done for all bufs");
892 } else {
893 LOGH("All buffers are not yet mapped");
894 }
895 }
896 }
897
898 if (err < 0) {
899 LOGE("buffer mapping failed %d", err);
900 } else {
901 // Return dequeued buffer back to driver
902 err = stream->bufDone((uint32_t)dequeuedIdx);
903 if ( err < 0) {
904 LOGW("stream bufDone failed %d", err);
905 }
906 }
907 }
908
909 // Handle preview data callback
910 if (pme->m_channels[QCAMERA_CH_TYPE_CALLBACK] == NULL) {
911 if (pme->needSendPreviewCallback() &&
912 (!pme->mParameters.isSceneSelectionEnabled())) {
913 frame->cache_flags |= CPU_HAS_READ;
914 int32_t rc = pme->sendPreviewCallback(stream, memory, idx);
915 if (NO_ERROR != rc) {
916 LOGW("Preview callback was not sent succesfully");
917 }
918 }
919 }
920
921 free(super_frame);
922 LOGH("[KPI Perf] : END");
923 return;
924 }
925
926 /*===========================================================================
927 * FUNCTION : sendPreviewCallback
928 *
929 * DESCRIPTION: helper function for triggering preview callbacks
930 *
931 * PARAMETERS :
932 * @stream : stream object
933 * @memory : Stream memory allocator
934 * @idx : buffer index
935 *
936 * RETURN : int32_t type of status
937 * NO_ERROR -- success
938 * none-zero failure code
939 *==========================================================================*/
sendPreviewCallback(QCameraStream * stream,QCameraMemory * memory,uint32_t idx)940 int32_t QCamera2HardwareInterface::sendPreviewCallback(QCameraStream *stream,
941 QCameraMemory *memory, uint32_t idx)
942 {
943 camera_memory_t *previewMem = NULL;
944 camera_memory_t *data = NULL;
945 camera_memory_t *dataToApp = NULL;
946 size_t previewBufSize = 0;
947 size_t previewBufSizeFromCallback = 0;
948 cam_dimension_t preview_dim;
949 cam_format_t previewFmt;
950 int32_t rc = NO_ERROR;
951 int32_t yStride = 0;
952 int32_t yScanline = 0;
953 int32_t uvStride = 0;
954 int32_t uvScanline = 0;
955 int32_t uStride = 0;
956 int32_t uScanline = 0;
957 int32_t vStride = 0;
958 int32_t vScanline = 0;
959 int32_t yStrideToApp = 0;
960 int32_t uvStrideToApp = 0;
961 int32_t yScanlineToApp = 0;
962 int32_t uvScanlineToApp = 0;
963 int32_t srcOffset = 0;
964 int32_t dstOffset = 0;
965 int32_t srcBaseOffset = 0;
966 int32_t dstBaseOffset = 0;
967 int i;
968
969 if ((NULL == stream) || (NULL == memory)) {
970 LOGE("Invalid preview callback input");
971 return BAD_VALUE;
972 }
973
974 cam_stream_info_t *streamInfo =
975 reinterpret_cast<cam_stream_info_t *>(stream->getStreamInfoBuf()->getPtr(0));
976 if (NULL == streamInfo) {
977 LOGE("Invalid streamInfo");
978 return BAD_VALUE;
979 }
980
981 stream->getFrameDimension(preview_dim);
982 stream->getFormat(previewFmt);
983
984 yStrideToApp = preview_dim.width;
985 yScanlineToApp = preview_dim.height;
986 uvStrideToApp = yStrideToApp;
987 uvScanlineToApp = yScanlineToApp / 2;
988
989 /* The preview buffer size in the callback should be
990 * (width*height*bytes_per_pixel). As all preview formats we support,
991 * use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
992 * We need to put a check if some other formats are supported in future. */
993 if ((previewFmt == CAM_FORMAT_YUV_420_NV21) ||
994 (previewFmt == CAM_FORMAT_YUV_420_NV12) ||
995 (previewFmt == CAM_FORMAT_YUV_420_YV12) ||
996 (previewFmt == CAM_FORMAT_YUV_420_NV12_VENUS) ||
997 (previewFmt == CAM_FORMAT_YUV_420_NV21_VENUS) ||
998 (previewFmt == CAM_FORMAT_YUV_420_NV21_ADRENO)) {
999 if(previewFmt == CAM_FORMAT_YUV_420_YV12) {
1000 yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
1001 yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
1002 uStride = streamInfo->buf_planes.plane_info.mp[1].stride;
1003 uScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
1004 vStride = streamInfo->buf_planes.plane_info.mp[2].stride;
1005 vScanline = streamInfo->buf_planes.plane_info.mp[2].scanline;
1006
1007 previewBufSize = (size_t)
1008 (yStride * yScanline + uStride * uScanline + vStride * vScanline);
1009 previewBufSizeFromCallback = previewBufSize;
1010 } else {
1011 yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
1012 yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
1013 uvStride = streamInfo->buf_planes.plane_info.mp[1].stride;
1014 uvScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
1015
1016 previewBufSize = (size_t)
1017 ((yStrideToApp * yScanlineToApp) + (uvStrideToApp * uvScanlineToApp));
1018
1019 previewBufSizeFromCallback = (size_t)
1020 ((yStride * yScanline) + (uvStride * uvScanline));
1021 }
1022 if(previewBufSize == previewBufSizeFromCallback) {
1023 previewMem = mGetMemory(memory->getFd(idx),
1024 previewBufSize, 1, mCallbackCookie);
1025 if (!previewMem || !previewMem->data) {
1026 LOGE("mGetMemory failed.\n");
1027 return NO_MEMORY;
1028 } else {
1029 data = previewMem;
1030 }
1031 } else {
1032 data = memory->getMemory(idx, false);
1033 dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
1034 if (!dataToApp || !dataToApp->data) {
1035 LOGE("mGetMemory failed.\n");
1036 return NO_MEMORY;
1037 }
1038
1039 for (i = 0; i < preview_dim.height; i++) {
1040 srcOffset = i * yStride;
1041 dstOffset = i * yStrideToApp;
1042
1043 memcpy((unsigned char *) dataToApp->data + dstOffset,
1044 (unsigned char *) data->data + srcOffset,
1045 (size_t)yStrideToApp);
1046 }
1047
1048 srcBaseOffset = yStride * yScanline;
1049 dstBaseOffset = yStrideToApp * yScanlineToApp;
1050
1051 for (i = 0; i < preview_dim.height/2; i++) {
1052 srcOffset = i * uvStride + srcBaseOffset;
1053 dstOffset = i * uvStrideToApp + dstBaseOffset;
1054
1055 memcpy((unsigned char *) dataToApp->data + dstOffset,
1056 (unsigned char *) data->data + srcOffset,
1057 (size_t)yStrideToApp);
1058 }
1059 }
1060 } else {
1061 /*Invalid Buffer content. But can be used as a first preview frame trigger in
1062 framework/app */
1063 previewBufSize = (size_t)
1064 ((yStrideToApp * yScanlineToApp) +
1065 (uvStrideToApp * uvScanlineToApp));
1066 previewBufSizeFromCallback = 0;
1067 LOGW("Invalid preview format. Buffer content cannot be processed size = %d",
1068 previewBufSize);
1069 dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
1070 if (!dataToApp || !dataToApp->data) {
1071 LOGE("mGetMemory failed.\n");
1072 return NO_MEMORY;
1073 }
1074 }
1075 qcamera_callback_argm_t cbArg;
1076 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1077 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
1078 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1079 if (previewBufSize != 0 && previewBufSizeFromCallback != 0 &&
1080 previewBufSize == previewBufSizeFromCallback) {
1081 cbArg.data = data;
1082 } else {
1083 cbArg.data = dataToApp;
1084 }
1085 if ( previewMem ) {
1086 cbArg.user_data = previewMem;
1087 cbArg.release_cb = releaseCameraMemory;
1088 } else if (dataToApp) {
1089 cbArg.user_data = dataToApp;
1090 cbArg.release_cb = releaseCameraMemory;
1091 }
1092 cbArg.cookie = this;
1093 rc = m_cbNotifier.notifyCallback(cbArg);
1094 if (rc != NO_ERROR) {
1095 LOGW("fail sending notification");
1096 if (previewMem) {
1097 previewMem->release(previewMem);
1098 } else if (dataToApp) {
1099 dataToApp->release(dataToApp);
1100 }
1101 }
1102
1103 return rc;
1104 }
1105
1106 /*===========================================================================
1107 * FUNCTION : nodisplay_preview_stream_cb_routine
1108 *
1109 * DESCRIPTION: helper function to handle preview frame from preview stream in
1110 * no-display case
1111 *
1112 * PARAMETERS :
1113 * @super_frame : received super buffer
1114 * @stream : stream object
1115 * @userdata : user data ptr
1116 *
1117 * RETURN : None
1118 *
1119 * NOTE : caller passes the ownership of super_frame, it's our
1120 * responsibility to free super_frame once it's done.
1121 *==========================================================================*/
nodisplay_preview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1122 void QCamera2HardwareInterface::nodisplay_preview_stream_cb_routine(
1123 mm_camera_super_buf_t *super_frame,
1124 QCameraStream *stream,
1125 void * userdata)
1126 {
1127 ATRACE_CALL();
1128 LOGH("[KPI Perf] E");
1129 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1130 if (pme == NULL ||
1131 pme->mCameraHandle == NULL ||
1132 pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1133 LOGE("camera obj not valid");
1134 // simply free super frame
1135 free(super_frame);
1136 return;
1137 }
1138 mm_camera_buf_def_t *frame = super_frame->bufs[0];
1139 if (NULL == frame) {
1140 LOGE("preview frame is NULL");
1141 free(super_frame);
1142 return;
1143 }
1144
1145 if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
1146 LOGH("preview is not running, no need to process");
1147 stream->bufDone(frame->buf_idx);
1148 free(super_frame);
1149 return;
1150 }
1151
1152 if (pme->needDebugFps()) {
1153 pme->debugShowPreviewFPS();
1154 }
1155
1156 QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
1157 camera_memory_t *preview_mem = NULL;
1158 if (previewMemObj != NULL) {
1159 preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
1160 }
1161 if (NULL != previewMemObj && NULL != preview_mem) {
1162 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
1163
1164 if ((pme->needProcessPreviewFrame(frame->frame_idx)) &&
1165 pme->needSendPreviewCallback() &&
1166 (pme->getRelatedCamSyncInfo()->mode != CAM_MODE_SECONDARY)) {
1167 qcamera_callback_argm_t cbArg;
1168 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1169 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
1170 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1171 cbArg.data = preview_mem;
1172 cbArg.user_data = (void *) &frame->buf_idx;
1173 cbArg.cookie = stream;
1174 cbArg.release_cb = returnStreamBuffer;
1175 // Preset cache flags to be handled when the buffer comes back
1176 frame->cache_flags |= CPU_HAS_READ;
1177 int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
1178 if (rc != NO_ERROR) {
1179 LOGE ("fail sending data notify");
1180 stream->bufDone(frame->buf_idx);
1181 }
1182 } else {
1183 stream->bufDone(frame->buf_idx);
1184 }
1185 }
1186 free(super_frame);
1187 LOGH("[KPI Perf] X");
1188 }
1189
1190 /*===========================================================================
1191 * FUNCTION : rdi_mode_stream_cb_routine
1192 *
1193 * DESCRIPTION: helper function to handle RDI frame from preview stream in
1194 * rdi mode case
1195 *
1196 * PARAMETERS :
1197 * @super_frame : received super buffer
1198 * @stream : stream object
1199 * @userdata : user data ptr
1200 *
1201 * RETURN : None
1202 *
1203 * NOTE : caller passes the ownership of super_frame, it's our
1204 * responsibility to free super_frame once it's done.
1205 *==========================================================================*/
rdi_mode_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1206 void QCamera2HardwareInterface::rdi_mode_stream_cb_routine(
1207 mm_camera_super_buf_t *super_frame,
1208 QCameraStream *stream,
1209 void * userdata)
1210 {
1211 ATRACE_CALL();
1212 LOGH("RDI_DEBUG Enter");
1213 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1214 if (pme == NULL ||
1215 pme->mCameraHandle == NULL ||
1216 pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1217 LOGE("camera obj not valid");
1218 free(super_frame);
1219 return;
1220 }
1221 mm_camera_buf_def_t *frame = super_frame->bufs[0];
1222 if (NULL == frame) {
1223 LOGE("preview frame is NLUL");
1224 goto end;
1225 }
1226 if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
1227 LOGE("preview is not running, no need to process");
1228 stream->bufDone(frame->buf_idx);
1229 goto end;
1230 }
1231 if (pme->needDebugFps()) {
1232 pme->debugShowPreviewFPS();
1233 }
1234 // Non-secure Mode
1235 if (!pme->isSecureMode()) {
1236 QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
1237 if (NULL == previewMemObj) {
1238 LOGE("previewMemObj is NULL");
1239 stream->bufDone(frame->buf_idx);
1240 goto end;
1241 }
1242
1243 camera_memory_t *preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
1244 if (NULL != preview_mem) {
1245 previewMemObj->cleanCache(frame->buf_idx);
1246 // Dump RAW frame
1247 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_RAW);
1248 // Notify Preview callback frame
1249 if (pme->needProcessPreviewFrame(frame->frame_idx) &&
1250 pme->mDataCb != NULL &&
1251 pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
1252 qcamera_callback_argm_t cbArg;
1253 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1254 cbArg.cb_type = QCAMERA_DATA_CALLBACK;
1255 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1256 cbArg.data = preview_mem;
1257 cbArg.user_data = (void *) &frame->buf_idx;
1258 cbArg.cookie = stream;
1259 cbArg.release_cb = returnStreamBuffer;
1260 // Preset cache flags to be handled when the buffer comes back
1261 frame->cache_flags |= CPU_HAS_READ;
1262 pme->m_cbNotifier.notifyCallback(cbArg);
1263 } else {
1264 LOGE("preview_mem is NULL");
1265 stream->bufDone(frame->buf_idx);
1266 }
1267 }
1268 else {
1269 LOGE("preview_mem is NULL");
1270 stream->bufDone(frame->buf_idx);
1271 }
1272 } else {
1273 // Secure Mode
1274 // We will do QCAMERA_NOTIFY_CALLBACK and share FD in case of secure mode
1275 QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
1276 if (NULL == previewMemObj) {
1277 LOGE("previewMemObj is NULL");
1278 stream->bufDone(frame->buf_idx);
1279 goto end;
1280 }
1281
1282 int fd = previewMemObj->getFd(frame->buf_idx);
1283 LOGD("Preview frame fd =%d for index = %d ", fd, frame->buf_idx);
1284 if (pme->needProcessPreviewFrame(frame->frame_idx) &&
1285 pme->mDataCb != NULL &&
1286 pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
1287 // Prepare Callback structure
1288 qcamera_callback_argm_t cbArg;
1289 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1290 cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
1291 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
1292 #ifndef VANILLA_HAL
1293 cbArg.ext1 = CAMERA_FRAME_DATA_FD;
1294 cbArg.ext2 = fd;
1295 #endif
1296 cbArg.user_data = (void *) &frame->buf_idx;
1297 cbArg.cookie = stream;
1298 cbArg.release_cb = returnStreamBuffer;
1299 // Preset cache flags to be handled when the buffer comes back
1300 frame->cache_flags |= CPU_HAS_READ;
1301 pme->m_cbNotifier.notifyCallback(cbArg);
1302 } else {
1303 LOGH("No need to process preview frame, return buffer");
1304 stream->bufDone(frame->buf_idx);
1305 }
1306 }
1307 end:
1308 free(super_frame);
1309 LOGH("RDI_DEBUG Exit");
1310 return;
1311 }
1312
1313 /*===========================================================================
1314 * FUNCTION : postview_stream_cb_routine
1315 *
1316 * DESCRIPTION: helper function to handle post frame from postview stream
1317 *
1318 * PARAMETERS :
1319 * @super_frame : received super buffer
1320 * @stream : stream object
1321 * @userdata : user data ptr
1322 *
1323 * RETURN : None
1324 *
1325 * NOTE : caller passes the ownership of super_frame, it's our
1326 * responsibility to free super_frame once it's done.
1327 *==========================================================================*/
postview_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1328 void QCamera2HardwareInterface::postview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
1329 QCameraStream *stream,
1330 void *userdata)
1331 {
1332 ATRACE_CALL();
1333 int err = NO_ERROR;
1334 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1335 QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
1336
1337 if (pme == NULL) {
1338 LOGE("Invalid hardware object");
1339 free(super_frame);
1340 return;
1341 }
1342 if (memory == NULL) {
1343 LOGE("Invalid memory object");
1344 free(super_frame);
1345 return;
1346 }
1347
1348 LOGH("[KPI Perf] : BEGIN");
1349
1350 mm_camera_buf_def_t *frame = super_frame->bufs[0];
1351 if (NULL == frame) {
1352 LOGE("preview frame is NULL");
1353 free(super_frame);
1354 return;
1355 }
1356
1357 QCameraMemory *memObj = (QCameraMemory *)frame->mem_info;
1358 if (NULL != memObj) {
1359 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_THUMBNAIL);
1360 }
1361
1362 // Return buffer back to driver
1363 err = stream->bufDone(frame->buf_idx);
1364 if ( err < 0) {
1365 LOGE("stream bufDone failed %d", err);
1366 }
1367
1368 free(super_frame);
1369 LOGH("[KPI Perf] : END");
1370 return;
1371 }
1372
1373 /*===========================================================================
1374 * FUNCTION : video_stream_cb_routine
1375 *
1376 * DESCRIPTION: helper function to handle video frame from video stream
1377 *
1378 * PARAMETERS :
1379 * @super_frame : received super buffer
1380 * @stream : stream object
1381 * @userdata : user data ptr
1382 *
1383 * RETURN : None
1384 *
1385 * NOTE : caller passes the ownership of super_frame, it's our
1386 * responsibility to free super_frame once it's done. video
1387 * frame will be sent to video encoder. Once video encoder is
1388 * done with the video frame, it will call another API
1389 * (release_recording_frame) to return the frame back
1390 *==========================================================================*/
video_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1391 void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
1392 QCameraStream *stream,
1393 void *userdata)
1394 {
1395 ATRACE_CALL();
1396 QCameraVideoMemory *videoMemObj = NULL;
1397 camera_memory_t *video_mem = NULL;
1398 nsecs_t timeStamp = 0;
1399 bool triggerTCB = FALSE;
1400
1401 LOGH("[KPI Perf] : BEGIN");
1402 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1403 if (pme == NULL ||
1404 pme->mCameraHandle == NULL ||
1405 pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1406 LOGE("camera obj not valid");
1407 // simply free super frame
1408 free(super_frame);
1409 return;
1410 }
1411
1412 mm_camera_buf_def_t *frame = super_frame->bufs[0];
1413
1414 if (pme->needDebugFps()) {
1415 pme->debugShowVideoFPS();
1416 }
1417 if(pme->m_bRecordStarted) {
1418 LOGI("[KPI Perf] : PROFILE_FIRST_RECORD_FRAME");
1419 pme->m_bRecordStarted = false ;
1420 }
1421 LOGD("Stream(%d), Timestamp: %ld %ld",
1422 frame->stream_id,
1423 frame->ts.tv_sec,
1424 frame->ts.tv_nsec);
1425
1426 if (frame->buf_type == CAM_STREAM_BUF_TYPE_MPLANE) {
1427 if (pme->mParameters.getVideoBatchSize() == 0) {
1428 timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1429 + frame->ts.tv_nsec;
1430 LOGD("Video frame to encoder TimeStamp : %lld batch = 0",
1431 timeStamp);
1432 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
1433 videoMemObj = (QCameraVideoMemory *)frame->mem_info;
1434 video_mem = NULL;
1435 if (NULL != videoMemObj) {
1436 video_mem = videoMemObj->getMemory(frame->buf_idx,
1437 (pme->mStoreMetaDataInFrame > 0)? true : false);
1438 videoMemObj->updateNativeHandle(frame->buf_idx);
1439 triggerTCB = TRUE;
1440 }
1441 } else {
1442 //Handle video batch callback
1443 native_handle_t *nh = NULL;
1444 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
1445 QCameraVideoMemory *videoMemObj = (QCameraVideoMemory *)frame->mem_info;
1446 if ((stream->mCurMetaMemory == NULL)
1447 || (stream->mCurBufIndex == -1)) {
1448 //get Free metadata available
1449 for (int i = 0; i < CAMERA_MIN_VIDEO_BATCH_BUFFERS; i++) {
1450 if (stream->mStreamMetaMemory[i].consumerOwned == 0) {
1451 stream->mCurMetaMemory = videoMemObj->getMemory(i,true);
1452 stream->mCurBufIndex = 0;
1453 stream->mCurMetaIndex = i;
1454 stream->mStreamMetaMemory[i].numBuffers = 0;
1455 break;
1456 }
1457 }
1458 }
1459 video_mem = stream->mCurMetaMemory;
1460 nh = videoMemObj->updateNativeHandle(stream->mCurMetaIndex);
1461 if (video_mem == NULL || nh == NULL) {
1462 LOGE("No Free metadata. Drop this frame");
1463 stream->mCurBufIndex = -1;
1464 stream->bufDone(frame->buf_idx);
1465 free(super_frame);
1466 return;
1467 }
1468
1469 int index = stream->mCurBufIndex;
1470 int fd_cnt = pme->mParameters.getVideoBatchSize();
1471 nsecs_t frame_ts = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1472 + frame->ts.tv_nsec;
1473 if (index == 0) {
1474 stream->mFirstTimeStamp = frame_ts;
1475 }
1476
1477 stream->mStreamMetaMemory[stream->mCurMetaIndex].buf_index[index]
1478 = (uint8_t)frame->buf_idx;
1479 stream->mStreamMetaMemory[stream->mCurMetaIndex].numBuffers++;
1480 stream->mStreamMetaMemory[stream->mCurMetaIndex].consumerOwned
1481 = TRUE;
1482 /*
1483 * data[0] => FD
1484 * data[mNumFDs + 1] => OFFSET
1485 * data[mNumFDs + 2] => SIZE
1486 * data[mNumFDs + 3] => Usage Flag (Color format/Compression)
1487 * data[mNumFDs + 4] => TIMESTAMP
1488 * data[mNumFDs + 5] => FORMAT
1489 */
1490 nh->data[index] = videoMemObj->getFd(frame->buf_idx);
1491 nh->data[index + fd_cnt] = 0;
1492 nh->data[index + (fd_cnt * 2)] = (int)videoMemObj->getSize(frame->buf_idx);
1493 nh->data[index + (fd_cnt * 3)] = videoMemObj->getUsage();
1494 nh->data[index + (fd_cnt * 4)] = (int)(frame_ts - stream->mFirstTimeStamp);
1495 nh->data[index + (fd_cnt * 5)] = videoMemObj->getFormat();
1496 stream->mCurBufIndex++;
1497 if (stream->mCurBufIndex == fd_cnt) {
1498 timeStamp = stream->mFirstTimeStamp;
1499 LOGD("Video frame to encoder TimeStamp : %lld batch = %d",
1500 timeStamp, fd_cnt);
1501 stream->mCurBufIndex = -1;
1502 stream->mCurMetaIndex = -1;
1503 stream->mCurMetaMemory = NULL;
1504 triggerTCB = TRUE;
1505 }
1506 }
1507 } else {
1508 videoMemObj = (QCameraVideoMemory *)frame->mem_info;
1509 video_mem = NULL;
1510 native_handle_t *nh = NULL;
1511 int fd_cnt = frame->user_buf.bufs_used;
1512 if (NULL != videoMemObj) {
1513 video_mem = videoMemObj->getMemory(frame->buf_idx, true);
1514 nh = videoMemObj->updateNativeHandle(frame->buf_idx);
1515 } else {
1516 LOGE("videoMemObj NULL");
1517 }
1518
1519 if (nh != NULL) {
1520 timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
1521 + frame->ts.tv_nsec;
1522 LOGD("Batch buffer TimeStamp : %lld FD = %d index = %d fd_cnt = %d",
1523 timeStamp, frame->fd, frame->buf_idx, fd_cnt);
1524
1525 for (int i = 0; i < fd_cnt; i++) {
1526 if (frame->user_buf.buf_idx[i] >= 0) {
1527 mm_camera_buf_def_t *plane_frame =
1528 &frame->user_buf.plane_buf[frame->user_buf.buf_idx[i]];
1529 QCameraVideoMemory *frameobj =
1530 (QCameraVideoMemory *)plane_frame->mem_info;
1531 int usage = frameobj->getUsage();
1532 nsecs_t frame_ts = nsecs_t(plane_frame->ts.tv_sec) * 1000000000LL
1533 + plane_frame->ts.tv_nsec;
1534 /*
1535 data[0] => FD
1536 data[mNumFDs + 1] => OFFSET
1537 data[mNumFDs + 2] => SIZE
1538 data[mNumFDs + 3] => Usage Flag (Color format/Compression)
1539 data[mNumFDs + 4] => TIMESTAMP
1540 data[mNumFDs + 5] => FORMAT
1541 */
1542 nh->data[i] = frameobj->getFd(plane_frame->buf_idx);
1543 nh->data[fd_cnt + i] = 0;
1544 nh->data[(2 * fd_cnt) + i] = (int)frameobj->getSize(plane_frame->buf_idx);
1545 nh->data[(3 * fd_cnt) + i] = usage;
1546 nh->data[(4 * fd_cnt) + i] = (int)(frame_ts - timeStamp);
1547 nh->data[(5 * fd_cnt) + i] = frameobj->getFormat();
1548 LOGD("Send Video frames to services/encoder delta : %lld FD = %d index = %d",
1549 (frame_ts - timeStamp), plane_frame->fd, plane_frame->buf_idx);
1550 pme->dumpFrameToFile(stream, plane_frame, QCAMERA_DUMP_FRM_VIDEO);
1551 }
1552 }
1553 triggerTCB = TRUE;
1554 } else {
1555 LOGE("No Video Meta Available. Return Buffer");
1556 stream->bufDone(super_frame->bufs[0]->buf_idx);
1557 }
1558 }
1559
1560 if ((NULL != video_mem) && (triggerTCB == TRUE)) {
1561 if ((pme->mDataCbTimestamp != NULL) &&
1562 pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
1563 qcamera_callback_argm_t cbArg;
1564 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
1565 cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
1566 cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
1567 cbArg.data = video_mem;
1568 cbArg.timestamp = timeStamp;
1569 int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
1570 if (rc != NO_ERROR) {
1571 LOGE("fail sending data notify");
1572 stream->bufDone(frame->buf_idx);
1573 }
1574 }
1575 }
1576
1577 free(super_frame);
1578 LOGH("[KPI Perf] : END");
1579 }
1580
1581 /*===========================================================================
1582 * FUNCTION : snapshot_channel_cb_routine
1583 *
1584 * DESCRIPTION: helper function to handle snapshot frame from snapshot channel
1585 *
1586 * PARAMETERS :
1587 * @super_frame : received super buffer
1588 * @userdata : user data ptr
1589 *
1590 * RETURN : None
1591 *
1592 * NOTE : recvd_frame will be released after this call by caller, so if
1593 * async operation needed for recvd_frame, it's our responsibility
1594 * to save a copy for this variable to be used later.
1595 *==========================================================================*/
snapshot_channel_cb_routine(mm_camera_super_buf_t * super_frame,void * userdata)1596 void QCamera2HardwareInterface::snapshot_channel_cb_routine(mm_camera_super_buf_t *super_frame,
1597 void *userdata)
1598 {
1599 ATRACE_CALL();
1600 char value[PROPERTY_VALUE_MAX];
1601 QCameraChannel *pChannel = NULL;
1602
1603 LOGH("[KPI Perf]: E");
1604 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1605 if (pme == NULL ||
1606 pme->mCameraHandle == NULL ||
1607 pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1608 LOGE("camera obj not valid");
1609 // simply free super frame
1610 free(super_frame);
1611 return;
1612 }
1613
1614 if (pme->isLowPowerMode()) {
1615 pChannel = pme->m_channels[QCAMERA_CH_TYPE_VIDEO];
1616 } else {
1617 pChannel = pme->m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
1618 }
1619
1620 if ((pChannel == NULL) || (pChannel->getMyHandle() != super_frame->ch_id)) {
1621 LOGE("Snapshot channel doesn't exist, return here");
1622 return;
1623 }
1624
1625 property_get("persist.camera.dumpmetadata", value, "0");
1626 int32_t enabled = atoi(value);
1627 if (enabled) {
1628 if (pChannel == NULL ||
1629 pChannel->getMyHandle() != super_frame->ch_id) {
1630 LOGE("Capture channel doesn't exist, return here");
1631 return;
1632 }
1633 mm_camera_buf_def_t *pMetaFrame = NULL;
1634 QCameraStream *pStream = NULL;
1635 for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1636 pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
1637 if (pStream != NULL) {
1638 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
1639 pMetaFrame = super_frame->bufs[i]; //find the metadata
1640 if (pMetaFrame != NULL &&
1641 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
1642 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
1643 }
1644 break;
1645 }
1646 }
1647 }
1648 }
1649
1650 // save a copy for the superbuf
1651 mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1652 if (frame == NULL) {
1653 LOGE("Error allocating memory to save received_frame structure.");
1654 pChannel->bufDone(super_frame);
1655 return;
1656 }
1657 *frame = *super_frame;
1658
1659 if (frame->num_bufs > 0) {
1660 LOGI("[KPI Perf]: superbuf frame_idx %d",
1661 frame->bufs[0]->frame_idx);
1662 }
1663
1664 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
1665 (NO_ERROR != pme->m_postprocessor.processData(frame))) {
1666 LOGE("Failed to trigger process data");
1667 pChannel->bufDone(super_frame);
1668 free(frame);
1669 frame = NULL;
1670 return;
1671 }
1672
1673 LOGH("[KPI Perf]: X");
1674 }
1675
1676 /*===========================================================================
1677 * FUNCTION : raw_stream_cb_routine
1678 *
1679 * DESCRIPTION: helper function to handle raw dump frame from raw stream
1680 *
1681 * PARAMETERS :
1682 * @super_frame : received super buffer
1683 * @stream : stream object
1684 * @userdata : user data ptr
1685 *
1686 * RETURN : None
1687 *
1688 * NOTE : caller passes the ownership of super_frame, it's our
1689 * responsibility to free super_frame once it's done. For raw
1690 * frame, there is no need to send to postprocessor for jpeg
1691 * encoding. this function will play shutter and send the data
1692 * callback to upper layer. Raw frame buffer will be returned
1693 * back to kernel, and frame will be free after use.
1694 *==========================================================================*/
raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream *,void * userdata)1695 void QCamera2HardwareInterface::raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1696 QCameraStream * /*stream*/,
1697 void * userdata)
1698 {
1699 ATRACE_CALL();
1700 LOGH("[KPI Perf] : BEGIN");
1701 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1702 if (pme == NULL ||
1703 pme->mCameraHandle == NULL ||
1704 pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1705 LOGE("camera obj not valid");
1706 // simply free super frame
1707 free(super_frame);
1708 return;
1709 }
1710
1711 pme->m_postprocessor.processRawData(super_frame);
1712 LOGH("[KPI Perf] : END");
1713 }
1714
1715 /*===========================================================================
1716 * FUNCTION : raw_channel_cb_routine
1717 *
1718 * DESCRIPTION: helper function to handle RAW superbuf callback directly from
1719 * mm-camera-interface
1720 *
1721 * PARAMETERS :
1722 * @super_frame : received super buffer
1723 * @userdata : user data ptr
1724 *
1725 * RETURN : None
1726 *
1727 * NOTE : recvd_frame will be released after this call by caller, so if
1728 * async operation needed for recvd_frame, it's our responsibility
1729 * to save a copy for this variable to be used later.
1730 *==========================================================================*/
raw_channel_cb_routine(mm_camera_super_buf_t * super_frame,void * userdata)1731 void QCamera2HardwareInterface::raw_channel_cb_routine(mm_camera_super_buf_t *super_frame,
1732 void *userdata)
1733
1734 {
1735 ATRACE_CALL();
1736 char value[PROPERTY_VALUE_MAX];
1737
1738 LOGH("[KPI Perf]: E");
1739 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1740 if (pme == NULL ||
1741 pme->mCameraHandle == NULL ||
1742 pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1743 LOGE("camera obj not valid");
1744 // simply free super frame
1745 free(super_frame);
1746 return;
1747 }
1748
1749 QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_RAW];
1750 if (pChannel == NULL) {
1751 LOGE("RAW channel doesn't exist, return here");
1752 return;
1753 }
1754
1755 if (pChannel->getMyHandle() != super_frame->ch_id) {
1756 LOGE("Invalid Input super buffer");
1757 pChannel->bufDone(super_frame);
1758 return;
1759 }
1760
1761 property_get("persist.camera.dumpmetadata", value, "0");
1762 int32_t enabled = atoi(value);
1763 if (enabled) {
1764 mm_camera_buf_def_t *pMetaFrame = NULL;
1765 QCameraStream *pStream = NULL;
1766 for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1767 pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
1768 if (pStream != NULL) {
1769 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
1770 pMetaFrame = super_frame->bufs[i]; //find the metadata
1771 if (pMetaFrame != NULL &&
1772 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
1773 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "raw");
1774 }
1775 break;
1776 }
1777 }
1778 }
1779 }
1780
1781 // save a copy for the superbuf
1782 mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
1783 if (frame == NULL) {
1784 LOGE("Error allocating memory to save received_frame structure.");
1785 pChannel->bufDone(super_frame);
1786 return;
1787 }
1788 *frame = *super_frame;
1789
1790 if (frame->num_bufs > 0) {
1791 LOGI("[KPI Perf]: superbuf frame_idx %d",
1792 frame->bufs[0]->frame_idx);
1793 }
1794
1795 // Wait on Postproc initialization if needed
1796 // then send to postprocessor
1797 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
1798 (NO_ERROR != pme->m_postprocessor.processData(frame))) {
1799 LOGE("Failed to trigger process data");
1800 pChannel->bufDone(super_frame);
1801 free(frame);
1802 frame = NULL;
1803 return;
1804 }
1805
1806 LOGH("[KPI Perf]: X");
1807
1808 }
1809
1810 /*===========================================================================
1811 * FUNCTION : preview_raw_stream_cb_routine
1812 *
1813 * DESCRIPTION: helper function to handle raw frame during standard preview
1814 *
1815 * PARAMETERS :
1816 * @super_frame : received super buffer
1817 * @stream : stream object
1818 * @userdata : user data ptr
1819 *
1820 * RETURN : None
1821 *
1822 * NOTE : caller passes the ownership of super_frame, it's our
1823 * responsibility to free super_frame once it's done.
1824 *==========================================================================*/
preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1825 void QCamera2HardwareInterface::preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1826 QCameraStream * stream,
1827 void * userdata)
1828 {
1829 ATRACE_CALL();
1830 LOGH("[KPI Perf] : BEGIN");
1831 char value[PROPERTY_VALUE_MAX];
1832 bool dump_preview_raw = false, dump_video_raw = false;
1833
1834 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1835 if (pme == NULL ||
1836 pme->mCameraHandle == NULL ||
1837 pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1838 LOGE("camera obj not valid");
1839 // simply free super frame
1840 free(super_frame);
1841 return;
1842 }
1843
1844 mm_camera_buf_def_t *raw_frame = super_frame->bufs[0];
1845
1846 if (raw_frame != NULL) {
1847 property_get("persist.camera.preview_raw", value, "0");
1848 dump_preview_raw = atoi(value) > 0 ? true : false;
1849 property_get("persist.camera.video_raw", value, "0");
1850 dump_video_raw = atoi(value) > 0 ? true : false;
1851 if (dump_preview_raw || (pme->mParameters.getRecordingHintValue()
1852 && dump_video_raw)) {
1853 pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
1854 }
1855 stream->bufDone(raw_frame->buf_idx);
1856 }
1857 free(super_frame);
1858
1859 LOGH("[KPI Perf] : END");
1860 }
1861
1862 /*===========================================================================
1863 * FUNCTION : snapshot_raw_stream_cb_routine
1864 *
1865 * DESCRIPTION: helper function to handle raw frame during standard capture
1866 *
1867 * PARAMETERS :
1868 * @super_frame : received super buffer
1869 * @stream : stream object
1870 * @userdata : user data ptr
1871 *
1872 * RETURN : None
1873 *
1874 * NOTE : caller passes the ownership of super_frame, it's our
1875 * responsibility to free super_frame once it's done.
1876 *==========================================================================*/
snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)1877 void QCamera2HardwareInterface::snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
1878 QCameraStream * stream,
1879 void * userdata)
1880 {
1881 ATRACE_CALL();
1882 LOGH("[KPI Perf] : BEGIN");
1883 char value[PROPERTY_VALUE_MAX];
1884 bool dump_raw = false;
1885
1886 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
1887 if (pme == NULL ||
1888 pme->mCameraHandle == NULL ||
1889 pme->mCameraHandle->camera_handle != super_frame->camera_handle){
1890 LOGE("camera obj not valid");
1891 // simply free super frame
1892 free(super_frame);
1893 return;
1894 }
1895
1896 property_get("persist.camera.snapshot_raw", value, "0");
1897 dump_raw = atoi(value) > 0 ? true : false;
1898
1899 for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
1900 if (super_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
1901 mm_camera_buf_def_t * raw_frame = super_frame->bufs[i];
1902 if (NULL != stream) {
1903 if (dump_raw) {
1904 pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
1905 }
1906 stream->bufDone(super_frame->bufs[i]->buf_idx);
1907 }
1908 break;
1909 }
1910 }
1911
1912 free(super_frame);
1913
1914 LOGH("[KPI Perf] : END");
1915 }
1916
1917 /*===========================================================================
1918 * FUNCTION : updateMetadata
1919 *
1920 * DESCRIPTION: Frame related parameter can be updated here
1921 *
1922 * PARAMETERS :
1923 * @pMetaData : pointer to metadata buffer
1924 *
1925 * RETURN : int32_t type of status
1926 * NO_ERROR -- success
1927 * none-zero failure code
1928 *==========================================================================*/
updateMetadata(metadata_buffer_t * pMetaData)1929 int32_t QCamera2HardwareInterface::updateMetadata(metadata_buffer_t *pMetaData)
1930 {
1931 int32_t rc = NO_ERROR;
1932
1933 if (pMetaData == NULL) {
1934 LOGE("Null Metadata buffer");
1935 return rc;
1936 }
1937
1938 // Sharpness
1939 cam_edge_application_t edge_application;
1940 memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
1941 edge_application.sharpness = mParameters.getSharpness();
1942 if (edge_application.sharpness != 0) {
1943 edge_application.edge_mode = CAM_EDGE_MODE_FAST;
1944 } else {
1945 edge_application.edge_mode = CAM_EDGE_MODE_OFF;
1946 }
1947 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
1948 CAM_INTF_META_EDGE_MODE, edge_application);
1949
1950 //Effect
1951 int32_t prmEffect = mParameters.getEffect();
1952 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_EFFECT, prmEffect);
1953
1954 //flip
1955 int32_t prmFlip = mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT);
1956 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_FLIP, prmFlip);
1957
1958 //denoise
1959 uint8_t prmDenoise = (uint8_t)mParameters.isWNREnabled();
1960 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
1961 CAM_INTF_META_NOISE_REDUCTION_MODE, prmDenoise);
1962
1963 //rotation & device rotation
1964 uint32_t prmRotation = mParameters.getJpegRotation();
1965 cam_rotation_info_t rotation_info;
1966 memset(&rotation_info, 0, sizeof(cam_rotation_info_t));
1967 if (prmRotation == 0) {
1968 rotation_info.rotation = ROTATE_0;
1969 } else if (prmRotation == 90) {
1970 rotation_info.rotation = ROTATE_90;
1971 } else if (prmRotation == 180) {
1972 rotation_info.rotation = ROTATE_180;
1973 } else if (prmRotation == 270) {
1974 rotation_info.rotation = ROTATE_270;
1975 }
1976
1977 uint32_t device_rotation = mParameters.getDeviceRotation();
1978 if (device_rotation == 0) {
1979 rotation_info.device_rotation = ROTATE_0;
1980 } else if (device_rotation == 90) {
1981 rotation_info.device_rotation = ROTATE_90;
1982 } else if (device_rotation == 180) {
1983 rotation_info.device_rotation = ROTATE_180;
1984 } else if (device_rotation == 270) {
1985 rotation_info.device_rotation = ROTATE_270;
1986 } else {
1987 rotation_info.device_rotation = ROTATE_0;
1988 }
1989
1990 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_ROTATION, rotation_info);
1991
1992 // Imglib Dynamic Scene Data
1993 cam_dyn_img_data_t dyn_img_data = mParameters.getDynamicImgData();
1994 if (mParameters.isStillMoreEnabled()) {
1995 cam_still_more_t stillmore_cap = mParameters.getStillMoreSettings();
1996 dyn_img_data.input_count = stillmore_cap.burst_count;
1997 }
1998 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
1999 CAM_INTF_META_IMG_DYN_FEAT, dyn_img_data);
2000
2001 //CPP CDS
2002 int32_t prmCDSMode = mParameters.getCDSMode();
2003 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
2004 CAM_INTF_PARM_CDS_MODE, prmCDSMode);
2005
2006 return rc;
2007 }
2008
2009 /*===========================================================================
2010 * FUNCTION : metadata_stream_cb_routine
2011 *
2012 * DESCRIPTION: helper function to handle metadata frame from metadata stream
2013 *
2014 * PARAMETERS :
2015 * @super_frame : received super buffer
2016 * @stream : stream object
2017 * @userdata : user data ptr
2018 *
2019 * RETURN : None
2020 *
2021 * NOTE : caller passes the ownership of super_frame, it's our
2022 * responsibility to free super_frame once it's done. Metadata
2023 * could have valid entries for face detection result or
2024 * histogram statistics information.
2025 *==========================================================================*/
metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)2026 void QCamera2HardwareInterface::metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,
2027 QCameraStream * stream,
2028 void * userdata)
2029 {
2030 ATRACE_CALL();
2031 LOGD("[KPI Perf] : BEGIN");
2032 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2033 if (pme == NULL ||
2034 pme->mCameraHandle == NULL ||
2035 pme->mCameraHandle->camera_handle != super_frame->camera_handle){
2036 LOGE("camera obj not valid");
2037 // simply free super frame
2038 free(super_frame);
2039 return;
2040 }
2041
2042 mm_camera_buf_def_t *frame = super_frame->bufs[0];
2043 metadata_buffer_t *pMetaData = (metadata_buffer_t *)frame->buffer;
2044 if(pme->m_stateMachine.isNonZSLCaptureRunning()&&
2045 !pme->mLongshotEnabled) {
2046 //Make shutter call back in non ZSL mode once raw frame is received from VFE.
2047 pme->playShutter();
2048 }
2049
2050 if (pMetaData->is_tuning_params_valid && pme->mParameters.getRecordingHintValue() == true) {
2051 //Dump Tuning data for video
2052 pme->dumpMetadataToFile(stream,frame,(char *)"Video");
2053 }
2054
2055 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, pMetaData) {
2056 // process histogram statistics info
2057 qcamera_sm_internal_evt_payload_t *payload =
2058 (qcamera_sm_internal_evt_payload_t *)
2059 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2060 if (NULL != payload) {
2061 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2062 payload->evt_type = QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS;
2063 payload->stats_data = *stats_data;
2064 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2065 if (rc != NO_ERROR) {
2066 LOGW("processEvt histogram failed");
2067 free(payload);
2068 payload = NULL;
2069
2070 }
2071 } else {
2072 LOGE("No memory for histogram qcamera_sm_internal_evt_payload_t");
2073 }
2074 }
2075
2076 IF_META_AVAILABLE(cam_face_detection_data_t, detection_data,
2077 CAM_INTF_META_FACE_DETECTION, pMetaData) {
2078
2079 cam_faces_data_t faces_data;
2080 pme->fillFacesData(faces_data, pMetaData);
2081 faces_data.detection_data.fd_type = QCAMERA_FD_PREVIEW; //HARD CODE here before MCT can support
2082
2083 qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
2084 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2085 if (NULL != payload) {
2086 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2087 payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
2088 payload->faces_data = faces_data;
2089 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2090 if (rc != NO_ERROR) {
2091 LOGW("processEvt face detection failed");
2092 free(payload);
2093 payload = NULL;
2094 }
2095 } else {
2096 LOGE("No memory for face detect qcamera_sm_internal_evt_payload_t");
2097 }
2098 }
2099
2100 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, pMetaData) {
2101 uint8_t forceAFUpdate = FALSE;
2102 //1. Earlier HAL used to rely on AF done flags set in metadata to generate callbacks to
2103 //upper layers. But in scenarios where metadata drops especially which contain important
2104 //AF information, APP will wait indefinitely for focus result resulting in capture hang.
2105 //2. HAL can check for AF state transitions to generate AF state callbacks to upper layers.
2106 //This will help overcome metadata drop issue with the earlier approach.
2107 //3. But sometimes AF state transitions can happen so fast within same metadata due to
2108 //which HAL will receive only the final AF state. HAL may perceive this as no change in AF
2109 //state depending on the state transitions happened (for example state A -> B -> A).
2110 //4. To overcome the drawbacks of both the approaches, we go for a hybrid model in which
2111 //we check state transition at both HAL level and AF module level. We rely on
2112 //'state transition' meta field set by AF module for the state transition detected by it.
2113 IF_META_AVAILABLE(uint8_t, stateChange, CAM_INTF_AF_STATE_TRANSITION, pMetaData) {
2114 forceAFUpdate = *stateChange;
2115 }
2116 //This is a special scenario in which when scene modes like landscape are selected, AF mode
2117 //gets changed to INFINITY at backend, but HAL will not be aware of it. Also, AF state in
2118 //such cases will be set to CAM_AF_STATE_INACTIVE by backend. So, detect the AF mode
2119 //change here and trigger AF callback @ processAutoFocusEvent().
2120 IF_META_AVAILABLE(uint32_t, afFocusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
2121 if (((cam_focus_mode_type)(*afFocusMode) == CAM_FOCUS_MODE_INFINITY) &&
2122 pme->mActiveAF){
2123 forceAFUpdate = TRUE;
2124 }
2125 }
2126 if ((pme->m_currentFocusState != (*afState)) || forceAFUpdate) {
2127 cam_af_state_t prevFocusState = pme->m_currentFocusState;
2128 pme->m_currentFocusState = (cam_af_state_t)(*afState);
2129 qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
2130 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2131 if (NULL != payload) {
2132 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2133 payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_UPDATE;
2134 payload->focus_data.focus_state = (cam_af_state_t)(*afState);
2135 //Need to flush ZSL Q only if we are transitioning from scanning state
2136 //to focused/not focused state.
2137 payload->focus_data.flush_info.needFlush =
2138 ((prevFocusState == CAM_AF_STATE_PASSIVE_SCAN) ||
2139 (prevFocusState == CAM_AF_STATE_ACTIVE_SCAN)) &&
2140 ((pme->m_currentFocusState == CAM_AF_STATE_FOCUSED_LOCKED) ||
2141 (pme->m_currentFocusState == CAM_AF_STATE_NOT_FOCUSED_LOCKED));
2142 payload->focus_data.flush_info.focused_frame_idx = frame->frame_idx;
2143
2144 IF_META_AVAILABLE(float, focusDistance,
2145 CAM_INTF_META_LENS_FOCUS_DISTANCE, pMetaData) {
2146 payload->focus_data.focus_dist.
2147 focus_distance[CAM_FOCUS_DISTANCE_OPTIMAL_INDEX] = *focusDistance;
2148 }
2149 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, pMetaData) {
2150 payload->focus_data.focus_dist.
2151 focus_distance[CAM_FOCUS_DISTANCE_NEAR_INDEX] = focusRange[0];
2152 payload->focus_data.focus_dist.
2153 focus_distance[CAM_FOCUS_DISTANCE_FAR_INDEX] = focusRange[1];
2154 }
2155 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
2156 payload->focus_data.focus_mode = (cam_focus_mode_type)(*focusMode);
2157 }
2158 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2159 if (rc != NO_ERROR) {
2160 LOGW("processEvt focus failed");
2161 free(payload);
2162 payload = NULL;
2163 }
2164 } else {
2165 LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
2166 }
2167 }
2168 }
2169
2170 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, pMetaData) {
2171 if (crop_data->num_of_streams > MAX_NUM_STREAMS) {
2172 LOGE("Invalid num_of_streams %d in crop_data",
2173 crop_data->num_of_streams);
2174 } else {
2175 qcamera_sm_internal_evt_payload_t *payload =
2176 (qcamera_sm_internal_evt_payload_t *)
2177 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2178 if (NULL != payload) {
2179 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2180 payload->evt_type = QCAMERA_INTERNAL_EVT_CROP_INFO;
2181 payload->crop_data = *crop_data;
2182 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2183 if (rc != NO_ERROR) {
2184 LOGE("processEvt crop info failed");
2185 free(payload);
2186 payload = NULL;
2187 }
2188 } else {
2189 LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
2190 }
2191 }
2192 }
2193
2194 IF_META_AVAILABLE(int32_t, prep_snapshot_done_state,
2195 CAM_INTF_META_PREP_SNAPSHOT_DONE, pMetaData) {
2196 qcamera_sm_internal_evt_payload_t *payload =
2197 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2198 if (NULL != payload) {
2199 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2200 payload->evt_type = QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE;
2201 payload->prep_snapshot_state = (cam_prep_snapshot_state_t)*prep_snapshot_done_state;
2202 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2203 if (rc != NO_ERROR) {
2204 LOGW("processEvt prep_snapshot failed");
2205 free(payload);
2206 payload = NULL;
2207 }
2208 } else {
2209 LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
2210 }
2211 }
2212
2213 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
2214 CAM_INTF_META_ASD_HDR_SCENE_DATA, pMetaData) {
2215 LOGH("hdr_scene_data: %d %f\n",
2216 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
2217 //Handle this HDR meta data only if capture is not in process
2218 if (!pme->m_stateMachine.isCaptureRunning()) {
2219 qcamera_sm_internal_evt_payload_t *payload =
2220 (qcamera_sm_internal_evt_payload_t *)
2221 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2222 if (NULL != payload) {
2223 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2224 payload->evt_type = QCAMERA_INTERNAL_EVT_HDR_UPDATE;
2225 payload->hdr_data = *hdr_scene_data;
2226 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2227 if (rc != NO_ERROR) {
2228 LOGW("processEvt hdr update failed");
2229 free(payload);
2230 payload = NULL;
2231 }
2232 } else {
2233 LOGE("No memory for hdr update qcamera_sm_internal_evt_payload_t");
2234 }
2235 }
2236 }
2237
2238 IF_META_AVAILABLE(cam_asd_decision_t, cam_asd_info,
2239 CAM_INTF_META_ASD_SCENE_INFO, pMetaData) {
2240 qcamera_sm_internal_evt_payload_t *payload =
2241 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2242 if (NULL != payload) {
2243 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2244 payload->evt_type = QCAMERA_INTERNAL_EVT_ASD_UPDATE;
2245 payload->asd_data = (cam_asd_decision_t)*cam_asd_info;
2246 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2247 if (rc != NO_ERROR) {
2248 LOGW("processEvt asd_update failed");
2249 free(payload);
2250 payload = NULL;
2251 }
2252 } else {
2253 LOGE("No memory for asd_update qcamera_sm_internal_evt_payload_t");
2254 }
2255 }
2256
2257 IF_META_AVAILABLE(cam_awb_params_t, awb_params, CAM_INTF_META_AWB_INFO, pMetaData) {
2258 LOGH(", metadata for awb params.");
2259 qcamera_sm_internal_evt_payload_t *payload =
2260 (qcamera_sm_internal_evt_payload_t *)
2261 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2262 if (NULL != payload) {
2263 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2264 payload->evt_type = QCAMERA_INTERNAL_EVT_AWB_UPDATE;
2265 payload->awb_data = *awb_params;
2266 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2267 if (rc != NO_ERROR) {
2268 LOGW("processEvt awb_update failed");
2269 free(payload);
2270 payload = NULL;
2271 }
2272 } else {
2273 LOGE("No memory for awb_update qcamera_sm_internal_evt_payload_t");
2274 }
2275 }
2276
2277 IF_META_AVAILABLE(uint32_t, flash_mode, CAM_INTF_META_FLASH_MODE, pMetaData) {
2278 pme->mExifParams.sensor_params.flash_mode = (cam_flash_mode_t)*flash_mode;
2279 }
2280
2281 IF_META_AVAILABLE(int32_t, flash_state, CAM_INTF_META_FLASH_STATE, pMetaData) {
2282 pme->mExifParams.sensor_params.flash_state = (cam_flash_state_t) *flash_state;
2283 }
2284
2285 IF_META_AVAILABLE(float, aperture_value, CAM_INTF_META_LENS_APERTURE, pMetaData) {
2286 pme->mExifParams.sensor_params.aperture_value = *aperture_value;
2287 }
2288
2289 IF_META_AVAILABLE(cam_3a_params_t, ae_params, CAM_INTF_META_AEC_INFO, pMetaData) {
2290 pme->mExifParams.cam_3a_params = *ae_params;
2291 pme->mExifParams.cam_3a_params_valid = TRUE;
2292 pme->mFlashNeeded = ae_params->flash_needed;
2293 pme->mExifParams.cam_3a_params.brightness = (float) pme->mParameters.getBrightness();
2294 qcamera_sm_internal_evt_payload_t *payload =
2295 (qcamera_sm_internal_evt_payload_t *)
2296 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2297 if (NULL != payload) {
2298 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2299 payload->evt_type = QCAMERA_INTERNAL_EVT_AE_UPDATE;
2300 payload->ae_data = *ae_params;
2301 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2302 if (rc != NO_ERROR) {
2303 LOGW("processEvt ae_update failed");
2304 free(payload);
2305 payload = NULL;
2306 }
2307 } else {
2308 LOGE("No memory for ae_update qcamera_sm_internal_evt_payload_t");
2309 }
2310 }
2311
2312 IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, pMetaData) {
2313 pme->mExifParams.cam_3a_params.wb_mode = (cam_wb_mode_type) *wb_mode;
2314 }
2315
2316 IF_META_AVAILABLE(cam_sensor_params_t, sensor_params, CAM_INTF_META_SENSOR_INFO, pMetaData) {
2317 pme->mExifParams.sensor_params = *sensor_params;
2318 }
2319
2320 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
2321 CAM_INTF_META_EXIF_DEBUG_AE, pMetaData) {
2322 if (pme->mExifParams.debug_params) {
2323 pme->mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
2324 pme->mExifParams.debug_params->ae_debug_params_valid = TRUE;
2325 }
2326 }
2327
2328 IF_META_AVAILABLE(cam_awb_exif_debug_t, awb_exif_debug_params,
2329 CAM_INTF_META_EXIF_DEBUG_AWB, pMetaData) {
2330 if (pme->mExifParams.debug_params) {
2331 pme->mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
2332 pme->mExifParams.debug_params->awb_debug_params_valid = TRUE;
2333 }
2334 }
2335
2336 IF_META_AVAILABLE(cam_af_exif_debug_t, af_exif_debug_params,
2337 CAM_INTF_META_EXIF_DEBUG_AF, pMetaData) {
2338 if (pme->mExifParams.debug_params) {
2339 pme->mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
2340 pme->mExifParams.debug_params->af_debug_params_valid = TRUE;
2341 }
2342 }
2343
2344 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
2345 CAM_INTF_META_EXIF_DEBUG_ASD, pMetaData) {
2346 if (pme->mExifParams.debug_params) {
2347 pme->mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
2348 pme->mExifParams.debug_params->asd_debug_params_valid = TRUE;
2349 }
2350 }
2351
2352 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t, stats_exif_debug_params,
2353 CAM_INTF_META_EXIF_DEBUG_STATS, pMetaData) {
2354 if (pme->mExifParams.debug_params) {
2355 pme->mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
2356 pme->mExifParams.debug_params->stats_debug_params_valid = TRUE;
2357 }
2358 }
2359
2360 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t, bestats_exif_debug_params,
2361 CAM_INTF_META_EXIF_DEBUG_BESTATS, pMetaData) {
2362 if (pme->mExifParams.debug_params) {
2363 pme->mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
2364 pme->mExifParams.debug_params->bestats_debug_params_valid = TRUE;
2365 }
2366 }
2367
2368 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
2369 CAM_INTF_META_EXIF_DEBUG_BHIST, pMetaData) {
2370 if (pme->mExifParams.debug_params) {
2371 pme->mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
2372 pme->mExifParams.debug_params->bhist_debug_params_valid = TRUE;
2373 }
2374 }
2375
2376 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
2377 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, pMetaData) {
2378 if (pme->mExifParams.debug_params) {
2379 pme->mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
2380 pme->mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
2381 }
2382 }
2383
2384 IF_META_AVAILABLE(uint32_t, led_mode, CAM_INTF_META_LED_MODE_OVERRIDE, pMetaData) {
2385 qcamera_sm_internal_evt_payload_t *payload =
2386 (qcamera_sm_internal_evt_payload_t *)
2387 malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2388 if (NULL != payload) {
2389 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2390 payload->evt_type = QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE;
2391 payload->led_data = (cam_flash_mode_t)*led_mode;
2392 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2393 if (rc != NO_ERROR) {
2394 LOGW("processEvt led mode override failed");
2395 free(payload);
2396 payload = NULL;
2397 }
2398 } else {
2399 LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
2400 }
2401 }
2402
2403 cam_edge_application_t edge_application;
2404 memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
2405 edge_application.sharpness = pme->mParameters.getSharpness();
2406 if (edge_application.sharpness != 0) {
2407 edge_application.edge_mode = CAM_EDGE_MODE_FAST;
2408 } else {
2409 edge_application.edge_mode = CAM_EDGE_MODE_OFF;
2410 }
2411 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_META_EDGE_MODE, edge_application);
2412
2413 IF_META_AVAILABLE(cam_focus_pos_info_t, cur_pos_info,
2414 CAM_INTF_META_FOCUS_POSITION, pMetaData) {
2415 qcamera_sm_internal_evt_payload_t *payload =
2416 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
2417 if (NULL != payload) {
2418 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
2419 payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE;
2420 payload->focus_pos = *cur_pos_info;
2421 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
2422 if (rc != NO_ERROR) {
2423 LOGW("processEvt focus_pos_update failed");
2424 free(payload);
2425 payload = NULL;
2426 }
2427 } else {
2428 LOGE("No memory for focus_pos_update qcamera_sm_internal_evt_payload_t");
2429 }
2430 }
2431
2432 if (pme->mParameters.getLowLightCapture()) {
2433 IF_META_AVAILABLE(cam_low_light_mode_t, low_light_level,
2434 CAM_INTF_META_LOW_LIGHT, pMetaData) {
2435 pme->mParameters.setLowLightLevel(*low_light_level);
2436 }
2437 }
2438
2439 IF_META_AVAILABLE(cam_dyn_img_data_t, dyn_img_data,
2440 CAM_INTF_META_IMG_DYN_FEAT, pMetaData) {
2441 pme->mParameters.setDynamicImgData(*dyn_img_data);
2442 }
2443
2444 IF_META_AVAILABLE(int32_t, touch_ae_status, CAM_INTF_META_TOUCH_AE_RESULT, pMetaData) {
2445 LOGD("touch_ae_status: %d", *touch_ae_status);
2446 }
2447
2448 stream->bufDone(frame->buf_idx);
2449 free(super_frame);
2450
2451 LOGD("[KPI Perf] : END");
2452 }
2453
2454 /*===========================================================================
2455 * FUNCTION : reprocess_stream_cb_routine
2456 *
2457 * DESCRIPTION: helper function to handle reprocess frame from reprocess stream
2458 (after reprocess, e.g., ZSL snapshot frame after WNR if
2459 * WNR is enabled)
2460 *
2461 * PARAMETERS :
2462 * @super_frame : received super buffer
2463 * @stream : stream object
2464 * @userdata : user data ptr
2465 *
2466 * RETURN : None
2467 *
2468 * NOTE : caller passes the ownership of super_frame, it's our
2469 * responsibility to free super_frame once it's done. In this
2470 * case, reprocessed frame need to be passed to postprocessor
2471 * for jpeg encoding.
2472 *==========================================================================*/
reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream *,void * userdata)2473 void QCamera2HardwareInterface::reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,
2474 QCameraStream * /*stream*/,
2475 void * userdata)
2476 {
2477 ATRACE_CALL();
2478 LOGH("[KPI Perf]: E");
2479 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2480 if (pme == NULL ||
2481 pme->mCameraHandle == NULL ||
2482 pme->mCameraHandle->camera_handle != super_frame->camera_handle){
2483 LOGE("camera obj not valid");
2484 // simply free super frame
2485 free(super_frame);
2486 return;
2487 }
2488
2489 pme->m_postprocessor.processPPData(super_frame);
2490
2491 LOGH("[KPI Perf]: X");
2492 }
2493
2494 /*===========================================================================
2495 * FUNCTION : callback_stream_cb_routine
2496 *
2497 * DESCRIPTION: function to process CALBACK stream data
2498 Frame will processed and sent to framework
2499 *
2500 * PARAMETERS :
2501 * @super_frame : received super buffer
2502 * @stream : stream object
2503 * @userdata : user data ptr
2504 *
2505 * RETURN : None
2506 *==========================================================================*/
callback_stream_cb_routine(mm_camera_super_buf_t * super_frame,QCameraStream * stream,void * userdata)2507 void QCamera2HardwareInterface::callback_stream_cb_routine(mm_camera_super_buf_t *super_frame,
2508 QCameraStream *stream, void *userdata)
2509 {
2510 ATRACE_CALL();
2511 LOGH("[KPI Perf]: E");
2512 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
2513
2514 if (pme == NULL ||
2515 pme->mCameraHandle == NULL ||
2516 pme->mCameraHandle->camera_handle != super_frame->camera_handle) {
2517 LOGE("camera obj not valid");
2518 // simply free super frame
2519 free(super_frame);
2520 return;
2521 }
2522
2523 mm_camera_buf_def_t *frame = super_frame->bufs[0];
2524 if (NULL == frame) {
2525 LOGE("preview callback frame is NULL");
2526 free(super_frame);
2527 return;
2528 }
2529
2530 if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
2531 LOGH("preview is not running, no need to process");
2532 stream->bufDone(frame->buf_idx);
2533 free(super_frame);
2534 return;
2535 }
2536
2537 QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
2538 // Handle preview data callback
2539 if (pme->mDataCb != NULL &&
2540 (pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) &&
2541 (!pme->mParameters.isSceneSelectionEnabled())) {
2542 // Preset cache flags to be handled when the buffer comes back
2543 frame->cache_flags |= CPU_HAS_READ;
2544 int32_t rc = pme->sendPreviewCallback(stream, previewMemObj, frame->buf_idx);
2545 if (NO_ERROR != rc) {
2546 LOGE("Preview callback was not sent succesfully");
2547 }
2548 }
2549 stream->bufDone(frame->buf_idx);
2550 free(super_frame);
2551 LOGH("[KPI Perf]: X");
2552 }
2553
2554 /*===========================================================================
2555 * FUNCTION : dumpFrameToFile
2556 *
2557 * DESCRIPTION: helper function to dump jpeg into file for debug purpose.
2558 *
2559 * PARAMETERS :
2560 * @data : data ptr
2561 * @size : length of data buffer
2562 * @index : identifier for data
2563 *
2564 * RETURN : None
2565 *==========================================================================*/
dumpJpegToFile(const void * data,size_t size,uint32_t index)2566 void QCamera2HardwareInterface::dumpJpegToFile(const void *data,
2567 size_t size, uint32_t index)
2568 {
2569 char value[PROPERTY_VALUE_MAX];
2570 property_get("persist.camera.dumpimg", value, "0");
2571 uint32_t enabled = (uint32_t) atoi(value);
2572 uint32_t frm_num = 0;
2573 uint32_t skip_mode = 0;
2574
2575 char buf[32];
2576 cam_dimension_t dim;
2577 memset(buf, 0, sizeof(buf));
2578 memset(&dim, 0, sizeof(dim));
2579
2580 if(((enabled & QCAMERA_DUMP_FRM_JPEG) && data) ||
2581 ((true == m_bIntJpegEvtPending) && data)) {
2582 frm_num = ((enabled & 0xffff0000) >> 16);
2583 if(frm_num == 0) {
2584 frm_num = 10; //default 10 frames
2585 }
2586 if(frm_num > 256) {
2587 frm_num = 256; //256 buffers cycle around
2588 }
2589 skip_mode = ((enabled & 0x0000ff00) >> 8);
2590 if(skip_mode == 0) {
2591 skip_mode = 1; //no-skip
2592 }
2593
2594 if( mDumpSkipCnt % skip_mode == 0) {
2595 if((frm_num == 256) && (mDumpFrmCnt >= frm_num)) {
2596 // reset frame count if cycling
2597 mDumpFrmCnt = 0;
2598 }
2599 if (mDumpFrmCnt <= frm_num) {
2600 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION "%d_%d.jpg",
2601 mDumpFrmCnt, index);
2602 if (true == m_bIntJpegEvtPending) {
2603 strlcpy(m_BackendFileName, buf, QCAMERA_MAX_FILEPATH_LENGTH);
2604 mBackendFileSize = size;
2605 }
2606
2607 int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
2608 if (file_fd >= 0) {
2609 ssize_t written_len = write(file_fd, data, size);
2610 fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
2611 LOGH("written number of bytes %zd\n",
2612 written_len);
2613 close(file_fd);
2614 } else {
2615 LOGE("fail to open file for image dumping");
2616 }
2617 if (false == m_bIntJpegEvtPending) {
2618 mDumpFrmCnt++;
2619 }
2620 }
2621 }
2622 mDumpSkipCnt++;
2623 }
2624 }
2625
2626
dumpMetadataToFile(QCameraStream * stream,mm_camera_buf_def_t * frame,char * type)2627 void QCamera2HardwareInterface::dumpMetadataToFile(QCameraStream *stream,
2628 mm_camera_buf_def_t *frame,char *type)
2629 {
2630 char value[PROPERTY_VALUE_MAX];
2631 uint32_t frm_num = 0;
2632 metadata_buffer_t *metadata = (metadata_buffer_t *)frame->buffer;
2633 property_get("persist.camera.dumpmetadata", value, "0");
2634 uint32_t enabled = (uint32_t) atoi(value);
2635 if (stream == NULL) {
2636 LOGH("No op");
2637 return;
2638 }
2639
2640 uint32_t dumpFrmCnt = stream->mDumpMetaFrame;
2641 if(enabled){
2642 frm_num = ((enabled & 0xffff0000) >> 16);
2643 if (frm_num == 0) {
2644 frm_num = 10; //default 10 frames
2645 }
2646 if (frm_num > 256) {
2647 frm_num = 256; //256 buffers cycle around
2648 }
2649 if ((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
2650 // reset frame count if cycling
2651 dumpFrmCnt = 0;
2652 }
2653 LOGH("dumpFrmCnt= %u, frm_num = %u", dumpFrmCnt, frm_num);
2654 if (dumpFrmCnt < frm_num) {
2655 char timeBuf[128];
2656 char buf[32];
2657 memset(buf, 0, sizeof(buf));
2658 memset(timeBuf, 0, sizeof(timeBuf));
2659 time_t current_time;
2660 struct tm * timeinfo;
2661 time (¤t_time);
2662 timeinfo = localtime (¤t_time);
2663 if (NULL != timeinfo) {
2664 strftime(timeBuf, sizeof(timeBuf),
2665 QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
2666 }
2667 String8 filePath(timeBuf);
2668 snprintf(buf, sizeof(buf), "%um_%s_%d.bin", dumpFrmCnt, type, frame->frame_idx);
2669 filePath.append(buf);
2670 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2671 if (file_fd >= 0) {
2672 ssize_t written_len = 0;
2673 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
2674 void *data = (void *)((uint8_t *)&metadata->tuning_params.tuning_data_version);
2675 written_len += write(file_fd, data, sizeof(uint32_t));
2676 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size);
2677 LOGH("tuning_sensor_data_size %d",(int)(*(int *)data));
2678 written_len += write(file_fd, data, sizeof(uint32_t));
2679 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size);
2680 LOGH("tuning_vfe_data_size %d",(int)(*(int *)data));
2681 written_len += write(file_fd, data, sizeof(uint32_t));
2682 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size);
2683 LOGH("tuning_cpp_data_size %d",(int)(*(int *)data));
2684 written_len += write(file_fd, data, sizeof(uint32_t));
2685 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size);
2686 LOGH("tuning_cac_data_size %d",(int)(*(int *)data));
2687 written_len += write(file_fd, data, sizeof(uint32_t));
2688 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size2);
2689 LOGH("< skrajago >tuning_cac_data_size %d",(int)(*(int *)data));
2690 written_len += write(file_fd, data, sizeof(uint32_t));
2691 size_t total_size = metadata->tuning_params.tuning_sensor_data_size;
2692 data = (void *)((uint8_t *)&metadata->tuning_params.data);
2693 written_len += write(file_fd, data, total_size);
2694 total_size = metadata->tuning_params.tuning_vfe_data_size;
2695 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]);
2696 written_len += write(file_fd, data, total_size);
2697 total_size = metadata->tuning_params.tuning_cpp_data_size;
2698 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]);
2699 written_len += write(file_fd, data, total_size);
2700 total_size = metadata->tuning_params.tuning_cac_data_size;
2701 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]);
2702 written_len += write(file_fd, data, total_size);
2703 close(file_fd);
2704 }else {
2705 LOGE("fail t open file for image dumping");
2706 }
2707 dumpFrmCnt++;
2708 }
2709 }
2710 stream->mDumpMetaFrame = dumpFrmCnt;
2711 }
2712 /*===========================================================================
2713 * FUNCTION : dumpFrameToFile
2714 *
2715 * DESCRIPTION: helper function to dump frame into file for debug purpose.
2716 *
2717 * PARAMETERS :
2718 * @data : data ptr
2719 * @size : length of data buffer
2720 * @index : identifier for data
2721 * @dump_type : type of the frame to be dumped. Only such
2722 * dump type is enabled, the frame will be
2723 * dumped into a file.
2724 *
2725 * RETURN : None
2726 *==========================================================================*/
dumpFrameToFile(QCameraStream * stream,mm_camera_buf_def_t * frame,uint32_t dump_type,const char * misc)2727 void QCamera2HardwareInterface::dumpFrameToFile(QCameraStream *stream,
2728 mm_camera_buf_def_t *frame, uint32_t dump_type, const char *misc)
2729 {
2730 char value[PROPERTY_VALUE_MAX];
2731 property_get("persist.camera.dumpimg", value, "0");
2732 uint32_t enabled = (uint32_t) atoi(value);
2733 uint32_t frm_num = 0;
2734 uint32_t skip_mode = 0;
2735
2736 if (NULL == stream) {
2737 LOGE("stream object is null");
2738 return;
2739 }
2740
2741 uint32_t dumpFrmCnt = stream->mDumpFrame;
2742
2743 if (true == m_bIntRawEvtPending) {
2744 enabled = QCAMERA_DUMP_FRM_RAW;
2745 }
2746
2747 if((enabled & QCAMERA_DUMP_FRM_MASK_ALL)) {
2748 if((enabled & dump_type) && stream && frame) {
2749 frm_num = ((enabled & 0xffff0000) >> 16);
2750 if(frm_num == 0) {
2751 frm_num = 10; //default 10 frames
2752 }
2753 if(frm_num > 256) {
2754 frm_num = 256; //256 buffers cycle around
2755 }
2756 skip_mode = ((enabled & 0x0000ff00) >> 8);
2757 if(skip_mode == 0) {
2758 skip_mode = 1; //no-skip
2759 }
2760 if(stream->mDumpSkipCnt == 0)
2761 stream->mDumpSkipCnt = 1;
2762
2763 if( stream->mDumpSkipCnt % skip_mode == 0) {
2764 if((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
2765 // reset frame count if cycling
2766 dumpFrmCnt = 0;
2767 }
2768 if (dumpFrmCnt <= frm_num) {
2769 char buf[32];
2770 char timeBuf[128];
2771 time_t current_time;
2772 struct tm * timeinfo;
2773
2774 memset(timeBuf, 0, sizeof(timeBuf));
2775
2776 time (¤t_time);
2777 timeinfo = localtime (¤t_time);
2778 memset(buf, 0, sizeof(buf));
2779
2780 cam_dimension_t dim;
2781 memset(&dim, 0, sizeof(dim));
2782 stream->getFrameDimension(dim);
2783
2784 cam_frame_len_offset_t offset;
2785 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2786 stream->getFrameOffset(offset);
2787
2788 if (NULL != timeinfo) {
2789 strftime(timeBuf, sizeof(timeBuf),
2790 QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
2791 }
2792 String8 filePath(timeBuf);
2793 switch (dump_type) {
2794 case QCAMERA_DUMP_FRM_PREVIEW:
2795 {
2796 snprintf(buf, sizeof(buf), "%dp_%dx%d_%d.yuv",
2797 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2798 }
2799 break;
2800 case QCAMERA_DUMP_FRM_THUMBNAIL:
2801 {
2802 snprintf(buf, sizeof(buf), "%dt_%dx%d_%d.yuv",
2803 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2804 }
2805 break;
2806 case QCAMERA_DUMP_FRM_SNAPSHOT:
2807 {
2808 if (!mParameters.isPostProcScaling()) {
2809 mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
2810 } else {
2811 stream->getFrameDimension(dim);
2812 }
2813 if (misc != NULL) {
2814 snprintf(buf, sizeof(buf), "%ds_%dx%d_%d_%s.yuv",
2815 dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
2816 } else {
2817 snprintf(buf, sizeof(buf), "%ds_%dx%d_%d.yuv",
2818 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2819 }
2820 }
2821 break;
2822 case QCAMERA_DUMP_FRM_INPUT_REPROCESS:
2823 {
2824 stream->getFrameDimension(dim);
2825 if (misc != NULL) {
2826 snprintf(buf, sizeof(buf), "%dir_%dx%d_%d_%s.yuv",
2827 dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
2828 } else {
2829 snprintf(buf, sizeof(buf), "%dir_%dx%d_%d.yuv",
2830 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2831 }
2832 }
2833 break;
2834 case QCAMERA_DUMP_FRM_VIDEO:
2835 {
2836 snprintf(buf, sizeof(buf), "%dv_%dx%d_%d.yuv",
2837 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2838 }
2839 break;
2840 case QCAMERA_DUMP_FRM_RAW:
2841 {
2842 mParameters.getStreamDimension(CAM_STREAM_TYPE_RAW, dim);
2843 snprintf(buf, sizeof(buf), "%dr_%dx%d_%d.raw",
2844 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2845 }
2846 break;
2847 case QCAMERA_DUMP_FRM_JPEG:
2848 {
2849 mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
2850 snprintf(buf, sizeof(buf), "%dj_%dx%d_%d.yuv",
2851 dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
2852 }
2853 break;
2854 default:
2855 LOGE("Not supported for dumping stream type %d",
2856 dump_type);
2857 return;
2858 }
2859
2860 filePath.append(buf);
2861 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
2862 ssize_t written_len = 0;
2863 if (file_fd >= 0) {
2864 void *data = NULL;
2865
2866 fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
2867 for (uint32_t i = 0; i < offset.num_planes; i++) {
2868 uint32_t index = offset.mp[i].offset;
2869 if (i > 0) {
2870 index += offset.mp[i-1].len;
2871 }
2872
2873 if (offset.mp[i].meta_len != 0) {
2874 data = (void *)((uint8_t *)frame->buffer + index);
2875 written_len += write(file_fd, data,
2876 (size_t)offset.mp[i].meta_len);
2877 index += (uint32_t)offset.mp[i].meta_len;
2878 }
2879
2880 for (int j = 0; j < offset.mp[i].height; j++) {
2881 data = (void *)((uint8_t *)frame->buffer + index);
2882 written_len += write(file_fd, data,
2883 (size_t)offset.mp[i].width);
2884 index += (uint32_t)offset.mp[i].stride;
2885 }
2886 }
2887
2888 LOGH("written number of bytes %zd\n",
2889 written_len);
2890 close(file_fd);
2891 frame->cache_flags |= CPU_HAS_READ;
2892 } else {
2893 LOGE("fail to open file for image dumping");
2894 }
2895 if (true == m_bIntRawEvtPending) {
2896 strlcpy(m_BackendFileName, filePath.string(), QCAMERA_MAX_FILEPATH_LENGTH);
2897 mBackendFileSize = (size_t)written_len;
2898 } else {
2899 dumpFrmCnt++;
2900 }
2901 }
2902 }
2903 stream->mDumpSkipCnt++;
2904 }
2905 } else {
2906 dumpFrmCnt = 0;
2907 }
2908 stream->mDumpFrame = dumpFrmCnt;
2909 }
2910
2911 /*===========================================================================
2912 * FUNCTION : debugShowVideoFPS
2913 *
2914 * DESCRIPTION: helper function to log video frame FPS for debug purpose.
2915 *
2916 * PARAMETERS : None
2917 *
2918 * RETURN : None
2919 *==========================================================================*/
debugShowVideoFPS()2920 void QCamera2HardwareInterface::debugShowVideoFPS()
2921 {
2922 mVFrameCount++;
2923 nsecs_t now = systemTime();
2924 nsecs_t diff = now - mVLastFpsTime;
2925 if (diff > ms2ns(250)) {
2926 mVFps = (((double)(mVFrameCount - mVLastFrameCount)) *
2927 (double)(s2ns(1))) / (double)diff;
2928 LOGI("[KPI Perf]: PROFILE_VIDEO_FRAMES_PER_SECOND: %.4f Cam ID = %d",
2929 mVFps, mCameraId);
2930 mVLastFpsTime = now;
2931 mVLastFrameCount = mVFrameCount;
2932 }
2933 }
2934
2935 /*===========================================================================
2936 * FUNCTION : debugShowPreviewFPS
2937 *
2938 * DESCRIPTION: helper function to log preview frame FPS for debug purpose.
2939 *
2940 * PARAMETERS : None
2941 *
2942 * RETURN : None
2943 *==========================================================================*/
debugShowPreviewFPS()2944 void QCamera2HardwareInterface::debugShowPreviewFPS()
2945 {
2946 mPFrameCount++;
2947 nsecs_t now = systemTime();
2948 nsecs_t diff = now - mPLastFpsTime;
2949 if (diff > ms2ns(250)) {
2950 mPFps = (((double)(mPFrameCount - mPLastFrameCount)) *
2951 (double)(s2ns(1))) / (double)diff;
2952 LOGI("[KPI Perf]: PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f Cam ID = %d",
2953 mPFps, mCameraId);
2954 mPLastFpsTime = now;
2955 mPLastFrameCount = mPFrameCount;
2956 }
2957 }
2958
2959 /*===========================================================================
2960 * FUNCTION : fillFacesData
2961 *
2962 * DESCRIPTION: helper function to fill in face related metadata into a struct.
2963 *
2964 * PARAMETERS :
2965 * @faces_data : face features data to be filled
2966 * @metadata : metadata structure to read face features from
2967 *
2968 * RETURN : None
2969 *==========================================================================*/
fillFacesData(cam_faces_data_t & faces_data,metadata_buffer_t * metadata)2970 void QCamera2HardwareInterface::fillFacesData(cam_faces_data_t &faces_data,
2971 metadata_buffer_t *metadata)
2972 {
2973 memset(&faces_data, 0, sizeof(cam_faces_data_t));
2974
2975 IF_META_AVAILABLE(cam_face_detection_data_t, p_detection_data,
2976 CAM_INTF_META_FACE_DETECTION, metadata) {
2977 faces_data.detection_data = *p_detection_data;
2978 if (faces_data.detection_data.num_faces_detected > MAX_ROI) {
2979 faces_data.detection_data.num_faces_detected = MAX_ROI;
2980 }
2981
2982 LOGH("[KPI Perf] PROFILE_NUMBER_OF_FACES_DETECTED %d",
2983 faces_data.detection_data.num_faces_detected);
2984
2985 IF_META_AVAILABLE(cam_face_recog_data_t, p_recog_data,
2986 CAM_INTF_META_FACE_RECOG, metadata) {
2987 faces_data.recog_valid = true;
2988 faces_data.recog_data = *p_recog_data;
2989 }
2990
2991 IF_META_AVAILABLE(cam_face_blink_data_t, p_blink_data,
2992 CAM_INTF_META_FACE_BLINK, metadata) {
2993 faces_data.blink_valid = true;
2994 faces_data.blink_data = *p_blink_data;
2995 }
2996
2997 IF_META_AVAILABLE(cam_face_gaze_data_t, p_gaze_data,
2998 CAM_INTF_META_FACE_GAZE, metadata) {
2999 faces_data.gaze_valid = true;
3000 faces_data.gaze_data = *p_gaze_data;
3001 }
3002
3003 IF_META_AVAILABLE(cam_face_smile_data_t, p_smile_data,
3004 CAM_INTF_META_FACE_SMILE, metadata) {
3005 faces_data.smile_valid = true;
3006 faces_data.smile_data = *p_smile_data;
3007 }
3008
3009 IF_META_AVAILABLE(cam_face_landmarks_data_t, p_landmarks,
3010 CAM_INTF_META_FACE_LANDMARK, metadata) {
3011 faces_data.landmark_valid = true;
3012 faces_data.landmark_data = *p_landmarks;
3013 }
3014
3015 IF_META_AVAILABLE(cam_face_contour_data_t, p_contour,
3016 CAM_INTF_META_FACE_CONTOUR, metadata) {
3017 faces_data.contour_valid = true;
3018 faces_data.contour_data = *p_contour;
3019 }
3020 }
3021 }
3022
3023 /*===========================================================================
3024 * FUNCTION : ~QCameraCbNotifier
3025 *
3026 * DESCRIPTION: Destructor for exiting the callback context.
3027 *
3028 * PARAMETERS : None
3029 *
3030 * RETURN : None
3031 *==========================================================================*/
~QCameraCbNotifier()3032 QCameraCbNotifier::~QCameraCbNotifier()
3033 {
3034 }
3035
3036 /*===========================================================================
3037 * FUNCTION : exit
3038 *
3039 * DESCRIPTION: exit notify thread.
3040 *
3041 * PARAMETERS : None
3042 *
3043 * RETURN : None
3044 *==========================================================================*/
exit()3045 void QCameraCbNotifier::exit()
3046 {
3047 mActive = false;
3048 mProcTh.exit();
3049 }
3050
3051 /*===========================================================================
3052 * FUNCTION : releaseNotifications
3053 *
3054 * DESCRIPTION: callback for releasing data stored in the callback queue.
3055 *
3056 * PARAMETERS :
3057 * @data : data to be released
3058 * @user_data : context data
3059 *
3060 * RETURN : None
3061 *==========================================================================*/
releaseNotifications(void * data,void * user_data)3062 void QCameraCbNotifier::releaseNotifications(void *data, void *user_data)
3063 {
3064 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3065
3066 if ( ( NULL != arg ) && ( NULL != user_data ) ) {
3067 if ( arg->release_cb ) {
3068 arg->release_cb(arg->user_data, arg->cookie, FAILED_TRANSACTION);
3069 }
3070 }
3071 }
3072
3073 /*===========================================================================
3074 * FUNCTION : matchSnapshotNotifications
3075 *
3076 * DESCRIPTION: matches snapshot data callbacks
3077 *
3078 * PARAMETERS :
3079 * @data : data to match
3080 * @user_data : context data
3081 *
3082 * RETURN : bool match
3083 * true - match found
3084 * false- match not found
3085 *==========================================================================*/
matchSnapshotNotifications(void * data,void *)3086 bool QCameraCbNotifier::matchSnapshotNotifications(void *data,
3087 void */*user_data*/)
3088 {
3089 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3090 if ( NULL != arg ) {
3091 if ( QCAMERA_DATA_SNAPSHOT_CALLBACK == arg->cb_type ) {
3092 return true;
3093 }
3094 }
3095
3096 return false;
3097 }
3098
3099 /*===========================================================================
3100 * FUNCTION : matchPreviewNotifications
3101 *
3102 * DESCRIPTION: matches preview data callbacks
3103 *
3104 * PARAMETERS :
3105 * @data : data to match
3106 * @user_data : context data
3107 *
3108 * RETURN : bool match
3109 * true - match found
3110 * false- match not found
3111 *==========================================================================*/
matchPreviewNotifications(void * data,void *)3112 bool QCameraCbNotifier::matchPreviewNotifications(void *data,
3113 void */*user_data*/)
3114 {
3115 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3116 if (NULL != arg) {
3117 if ((QCAMERA_DATA_CALLBACK == arg->cb_type) &&
3118 (CAMERA_MSG_PREVIEW_FRAME == arg->msg_type)) {
3119 return true;
3120 }
3121 }
3122
3123 return false;
3124 }
3125
3126 /*===========================================================================
3127 * FUNCTION : matchTimestampNotifications
3128 *
3129 * DESCRIPTION: matches timestamp data callbacks
3130 *
3131 * PARAMETERS :
3132 * @data : data to match
3133 * @user_data : context data
3134 *
3135 * RETURN : bool match
3136 * true - match found
3137 * false- match not found
3138 *==========================================================================*/
matchTimestampNotifications(void * data,void *)3139 bool QCameraCbNotifier::matchTimestampNotifications(void *data,
3140 void */*user_data*/)
3141 {
3142 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
3143 if (NULL != arg) {
3144 if ((QCAMERA_DATA_TIMESTAMP_CALLBACK == arg->cb_type) &&
3145 (CAMERA_MSG_VIDEO_FRAME == arg->msg_type)) {
3146 return true;
3147 }
3148 }
3149
3150 return false;
3151 }
3152
3153 /*===========================================================================
3154 * FUNCTION : cbNotifyRoutine
3155 *
3156 * DESCRIPTION: callback thread which interfaces with the upper layers
3157 * given input commands.
3158 *
3159 * PARAMETERS :
3160 * @data : context data
3161 *
3162 * RETURN : None
3163 *==========================================================================*/
cbNotifyRoutine(void * data)3164 void * QCameraCbNotifier::cbNotifyRoutine(void * data)
3165 {
3166 int running = 1;
3167 int ret;
3168 QCameraCbNotifier *pme = (QCameraCbNotifier *)data;
3169 QCameraCmdThread *cmdThread = &pme->mProcTh;
3170 cmdThread->setName("CAM_cbNotify");
3171 uint8_t isSnapshotActive = FALSE;
3172 bool longShotEnabled = false;
3173 uint32_t numOfSnapshotExpected = 0;
3174 uint32_t numOfSnapshotRcvd = 0;
3175 int32_t cbStatus = NO_ERROR;
3176
3177 LOGD("E");
3178 do {
3179 do {
3180 ret = cam_sem_wait(&cmdThread->cmd_sem);
3181 if (ret != 0 && errno != EINVAL) {
3182 LOGD("cam_sem_wait error (%s)",
3183 strerror(errno));
3184 return NULL;
3185 }
3186 } while (ret != 0);
3187
3188 camera_cmd_type_t cmd = cmdThread->getCmd();
3189 LOGD("get cmd %d", cmd);
3190 switch (cmd) {
3191 case CAMERA_CMD_TYPE_START_DATA_PROC:
3192 {
3193 isSnapshotActive = TRUE;
3194 numOfSnapshotExpected = pme->mParent->numOfSnapshotsExpected();
3195 longShotEnabled = pme->mParent->isLongshotEnabled();
3196 LOGD("Num Snapshots Expected = %d",
3197 numOfSnapshotExpected);
3198 numOfSnapshotRcvd = 0;
3199 }
3200 break;
3201 case CAMERA_CMD_TYPE_STOP_DATA_PROC:
3202 {
3203 pme->mDataQ.flushNodes(matchSnapshotNotifications);
3204 isSnapshotActive = FALSE;
3205
3206 numOfSnapshotExpected = 0;
3207 numOfSnapshotRcvd = 0;
3208 }
3209 break;
3210 case CAMERA_CMD_TYPE_DO_NEXT_JOB:
3211 {
3212 qcamera_callback_argm_t *cb =
3213 (qcamera_callback_argm_t *)pme->mDataQ.dequeue();
3214 cbStatus = NO_ERROR;
3215 if (NULL != cb) {
3216 LOGD("cb type %d received",
3217 cb->cb_type);
3218
3219 if (pme->mParent->msgTypeEnabledWithLock(cb->msg_type)) {
3220 switch (cb->cb_type) {
3221 case QCAMERA_NOTIFY_CALLBACK:
3222 {
3223 if (cb->msg_type == CAMERA_MSG_FOCUS) {
3224 KPI_ATRACE_INT("Camera:AutoFocus", 0);
3225 LOGH("[KPI Perf] : PROFILE_SENDING_FOCUS_EVT_TO APP");
3226 }
3227 if (pme->mNotifyCb) {
3228 pme->mNotifyCb(cb->msg_type,
3229 cb->ext1,
3230 cb->ext2,
3231 pme->mCallbackCookie);
3232 } else {
3233 LOGW("notify callback not set!");
3234 }
3235 if (cb->release_cb) {
3236 cb->release_cb(cb->user_data, cb->cookie,
3237 cbStatus);
3238 }
3239 }
3240 break;
3241 case QCAMERA_DATA_CALLBACK:
3242 {
3243 if (pme->mDataCb) {
3244 pme->mDataCb(cb->msg_type,
3245 cb->data,
3246 cb->index,
3247 cb->metadata,
3248 pme->mCallbackCookie);
3249 } else {
3250 LOGW("data callback not set!");
3251 }
3252 if (cb->release_cb) {
3253 cb->release_cb(cb->user_data, cb->cookie,
3254 cbStatus);
3255 }
3256 }
3257 break;
3258 case QCAMERA_DATA_TIMESTAMP_CALLBACK:
3259 {
3260 if(pme->mDataCbTimestamp) {
3261 pme->mDataCbTimestamp(cb->timestamp,
3262 cb->msg_type,
3263 cb->data,
3264 cb->index,
3265 pme->mCallbackCookie);
3266 } else {
3267 LOGE("Timestamp data callback not set!");
3268 }
3269 if (cb->release_cb) {
3270 cb->release_cb(cb->user_data, cb->cookie,
3271 cbStatus);
3272 }
3273 }
3274 break;
3275 case QCAMERA_DATA_SNAPSHOT_CALLBACK:
3276 {
3277 if (TRUE == isSnapshotActive && pme->mDataCb ) {
3278 if (!longShotEnabled) {
3279 numOfSnapshotRcvd++;
3280 LOGI("Num Snapshots Received = %d Expected = %d",
3281 numOfSnapshotRcvd, numOfSnapshotExpected);
3282 if (numOfSnapshotExpected > 0 &&
3283 (numOfSnapshotExpected == numOfSnapshotRcvd)) {
3284 LOGI("Received all snapshots");
3285 // notify HWI that snapshot is done
3286 pme->mParent->processSyncEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE,
3287 NULL);
3288 }
3289 }
3290 if (pme->mJpegCb) {
3291 LOGI("Calling JPEG Callback!! for camera %d"
3292 "release_data %p",
3293 "frame_idx %d",
3294 pme->mParent->getCameraId(),
3295 cb->user_data,
3296 cb->frame_index);
3297 pme->mJpegCb(cb->msg_type, cb->data,
3298 cb->index, cb->metadata,
3299 pme->mJpegCallbackCookie,
3300 cb->frame_index, cb->release_cb,
3301 cb->cookie, cb->user_data);
3302 // incase of non-null Jpeg cb we transfer
3303 // ownership of buffer to muxer. hence
3304 // release_cb should not be called
3305 // muxer will release after its done with
3306 // processing the buffer
3307 } else if(pme->mDataCb){
3308 pme->mDataCb(cb->msg_type, cb->data, cb->index,
3309 cb->metadata, pme->mCallbackCookie);
3310 if (cb->release_cb) {
3311 cb->release_cb(cb->user_data, cb->cookie,
3312 cbStatus);
3313 }
3314 }
3315 }
3316 }
3317 break;
3318 default:
3319 {
3320 LOGE("invalid cb type %d",
3321 cb->cb_type);
3322 cbStatus = BAD_VALUE;
3323 if (cb->release_cb) {
3324 cb->release_cb(cb->user_data, cb->cookie,
3325 cbStatus);
3326 }
3327 }
3328 break;
3329 };
3330 } else {
3331 LOGW("cb message type %d not enabled!",
3332 cb->msg_type);
3333 cbStatus = INVALID_OPERATION;
3334 if (cb->release_cb) {
3335 cb->release_cb(cb->user_data, cb->cookie, cbStatus);
3336 }
3337 }
3338 delete cb;
3339 } else {
3340 LOGW("invalid cb type passed");
3341 }
3342 }
3343 break;
3344 case CAMERA_CMD_TYPE_EXIT:
3345 {
3346 running = 0;
3347 pme->mDataQ.flush();
3348 }
3349 break;
3350 default:
3351 break;
3352 }
3353 } while (running);
3354 LOGD("X");
3355
3356 return NULL;
3357 }
3358
3359 /*===========================================================================
3360 * FUNCTION : notifyCallback
3361 *
3362 * DESCRIPTION: Enqueus pending callback notifications for the upper layers.
3363 *
3364 * PARAMETERS :
3365 * @cbArgs : callback arguments
3366 *
3367 * RETURN : int32_t type of status
3368 * NO_ERROR -- success
3369 * none-zero failure code
3370 *==========================================================================*/
notifyCallback(qcamera_callback_argm_t & cbArgs)3371 int32_t QCameraCbNotifier::notifyCallback(qcamera_callback_argm_t &cbArgs)
3372 {
3373 if (!mActive) {
3374 LOGE("notify thread is not active");
3375 return UNKNOWN_ERROR;
3376 }
3377
3378 qcamera_callback_argm_t *cbArg = new qcamera_callback_argm_t();
3379 if (NULL == cbArg) {
3380 LOGE("no mem for qcamera_callback_argm_t");
3381 return NO_MEMORY;
3382 }
3383 memset(cbArg, 0, sizeof(qcamera_callback_argm_t));
3384 *cbArg = cbArgs;
3385
3386 if (mDataQ.enqueue((void *)cbArg)) {
3387 return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
3388 } else {
3389 LOGE("Error adding cb data into queue");
3390 delete cbArg;
3391 return UNKNOWN_ERROR;
3392 }
3393 }
3394
3395 /*===========================================================================
3396 * FUNCTION : setCallbacks
3397 *
3398 * DESCRIPTION: Initializes the callback functions, which would be used for
3399 * communication with the upper layers and launches the callback
3400 * context in which the callbacks will occur.
3401 *
3402 * PARAMETERS :
3403 * @notifyCb : notification callback
3404 * @dataCb : data callback
3405 * @dataCbTimestamp : data with timestamp callback
3406 * @callbackCookie : callback context data
3407 *
3408 * RETURN : None
3409 *==========================================================================*/
setCallbacks(camera_notify_callback notifyCb,camera_data_callback dataCb,camera_data_timestamp_callback dataCbTimestamp,void * callbackCookie)3410 void QCameraCbNotifier::setCallbacks(camera_notify_callback notifyCb,
3411 camera_data_callback dataCb,
3412 camera_data_timestamp_callback dataCbTimestamp,
3413 void *callbackCookie)
3414 {
3415 if ( ( NULL == mNotifyCb ) &&
3416 ( NULL == mDataCb ) &&
3417 ( NULL == mDataCbTimestamp ) &&
3418 ( NULL == mCallbackCookie ) ) {
3419 mNotifyCb = notifyCb;
3420 mDataCb = dataCb;
3421 mDataCbTimestamp = dataCbTimestamp;
3422 mCallbackCookie = callbackCookie;
3423 mActive = true;
3424 mProcTh.launch(cbNotifyRoutine, this);
3425 } else {
3426 LOGE("Camera callback notifier already initialized!");
3427 }
3428 }
3429
3430 /*===========================================================================
3431 * FUNCTION : setJpegCallBacks
3432 *
3433 * DESCRIPTION: Initializes the JPEG callback function, which would be used for
3434 * communication with the upper layers and launches the callback
3435 * context in which the callbacks will occur.
3436 *
3437 * PARAMETERS :
3438 * @jpegCb : notification callback
3439 * @callbackCookie : callback context data
3440 *
3441 * RETURN : None
3442 *==========================================================================*/
setJpegCallBacks(jpeg_data_callback jpegCb,void * callbackCookie)3443 void QCameraCbNotifier::setJpegCallBacks(
3444 jpeg_data_callback jpegCb, void *callbackCookie)
3445 {
3446 LOGH("Setting JPEG Callback notifier");
3447 mJpegCb = jpegCb;
3448 mJpegCallbackCookie = callbackCookie;
3449 }
3450
3451 /*===========================================================================
3452 * FUNCTION : flushPreviewNotifications
3453 *
3454 * DESCRIPTION: flush all pending preview notifications
3455 * from the notifier queue
3456 *
3457 * PARAMETERS : None
3458 *
3459 * RETURN : int32_t type of status
3460 * NO_ERROR -- success
3461 * none-zero failure code
3462 *==========================================================================*/
flushPreviewNotifications()3463 int32_t QCameraCbNotifier::flushPreviewNotifications()
3464 {
3465 if (!mActive) {
3466 LOGE("notify thread is not active");
3467 return UNKNOWN_ERROR;
3468 }
3469 mDataQ.flushNodes(matchPreviewNotifications);
3470 return NO_ERROR;
3471 }
3472
3473 /*===========================================================================
3474 * FUNCTION : flushVideoNotifications
3475 *
3476 * DESCRIPTION: flush all pending video notifications
3477 * from the notifier queue
3478 *
3479 * PARAMETERS : None
3480 *
3481 * RETURN : int32_t type of status
3482 * NO_ERROR -- success
3483 * none-zero failure code
3484 *==========================================================================*/
flushVideoNotifications()3485 int32_t QCameraCbNotifier::flushVideoNotifications()
3486 {
3487 if (!mActive) {
3488 LOGE("notify thread is not active");
3489 return UNKNOWN_ERROR;
3490 }
3491 mDataQ.flushNodes(matchTimestampNotifications);
3492 return NO_ERROR;
3493 }
3494
3495 /*===========================================================================
3496 * FUNCTION : startSnapshots
3497 *
3498 * DESCRIPTION: Enables snapshot mode
3499 *
3500 * PARAMETERS : None
3501 *
3502 * RETURN : int32_t type of status
3503 * NO_ERROR -- success
3504 * none-zero failure code
3505 *==========================================================================*/
startSnapshots()3506 int32_t QCameraCbNotifier::startSnapshots()
3507 {
3508 return mProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, TRUE);
3509 }
3510
3511 /*===========================================================================
3512 * FUNCTION : stopSnapshots
3513 *
3514 * DESCRIPTION: Disables snapshot processing mode
3515 *
3516 * PARAMETERS : None
3517 *
3518 * RETURN : None
3519 *==========================================================================*/
stopSnapshots()3520 void QCameraCbNotifier::stopSnapshots()
3521 {
3522 mProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, FALSE, TRUE);
3523 }
3524
3525 }; // namespace qcamera
3526