1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30
31 #define LOG_TAG "QCamera3Channel"
32
33 // To remove
34 #include <cutils/properties.h>
35
36 // System dependencies
37 #include <fcntl.h>
38 #include <stdio.h>
39 #include <stdlib.h>
40 #include "hardware/gralloc.h"
41 #include <utils/Timers.h>
42 #include <sys/stat.h>
43
44 // Camera dependencies
45 #include "QCamera3Channel.h"
46 #include "QCamera3HWI.h"
47 #include "QCameraTrace.h"
48 #include "QCameraFormat.h"
49 extern "C" {
50 #include "mm_camera_dbg.h"
51 }
52
53 using namespace android;
54
55 namespace qcamera {
56 #define IS_BUFFER_ERROR(x) (((x) & V4L2_BUF_FLAG_ERROR) == V4L2_BUF_FLAG_ERROR)
57
58 /*===========================================================================
59 * FUNCTION : QCamera3Channel
60 *
61 * DESCRIPTION: constrcutor of QCamera3Channel
62 *
63 * PARAMETERS :
64 * @cam_handle : camera handle
65 * @cam_ops : ptr to camera ops table
66 *
67 * RETURN : none
68 *==========================================================================*/
QCamera3Channel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,void * userData,uint32_t numBuffers)69 QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
70 uint32_t channel_handle,
71 mm_camera_ops_t *cam_ops,
72 channel_cb_routine cb_routine,
73 channel_cb_buffer_err cb_buffer_err,
74 cam_padding_info_t *paddingInfo,
75 cam_feature_mask_t postprocess_mask,
76 void *userData, uint32_t numBuffers)
77 {
78 m_camHandle = cam_handle;
79 m_handle = channel_handle;
80 m_camOps = cam_ops;
81 m_bIsActive = false;
82 m_bUBWCenable = true;
83
84 m_numStreams = 0;
85 memset(mStreams, 0, sizeof(mStreams));
86 mUserData = userData;
87
88 mStreamInfoBuf = NULL;
89 mChannelCB = cb_routine;
90 mChannelCbBufErr = cb_buffer_err;
91 mPaddingInfo = *paddingInfo;
92 mPaddingInfo.offset_info.offset_x = 0;
93 mPaddingInfo.offset_info.offset_y = 0;
94
95 mPostProcMask = postprocess_mask;
96
97 mIsType = IS_TYPE_NONE;
98 mNumBuffers = numBuffers;
99 mPerFrameMapUnmapEnable = true;
100 mDumpFrmCnt = 0;
101 mNRMode = 0;
102
103 mYUVDump = property_get_int32("persist.camera.dumpimg", 0);
104 mMapStreamBuffers = mYUVDump;
105 }
106
107 /*===========================================================================
108 * FUNCTION : ~QCamera3Channel
109 *
110 * DESCRIPTION: destructor of QCamera3Channel
111 *
112 * PARAMETERS : none
113 *
114 * RETURN : none
115 *==========================================================================*/
~QCamera3Channel()116 QCamera3Channel::~QCamera3Channel()
117 {
118 }
119
120 /*===========================================================================
121 * FUNCTION : destroy
122 *
123 * DESCRIPTION: internal destructor of QCamera3Channel called by the subclasses
124 * this destructor will call pure virtual functions. stop will eventuall call
125 * QCamera3Stream::putBufs. The putBufs function will
126 * call QCamera3Channel::putStreamBufs which is pure virtual
127 *
128 * PARAMETERS : none
129 *
130 * RETURN : none
131 *==========================================================================*/
destroy()132 void QCamera3Channel::destroy()
133 {
134 if (m_bIsActive)
135 stop();
136
137 for (uint32_t i = 0; i < m_numStreams; i++) {
138 if (mStreams[i] != NULL) {
139 delete mStreams[i];
140 mStreams[i] = 0;
141 }
142 }
143 m_numStreams = 0;
144 }
145
146 /*===========================================================================
147 * FUNCTION : addStream
148 *
149 * DESCRIPTION: add a stream into channel
150 *
151 * PARAMETERS :
152 * @streamType : stream type
153 * @streamFormat : stream format
154 * @streamDim : stream dimension
155 * @streamRotation : rotation of the stream
156 * @minStreamBufNum : minimal buffer count for particular stream type
157 * @postprocessMask : post-proccess feature mask
158 * @isType : type of image stabilization required on the stream
159 *
160 * RETURN : int32_t type of status
161 * NO_ERROR -- success
162 * none-zero failure code
163 *==========================================================================*/
addStream(cam_stream_type_t streamType,cam_format_t streamFormat,cam_dimension_t streamDim,cam_rotation_t streamRotation,uint8_t minStreamBufNum,cam_feature_mask_t postprocessMask,cam_is_type_t isType,uint32_t batchSize)164 int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
165 cam_format_t streamFormat,
166 cam_dimension_t streamDim,
167 cam_rotation_t streamRotation,
168 uint8_t minStreamBufNum,
169 cam_feature_mask_t postprocessMask,
170 cam_is_type_t isType,
171 uint32_t batchSize)
172 {
173 int32_t rc = NO_ERROR;
174
175 if (m_numStreams >= 1) {
176 LOGE("Only one stream per channel supported in v3 Hal");
177 return BAD_VALUE;
178 }
179
180 if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
181 LOGE("stream number (%d) exceeds max limit (%d)",
182 m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
183 return BAD_VALUE;
184 }
185 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
186 m_handle,
187 m_camOps,
188 &mPaddingInfo,
189 this,
190 mMapStreamBuffers);
191 if (pStream == NULL) {
192 LOGE("No mem for Stream");
193 return NO_MEMORY;
194 }
195 LOGD("batch size is %d", batchSize);
196
197 rc = pStream->init(streamType, streamFormat, streamDim, streamRotation,
198 NULL, minStreamBufNum, postprocessMask, isType, batchSize,
199 streamCbRoutine, this);
200 if (rc == 0) {
201 mStreams[m_numStreams] = pStream;
202 m_numStreams++;
203 } else {
204 delete pStream;
205 }
206 return rc;
207 }
208
209 /*===========================================================================
210 * FUNCTION : start
211 *
212 * DESCRIPTION: start channel, which will start all streams belong to this channel
213 *
214 * PARAMETERS :
215 *
216 * RETURN : int32_t type of status
217 * NO_ERROR -- success
218 * none-zero failure code
219 *==========================================================================*/
start()220 int32_t QCamera3Channel::start()
221 {
222 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_START);
223 int32_t rc = NO_ERROR;
224
225 if (m_numStreams > 1) {
226 LOGW("bundle not supported");
227 } else if (m_numStreams == 0) {
228 return NO_INIT;
229 }
230
231 if(m_bIsActive) {
232 LOGW("Attempt to start active channel");
233 return rc;
234 }
235
236 for (uint32_t i = 0; i < m_numStreams; i++) {
237 if (mStreams[i] != NULL) {
238 mStreams[i]->start();
239 }
240 }
241
242 m_bIsActive = true;
243
244 return rc;
245 }
246
247 /*===========================================================================
248 * FUNCTION : stop
249 *
250 * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
251 *
252 * PARAMETERS : none
253 *
254 * RETURN : int32_t type of status
255 * NO_ERROR -- success
256 * none-zero failure code
257 *==========================================================================*/
stop()258 int32_t QCamera3Channel::stop()
259 {
260 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_STOP);
261 int32_t rc = NO_ERROR;
262 if(!m_bIsActive) {
263 LOGE("Attempt to stop inactive channel");
264 return rc;
265 }
266
267 for (uint32_t i = 0; i < m_numStreams; i++) {
268 if (mStreams[i] != NULL) {
269 mStreams[i]->stop();
270 }
271 }
272
273 m_bIsActive = false;
274 return rc;
275 }
276
277 /*===========================================================================
278 * FUNCTION : setBatchSize
279 *
280 * DESCRIPTION: Set batch size for the channel. This is a dummy implementation
281 * for the base class
282 *
283 * PARAMETERS :
284 * @batchSize : Number of image buffers in a batch
285 *
286 * RETURN : int32_t type of status
287 * NO_ERROR -- success always
288 * none-zero failure code
289 *==========================================================================*/
setBatchSize(uint32_t batchSize)290 int32_t QCamera3Channel::setBatchSize(uint32_t batchSize)
291 {
292 LOGD("Dummy method. batchSize: %d unused ", batchSize);
293 return NO_ERROR;
294 }
295
296 /*===========================================================================
297 * FUNCTION : queueBatchBuf
298 *
299 * DESCRIPTION: This is a dummy implementation for the base class
300 *
301 * PARAMETERS :
302 *
303 * RETURN : int32_t type of status
304 * NO_ERROR -- success always
305 * none-zero failure code
306 *==========================================================================*/
queueBatchBuf()307 int32_t QCamera3Channel::queueBatchBuf()
308 {
309 LOGD("Dummy method. Unused ");
310 return NO_ERROR;
311 }
312
313 /*===========================================================================
314 * FUNCTION : setPerFrameMapUnmap
315 *
316 * DESCRIPTION: Sets internal enable flag
317 *
318 * PARAMETERS :
319 * @enable : Bool value for the enable flag
320 *
321 * RETURN : int32_t type of status
322 * NO_ERROR -- success always
323 * none-zero failure code
324 *==========================================================================*/
setPerFrameMapUnmap(bool enable)325 int32_t QCamera3Channel::setPerFrameMapUnmap(bool enable)
326 {
327 mPerFrameMapUnmapEnable = enable;
328 return NO_ERROR;
329 }
330
331 /*===========================================================================
332 * FUNCTION : flush
333 *
334 * DESCRIPTION: flush a channel
335 *
336 * PARAMETERS : none
337 *
338 * RETURN : int32_t type of status
339 * NO_ERROR -- success
340 * none-zero failure code
341 *==========================================================================*/
flush()342 int32_t QCamera3Channel::flush()
343 {
344 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_FLUSH);
345 return NO_ERROR;
346 }
347
348 /*===========================================================================
349 * FUNCTION : bufDone
350 *
351 * DESCRIPTION: return a stream buf back to kernel
352 *
353 * PARAMETERS :
354 * @recvd_frame : stream buf frame to be returned
355 *
356 * RETURN : int32_t type of status
357 * NO_ERROR -- success
358 * none-zero failure code
359 *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)360 int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame)
361 {
362 int32_t rc = NO_ERROR;
363 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
364 if (recvd_frame->bufs[i] != NULL) {
365 for (uint32_t j = 0; j < m_numStreams; j++) {
366 if (mStreams[j] != NULL &&
367 mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
368 rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
369 break; // break loop j
370 }
371 }
372 }
373 }
374
375 return rc;
376 }
377
setBundleInfo(const cam_bundle_config_t & bundleInfo)378 int32_t QCamera3Channel::setBundleInfo(const cam_bundle_config_t &bundleInfo)
379 {
380 int32_t rc = NO_ERROR;
381 cam_stream_parm_buffer_t param;
382 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
383 param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO;
384 param.bundleInfo = bundleInfo;
385 if (m_numStreams > 0 && mStreams[0]) {
386 rc = mStreams[0]->setParameter(param);
387 if (rc != NO_ERROR) {
388 LOGE("stream setParameter for set bundle failed");
389 }
390 }
391 return rc;
392 }
393
394 /*===========================================================================
395 * FUNCTION : getStreamTypeMask
396 *
397 * DESCRIPTION: Get bit mask of all stream types in this channel
398 *
399 * PARAMETERS : None
400 *
401 * RETURN : Bit mask of all stream types in this channel
402 *==========================================================================*/
getStreamTypeMask()403 uint32_t QCamera3Channel::getStreamTypeMask()
404 {
405 uint32_t mask = 0;
406 for (uint32_t i = 0; i < m_numStreams; i++) {
407 mask |= (1U << mStreams[i]->getMyType());
408 }
409 return mask;
410 }
411
412 /*===========================================================================
413 * FUNCTION : getStreamID
414 *
415 * DESCRIPTION: Get StreamID of requested stream type
416 *
417 * PARAMETERS : streamMask
418 *
419 * RETURN : Stream ID
420 *==========================================================================*/
getStreamID(uint32_t streamMask)421 uint32_t QCamera3Channel::getStreamID(uint32_t streamMask)
422 {
423 uint32_t streamID = 0;
424 for (uint32_t i = 0; i < m_numStreams; i++) {
425 if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) {
426 streamID = mStreams[i]->getMyServerID();
427 break;
428 }
429 }
430 return streamID;
431 }
432
433 /*===========================================================================
434 * FUNCTION : getStreamByHandle
435 *
436 * DESCRIPTION: return stream object by stream handle
437 *
438 * PARAMETERS :
439 * @streamHandle : stream handle
440 *
441 * RETURN : stream object. NULL if not found
442 *==========================================================================*/
getStreamByHandle(uint32_t streamHandle)443 QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle)
444 {
445 for (uint32_t i = 0; i < m_numStreams; i++) {
446 if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
447 return mStreams[i];
448 }
449 }
450 return NULL;
451 }
452
453 /*===========================================================================
454 * FUNCTION : getStreamByIndex
455 *
456 * DESCRIPTION: return stream object by index
457 *
458 * PARAMETERS :
459 * @streamHandle : stream handle
460 *
461 * RETURN : stream object. NULL if not found
462 *==========================================================================*/
getStreamByIndex(uint32_t index)463 QCamera3Stream *QCamera3Channel::getStreamByIndex(uint32_t index)
464 {
465 if (index < m_numStreams) {
466 return mStreams[index];
467 }
468 return NULL;
469 }
470
471 /*===========================================================================
472 * FUNCTION : streamCbRoutine
473 *
474 * DESCRIPTION: callback routine for stream
475 *
476 * PARAMETERS :
477 * @streamHandle : stream handle
478 *
479 * RETURN : stream object. NULL if not found
480 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream,void * userdata)481 void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
482 QCamera3Stream *stream, void *userdata)
483 {
484 QCamera3Channel *channel = (QCamera3Channel *)userdata;
485 if (channel == NULL) {
486 LOGE("invalid channel pointer");
487 return;
488 }
489 channel->streamCbRoutine(super_frame, stream);
490 }
491
492 /*===========================================================================
493 * FUNCTION : dumpYUV
494 *
495 * DESCRIPTION: function to dump the YUV data from ISP/pproc
496 *
497 * PARAMETERS :
498 * @frame : frame to be dumped
499 * @dim : dimension of the stream
500 * @offset : offset of the data
501 * @name : 1 if it is ISP output/pproc input, 2 if it is pproc output
502 *
503 * RETURN :
504 *==========================================================================*/
dumpYUV(mm_camera_buf_def_t * frame,cam_dimension_t dim,cam_frame_len_offset_t offset,uint8_t dump_type)505 void QCamera3Channel::dumpYUV(mm_camera_buf_def_t *frame, cam_dimension_t dim,
506 cam_frame_len_offset_t offset, uint8_t dump_type)
507 {
508 char buf[FILENAME_MAX];
509 memset(buf, 0, sizeof(buf));
510 static int counter = 0;
511 if (mYUVDump & dump_type) {
512 mFrmNum = ((mYUVDump & 0xffff0000) >> 16);
513 if (mFrmNum == 0) {
514 mFrmNum = 10;
515 }
516 if (mFrmNum > 256) {
517 mFrmNum = 256;
518 }
519 mSkipMode = ((mYUVDump & 0x0000ff00) >> 8);
520 if (mSkipMode == 0) {
521 mSkipMode = 1;
522 }
523 if (mDumpSkipCnt == 0) {
524 mDumpSkipCnt = 1;
525 }
526 if (mDumpSkipCnt % mSkipMode == 0) {
527 if (mDumpFrmCnt < mFrmNum) {
528 /* Note that the image dimension will be the unrotated stream dimension.
529 * If you feel that the image would have been rotated during reprocess
530 * then swap the dimensions while opening the file
531 * */
532 switch (dump_type) {
533 case QCAMERA_DUMP_FRM_PREVIEW:
534 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"p_%d_%d_%dx%d.yuv",
535 counter, frame->frame_idx, dim.width, dim.height);
536 break;
537 case QCAMERA_DUMP_FRM_VIDEO:
538 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"v_%d_%d_%dx%d.yuv",
539 counter, frame->frame_idx, dim.width, dim.height);
540 break;
541 case QCAMERA_DUMP_FRM_INPUT_JPEG:
542 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.yuv",
543 counter, frame->frame_idx, dim.width, dim.height);
544 break;
545 case QCAMERA_DUMP_FRM_INPUT_REPROCESS:
546 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"ir_%d_%d_%dx%d.yuv",
547 counter, frame->frame_idx, dim.width, dim.height);
548 break;
549 case QCAMERA_DUMP_FRM_CALLBACK:
550 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"c_%d_%d_%dx%d.yuv",
551 counter, frame->frame_idx, dim.width, dim.height);
552 break;
553 case QCAMERA_DUMP_FRM_OUTPUT_JPEG:
554 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"j_%d_%d_%dx%d.jpg",
555 counter, frame->frame_idx, dim.width, dim.height);
556 break;
557 default :
558 LOGE("dumping not enabled for stream type %d",dump_type);
559 break;
560 }
561 counter++;
562 int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
563 ssize_t written_len = 0;
564 if (file_fd >= 0) {
565 void *data = NULL;
566 fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
567 if( dump_type == QCAMERA_DUMP_FRM_OUTPUT_JPEG ) {
568 written_len = write(file_fd, frame->buffer, frame->frame_len);
569 }
570 else {
571 for (uint32_t i = 0; i < offset.num_planes; i++) {
572 uint32_t index = offset.mp[i].offset;
573 if (i > 0) {
574 index += offset.mp[i-1].len;
575 }
576 for (int j = 0; j < offset.mp[i].height; j++) {
577 data = (void *)((uint8_t *)frame->buffer + index);
578 written_len += write(file_fd, data,
579 (size_t)offset.mp[i].width);
580 index += (uint32_t)offset.mp[i].stride;
581 }
582 }
583 }
584 LOGH("written number of bytes %ld\n", written_len);
585 mDumpFrmCnt++;
586 frame->cache_flags |= CPU_HAS_READ;
587 close(file_fd);
588 } else {
589 LOGE("failed to open file to dump image");
590 }
591 }
592 } else {
593 mDumpSkipCnt++;
594 }
595 }
596 }
597
598 /*===========================================================================
599 * FUNCTION : isUBWCEnabled
600 *
601 * DESCRIPTION: Function to get UBWC hardware support.
602 *
603 * PARAMETERS : None
604 *
605 * RETURN : TRUE -- UBWC format supported
606 * FALSE -- UBWC is not supported.
607 *==========================================================================*/
isUBWCEnabled()608 bool QCamera3Channel::isUBWCEnabled()
609 {
610 #ifdef UBWC_PRESENT
611 char value[PROPERTY_VALUE_MAX];
612 int prop_value = 0;
613 memset(value, 0, sizeof(value));
614 property_get("debug.gralloc.gfx_ubwc_disable", value, "0");
615 prop_value = atoi(value);
616 if (prop_value) {
617 return FALSE;
618 }
619
620 //Disable UBWC if Eztune is enabled
621 //EzTune process CPP output frame and cannot understand UBWC.
622 memset(value, 0, sizeof(value));
623 property_get("persist.camera.eztune.enable", value, "0");
624 prop_value = atoi(value);
625 if (prop_value) {
626 return FALSE;
627 }
628 return TRUE;
629 #else
630 return FALSE;
631 #endif
632 }
633
634 /*===========================================================================
635 * FUNCTION : setUBWCEnabled
636 *
637 * DESCRIPTION: set UBWC enable
638 *
639 * PARAMETERS : UBWC enable value
640 *
641 * RETURN : none
642 *
643 *==========================================================================*/
setUBWCEnabled(bool val)644 void QCamera3Channel::setUBWCEnabled(bool val)
645 {
646 m_bUBWCenable = val;
647 }
648
649 /*===========================================================================
650 * FUNCTION : getStreamDefaultFormat
651 *
652 * DESCRIPTION: return default buffer format for the stream
653 *
654 * PARAMETERS : type : Stream type
655 *
656 ** RETURN : format for stream type
657 *
658 *==========================================================================*/
getStreamDefaultFormat(cam_stream_type_t type,uint32_t width,uint32_t height,bool forcePreviewUBWC,cam_is_type_t isType)659 cam_format_t QCamera3Channel::getStreamDefaultFormat(cam_stream_type_t type,
660 uint32_t width, uint32_t height, bool forcePreviewUBWC, cam_is_type_t isType)
661 {
662 cam_format_t streamFormat;
663
664 switch (type) {
665 case CAM_STREAM_TYPE_PREVIEW:
666 if (isUBWCEnabled()) {
667
668 char prop[PROPERTY_VALUE_MAX];
669 int pFormat;
670 memset(prop, 0, sizeof(prop));
671 property_get("persist.camera.preview.ubwc", prop, "1");
672 pFormat = atoi(prop);
673
674 // When goog_zoom is linked to the preview stream, disable ubwc to preview
675 property_get("persist.camera.gzoom.at", prop, "0");
676 bool is_goog_zoom_preview_enabled = ((atoi(prop) & 2) > 0) && isType == IS_TYPE_EIS_3_0;
677
678 if (pFormat == 1 && forcePreviewUBWC && !is_goog_zoom_preview_enabled) {
679 streamFormat = CAM_FORMAT_YUV_420_NV12_UBWC;
680 } else {
681 /* Changed to macro to ensure format sent to gralloc for preview
682 is also changed if the preview format is changed at camera HAL */
683 streamFormat = PREVIEW_STREAM_FORMAT;
684 }
685 } else {
686 /* Changed to macro to ensure format sent to gralloc for preview
687 is also changed if the preview format is changed at camera HAL */
688 streamFormat = PREVIEW_STREAM_FORMAT;
689 }
690 break;
691 case CAM_STREAM_TYPE_VIDEO:
692 {
693 /* Disable UBWC for smaller video resolutions due to CPP downscale
694 limits. Refer cpp_hw_params.h::CPP_DOWNSCALE_LIMIT_UBWC */
695 if (isUBWCEnabled() && (width >= 640) && (height >= 480)) {
696 // When goog_zoom is linked to the video stream, disable ubwc to video
697 char prop[PROPERTY_VALUE_MAX];
698 property_get("persist.camera.gzoom.at", prop, "0");
699 bool is_goog_zoom_video_enabled = ((atoi(prop) & 1) > 0) && isType == IS_TYPE_EIS_3_0;
700
701 property_get("persist.camera.gzoom.4k", prop, "0");
702 bool is_goog_zoom_4k_enabled = (atoi(prop) > 0);
703 bool is_4k_video = (width >= 3840 && height >= 2160);
704
705 if ((QCameraCommon::isVideoUBWCEnabled()) && (!is_goog_zoom_video_enabled
706 || (is_4k_video && !is_goog_zoom_4k_enabled))) {
707 streamFormat = CAM_FORMAT_YUV_420_NV12_UBWC;
708 } else {
709 streamFormat = CAM_FORMAT_YUV_420_NV12_VENUS;
710 }
711 } else {
712 #if VENUS_PRESENT
713 streamFormat = CAM_FORMAT_YUV_420_NV12_VENUS;
714 #else
715 streamFormat = CAM_FORMAT_YUV_420_NV12;
716 #endif
717 }
718 break;
719 }
720 case CAM_STREAM_TYPE_SNAPSHOT:
721 streamFormat = CAM_FORMAT_YUV_420_NV21;
722 break;
723 case CAM_STREAM_TYPE_CALLBACK:
724 /* Changed to macro to ensure format sent to gralloc for callback
725 is also changed if the preview format is changed at camera HAL */
726 streamFormat = CALLBACK_STREAM_FORMAT;
727 break;
728 case CAM_STREAM_TYPE_RAW:
729 streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
730 break;
731 default:
732 streamFormat = CAM_FORMAT_YUV_420_NV21;
733 break;
734 }
735 return streamFormat;
736 }
737
738
739 /* QCamera3ProcessingChannel methods */
740
741 /*===========================================================================
742 * FUNCTION : QCamera3ProcessingChannel
743 *
744 * DESCRIPTION: constructor of QCamera3ProcessingChannel
745 *
746 * PARAMETERS :
747 * @cam_handle : camera handle
748 * @cam_ops : ptr to camera ops table
749 * @cb_routine : callback routine to frame aggregator
750 * @paddingInfo: stream padding info
751 * @userData : HWI handle
752 * @stream : camera3_stream_t structure
753 * @stream_type: Channel stream type
754 * @postprocess_mask: the postprocess mask for streams of this channel
755 * @metadataChannel: handle to the metadataChannel
756 * @numBuffers : number of max dequeued buffers
757 * RETURN : none
758 *==========================================================================*/
QCamera3ProcessingChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel,uint32_t numBuffers)759 QCamera3ProcessingChannel::QCamera3ProcessingChannel(uint32_t cam_handle,
760 uint32_t channel_handle,
761 mm_camera_ops_t *cam_ops,
762 channel_cb_routine cb_routine,
763 channel_cb_buffer_err cb_buffer_err,
764 cam_padding_info_t *paddingInfo,
765 void *userData,
766 camera3_stream_t *stream,
767 cam_stream_type_t stream_type,
768 cam_feature_mask_t postprocess_mask,
769 QCamera3Channel *metadataChannel,
770 uint32_t numBuffers) :
771 QCamera3Channel(cam_handle, channel_handle, cam_ops, cb_routine,
772 cb_buffer_err, paddingInfo, postprocess_mask, userData, numBuffers),
773 m_postprocessor(this),
774 mFrameCount(0),
775 mLastFrameCount(0),
776 mLastFpsTime(0),
777 mMemory(numBuffers),
778 mCamera3Stream(stream),
779 mNumBufs(CAM_MAX_NUM_BUFS_PER_STREAM),
780 mStreamType(stream_type),
781 mPostProcStarted(false),
782 mReprocessType(REPROCESS_TYPE_NONE),
783 mInputBufferConfig(false),
784 m_pMetaChannel(metadataChannel),
785 mMetaFrame(NULL),
786 mOfflineMemory(0),
787 mOfflineMetaMemory(numBuffers + (MAX_REPROCESS_PIPELINE_STAGES - 1))
788 {
789 char prop[PROPERTY_VALUE_MAX];
790 property_get("persist.debug.sf.showfps", prop, "0");
791 mDebugFPS = (uint8_t) atoi(prop);
792
793 int32_t rc = m_postprocessor.init(&mMemory);
794 if (rc != 0) {
795 LOGE("Init Postprocessor failed");
796 }
797 }
798
799 /*===========================================================================
800 * FUNCTION : ~QCamera3ProcessingChannel
801 *
802 * DESCRIPTION: destructor of QCamera3ProcessingChannel
803 *
804 * PARAMETERS : none
805 *
806 * RETURN : none
807 *==========================================================================*/
~QCamera3ProcessingChannel()808 QCamera3ProcessingChannel::~QCamera3ProcessingChannel()
809 {
810 destroy();
811
812 int32_t rc = m_postprocessor.deinit();
813 if (rc != 0) {
814 LOGE("De-init Postprocessor failed");
815 }
816
817 if (0 < mOfflineMetaMemory.getCnt()) {
818 mOfflineMetaMemory.deallocate();
819 }
820 if (0 < mOfflineMemory.getCnt()) {
821 mOfflineMemory.unregisterBuffers();
822 }
823
824 }
825
826 /*===========================================================================
827 * FUNCTION : streamCbRoutine
828 *
829 * DESCRIPTION:
830 *
831 * PARAMETERS :
832 * @super_frame : the super frame with filled buffer
833 * @stream : stream on which the buffer was requested and filled
834 *
835 * RETURN : none
836 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)837 void QCamera3ProcessingChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
838 QCamera3Stream *stream)
839 {
840 if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
841 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PREVIEW_STRM_CB);
842 } else {
843 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_STRM_CB);
844 }
845 //FIXME Q Buf back in case of error?
846 uint8_t frameIndex;
847 buffer_handle_t *resultBuffer;
848 int32_t resultFrameNumber;
849 camera3_stream_buffer_t result;
850 cam_dimension_t dim;
851 cam_frame_len_offset_t offset;
852
853 memset(&dim, 0, sizeof(dim));
854 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
855 if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
856 LOGE("Error with the stream callback");
857 return;
858 }
859
860 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
861 if(frameIndex >= mNumBufs) {
862 LOGE("Error, Invalid index for buffer");
863 stream->bufDone(frameIndex);
864 return;
865 }
866
867 if (mDebugFPS) {
868 showDebugFPS(stream->getMyType());
869 }
870 stream->getFrameDimension(dim);
871 stream->getFrameOffset(offset);
872 if (stream->getMyType() == CAM_STREAM_TYPE_PREVIEW) {
873 dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_PREVIEW);
874 } else if (stream->getMyType() == CAM_STREAM_TYPE_VIDEO) {
875 dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_VIDEO);
876 } else if (stream->getMyType() == CAM_STREAM_TYPE_CALLBACK) {
877 dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_CALLBACK);
878 }
879
880 do {
881
882 //Use below data to issue framework callback
883 resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
884 resultFrameNumber = mMemory.getFrameNumber(frameIndex);
885 uint32_t oldestBufIndex;
886 int32_t lowestFrameNumber = mMemory.getOldestFrameNumber(oldestBufIndex);
887 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
888 if ((lowestFrameNumber != -1 ) && (lowestFrameNumber < resultFrameNumber) &&
889 hal_obj->mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) {
890 LOGE("Error buffer dropped for framenumber:%d with bufidx:%d",
891 lowestFrameNumber, oldestBufIndex);
892 if (mOutOfSequenceBuffers.empty()) {
893 stream->cancelBuffer(oldestBufIndex);
894 }
895
896 //push in order!
897 auto itr = mOutOfSequenceBuffers.begin();
898 for (; itr != mOutOfSequenceBuffers.end(); itr++) {
899 mm_camera_super_buf_t *super_buf = *itr;
900 uint32_t buf_idx = super_buf->bufs[0]->buf_idx;
901 int32_t frame_num = mMemory.getFrameNumber(buf_idx);
902 if (resultFrameNumber < frame_num) {
903 LOGE("Out of order frame!! set buffer status error flag!");
904 mOutOfSequenceBuffers.insert(itr, super_frame);
905 super_buf->bufs[0]->flags |= V4L2_BUF_FLAG_ERROR;
906 break;
907 }
908 }
909
910 if (itr == mOutOfSequenceBuffers.end()) {
911 LOGE("Add the frame to the end of mOutOfSequenceBuffers");
912 // add the buffer
913 mOutOfSequenceBuffers.push_back(super_frame);
914 }
915 return;
916 }
917
918 if(hal_obj->mStreamConfig == true) {
919 switch (stream->getMyType()) {
920 case CAM_STREAM_TYPE_PREVIEW:
921 LOGH("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
922 break;
923 case CAM_STREAM_TYPE_VIDEO:
924 LOGH("[KPI Perf] : PROFILE_FIRST_VIDEO_FRAME");
925 break;
926 default:
927 break;
928 }
929 hal_obj->mStreamConfig = false;
930 }
931
932 result.stream = mCamera3Stream;
933 result.buffer = resultBuffer;
934 if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
935 result.status = CAMERA3_BUFFER_STATUS_ERROR;
936 LOGW("CAMERA3_BUFFER_STATUS_ERROR for stream_type: %d",
937 mStreams[0]->getMyType());
938 mChannelCbBufErr(this, resultFrameNumber, CAMERA3_BUFFER_STATUS_ERROR, mUserData);
939 } else {
940 result.status = CAMERA3_BUFFER_STATUS_OK;
941 }
942 result.acquire_fence = -1;
943 result.release_fence = -1;
944 if(mPerFrameMapUnmapEnable) {
945 int32_t rc = stream->bufRelease(frameIndex);
946 if (NO_ERROR != rc) {
947 LOGE("Error %d releasing stream buffer %d",
948 rc, frameIndex);
949 }
950
951 rc = mMemory.unregisterBuffer(frameIndex);
952 if (NO_ERROR != rc) {
953 LOGE("Error %d unregistering stream buffer %d",
954 rc, frameIndex);
955 }
956 }
957
958 if (0 <= resultFrameNumber) {
959 if (mChannelCB) {
960 mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, false, mUserData);
961 }
962 } else {
963 LOGE("Bad frame number");
964 }
965 free(super_frame);
966 super_frame = NULL;
967 if (mOutOfSequenceBuffers.empty()) {
968 break;
969 } else {
970 auto itr = mOutOfSequenceBuffers.begin();
971 super_frame = *itr;
972 frameIndex = super_frame->bufs[0]->buf_idx;
973 resultFrameNumber = mMemory.getFrameNumber(frameIndex);
974 lowestFrameNumber = mMemory.getOldestFrameNumber(oldestBufIndex);
975 LOGE("Attempting to recover next frame: result Frame#: %d, resultIdx: %d, "
976 "Lowest Frame#: %d, oldestBufIndex: %d",
977 resultFrameNumber, frameIndex, lowestFrameNumber, oldestBufIndex);
978 if ((lowestFrameNumber != -1) && (lowestFrameNumber < resultFrameNumber)) {
979 LOGE("Multiple frame dropped requesting cancel for frame %d, idx:%d",
980 lowestFrameNumber, oldestBufIndex);
981 stream->cancelBuffer(oldestBufIndex);
982 return;
983 } else if (lowestFrameNumber == resultFrameNumber) {
984 LOGE("Time to flush out head of list continue loop with this new super frame");
985 itr = mOutOfSequenceBuffers.erase(itr);
986 } else {
987 LOGE("Unexpected condition head of list is not the lowest frame number");
988 itr = mOutOfSequenceBuffers.erase(itr);
989 }
990 }
991 } while (1);
992 return;
993 }
994
995 /*===========================================================================
996 * FUNCTION : putStreamBufs
997 *
998 * DESCRIPTION: release the buffers allocated to the stream
999 *
1000 * PARAMETERS : NONE
1001 *
1002 * RETURN : NONE
1003 *==========================================================================*/
putStreamBufs()1004 void QCamera3YUVChannel::putStreamBufs()
1005 {
1006 QCamera3ProcessingChannel::putStreamBufs();
1007
1008 // Free allocated heap buffer.
1009 mMemory.deallocate();
1010 // Clear free heap buffer list.
1011 mFreeHeapBufferList.clear();
1012 // Clear offlinePpInfoList
1013 mOfflinePpInfoList.clear();
1014 }
1015
1016 /*===========================================================================
1017 * FUNCTION : timeoutFrame
1018 *
1019 * DESCRIPTION: Method to indicate to channel that a given frame has take too
1020 * long to be generated
1021 *
1022 * PARAMETERS : framenumber indicating the framenumber of the buffer timingout
1023 *
1024 * RETURN : int32_t type of status
1025 * NO_ERROR -- success
1026 * none-zero failure code
1027 *==========================================================================*/
timeoutFrame(uint32_t frameNumber)1028 int32_t QCamera3ProcessingChannel::timeoutFrame(uint32_t frameNumber)
1029 {
1030 int32_t bufIdx;
1031
1032 bufIdx = mMemory.getBufferIndex(frameNumber);
1033
1034 if (bufIdx < 0) {
1035 LOGE("%s: Buffer not found for frame:%d", __func__, frameNumber);
1036 return -1;
1037 }
1038
1039 mStreams[0]->timeoutFrame(bufIdx);
1040 return NO_ERROR;
1041 }
1042
1043 /*===========================================================================
1044 * FUNCTION : postprocFail
1045 *
1046 * DESCRIPTION: notify clients about failing post-process requests.
1047 *
1048 * PARAMETERS :
1049 * @ppBuffer : pointer to the pp buffer.
1050 *
1051 * RETURN : 0 on success
1052 * -EINVAL on invalid input
1053 *==========================================================================*/
postprocFail(qcamera_hal3_pp_buffer_t * ppBuffer)1054 int32_t QCamera3ProcessingChannel::postprocFail(qcamera_hal3_pp_buffer_t *ppBuffer) {
1055 if (ppBuffer == nullptr) {
1056 return BAD_VALUE;
1057 }
1058
1059 if (ppBuffer->output == nullptr) {
1060 return BAD_VALUE;
1061 }
1062
1063 camera3_stream_buffer_t result = {};
1064 result.buffer = ppBuffer->output;
1065
1066 LOGE("Input frame number: %d dropped!", ppBuffer->frameNumber);
1067 result.stream = mCamera3Stream;
1068 result.status = CAMERA3_BUFFER_STATUS_ERROR;
1069 result.acquire_fence = -1;
1070 result.release_fence = -1;
1071 if (mChannelCB) {
1072 mChannelCB(NULL, &result, ppBuffer->frameNumber, false, mUserData);
1073 }
1074
1075 return OK;
1076 }
1077
1078 /*===========================================================================
1079 * FUNCTION : request
1080 *
1081 * DESCRIPTION: handle the request - either with an input buffer or a direct
1082 * output request
1083 *
1084 * PARAMETERS :
1085 * @buffer : pointer to the output buffer
1086 * @frameNumber : frame number of the request
1087 * @pInputBuffer : pointer to input buffer if an input request
1088 * @metadata : parameters associated with the request
1089 * @internalreq : boolean to indicate if this is purely internal request
1090 * needing internal buffer allocation
1091 * @meteringonly : boolean indicating metering only frame subset of internal
1092 * not consumed by postprocessor
1093 *
1094 * RETURN : 0 on a success start of capture
1095 * -EINVAL on invalid input
1096 * -ENODEV on serious error
1097 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata,int & indexUsed,__unused bool internalRequest=false,__unused bool meteringOnly=false)1098 int32_t QCamera3ProcessingChannel::request(buffer_handle_t *buffer,
1099 uint32_t frameNumber,
1100 camera3_stream_buffer_t* pInputBuffer,
1101 metadata_buffer_t* metadata,
1102 int &indexUsed,
1103 __unused bool internalRequest = false,
1104 __unused bool meteringOnly = false)
1105 {
1106 int32_t rc = NO_ERROR;
1107 int index;
1108
1109 if (NULL == buffer || NULL == metadata) {
1110 LOGE("Invalid buffer/metadata in channel request");
1111 return BAD_VALUE;
1112 }
1113
1114 if (pInputBuffer) {
1115 //need to send to reprocessing
1116 LOGD("Got a request with input buffer, output streamType = %d", mStreamType);
1117 reprocess_config_t reproc_cfg;
1118 cam_dimension_t dim;
1119 memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
1120 memset(&dim, 0, sizeof(dim));
1121 setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat, dim);
1122 startPostProc(reproc_cfg);
1123
1124 qcamera_fwk_input_pp_data_t *src_frame = NULL;
1125 src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1,
1126 sizeof(qcamera_fwk_input_pp_data_t));
1127 if (src_frame == NULL) {
1128 LOGE("No memory for src frame");
1129 return NO_MEMORY;
1130 }
1131 rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata, buffer, frameNumber);
1132 if (NO_ERROR != rc) {
1133 LOGE("Error %d while setting framework input PP data", rc);
1134 free(src_frame);
1135 return rc;
1136 }
1137 LOGH("Post-process started");
1138 m_postprocessor.processData(src_frame);
1139 } else {
1140 index = mMemory.getMatchBufIndex((void*)buffer);
1141 if(index < 0) {
1142 rc = registerBuffer(buffer, mIsType);
1143 if (NO_ERROR != rc) {
1144 LOGE("On-the-fly buffer registration failed %d",
1145 rc);
1146 return rc;
1147 }
1148
1149 index = mMemory.getMatchBufIndex((void*)buffer);
1150 if (index < 0) {
1151 LOGE("Could not find object among registered buffers");
1152 return DEAD_OBJECT;
1153 }
1154 }
1155 rc = mMemory.markFrameNumber(index, frameNumber);
1156 if(rc != NO_ERROR) {
1157 LOGE("Error marking frame number:%d for index %d", frameNumber,
1158 index);
1159 return rc;
1160 }
1161 if (m_bIsActive) {
1162 rc = mStreams[0]->bufDone(index);
1163 if(rc != NO_ERROR) {
1164 LOGE("Failed to Q new buffer to stream");
1165 mMemory.markFrameNumber(index, -1);
1166 return rc;
1167 }
1168 }
1169 indexUsed = index;
1170 }
1171 return rc;
1172 }
1173
1174 /*===========================================================================
1175 * FUNCTION : initialize
1176 *
1177 * DESCRIPTION:
1178 *
1179 * PARAMETERS : isType : type of image stabilization on the buffer
1180 *
1181 * RETURN : int32_t type of status
1182 * NO_ERROR -- success
1183 * none-zero failure code
1184 *==========================================================================*/
initialize(__unused cam_is_type_t isType)1185 int32_t QCamera3ProcessingChannel::initialize(__unused cam_is_type_t isType)
1186 {
1187 int32_t rc = NO_ERROR;
1188 rc = mOfflineMetaMemory.allocateAll(sizeof(metadata_buffer_t));
1189 if (rc == NO_ERROR) {
1190 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1191 mFreeOfflineMetaBuffersList.clear();
1192 for (uint32_t i = 0; i < mNumBuffers + (MAX_REPROCESS_PIPELINE_STAGES - 1);
1193 i++) {
1194 mFreeOfflineMetaBuffersList.push_back(i);
1195 }
1196 } else {
1197 LOGE("Could not allocate offline meta buffers for input reprocess");
1198 }
1199 mOutOfSequenceBuffers.clear();
1200 return rc;
1201 }
1202
1203 /*===========================================================================
1204 * FUNCTION : registerBuffer
1205 *
1206 * DESCRIPTION: register streaming buffer to the channel object
1207 *
1208 * PARAMETERS :
1209 * @buffer : buffer to be registered
1210 * @isType : image stabilization type on the stream
1211 *
1212 * RETURN : int32_t type of status
1213 * NO_ERROR -- success
1214 * none-zero failure code
1215 *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType)1216 int32_t QCamera3ProcessingChannel::registerBuffer(buffer_handle_t *buffer,
1217 cam_is_type_t isType)
1218 {
1219 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_REG_BUF);
1220 int rc = 0;
1221 mIsType = isType;
1222 cam_stream_type_t streamType;
1223
1224 if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
1225 LOGE("Trying to register more buffers than initially requested");
1226 return BAD_VALUE;
1227 }
1228
1229 if (0 == m_numStreams) {
1230 rc = initialize(mIsType);
1231 if (rc != NO_ERROR) {
1232 LOGE("Couldn't initialize camera stream %d", rc);
1233 return rc;
1234 }
1235 }
1236
1237 streamType = mStreams[0]->getMyType();
1238 rc = mMemory.registerBuffer(buffer, streamType);
1239 if (ALREADY_EXISTS == rc) {
1240 return NO_ERROR;
1241 } else if (NO_ERROR != rc) {
1242 LOGE("Buffer %p couldn't be registered %d", buffer, rc);
1243 return rc;
1244 }
1245
1246 return rc;
1247 }
1248
registerBufferAndGetBufDef(buffer_handle_t * buffer,mm_camera_buf_def_t * frame)1249 int32_t QCamera3ProcessingChannel::registerBufferAndGetBufDef(buffer_handle_t *buffer,
1250 mm_camera_buf_def_t *frame)
1251 {
1252 if (buffer == nullptr || frame == nullptr) {
1253 ALOGE("%s: buffer and frame cannot be nullptr.", __FUNCTION__);
1254 return BAD_VALUE;
1255 }
1256
1257 status_t rc;
1258
1259 // Get the buffer index.
1260 int index = mMemory.getMatchBufIndex((void*)buffer);
1261 if(index < 0) {
1262 // Register the buffer if it was not registered.
1263 rc = registerBuffer(buffer, mIsType);
1264 if (rc != OK) {
1265 ALOGE("%s: Regitering buffer failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1266 return rc;
1267 }
1268
1269 index = mMemory.getMatchBufIndex((void*)buffer);
1270 if (index < 0) {
1271 ALOGE("%s: Could not find object among registered buffers", __FUNCTION__);
1272 return DEAD_OBJECT;
1273 }
1274 }
1275
1276 cam_frame_len_offset_t offset = {};
1277 mStreams[0]->getFrameOffset(offset);
1278
1279 // Get the buffer def.
1280 rc = mMemory.getBufDef(offset, *frame, index, mMapStreamBuffers);
1281 if (rc != 0) {
1282 ALOGE("%s: Getting a frame failed: %s (%d).", __FUNCTION__, strerror(-rc), rc);
1283 return rc;
1284 }
1285
1286 // Set the frame's stream ID because it's not set in getBufDef.
1287 frame->stream_id = mStreams[0]->getMyHandle();
1288 return 0;
1289 }
1290
unregisterBuffer(mm_camera_buf_def_t * frame)1291 void QCamera3ProcessingChannel::unregisterBuffer(mm_camera_buf_def_t *frame)
1292 {
1293 if (frame == nullptr) {
1294 ALOGE("%s: frame is nullptr", __FUNCTION__);
1295 return;
1296 }
1297
1298 mMemory.unregisterBuffer(frame->buf_idx);
1299 }
1300
1301 /*===========================================================================
1302 * FUNCTION : setFwkInputPPData
1303 *
1304 * DESCRIPTION: fill out the framework src frame information for reprocessing
1305 *
1306 * PARAMETERS :
1307 * @src_frame : input pp data to be filled out
1308 * @pInputBuffer : input buffer for reprocessing
1309 * @reproc_cfg : pointer to the reprocess config
1310 * @metadata : pointer to the metadata buffer
1311 * @output_buffer : output buffer for reprocessing; could be NULL if not
1312 * framework allocated
1313 * @frameNumber : frame number of the request
1314 *
1315 * RETURN : int32_t type of status
1316 * NO_ERROR -- success
1317 * none-zero failure code
1318 *==========================================================================*/
setFwkInputPPData(qcamera_fwk_input_pp_data_t * src_frame,camera3_stream_buffer_t * pInputBuffer,reprocess_config_t * reproc_cfg,metadata_buffer_t * metadata,buffer_handle_t * output_buffer,uint32_t frameNumber)1319 int32_t QCamera3ProcessingChannel::setFwkInputPPData(qcamera_fwk_input_pp_data_t *src_frame,
1320 camera3_stream_buffer_t *pInputBuffer, reprocess_config_t *reproc_cfg,
1321 metadata_buffer_t *metadata, buffer_handle_t *output_buffer,
1322 uint32_t frameNumber)
1323 {
1324 int32_t rc = NO_ERROR;
1325 int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1326 if(input_index < 0) {
1327 rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer, mStreamType);
1328 if (NO_ERROR != rc) {
1329 LOGE("On-the-fly input buffer registration failed %d",
1330 rc);
1331 return rc;
1332 }
1333 input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1334 if (input_index < 0) {
1335 LOGE("Could not find object among registered buffers");
1336 return DEAD_OBJECT;
1337 }
1338 }
1339 mOfflineMemory.markFrameNumber(input_index, frameNumber);
1340
1341 src_frame->src_frame = *pInputBuffer;
1342 rc = mOfflineMemory.getBufDef(reproc_cfg->input_stream_plane_info.plane_info,
1343 src_frame->input_buffer, input_index, mMapStreamBuffers);
1344 if (rc != 0) {
1345 return rc;
1346 }
1347 dumpYUV(&src_frame->input_buffer, reproc_cfg->input_stream_dim,
1348 reproc_cfg->input_stream_plane_info.plane_info, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
1349 cam_dimension_t dim = {sizeof(metadata_buffer_t), 1};
1350 cam_stream_buf_plane_info_t meta_planes;
1351 rc = mm_stream_calc_offset_metadata(&dim, &mPaddingInfo, &meta_planes);
1352 if (rc != 0) {
1353 LOGE("Metadata stream plane info calculation failed!");
1354 return rc;
1355 }
1356 uint32_t metaBufIdx;
1357 {
1358 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1359 if (mFreeOfflineMetaBuffersList.empty()) {
1360 LOGE("mFreeOfflineMetaBuffersList is null. Fatal");
1361 return BAD_VALUE;
1362 }
1363
1364 metaBufIdx = *(mFreeOfflineMetaBuffersList.begin());
1365 mFreeOfflineMetaBuffersList.erase(mFreeOfflineMetaBuffersList.begin());
1366 LOGD("erasing %d, mFreeOfflineMetaBuffersList.size %d", metaBufIdx,
1367 mFreeOfflineMetaBuffersList.size());
1368 }
1369
1370 mOfflineMetaMemory.markFrameNumber(metaBufIdx, frameNumber);
1371
1372 mm_camera_buf_def_t meta_buf;
1373 cam_frame_len_offset_t offset = meta_planes.plane_info;
1374 rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, metaBufIdx, true /*virtualAddr*/);
1375 if (NO_ERROR != rc) {
1376 return rc;
1377 }
1378 memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t));
1379 src_frame->metadata_buffer = meta_buf;
1380 src_frame->reproc_config = *reproc_cfg;
1381 src_frame->output_buffer = output_buffer;
1382 src_frame->frameNumber = frameNumber;
1383 return rc;
1384 }
1385
1386 /*===========================================================================
1387 * FUNCTION : checkStreamCbErrors
1388 *
1389 * DESCRIPTION: check the stream callback for errors
1390 *
1391 * PARAMETERS :
1392 * @super_frame : the super frame with filled buffer
1393 * @stream : stream on which the buffer was requested and filled
1394 *
1395 * RETURN : int32_t type of status
1396 * NO_ERROR -- success
1397 * none-zero failure code
1398 *==========================================================================*/
checkStreamCbErrors(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1399 int32_t QCamera3ProcessingChannel::checkStreamCbErrors(mm_camera_super_buf_t *super_frame,
1400 QCamera3Stream *stream)
1401 {
1402 if (NULL == stream) {
1403 LOGE("Invalid stream");
1404 return BAD_VALUE;
1405 }
1406
1407 if(NULL == super_frame) {
1408 LOGE("Invalid Super buffer");
1409 return BAD_VALUE;
1410 }
1411
1412 if(super_frame->num_bufs != 1) {
1413 LOGE("Multiple streams are not supported");
1414 return BAD_VALUE;
1415 }
1416 if(NULL == super_frame->bufs[0]) {
1417 LOGE("Error, Super buffer frame does not contain valid buffer");
1418 return BAD_VALUE;
1419 }
1420 return NO_ERROR;
1421 }
1422
1423 /*===========================================================================
1424 * FUNCTION : getStreamSize
1425 *
1426 * DESCRIPTION: get the size from the camera3_stream_t for the channel
1427 *
1428 * PARAMETERS :
1429 * @dim : Return the size of the stream
1430 *
1431 * RETURN : int32_t type of status
1432 * NO_ERROR -- success
1433 * none-zero failure code
1434 *==========================================================================*/
getStreamSize(cam_dimension_t & dim)1435 int32_t QCamera3ProcessingChannel::getStreamSize(cam_dimension_t &dim)
1436 {
1437 if (mCamera3Stream) {
1438 dim.width = mCamera3Stream->width;
1439 dim.height = mCamera3Stream->height;
1440 return NO_ERROR;
1441 } else {
1442 return BAD_VALUE;
1443 }
1444 }
1445
1446 /*===========================================================================
1447 * FUNCTION : getStreamBufs
1448 *
1449 * DESCRIPTION: get the buffers allocated to the stream
1450 *
1451 * PARAMETERS :
1452 * @len : buffer length
1453 *
1454 * RETURN : int32_t type of status
1455 * NO_ERROR -- success
1456 * none-zero failure code
1457 *==========================================================================*/
getStreamBufs(uint32_t)1458 QCamera3StreamMem* QCamera3ProcessingChannel::getStreamBufs(uint32_t /*len*/)
1459 {
1460 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GETSTREAMBUFS);
1461 return &mMemory;
1462 }
1463
1464 /*===========================================================================
1465 * FUNCTION : putStreamBufs
1466 *
1467 * DESCRIPTION: release the buffers allocated to the stream
1468 *
1469 * PARAMETERS : NONE
1470 *
1471 * RETURN : NONE
1472 *==========================================================================*/
putStreamBufs()1473 void QCamera3ProcessingChannel::putStreamBufs()
1474 {
1475 mMemory.unregisterBuffers();
1476
1477 /* Reclaim all the offline metabuffers and push them to free list */
1478 {
1479 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1480 mFreeOfflineMetaBuffersList.clear();
1481 for (uint32_t i = 0; i < mOfflineMetaMemory.getCnt(); i++) {
1482 mFreeOfflineMetaBuffersList.push_back(i);
1483 }
1484 }
1485 }
1486
1487
1488 /*===========================================================================
1489 * FUNCTION : stop
1490 *
1491 * DESCRIPTION: stop processing channel, which will stop all streams within,
1492 * including the reprocessing channel in postprocessor.
1493 *
1494 * PARAMETERS : none
1495 *
1496 * RETURN : int32_t type of status
1497 * NO_ERROR -- success
1498 * none-zero failure code
1499 *==========================================================================*/
stop()1500 int32_t QCamera3ProcessingChannel::stop()
1501 {
1502 if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
1503 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
1504 }
1505 int32_t rc = NO_ERROR;
1506 if(!m_bIsActive) {
1507 LOGE("Attempt to stop inactive channel");
1508 return rc;
1509 }
1510
1511 m_postprocessor.stop();
1512 mPostProcStarted = false;
1513 rc |= QCamera3Channel::stop();
1514 return rc;
1515 }
1516
1517 /*===========================================================================
1518 * FUNCTION : startPostProc
1519 *
1520 * DESCRIPTION: figure out if the postprocessor needs to be restarted and if yes
1521 * start it
1522 *
1523 * PARAMETERS :
1524 * @inputBufExists : whether there is an input buffer for post processing
1525 * @config : reprocessing configuration
1526 * @metadata : metadata associated with the reprocessing request
1527 *
1528 * RETURN : NONE
1529 *==========================================================================*/
startPostProc(const reprocess_config_t & config)1530 void QCamera3ProcessingChannel::startPostProc(const reprocess_config_t &config)
1531 {
1532 if (mPostProcStarted) {
1533 if (config.reprocess_type != mReprocessType) {
1534 // If the reprocess type doesn't match, stop and start with the new type
1535 m_postprocessor.stop();
1536 mPostProcStarted = false;
1537 } else {
1538 // Return if reprocess type is the same.
1539 return;
1540 }
1541 }
1542
1543 m_postprocessor.start(config);
1544 mPostProcStarted = true;
1545 mReprocessType = config.reprocess_type;
1546 }
1547
1548 /*===========================================================================
1549 * FUNCTION : queueReprocMetadata
1550 *
1551 * DESCRIPTION: queue the reprocess metadata to the postprocessor
1552 *
1553 * PARAMETERS : metadata : the metadata corresponding to the pp frame
1554 *
1555 * RETURN : int32_t type of status
1556 * NO_ERROR -- success
1557 * none-zero failure code
1558 *==========================================================================*/
queueReprocMetadata(mm_camera_super_buf_t * metadata)1559 int32_t QCamera3ProcessingChannel::queueReprocMetadata(mm_camera_super_buf_t *metadata)
1560 {
1561 return m_postprocessor.processPPMetadata(metadata);
1562 }
1563
1564 /*===========================================================================
1565 * FUNCTION : metadataBufDone
1566 *
1567 * DESCRIPTION: Buffer done method for a metadata buffer
1568 *
1569 * PARAMETERS :
1570 * @recvd_frame : received metadata frame
1571 *
1572 * RETURN : int32_t type of status
1573 * NO_ERROR -- success
1574 * none-zero failure code
1575 *==========================================================================*/
metadataBufDone(mm_camera_super_buf_t * recvd_frame)1576 int32_t QCamera3ProcessingChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
1577 {
1578 int32_t rc = NO_ERROR;;
1579 if ((NULL == m_pMetaChannel) || (NULL == recvd_frame)) {
1580 LOGE("Metadata channel or metadata buffer invalid");
1581 return BAD_VALUE;
1582 }
1583
1584 rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame);
1585
1586 return rc;
1587 }
1588
1589 /*===========================================================================
1590 * FUNCTION : translateStreamTypeAndFormat
1591 *
1592 * DESCRIPTION: translates the framework stream format into HAL stream type
1593 * and format
1594 *
1595 * PARAMETERS :
1596 * @streamType : translated stream type
1597 * @streamFormat : translated stream format
1598 * @stream : fwk stream
1599 *
1600 * RETURN : int32_t type of status
1601 * NO_ERROR -- success
1602 * none-zero failure code
1603 *==========================================================================*/
translateStreamTypeAndFormat(camera3_stream_t * stream,cam_stream_type_t & streamType,cam_format_t & streamFormat)1604 int32_t QCamera3ProcessingChannel::translateStreamTypeAndFormat(camera3_stream_t *stream,
1605 cam_stream_type_t &streamType, cam_format_t &streamFormat)
1606 {
1607 switch (stream->format) {
1608 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1609 if(stream->stream_type == CAMERA3_STREAM_INPUT){
1610 streamType = CAM_STREAM_TYPE_SNAPSHOT;
1611 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT,
1612 stream->width, stream->height, m_bUBWCenable, mIsType);
1613 } else {
1614 streamType = CAM_STREAM_TYPE_CALLBACK;
1615 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_CALLBACK,
1616 stream->width, stream->height, m_bUBWCenable, mIsType);
1617 }
1618 break;
1619 case HAL_PIXEL_FORMAT_Y8:
1620 streamType = CAM_STREAM_TYPE_CALLBACK;
1621 streamFormat = CAM_FORMAT_Y_ONLY;
1622 break;
1623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1624 if (stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
1625 streamType = CAM_STREAM_TYPE_VIDEO;
1626 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_VIDEO,
1627 stream->width, stream->height, m_bUBWCenable, mIsType);
1628 } else if(stream->stream_type == CAMERA3_STREAM_INPUT ||
1629 stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1630 IS_USAGE_ZSL(stream->usage)){
1631 streamType = CAM_STREAM_TYPE_SNAPSHOT;
1632 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT,
1633 stream->width, stream->height, m_bUBWCenable, mIsType);
1634 } else {
1635 streamType = CAM_STREAM_TYPE_PREVIEW;
1636 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_PREVIEW,
1637 stream->width, stream->height, m_bUBWCenable, mIsType);
1638 }
1639 break;
1640 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1641 case HAL_PIXEL_FORMAT_RAW16:
1642 case HAL_PIXEL_FORMAT_RAW10:
1643 streamType = CAM_STREAM_TYPE_RAW;
1644 if ((HAL_DATASPACE_DEPTH == stream->data_space) &&
1645 (HAL_PIXEL_FORMAT_RAW16 == stream->format)) {
1646 streamFormat = CAM_FORMAT_META_RAW_10BIT;
1647 } else {
1648 streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
1649 }
1650 break;
1651 default:
1652 return -EINVAL;
1653 }
1654 LOGD("fwk_format = %d, streamType = %d, streamFormat = %d",
1655 stream->format, streamType, streamFormat);
1656 return NO_ERROR;
1657 }
1658
1659 /*===========================================================================
1660 * FUNCTION : setReprocConfig
1661 *
1662 * DESCRIPTION: sets the reprocessing parameters for the input buffer
1663 *
1664 * PARAMETERS :
1665 * @reproc_cfg : the configuration to be set
1666 * @pInputBuffer : pointer to the input buffer
1667 * @metadata : pointer to the reprocessing metadata buffer
1668 * @streamFormat : format of the input stream
1669 *
1670 * RETURN : int32_t type of status
1671 * NO_ERROR -- success
1672 * none-zero failure code
1673 *==========================================================================*/
setReprocConfig(reprocess_config_t & reproc_cfg,camera3_stream_buffer_t * pInputBuffer,__unused metadata_buffer_t * metadata,cam_format_t streamFormat,cam_dimension_t dim)1674 int32_t QCamera3ProcessingChannel::setReprocConfig(reprocess_config_t &reproc_cfg,
1675 camera3_stream_buffer_t *pInputBuffer,
1676 __unused metadata_buffer_t *metadata,
1677 cam_format_t streamFormat, cam_dimension_t dim)
1678 {
1679 int32_t rc = 0;
1680 reproc_cfg.padding = &mPaddingInfo;
1681 cam_stream_info_t info = {.fmt = reproc_cfg.stream_format};
1682 //to ensure a big enough buffer size set the height and width
1683 //padding to max(height padding, width padding)
1684 if (reproc_cfg.padding->height_padding > reproc_cfg.padding->width_padding) {
1685 reproc_cfg.padding->width_padding = reproc_cfg.padding->height_padding;
1686 } else {
1687 reproc_cfg.padding->height_padding = reproc_cfg.padding->width_padding;
1688 }
1689 if (NULL != pInputBuffer) {
1690 reproc_cfg.input_stream_dim.width = (int32_t)pInputBuffer->stream->width;
1691 reproc_cfg.input_stream_dim.height = (int32_t)pInputBuffer->stream->height;
1692 } else {
1693 reproc_cfg.input_stream_dim.width = (int32_t)dim.width;
1694 reproc_cfg.input_stream_dim.height = (int32_t)dim.height;
1695 }
1696 reproc_cfg.src_channel = this;
1697 reproc_cfg.output_stream_dim.width = mCamera3Stream->width;
1698 reproc_cfg.output_stream_dim.height = mCamera3Stream->height;
1699 reproc_cfg.reprocess_type = getReprocessType();
1700 reproc_cfg.stream_format = streamFormat;
1701
1702 //offset calculation
1703 if (NULL != pInputBuffer) {
1704 rc = translateStreamTypeAndFormat(pInputBuffer->stream,
1705 reproc_cfg.stream_type, reproc_cfg.input_stream_format);
1706 if (rc != NO_ERROR) {
1707 LOGE("Stream format %d is not supported",
1708 pInputBuffer->stream->format);
1709 return rc;
1710 }
1711 } else {
1712 reproc_cfg.stream_type = mStreamType;
1713 reproc_cfg.input_stream_format = streamFormat;
1714 }
1715
1716 switch (reproc_cfg.stream_type) {
1717 case CAM_STREAM_TYPE_PREVIEW:
1718 if (getStreamByIndex(0) == NULL) {
1719 LOGE("Could not find stream");
1720 rc = -1;
1721 break;
1722 }
1723 rc = mm_stream_calc_offset_preview(
1724 getStreamByIndex(0)->getStreamInfo(),
1725 &reproc_cfg.input_stream_dim,
1726 reproc_cfg.padding,
1727 &reproc_cfg.input_stream_plane_info);
1728 break;
1729 case CAM_STREAM_TYPE_VIDEO:
1730 rc = mm_stream_calc_offset_video(reproc_cfg.stream_format,
1731 &reproc_cfg.input_stream_dim,
1732 &reproc_cfg.input_stream_plane_info);
1733 break;
1734 case CAM_STREAM_TYPE_RAW:
1735 rc = mm_stream_calc_offset_raw(&info,
1736 &reproc_cfg.input_stream_dim,
1737 reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
1738 break;
1739 case CAM_STREAM_TYPE_SNAPSHOT:
1740 case CAM_STREAM_TYPE_CALLBACK:
1741 default:
1742 rc = mm_stream_calc_offset_snapshot(streamFormat, &reproc_cfg.input_stream_dim,
1743 reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
1744 break;
1745 }
1746 if (rc != 0) {
1747 LOGE("Stream %d plane info calculation failed!", mStreamType);
1748 return rc;
1749 }
1750
1751 IF_META_AVAILABLE(cam_hdr_param_t, hdr_info, CAM_INTF_PARM_HAL_BRACKETING_HDR, metadata) {
1752 reproc_cfg.hdr_param = *hdr_info;
1753 }
1754
1755 return rc;
1756 }
1757
1758 /*===========================================================================
1759 * FUNCTION : reprocessCbRoutine
1760 *
1761 * DESCRIPTION: callback function for the reprocessed frame. This frame now
1762 * should be returned to the framework
1763 *
1764 * PARAMETERS :
1765 * @resultBuffer : buffer containing the reprocessed data
1766 * @resultFrameNumber : frame number on which the buffer was requested
1767 *
1768 * RETURN : NONE
1769 *
1770 *==========================================================================*/
reprocessCbRoutine(buffer_handle_t * resultBuffer,uint32_t resultFrameNumber)1771 void QCamera3ProcessingChannel::reprocessCbRoutine(buffer_handle_t *resultBuffer,
1772 uint32_t resultFrameNumber)
1773 {
1774 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_REPROC_CB);
1775 int rc = NO_ERROR;
1776
1777 rc = releaseOfflineMemory(resultFrameNumber);
1778 if (NO_ERROR != rc) {
1779 LOGE("Error releasing offline memory %d", rc);
1780 }
1781 /* Since reprocessing is done, send the callback to release the input buffer */
1782 if (mChannelCB) {
1783 mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
1784 }
1785 issueChannelCb(resultBuffer, resultFrameNumber);
1786
1787 return;
1788 }
1789
1790 /*===========================================================================
1791 * FUNCTION : issueChannelCb
1792 *
1793 * DESCRIPTION: function to set the result and issue channel callback
1794 *
1795 * PARAMETERS :
1796 * @resultBuffer : buffer containing the data
1797 * @resultFrameNumber : frame number on which the buffer was requested
1798 *
1799 * RETURN : NONE
1800 *
1801 *
1802 *==========================================================================*/
issueChannelCb(buffer_handle_t * resultBuffer,uint32_t resultFrameNumber)1803 void QCamera3ProcessingChannel::issueChannelCb(buffer_handle_t *resultBuffer,
1804 uint32_t resultFrameNumber)
1805 {
1806 camera3_stream_buffer_t result;
1807 //Use below data to issue framework callback
1808 result.stream = mCamera3Stream;
1809 result.buffer = resultBuffer;
1810 result.status = CAMERA3_BUFFER_STATUS_OK;
1811 result.acquire_fence = -1;
1812 result.release_fence = -1;
1813
1814 if (mChannelCB) {
1815 mChannelCB(NULL, &result, resultFrameNumber, false, mUserData);
1816 }
1817 }
1818
1819 /*===========================================================================
1820 * FUNCTION : showDebugFPS
1821 *
1822 * DESCRIPTION: Function to log the fps for preview, video, callback and raw
1823 * streams
1824 *
1825 * PARAMETERS : Stream type
1826 *
1827 * RETURN : None
1828 *==========================================================================*/
showDebugFPS(int32_t streamType)1829 void QCamera3ProcessingChannel::showDebugFPS(int32_t streamType)
1830 {
1831 double fps = 0;
1832 mFrameCount++;
1833 nsecs_t now = systemTime();
1834 nsecs_t diff = now - mLastFpsTime;
1835 if (diff > ms2ns(250)) {
1836 fps = (((double)(mFrameCount - mLastFrameCount)) *
1837 (double)(s2ns(1))) / (double)diff;
1838 switch(streamType) {
1839 case CAM_STREAM_TYPE_PREVIEW:
1840 LOGH("PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f: mFrameCount=%d",
1841 fps, mFrameCount);
1842 break;
1843 case CAM_STREAM_TYPE_VIDEO:
1844 LOGH("PROFILE_VIDEO_FRAMES_PER_SECOND : %.4f",
1845 fps);
1846 break;
1847 case CAM_STREAM_TYPE_CALLBACK:
1848 LOGH("PROFILE_CALLBACK_FRAMES_PER_SECOND : %.4f",
1849 fps);
1850 break;
1851 case CAM_STREAM_TYPE_RAW:
1852 LOGH("PROFILE_RAW_FRAMES_PER_SECOND : %.4f",
1853 fps);
1854 break;
1855 default:
1856 LOGH("logging not supported for the stream");
1857 break;
1858 }
1859 mLastFpsTime = now;
1860 mLastFrameCount = mFrameCount;
1861 }
1862 }
1863
1864 /*===========================================================================
1865 * FUNCTION : releaseOfflineMemory
1866 *
1867 * DESCRIPTION: function to clean up the offline memory used for input reprocess
1868 *
1869 * PARAMETERS :
1870 * @resultFrameNumber : frame number on which the buffer was requested
1871 *
1872 * RETURN : int32_t type of status
1873 * NO_ERROR -- success
1874 * non-zero failure code
1875 *
1876 *
1877 *==========================================================================*/
releaseOfflineMemory(uint32_t resultFrameNumber)1878 int32_t QCamera3ProcessingChannel::releaseOfflineMemory(uint32_t resultFrameNumber)
1879 {
1880 int32_t rc = NO_ERROR;
1881 int32_t inputBufIndex =
1882 mOfflineMemory.getGrallocBufferIndex(resultFrameNumber);
1883 if (0 <= inputBufIndex) {
1884 rc = mOfflineMemory.unregisterBuffer(inputBufIndex);
1885 } else {
1886 LOGW("Could not find offline input buffer, resultFrameNumber %d",
1887 resultFrameNumber);
1888 }
1889 if (rc != NO_ERROR) {
1890 LOGE("Failed to unregister offline input buffer");
1891 }
1892
1893 int32_t metaBufIndex =
1894 mOfflineMetaMemory.getHeapBufferIndex(resultFrameNumber);
1895 if (0 <= metaBufIndex) {
1896 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1897 mFreeOfflineMetaBuffersList.push_back((uint32_t)metaBufIndex);
1898 } else {
1899 LOGW("Could not find offline meta buffer, resultFrameNumber %d",
1900 resultFrameNumber);
1901 }
1902
1903 return rc;
1904 }
1905
1906 /* Regular Channel methods */
1907 /*===========================================================================
1908 * FUNCTION : QCamera3RegularChannel
1909 *
1910 * DESCRIPTION: constructor of QCamera3RegularChannel
1911 *
1912 * PARAMETERS :
1913 * @cam_handle : camera handle
1914 * @cam_ops : ptr to camera ops table
1915 * @cb_routine : callback routine to frame aggregator
1916 * @stream : camera3_stream_t structure
1917 * @stream_type: Channel stream type
1918 * @postprocess_mask: feature mask for postprocessing
1919 * @metadataChannel : metadata channel for the session
1920 * @numBuffers : number of max dequeued buffers
1921 *
1922 * RETURN : none
1923 *==========================================================================*/
QCamera3RegularChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel,uint32_t numBuffers)1924 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
1925 uint32_t channel_handle,
1926 mm_camera_ops_t *cam_ops,
1927 channel_cb_routine cb_routine,
1928 channel_cb_buffer_err cb_buffer_err,
1929 cam_padding_info_t *paddingInfo,
1930 void *userData,
1931 camera3_stream_t *stream,
1932 cam_stream_type_t stream_type,
1933 cam_feature_mask_t postprocess_mask,
1934 QCamera3Channel *metadataChannel,
1935 uint32_t numBuffers) :
1936 QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops,
1937 cb_routine, cb_buffer_err, paddingInfo, userData, stream, stream_type,
1938 postprocess_mask, metadataChannel, numBuffers),
1939 mBatchSize(0),
1940 mRotation(ROTATE_0)
1941 {
1942 }
1943
1944 /*===========================================================================
1945 * FUNCTION : ~QCamera3RegularChannel
1946 *
1947 * DESCRIPTION: destructor of QCamera3RegularChannel
1948 *
1949 * PARAMETERS : none
1950 *
1951 * RETURN : none
1952 *==========================================================================*/
~QCamera3RegularChannel()1953 QCamera3RegularChannel::~QCamera3RegularChannel()
1954 {
1955 destroy();
1956 }
1957
1958 /*===========================================================================
1959 * FUNCTION : initialize
1960 *
1961 * DESCRIPTION: Initialize and add camera channel & stream
1962 *
1963 * PARAMETERS :
1964 * @isType : type of image stabilization required on this stream
1965 *
1966 * RETURN : int32_t type of status
1967 * NO_ERROR -- success
1968 * none-zero failure code
1969 *==========================================================================*/
1970
initialize(cam_is_type_t isType)1971 int32_t QCamera3RegularChannel::initialize(cam_is_type_t isType)
1972 {
1973 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REG_CH_INIT);
1974 int32_t rc = NO_ERROR;
1975
1976 cam_dimension_t streamDim;
1977
1978 if (NULL == mCamera3Stream) {
1979 LOGE("Camera stream uninitialized");
1980 return NO_INIT;
1981 }
1982
1983 if (1 <= m_numStreams) {
1984 // Only one stream per channel supported in v3 Hal
1985 return NO_ERROR;
1986 }
1987
1988 mIsType = isType;
1989
1990 rc = translateStreamTypeAndFormat(mCamera3Stream, mStreamType,
1991 mStreamFormat);
1992 if (rc != NO_ERROR) {
1993 return -EINVAL;
1994 }
1995
1996
1997 if ((mStreamType == CAM_STREAM_TYPE_VIDEO) ||
1998 (mStreamType == CAM_STREAM_TYPE_PREVIEW)) {
1999 if ((mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) &&
2000 ((mPostProcMask & CAM_QCOM_FEATURE_ROTATION) == 0)) {
2001 LOGE("attempting rotation %d when rotation is disabled",
2002 mCamera3Stream->rotation);
2003 return -EINVAL;
2004 }
2005
2006 switch (mCamera3Stream->rotation) {
2007 case CAMERA3_STREAM_ROTATION_0:
2008 mRotation = ROTATE_0;
2009 break;
2010 case CAMERA3_STREAM_ROTATION_90: {
2011 mRotation = ROTATE_90;
2012 break;
2013 }
2014 case CAMERA3_STREAM_ROTATION_180:
2015 mRotation = ROTATE_180;
2016 break;
2017 case CAMERA3_STREAM_ROTATION_270: {
2018 mRotation = ROTATE_270;
2019 break;
2020 }
2021 default:
2022 LOGE("Unknown rotation: %d",
2023 mCamera3Stream->rotation);
2024 return -EINVAL;
2025 }
2026
2027 // Camera3/HAL3 spec expecting counter clockwise rotation but CPP HW is
2028 // doing Clockwise rotation and so swap it.
2029 if (mRotation == ROTATE_90) {
2030 mRotation = ROTATE_270;
2031 } else if (mRotation == ROTATE_270) {
2032 mRotation = ROTATE_90;
2033 }
2034
2035 } else if (mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) {
2036 LOGE("Rotation %d is not supported by stream type %d",
2037 mCamera3Stream->rotation,
2038 mStreamType);
2039 return -EINVAL;
2040 }
2041
2042 streamDim.width = mCamera3Stream->width;
2043 streamDim.height = mCamera3Stream->height;
2044
2045 LOGD("batch size is %d", mBatchSize);
2046 rc = QCamera3Channel::addStream(mStreamType,
2047 mStreamFormat,
2048 streamDim,
2049 mRotation,
2050 mNumBufs,
2051 mPostProcMask,
2052 mIsType,
2053 mBatchSize);
2054
2055 return rc;
2056 }
2057
2058 /*===========================================================================
2059 * FUNCTION : setBatchSize
2060 *
2061 * DESCRIPTION: Set batch size for the channel.
2062 *
2063 * PARAMETERS :
2064 * @batchSize : Number of image buffers in a batch
2065 *
2066 * RETURN : int32_t type of status
2067 * NO_ERROR -- success always
2068 * none-zero failure code
2069 *==========================================================================*/
setBatchSize(uint32_t batchSize)2070 int32_t QCamera3RegularChannel::setBatchSize(uint32_t batchSize)
2071 {
2072 int32_t rc = NO_ERROR;
2073
2074 mBatchSize = batchSize;
2075 LOGD("Batch size set: %d", mBatchSize);
2076 return rc;
2077 }
2078
2079 /*===========================================================================
2080 * FUNCTION : getStreamTypeMask
2081 *
2082 * DESCRIPTION: Get bit mask of all stream types in this channel.
2083 * If stream is not initialized, then generate mask based on
2084 * local streamType
2085 *
2086 * PARAMETERS : None
2087 *
2088 * RETURN : Bit mask of all stream types in this channel
2089 *==========================================================================*/
getStreamTypeMask()2090 uint32_t QCamera3RegularChannel::getStreamTypeMask()
2091 {
2092 if (mStreams[0]) {
2093 return QCamera3Channel::getStreamTypeMask();
2094 } else {
2095 return (1U << mStreamType);
2096 }
2097 }
2098
2099 /*===========================================================================
2100 * FUNCTION : queueBatchBuf
2101 *
2102 * DESCRIPTION: queue batch container to downstream
2103 *
2104 * PARAMETERS :
2105 *
2106 * RETURN : int32_t type of status
2107 * NO_ERROR -- success always
2108 * none-zero failure code
2109 *==========================================================================*/
queueBatchBuf()2110 int32_t QCamera3RegularChannel::queueBatchBuf()
2111 {
2112 int32_t rc = NO_ERROR;
2113
2114 if (mStreams[0]) {
2115 rc = mStreams[0]->queueBatchBuf();
2116 }
2117 if (rc != NO_ERROR) {
2118 LOGE("stream->queueBatchContainer failed");
2119 }
2120 return rc;
2121 }
2122
2123 /*===========================================================================
2124 * FUNCTION : request
2125 *
2126 * DESCRIPTION: process a request from camera service. Stream on if ncessary.
2127 *
2128 * PARAMETERS :
2129 * @buffer : buffer to be filled for this request
2130 *
2131 * RETURN : 0 on a success start of capture
2132 * -EINVAL on invalid input
2133 * -ENODEV on serious error
2134 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,int & indexUsed)2135 int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber, int &indexUsed)
2136 {
2137 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REG_CH_REQ);
2138 //FIX ME: Return buffer back in case of failures below.
2139
2140 int32_t rc = NO_ERROR;
2141 int index;
2142
2143 if (NULL == buffer) {
2144 LOGE("Invalid buffer in channel request");
2145 return BAD_VALUE;
2146 }
2147
2148 index = mMemory.getMatchBufIndex((void*)buffer);
2149 if(index < 0) {
2150 rc = registerBuffer(buffer, mIsType);
2151 if (NO_ERROR != rc) {
2152 LOGE("On-the-fly buffer registration failed %d",
2153 rc);
2154 return rc;
2155 }
2156
2157 index = mMemory.getMatchBufIndex((void*)buffer);
2158 if (index < 0) {
2159 LOGE("Could not find object among registered buffers");
2160 return DEAD_OBJECT;
2161 }
2162 }
2163
2164 rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
2165 if(rc != NO_ERROR) {
2166 LOGE("Failed to mark FrameNumber:%d,idx:%d",frameNumber,index);
2167 return rc;
2168 }
2169 if (m_bIsActive) {
2170 rc = mStreams[0]->bufDone((uint32_t)index);
2171 if(rc != NO_ERROR) {
2172 LOGE("Failed to Q new buffer to stream");
2173 mMemory.markFrameNumber(index, -1);
2174 return rc;
2175 }
2176 }
2177
2178 indexUsed = index;
2179 return rc;
2180 }
2181
2182 /*===========================================================================
2183 * FUNCTION : getReprocessType
2184 *
2185 * DESCRIPTION: get the type of reprocess output supported by this channel
2186 *
2187 * PARAMETERS : NONE
2188 *
2189 * RETURN : reprocess_type_t : type of reprocess
2190 *==========================================================================*/
getReprocessType()2191 reprocess_type_t QCamera3RegularChannel::getReprocessType()
2192 {
2193 return REPROCESS_TYPE_PRIVATE;
2194 }
2195
2196
QCamera3MetadataChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,void * userData,uint32_t numBuffers)2197 QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
2198 uint32_t channel_handle,
2199 mm_camera_ops_t *cam_ops,
2200 channel_cb_routine cb_routine,
2201 channel_cb_buffer_err cb_buffer_err,
2202 cam_padding_info_t *paddingInfo,
2203 cam_feature_mask_t postprocess_mask,
2204 void *userData, uint32_t numBuffers) :
2205 QCamera3Channel(cam_handle, channel_handle, cam_ops,
2206 cb_routine, cb_buffer_err, paddingInfo, postprocess_mask,
2207 userData, numBuffers),
2208 mMemory(NULL), mDepthDataPresent(false)
2209 {
2210 mMapStreamBuffers = true;
2211 }
2212
~QCamera3MetadataChannel()2213 QCamera3MetadataChannel::~QCamera3MetadataChannel()
2214 {
2215 destroy();
2216
2217 if (mMemory) {
2218 mMemory->deallocate();
2219 delete mMemory;
2220 mMemory = NULL;
2221 }
2222 }
2223
initialize(cam_is_type_t isType)2224 int32_t QCamera3MetadataChannel::initialize(cam_is_type_t isType)
2225 {
2226 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_METADATA_CH_INIT);
2227 int32_t rc;
2228 cam_dimension_t streamDim;
2229
2230 if (mMemory || m_numStreams > 0) {
2231 LOGE("metadata channel already initialized");
2232 return -EINVAL;
2233 }
2234
2235 streamDim.width = (int32_t)sizeof(metadata_buffer_t),
2236 streamDim.height = 1;
2237
2238 mIsType = isType;
2239 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
2240 streamDim, ROTATE_0, (uint8_t)mNumBuffers, mPostProcMask, mIsType);
2241 if (rc < 0) {
2242 LOGE("addStream failed");
2243 }
2244 return rc;
2245 }
2246
request(buffer_handle_t *,uint32_t,int &)2247 int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/,
2248 uint32_t /*frameNumber*/,
2249 int& /*indexUsed*/)
2250 {
2251 if (!m_bIsActive) {
2252 return start();
2253 }
2254 else
2255 return 0;
2256 }
2257
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)2258 void QCamera3MetadataChannel::streamCbRoutine(
2259 mm_camera_super_buf_t *super_frame,
2260 QCamera3Stream * /*stream*/)
2261 {
2262 ATRACE_NAME("metadata_stream_cb_routine");
2263 uint32_t requestNumber = 0;
2264 if (super_frame == NULL || super_frame->num_bufs != 1) {
2265 LOGE("super_frame is not valid");
2266 return;
2267 }
2268 if (mChannelCB) {
2269 mChannelCB(super_frame, NULL, requestNumber, false, mUserData);
2270 }
2271 }
2272
getStreamBufs(uint32_t len)2273 QCamera3StreamMem* QCamera3MetadataChannel::getStreamBufs(uint32_t len)
2274 {
2275 int rc;
2276 if (len < sizeof(metadata_buffer_t)) {
2277 LOGE("Metadata buffer size less than structure %d vs %d",
2278 len,
2279 sizeof(metadata_buffer_t));
2280 return NULL;
2281 }
2282 mMemory = new QCamera3StreamMem(MIN_STREAMING_BUFFER_NUM);
2283 if (!mMemory) {
2284 LOGE("unable to create metadata memory");
2285 return NULL;
2286 }
2287 rc = mMemory->allocateAll(len);
2288 if (rc < 0) {
2289 LOGE("unable to allocate metadata memory");
2290 delete mMemory;
2291 mMemory = NULL;
2292 return NULL;
2293 }
2294 clear_metadata_buffer((metadata_buffer_t*)mMemory->getPtr(0));
2295
2296 for (uint32_t i = 0; i < mMemory->getCnt(); i++) {
2297 if (mMemory->valid(i)) {
2298 metadata_buffer_t *metadata_buffer_t =
2299 static_cast<::metadata_buffer_t *> (mMemory->getPtr(i));
2300 metadata_buffer_t->depth_data.depth_data = nullptr;
2301 if (mDepthDataPresent) {
2302 metadata_buffer_t->depth_data.depth_data =
2303 new uint8_t[PD_DATA_SIZE];
2304 }
2305 } else {
2306 LOGE("Invalid meta buffer at index: %d", i);
2307 }
2308 }
2309
2310 return mMemory;
2311 }
2312
putStreamBufs()2313 void QCamera3MetadataChannel::putStreamBufs()
2314 {
2315 for (uint32_t i = 0; i < mMemory->getCnt(); i++) {
2316 if (mMemory->valid(i)) {
2317 metadata_buffer_t *metadata_buffer_t =
2318 static_cast<::metadata_buffer_t *> (mMemory->getPtr(i));
2319 if (nullptr != metadata_buffer_t->depth_data.depth_data) {
2320 delete [] metadata_buffer_t->depth_data.depth_data;
2321 metadata_buffer_t->depth_data.depth_data = nullptr;
2322 }
2323 } else {
2324 LOGE("Invalid meta buffer at index: %d", i);
2325 }
2326 }
2327
2328 mMemory->deallocate();
2329 delete mMemory;
2330 mMemory = NULL;
2331 }
2332 /*************************************************************************************/
2333 // RAW Channel related functions
QCamera3RawChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel,bool raw_16,uint32_t numBuffers)2334 QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle,
2335 uint32_t channel_handle,
2336 mm_camera_ops_t *cam_ops,
2337 channel_cb_routine cb_routine,
2338 channel_cb_buffer_err cb_buffer_err,
2339 cam_padding_info_t *paddingInfo,
2340 void *userData,
2341 camera3_stream_t *stream,
2342 cam_feature_mask_t postprocess_mask,
2343 QCamera3Channel *metadataChannel,
2344 bool raw_16, uint32_t numBuffers) :
2345 QCamera3RegularChannel(cam_handle, channel_handle, cam_ops,
2346 cb_routine, cb_buffer_err, paddingInfo, userData, stream,
2347 CAM_STREAM_TYPE_RAW, postprocess_mask,
2348 metadataChannel, numBuffers),
2349 mIsRaw16(raw_16)
2350 {
2351 char prop[PROPERTY_VALUE_MAX];
2352 property_get("persist.camera.raw.debug.dump", prop, "0");
2353 mRawDump = atoi(prop);
2354 mMapStreamBuffers = (mRawDump || mIsRaw16);
2355 }
2356
~QCamera3RawChannel()2357 QCamera3RawChannel::~QCamera3RawChannel()
2358 {
2359 }
2360
2361 /*===========================================================================
2362 * FUNCTION : initialize
2363 *
2364 * DESCRIPTION: Initialize and add camera channel & stream
2365 *
2366 * PARAMETERS :
2367 * @isType : image stabilization type on the stream
2368 *
2369 * RETURN : int32_t type of status
2370 * NO_ERROR -- success
2371 * none-zero failure code
2372 *==========================================================================*/
2373
initialize(cam_is_type_t isType)2374 int32_t QCamera3RawChannel::initialize(cam_is_type_t isType)
2375 {
2376 return QCamera3RegularChannel::initialize(isType);
2377 }
2378
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)2379 void QCamera3RawChannel::streamCbRoutine(
2380 mm_camera_super_buf_t *super_frame,
2381 QCamera3Stream * stream)
2382 {
2383 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_RAW_CH_STRM_CB);
2384 /* Move this back down once verified */
2385 if (mRawDump)
2386 dumpRawSnapshot(super_frame->bufs[0]);
2387
2388 if (mIsRaw16) {
2389 cam_format_t streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_RAW,
2390 mCamera3Stream->width, mCamera3Stream->height, m_bUBWCenable, mIsType);
2391 if (streamFormat == CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG)
2392 convertMipiToRaw16(super_frame->bufs[0]);
2393 else
2394 convertLegacyToRaw16(super_frame->bufs[0]);
2395
2396 //Make sure cache coherence because extra processing is done
2397 mMemory.cleanCache(super_frame->bufs[0]->buf_idx);
2398 }
2399
2400 QCamera3RegularChannel::streamCbRoutine(super_frame, stream);
2401 return;
2402 }
2403
dumpRawSnapshot(mm_camera_buf_def_t * frame)2404 void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
2405 {
2406 QCamera3Stream *stream = getStreamByIndex(0);
2407 if (stream != NULL) {
2408 char buf[FILENAME_MAX];
2409 memset(buf, 0, sizeof(buf));
2410 cam_dimension_t dim;
2411 memset(&dim, 0, sizeof(dim));
2412 stream->getFrameDimension(dim);
2413
2414 cam_frame_len_offset_t offset;
2415 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2416 stream->getFrameOffset(offset);
2417 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"r_%d_%dx%d.raw",
2418 frame->frame_idx, offset.mp[0].stride, offset.mp[0].scanline);
2419
2420 int file_fd = open(buf, O_RDWR| O_CREAT, 0644);
2421 if (file_fd >= 0) {
2422 ssize_t written_len = write(file_fd, frame->buffer, frame->frame_len);
2423 LOGD("written number of bytes %zd", written_len);
2424 frame->cache_flags |= CPU_HAS_READ;
2425 close(file_fd);
2426 } else {
2427 LOGE("failed to open file to dump image");
2428 }
2429 } else {
2430 LOGE("Could not find stream");
2431 }
2432
2433 }
2434
convertLegacyToRaw16(mm_camera_buf_def_t * frame)2435 void QCamera3RawChannel::convertLegacyToRaw16(mm_camera_buf_def_t *frame)
2436 {
2437 // Convert image buffer from Opaque raw format to RAW16 format
2438 // 10bit Opaque raw is stored in the format of:
2439 // 0000 - p5 - p4 - p3 - p2 - p1 - p0
2440 // where p0 to p5 are 6 pixels (each is 10bit)_and most significant
2441 // 4 bits are 0s. Each 64bit word contains 6 pixels.
2442
2443 QCamera3Stream *stream = getStreamByIndex(0);
2444 if (stream != NULL) {
2445 cam_dimension_t dim;
2446 memset(&dim, 0, sizeof(dim));
2447 stream->getFrameDimension(dim);
2448
2449 cam_frame_len_offset_t offset;
2450 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2451 stream->getFrameOffset(offset);
2452
2453 uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U;
2454 uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
2455
2456 // In-place format conversion.
2457 // Raw16 format always occupy more memory than opaque raw10.
2458 // Convert to Raw16 by iterating through all pixels from bottom-right
2459 // to top-left of the image.
2460 // One special notes:
2461 // 1. Cross-platform raw16's stride is 16 pixels.
2462 // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes.
2463 for (int32_t ys = dim.height - 1; ys >= 0; ys--) {
2464 uint32_t y = (uint32_t)ys;
2465 uint64_t* row_start = (uint64_t *)frame->buffer +
2466 y * (uint32_t)offset.mp[0].stride_in_bytes / 8;
2467 for (int32_t xs = dim.width - 1; xs >= 0; xs--) {
2468 uint32_t x = (uint32_t)xs;
2469 uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6)));
2470 raw16_buffer[y*raw16_stride+x] = raw16_pixel;
2471 }
2472 }
2473 } else {
2474 LOGE("Could not find stream");
2475 }
2476
2477 }
2478
convertMipiToRaw16(mm_camera_buf_def_t * frame)2479 void QCamera3RawChannel::convertMipiToRaw16(mm_camera_buf_def_t *frame)
2480 {
2481 // Convert image buffer from mipi10 raw format to RAW16 format
2482 // mipi10 opaque raw is stored in the format of:
2483 // P3(1:0) P2(1:0) P1(1:0) P0(1:0) P3(9:2) P2(9:2) P1(9:2) P0(9:2)
2484 // 4 pixels occupy 5 bytes, no padding needed
2485
2486 QCamera3Stream *stream = getStreamByIndex(0);
2487 if (stream != NULL) {
2488 cam_dimension_t dim;
2489 memset(&dim, 0, sizeof(dim));
2490 stream->getFrameDimension(dim);
2491
2492 cam_frame_len_offset_t offset;
2493 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2494 stream->getFrameOffset(offset);
2495
2496 uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U;
2497 uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
2498 uint8_t first_quintuple[5];
2499 memcpy(first_quintuple, raw16_buffer, sizeof(first_quintuple));
2500
2501 // In-place format conversion.
2502 // Raw16 format always occupy more memory than opaque raw10.
2503 // Convert to Raw16 by iterating through all pixels from bottom-right
2504 // to top-left of the image.
2505 // One special notes:
2506 // 1. Cross-platform raw16's stride is 16 pixels.
2507 // 2. mipi raw10's stride is 4 pixels, and aligned to 16 bytes.
2508 for (int32_t ys = dim.height - 1; ys >= 0; ys--) {
2509 uint32_t y = (uint32_t)ys;
2510 uint8_t* row_start = (uint8_t *)frame->buffer +
2511 y * (uint32_t)offset.mp[0].stride_in_bytes;
2512 for (int32_t xs = dim.width - 1; xs >= 0; xs--) {
2513 uint32_t x = (uint32_t)xs;
2514 uint8_t upper_8bit = row_start[5*(x/4)+x%4];
2515 uint8_t lower_2bit = ((row_start[5*(x/4)+4] >> ((x%4) << 1)) & 0x3);
2516 uint16_t raw16_pixel =
2517 (uint16_t)(((uint16_t)upper_8bit)<<2 |
2518 (uint16_t)lower_2bit);
2519 raw16_buffer[y*raw16_stride+x] = raw16_pixel;
2520 }
2521 }
2522
2523 // Re-convert the first 2 pixels of the buffer because the loop above messes
2524 // them up by reading the first quintuple while modifying it.
2525 raw16_buffer[0] = ((uint16_t)first_quintuple[0]<<2) | (first_quintuple[4] & 0x3);
2526 raw16_buffer[1] = ((uint16_t)first_quintuple[1]<<2) | ((first_quintuple[4] >> 2) & 0x3);
2527
2528 } else {
2529 LOGE("Could not find stream");
2530 }
2531
2532 }
2533
2534 /*===========================================================================
2535 * FUNCTION : getReprocessType
2536 *
2537 * DESCRIPTION: get the type of reprocess output supported by this channel
2538 *
2539 * PARAMETERS : NONE
2540 *
2541 * RETURN : reprocess_type_t : type of reprocess
2542 *==========================================================================*/
getReprocessType()2543 reprocess_type_t QCamera3RawChannel::getReprocessType()
2544 {
2545 return REPROCESS_TYPE_RAW;
2546 }
2547
2548
2549 /*************************************************************************************/
2550 // RAW Dump Channel related functions
2551
2552 /*===========================================================================
2553 * FUNCTION : QCamera3RawDumpChannel
2554 *
2555 * DESCRIPTION: Constructor for RawDumpChannel
2556 *
2557 * PARAMETERS :
2558 * @cam_handle : Handle for Camera
2559 * @cam_ops : Function pointer table
2560 * @rawDumpSize : Dimensions for the Raw stream
2561 * @paddinginfo : Padding information for stream
2562 * @userData : Cookie for parent
2563 * @pp mask : PP feature mask for this stream
2564 * @numBuffers : number of max dequeued buffers
2565 *
2566 * RETURN : NA
2567 *==========================================================================*/
QCamera3RawDumpChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,cam_dimension_t rawDumpSize,cam_padding_info_t * paddingInfo,void * userData,cam_feature_mask_t postprocess_mask,uint32_t numBuffers)2568 QCamera3RawDumpChannel::QCamera3RawDumpChannel(uint32_t cam_handle,
2569 uint32_t channel_handle,
2570 mm_camera_ops_t *cam_ops,
2571 cam_dimension_t rawDumpSize,
2572 cam_padding_info_t *paddingInfo,
2573 void *userData,
2574 cam_feature_mask_t postprocess_mask, uint32_t numBuffers) :
2575 QCamera3Channel(cam_handle, channel_handle, cam_ops, NULL,
2576 NULL, paddingInfo, postprocess_mask,
2577 userData, numBuffers),
2578 mDim(rawDumpSize),
2579 mMemory(NULL)
2580 {
2581 char prop[PROPERTY_VALUE_MAX];
2582 property_get("persist.camera.raw.dump", prop, "0");
2583 mRawDump = atoi(prop);
2584 }
2585
2586 /*===========================================================================
2587 * FUNCTION : QCamera3RawDumpChannel
2588 *
2589 * DESCRIPTION: Destructor for RawDumpChannel
2590 *
2591 * PARAMETERS :
2592 *
2593 * RETURN : NA
2594 *==========================================================================*/
2595
~QCamera3RawDumpChannel()2596 QCamera3RawDumpChannel::~QCamera3RawDumpChannel()
2597 {
2598 destroy();
2599 }
2600
2601 /*===========================================================================
2602 * FUNCTION : dumpRawSnapshot
2603 *
2604 * DESCRIPTION: Helper function to dump Raw frames
2605 *
2606 * PARAMETERS :
2607 * @frame : stream buf frame to be dumped
2608 *
2609 * RETURN : NA
2610 *==========================================================================*/
dumpRawSnapshot(mm_camera_buf_def_t * frame)2611 void QCamera3RawDumpChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
2612 {
2613 QCamera3Stream *stream = getStreamByIndex(0);
2614 if (stream != NULL) {
2615 char buf[FILENAME_MAX];
2616 struct timeval tv;
2617 struct tm timeinfo_data;
2618 struct tm *timeinfo;
2619
2620 cam_dimension_t dim;
2621 memset(&dim, 0, sizeof(dim));
2622 stream->getFrameDimension(dim);
2623
2624 cam_frame_len_offset_t offset;
2625 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2626 stream->getFrameOffset(offset);
2627
2628 gettimeofday(&tv, NULL);
2629 timeinfo = localtime_r(&tv.tv_sec, &timeinfo_data);
2630
2631 if (NULL != timeinfo) {
2632 memset(buf, 0, sizeof(buf));
2633 snprintf(buf, sizeof(buf),
2634 QCAMERA_DUMP_FRM_LOCATION
2635 "%04d-%02d-%02d-%02d-%02d-%02d-%06ld_%d_%dx%d.raw",
2636 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
2637 timeinfo->tm_mday, timeinfo->tm_hour,
2638 timeinfo->tm_min, timeinfo->tm_sec,tv.tv_usec,
2639 frame->frame_idx, dim.width, dim.height);
2640
2641 int file_fd = open(buf, O_RDWR| O_CREAT, 0777);
2642 if (file_fd >= 0) {
2643 ssize_t written_len =
2644 write(file_fd, frame->buffer, offset.frame_len);
2645 LOGD("written number of bytes %zd", written_len);
2646 frame->cache_flags |= CPU_HAS_READ;
2647 close(file_fd);
2648 } else {
2649 LOGE("failed to open file to dump image");
2650 }
2651 } else {
2652 LOGE("localtime_r() error");
2653 }
2654 } else {
2655 LOGE("Could not find stream");
2656 }
2657
2658 }
2659
2660 /*===========================================================================
2661 * FUNCTION : streamCbRoutine
2662 *
2663 * DESCRIPTION: Callback routine invoked for each frame generated for
2664 * Rawdump channel
2665 *
2666 * PARAMETERS :
2667 * @super_frame : stream buf frame generated
2668 * @stream : Underlying Stream object cookie
2669 *
2670 * RETURN : NA
2671 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,__unused QCamera3Stream * stream)2672 void QCamera3RawDumpChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
2673 __unused QCamera3Stream *stream)
2674 {
2675 LOGD("E");
2676 if (super_frame == NULL || super_frame->num_bufs != 1) {
2677 LOGE("super_frame is not valid");
2678 return;
2679 }
2680
2681 if (mRawDump)
2682 dumpRawSnapshot(super_frame->bufs[0]);
2683
2684 bufDone(super_frame);
2685 free(super_frame);
2686 }
2687
2688 /*===========================================================================
2689 * FUNCTION : getStreamBufs
2690 *
2691 * DESCRIPTION: Callback function provided to interface to get buffers.
2692 *
2693 * PARAMETERS :
2694 * @len : Length of each buffer to be allocated
2695 *
2696 * RETURN : NULL on buffer allocation failure
2697 * QCamera3StreamMem object on sucess
2698 *==========================================================================*/
getStreamBufs(uint32_t len)2699 QCamera3StreamMem* QCamera3RawDumpChannel::getStreamBufs(uint32_t len)
2700 {
2701 int rc;
2702 mMemory = new QCamera3StreamMem(mNumBuffers);
2703
2704 if (!mMemory) {
2705 LOGE("unable to create heap memory");
2706 return NULL;
2707 }
2708 rc = mMemory->allocateAll((size_t)len);
2709 if (rc < 0) {
2710 LOGE("unable to allocate heap memory");
2711 delete mMemory;
2712 mMemory = NULL;
2713 return NULL;
2714 }
2715 return mMemory;
2716 }
2717
2718 /*===========================================================================
2719 * FUNCTION : putStreamBufs
2720 *
2721 * DESCRIPTION: Callback function provided to interface to return buffers.
2722 * Although no handles are actually returned, implicitl assumption
2723 * that interface will no longer use buffers and channel can
2724 * deallocated if necessary.
2725 *
2726 * PARAMETERS : NA
2727 *
2728 * RETURN : NA
2729 *==========================================================================*/
putStreamBufs()2730 void QCamera3RawDumpChannel::putStreamBufs()
2731 {
2732 mMemory->deallocate();
2733 delete mMemory;
2734 mMemory = NULL;
2735 }
2736
2737 /*===========================================================================
2738 * FUNCTION : request
2739 *
2740 * DESCRIPTION: Request function used as trigger
2741 *
2742 * PARAMETERS :
2743 * @recvd_frame : buffer- this will be NULL since this is internal channel
2744 * @frameNumber : Undefined again since this is internal stream
2745 *
2746 * RETURN : int32_t type of status
2747 * NO_ERROR -- success
2748 * none-zero failure code
2749 *==========================================================================*/
request(buffer_handle_t *,uint32_t,int &)2750 int32_t QCamera3RawDumpChannel::request(buffer_handle_t * /*buffer*/,
2751 uint32_t /*frameNumber*/,
2752 int & /*indexUsed*/)
2753 {
2754 if (!m_bIsActive) {
2755 return QCamera3Channel::start();
2756 }
2757 else
2758 return 0;
2759 }
2760
2761 /*===========================================================================
2762 * FUNCTION : intialize
2763 *
2764 * DESCRIPTION: Initializes channel params and creates underlying stream
2765 *
2766 * PARAMETERS :
2767 * @isType : type of image stabilization required on this stream
2768 *
2769 * RETURN : int32_t type of status
2770 * NO_ERROR -- success
2771 * none-zero failure code
2772 *==========================================================================*/
initialize(cam_is_type_t isType)2773 int32_t QCamera3RawDumpChannel::initialize(cam_is_type_t isType)
2774 {
2775 int32_t rc;
2776
2777 mIsType = isType;
2778 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_RAW,
2779 CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG, mDim, ROTATE_0, (uint8_t)mNumBuffers,
2780 mPostProcMask, mIsType);
2781 if (rc < 0) {
2782 LOGE("addStream failed");
2783 }
2784 return rc;
2785 }
2786
2787 /*************************************************************************************/
2788 // HDR+ RAW Source Channel related functions
QCamera3HdrPlusRawSrcChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,cam_dimension_t rawDumpSize,cam_padding_info_t * paddingInfo,void * userData,cam_feature_mask_t postprocess_mask,std::shared_ptr<HdrPlusClient> hdrPlusClient,uint32_t hdrPlusStreamId,uint32_t numBuffers)2789 QCamera3HdrPlusRawSrcChannel::QCamera3HdrPlusRawSrcChannel(uint32_t cam_handle,
2790 uint32_t channel_handle,
2791 mm_camera_ops_t *cam_ops,
2792 cam_dimension_t rawDumpSize,
2793 cam_padding_info_t *paddingInfo,
2794 void *userData,
2795 cam_feature_mask_t postprocess_mask,
2796 std::shared_ptr<HdrPlusClient> hdrPlusClient,
2797 uint32_t hdrPlusStreamId,
2798 uint32_t numBuffers) :
2799 QCamera3RawDumpChannel(cam_handle, channel_handle, cam_ops, rawDumpSize, paddingInfo, userData,
2800 postprocess_mask, numBuffers),
2801 mHdrPlusClient(hdrPlusClient),
2802 mHdrPlusStreamId(hdrPlusStreamId)
2803 {
2804
2805 }
2806
~QCamera3HdrPlusRawSrcChannel()2807 QCamera3HdrPlusRawSrcChannel::~QCamera3HdrPlusRawSrcChannel()
2808 {
2809 }
2810
streamCbRoutine(mm_camera_super_buf_t * super_frame,__unused QCamera3Stream * stream)2811 void QCamera3HdrPlusRawSrcChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
2812 __unused QCamera3Stream *stream)
2813 {
2814 if (super_frame == NULL || super_frame->num_bufs != 1) {
2815 LOGE("super_frame is not valid");
2816 return;
2817 }
2818
2819 // Send RAW buffer to HDR+ service
2820 sendRawToHdrPlusService(super_frame->bufs[0]);
2821
2822 bufDone(super_frame);
2823 free(super_frame);
2824 }
2825
sendRawToHdrPlusService(mm_camera_buf_def_t * frame)2826 void QCamera3HdrPlusRawSrcChannel::sendRawToHdrPlusService(mm_camera_buf_def_t *frame)
2827 {
2828 QCamera3Stream *stream = getStreamByIndex(0);
2829 if (stream == nullptr) {
2830 LOGE("%s: Could not find stream.", __FUNCTION__);
2831 return;
2832 }
2833
2834 cam_frame_len_offset_t offset = {};
2835 stream->getFrameOffset(offset);
2836
2837 pbcamera::StreamBuffer buffer;
2838 buffer.streamId = mHdrPlusStreamId;
2839 buffer.data = frame->buffer;
2840 buffer.dataSize = offset.frame_len;
2841
2842 // Use the frame timestamp as mock Easel timestamp.
2843 int64_t mockEaselTimestampNs = (int64_t)frame->ts.tv_sec * 1000000000 + frame->ts.tv_nsec;
2844 mHdrPlusClient->notifyInputBuffer(buffer, mockEaselTimestampNs);
2845 }
2846
2847 /*************************************************************************************/
2848
2849 /* QCamera3YUVChannel methods */
2850
2851 /*===========================================================================
2852 * FUNCTION : QCamera3YUVChannel
2853 *
2854 * DESCRIPTION: constructor of QCamera3YUVChannel
2855 *
2856 * PARAMETERS :
2857 * @cam_handle : camera handle
2858 * @cam_ops : ptr to camera ops table
2859 * @cb_routine : callback routine to frame aggregator
2860 * @paddingInfo : padding information for the stream
2861 * @stream : camera3_stream_t structure
2862 * @stream_type: Channel stream type
2863 * @postprocess_mask: the postprocess mask for streams of this channel
2864 * @metadataChannel: handle to the metadataChannel
2865 * RETURN : none
2866 *==========================================================================*/
QCamera3YUVChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buf_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel)2867 QCamera3YUVChannel::QCamera3YUVChannel(uint32_t cam_handle,
2868 uint32_t channel_handle,
2869 mm_camera_ops_t *cam_ops,
2870 channel_cb_routine cb_routine,
2871 channel_cb_buffer_err cb_buf_err,
2872 cam_padding_info_t *paddingInfo,
2873 void *userData,
2874 camera3_stream_t *stream,
2875 cam_stream_type_t stream_type,
2876 cam_feature_mask_t postprocess_mask,
2877 QCamera3Channel *metadataChannel) :
2878 QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops,
2879 cb_routine, cb_buf_err, paddingInfo, userData, stream, stream_type,
2880 postprocess_mask, metadataChannel)
2881 {
2882
2883 mBypass = (postprocess_mask == CAM_QCOM_FEATURE_NONE);
2884 mFrameLen = 0;
2885 mEdgeMode.edge_mode = CAM_EDGE_MODE_OFF;
2886 mEdgeMode.sharpness = 0;
2887 mNoiseRedMode = CAM_NOISE_REDUCTION_MODE_OFF;
2888 memset(&mCropRegion, 0, sizeof(mCropRegion));
2889 }
2890
2891 /*===========================================================================
2892 * FUNCTION : ~QCamera3YUVChannel
2893 *
2894 * DESCRIPTION: destructor of QCamera3YUVChannel
2895 *
2896 * PARAMETERS : none
2897 *
2898 *
2899 * RETURN : none
2900 *==========================================================================*/
~QCamera3YUVChannel()2901 QCamera3YUVChannel::~QCamera3YUVChannel()
2902 {
2903 // Deallocation of heap buffers allocated in mMemory is freed
2904 // automatically by its destructor
2905 }
2906
2907 /*===========================================================================
2908 * FUNCTION : initialize
2909 *
2910 * DESCRIPTION: Initialize and add camera channel & stream
2911 *
2912 * PARAMETERS :
2913 * @isType : the image stabilization type
2914 *
2915 * RETURN : int32_t type of status
2916 * NO_ERROR -- success
2917 * none-zero failure code
2918 *==========================================================================*/
initialize(cam_is_type_t isType)2919 int32_t QCamera3YUVChannel::initialize(cam_is_type_t isType)
2920 {
2921 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_YUV_CH_INIT);
2922 int32_t rc = NO_ERROR;
2923 cam_dimension_t streamDim;
2924
2925 if (NULL == mCamera3Stream) {
2926 LOGE("Camera stream uninitialized");
2927 return NO_INIT;
2928 }
2929
2930 if (1 <= m_numStreams) {
2931 // Only one stream per channel supported in v3 Hal
2932 return NO_ERROR;
2933 }
2934
2935 mIsType = isType;
2936 rc = translateStreamTypeAndFormat(mCamera3Stream, mStreamType,
2937 mStreamFormat);
2938 if (rc != NO_ERROR) {
2939 return -EINVAL;
2940 }
2941
2942 streamDim.width = mCamera3Stream->width;
2943 streamDim.height = mCamera3Stream->height;
2944
2945 rc = QCamera3Channel::addStream(mStreamType,
2946 mStreamFormat,
2947 streamDim,
2948 ROTATE_0,
2949 mNumBufs,
2950 mPostProcMask,
2951 mIsType);
2952 if (rc < 0) {
2953 LOGE("addStream failed");
2954 return rc;
2955 }
2956
2957 cam_stream_buf_plane_info_t buf_planes;
2958 cam_padding_info_t paddingInfo = mPaddingInfo;
2959
2960 memset(&buf_planes, 0, sizeof(buf_planes));
2961 //to ensure a big enough buffer size set the height and width
2962 //padding to max(height padding, width padding)
2963 paddingInfo.width_padding = MAX(paddingInfo.width_padding, paddingInfo.height_padding);
2964 paddingInfo.height_padding = paddingInfo.width_padding;
2965
2966 rc = mm_stream_calc_offset_snapshot(mStreamFormat, &streamDim, &paddingInfo,
2967 &buf_planes);
2968 if (rc < 0) {
2969 LOGE("mm_stream_calc_offset_preview failed");
2970 return rc;
2971 }
2972
2973 mFrameLen = buf_planes.plane_info.frame_len;
2974
2975 if (NO_ERROR != rc) {
2976 LOGE("Initialize failed, rc = %d", rc);
2977 return rc;
2978 }
2979
2980 /* initialize offline meta memory for input reprocess */
2981 rc = QCamera3ProcessingChannel::initialize(isType);
2982 if (NO_ERROR != rc) {
2983 LOGE("Processing Channel initialize failed, rc = %d",
2984 rc);
2985 }
2986
2987 return rc;
2988 }
2989
2990 /*===========================================================================
2991 * FUNCTION : request
2992 *
2993 * DESCRIPTION: entry function for a request on a YUV stream. This function
2994 * has the logic to service a request based on its type
2995 *
2996 * PARAMETERS :
2997 * @buffer : pointer to the output buffer
2998 * @frameNumber : frame number of the request
2999 * @pInputBuffer : pointer to input buffer if an input request
3000 * @metadata : parameters associated with the request
3001 * @internalreq : boolean to indicate if this is purely internal request
3002 * needing internal buffer allocation
3003 * @meteringonly : boolean indicating metering only frame subset of internal
3004 * not consumed by postprocessor
3005 *
3006 * RETURN : 0 on a success start of capture
3007 * -EINVAL on invalid input
3008 * -ENODEV on serious error
3009 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata,bool & needMetadata,int & indexUsed,__unused bool internalRequest=false,__unused bool meteringOnly=false)3010 int32_t QCamera3YUVChannel::request(buffer_handle_t *buffer,
3011 uint32_t frameNumber,
3012 camera3_stream_buffer_t* pInputBuffer,
3013 metadata_buffer_t* metadata, bool &needMetadata,
3014 int &indexUsed,
3015 __unused bool internalRequest = false,
3016 __unused bool meteringOnly = false)
3017 {
3018 int32_t rc = NO_ERROR;
3019 Mutex::Autolock lock(mOfflinePpLock);
3020
3021 LOGD("pInputBuffer is %p frame number %d", pInputBuffer, frameNumber);
3022 if (NULL == buffer || NULL == metadata) {
3023 LOGE("Invalid buffer/metadata in channel request");
3024 return BAD_VALUE;
3025 }
3026
3027 PpInfo ppInfo;
3028 memset(&ppInfo, 0, sizeof(ppInfo));
3029 ppInfo.frameNumber = frameNumber;
3030 ppInfo.offlinePpFlag = false;
3031 if (mBypass && !pInputBuffer ) {
3032 ppInfo.offlinePpFlag = needsFramePostprocessing(metadata);
3033 ppInfo.output = buffer;
3034 mOfflinePpInfoList.push_back(ppInfo);
3035 }
3036
3037 LOGD("offlinePpFlag is %d", ppInfo.offlinePpFlag);
3038 needMetadata = ppInfo.offlinePpFlag;
3039 if (!ppInfo.offlinePpFlag) {
3040 // regular request
3041 return QCamera3ProcessingChannel::request(buffer, frameNumber,
3042 pInputBuffer, metadata, indexUsed);
3043 } else {
3044
3045 //we need to send this frame through the CPP
3046 //Allocate heap memory, then buf done on the buffer
3047 uint32_t bufIdx;
3048 if (mFreeHeapBufferList.empty()) {
3049 rc = mMemory.allocateOne(mFrameLen);
3050 if (rc < 0) {
3051 LOGE("Failed allocating heap buffer. Fatal");
3052 return BAD_VALUE;
3053 } else {
3054 bufIdx = (uint32_t)rc;
3055 }
3056 } else {
3057 bufIdx = *(mFreeHeapBufferList.begin());
3058 mFreeHeapBufferList.erase(mFreeHeapBufferList.begin());
3059 }
3060
3061 /* Configure and start postproc if necessary */
3062 reprocess_config_t reproc_cfg;
3063 cam_dimension_t dim;
3064 memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
3065 memset(&dim, 0, sizeof(dim));
3066 mStreams[0]->getFrameDimension(dim);
3067 setReprocConfig(reproc_cfg, NULL, metadata, mStreamFormat, dim);
3068
3069 // Start postprocessor without input buffer
3070 startPostProc(reproc_cfg);
3071
3072 LOGD("erasing %d", bufIdx);
3073
3074 mMemory.markFrameNumber(bufIdx, frameNumber);
3075 indexUsed = bufIdx;
3076 if (m_bIsActive) {
3077 mStreams[0]->bufDone(bufIdx);
3078 }
3079
3080 }
3081 return rc;
3082 }
3083
3084 /*===========================================================================
3085 * FUNCTION : postprocFail
3086 *
3087 * DESCRIPTION: notify clients about failing post-process requests.
3088 *
3089 * PARAMETERS :
3090 * @ppBuffer : pointer to the pp buffer.
3091 *
3092 * RETURN : 0 on success
3093 * -EINVAL on invalid input
3094 *==========================================================================*/
postprocFail(qcamera_hal3_pp_buffer_t * ppBuffer)3095 int32_t QCamera3YUVChannel::postprocFail(qcamera_hal3_pp_buffer_t *ppBuffer) {
3096 if (ppBuffer == nullptr) {
3097 return BAD_VALUE;
3098 }
3099
3100 {
3101 List<PpInfo>::iterator ppInfo;
3102
3103 Mutex::Autolock lock(mOfflinePpLock);
3104 for (ppInfo = mOfflinePpInfoList.begin();
3105 ppInfo != mOfflinePpInfoList.end(); ppInfo++) {
3106 if (ppInfo->frameNumber == ppBuffer->frameNumber) {
3107 break;
3108 }
3109 }
3110
3111 if (ppInfo == mOfflinePpInfoList.end()) {
3112 LOGE("Offline reprocess info for frame number: %d not found!", ppBuffer->frameNumber);
3113 return BAD_VALUE;
3114 }
3115
3116 LOGE("Failed YUV post-process on frame number: %d removing from offline queue!",
3117 ppBuffer->frameNumber);
3118 mOfflinePpInfoList.erase(ppInfo);
3119 }
3120
3121 int32_t bufferIndex = mMemory.getHeapBufferIndex(ppBuffer->frameNumber);
3122 if (bufferIndex < 0) {
3123 LOGE("Fatal %d: no buffer index for frame number %d", bufferIndex, ppBuffer->frameNumber);
3124 return BAD_VALUE;
3125 } else {
3126 mMemory.markFrameNumber(bufferIndex, -1);
3127 mFreeHeapBufferList.push_back(bufferIndex);
3128 }
3129
3130 return QCamera3ProcessingChannel::postprocFail(ppBuffer);
3131 }
3132
3133 /*===========================================================================
3134 * FUNCTION : streamCbRoutine
3135 *
3136 * DESCRIPTION:
3137 *
3138 * PARAMETERS :
3139 * @super_frame : the super frame with filled buffer
3140 * @stream : stream on which the buffer was requested and filled
3141 *
3142 * RETURN : none
3143 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)3144 void QCamera3YUVChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
3145 QCamera3Stream *stream)
3146 {
3147 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_YUV_CH_STRM_CB);
3148 uint8_t frameIndex;
3149 int32_t resultFrameNumber;
3150 bool droppedInputPPBuffer = false;
3151
3152 if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
3153 LOGE("Error with the stream callback");
3154 return;
3155 }
3156
3157 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
3158 if(frameIndex >= mNumBufs) {
3159 LOGE("Error, Invalid index for buffer");
3160 stream->bufDone(frameIndex);
3161 return;
3162 }
3163
3164 if (mBypass) {
3165 {
3166 List<PpInfo>::iterator ppInfo;
3167
3168 Mutex::Autolock lock(mOfflinePpLock);
3169 resultFrameNumber = mMemory.getFrameNumber(frameIndex);
3170 for (ppInfo = mOfflinePpInfoList.begin();
3171 ppInfo != mOfflinePpInfoList.end(); ppInfo++) {
3172 if (ppInfo->frameNumber == (uint32_t)resultFrameNumber) {
3173 break;
3174 }
3175 }
3176 LOGD("frame index %d, frame number %d", frameIndex,
3177 resultFrameNumber);
3178 //check the reprocessing required flag against the frame number
3179 if (ppInfo == mOfflinePpInfoList.end()) {
3180 LOGE("Error, request for frame number is a reprocess.");
3181 stream->bufDone(frameIndex);
3182 return;
3183 }
3184
3185 if (ppInfo->offlinePpFlag) {
3186 if (ppInfo != mOfflinePpInfoList.begin() &&
3187 IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
3188 droppedInputPPBuffer = true;
3189 mOfflinePpInfoList.erase(ppInfo);
3190 } else {
3191 mm_camera_super_buf_t *frame =
3192 (mm_camera_super_buf_t *)malloc(sizeof(
3193 mm_camera_super_buf_t));
3194 if (frame == NULL) {
3195 LOGE("Error allocating memory to save received_frame structure.");
3196 if(stream) {
3197 stream->bufDone(frameIndex);
3198 }
3199 return;
3200 }
3201
3202 *frame = *super_frame;
3203 m_postprocessor.processData(frame, ppInfo->output,
3204 resultFrameNumber);
3205 free(super_frame);
3206 return;
3207 }
3208 } else {
3209 if (ppInfo != mOfflinePpInfoList.begin()) {
3210 // There is pending reprocess buffer, cache current buffer
3211 if (ppInfo->callback_buffer != NULL) {
3212 LOGE("Fatal: cached callback_buffer is already present");
3213 }
3214 ppInfo->callback_buffer = super_frame;
3215 return;
3216 } else {
3217 mOfflinePpInfoList.erase(ppInfo);
3218 }
3219 }
3220 }
3221
3222 if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
3223 mChannelCbBufErr(this, resultFrameNumber,
3224 CAMERA3_BUFFER_STATUS_ERROR, mUserData);
3225 if (droppedInputPPBuffer) {
3226 camera3_stream_buffer_t result = {};
3227 result.buffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
3228 int32_t bufferIndex =
3229 mMemory.getHeapBufferIndex(resultFrameNumber);
3230 if (bufferIndex < 0) {
3231 LOGE("Fatal %d: no buffer index for frame number %d",
3232 bufferIndex, resultFrameNumber);
3233 } else {
3234 mMemory.markFrameNumber(bufferIndex, -1);
3235 mFreeHeapBufferList.push_back(bufferIndex);
3236 }
3237
3238 LOGE("Input frame number: %d dropped!", resultFrameNumber);
3239 result.stream = mCamera3Stream;
3240 result.status = CAMERA3_BUFFER_STATUS_ERROR;
3241 result.acquire_fence = -1;
3242 result.release_fence = -1;
3243 if (mChannelCB) {
3244 mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, false, mUserData);
3245 }
3246 free(super_frame);
3247
3248 return;
3249 }
3250 }
3251 }
3252
3253 QCamera3ProcessingChannel::streamCbRoutine(super_frame, stream);
3254
3255 /* return any pending buffers that were received out of order earlier */
3256 while((super_frame = getNextPendingCbBuffer())) {
3257 QCamera3ProcessingChannel::streamCbRoutine(super_frame, stream);
3258 }
3259
3260 return;
3261 }
3262
3263 /*===========================================================================
3264 * FUNCTION : getNextPendingCbBuffer
3265 *
3266 * DESCRIPTION: Returns the callback_buffer from the first entry of
3267 * mOfflinePpInfoList
3268 *
3269 * PARAMETERS : none
3270 *
3271 * RETURN : callback_buffer
3272 *==========================================================================*/
getNextPendingCbBuffer()3273 mm_camera_super_buf_t* QCamera3YUVChannel::getNextPendingCbBuffer() {
3274 mm_camera_super_buf_t* super_frame = NULL;
3275 if (mOfflinePpInfoList.size()) {
3276 if ((super_frame = mOfflinePpInfoList.begin()->callback_buffer)) {
3277 mOfflinePpInfoList.erase(mOfflinePpInfoList.begin());
3278 }
3279 }
3280 return super_frame;
3281 }
3282
3283 /*===========================================================================
3284 * FUNCTION : reprocessCbRoutine
3285 *
3286 * DESCRIPTION: callback function for the reprocessed frame. This frame now
3287 * should be returned to the framework. This same callback is
3288 * used during input reprocessing or offline postprocessing
3289 *
3290 * PARAMETERS :
3291 * @resultBuffer : buffer containing the reprocessed data
3292 * @resultFrameNumber : frame number on which the buffer was requested
3293 *
3294 * RETURN : NONE
3295 *
3296 *==========================================================================*/
reprocessCbRoutine(buffer_handle_t * resultBuffer,uint32_t resultFrameNumber)3297 void QCamera3YUVChannel::reprocessCbRoutine(buffer_handle_t *resultBuffer,
3298 uint32_t resultFrameNumber)
3299 {
3300 LOGD("E: frame number %d", resultFrameNumber);
3301 Vector<mm_camera_super_buf_t *> pendingCbs;
3302
3303 /* release the input buffer and input metadata buffer if used */
3304 if (0 > mMemory.getHeapBufferIndex(resultFrameNumber)) {
3305 /* mOfflineMemory and mOfflineMetaMemory used only for input reprocessing */
3306 int32_t rc = releaseOfflineMemory(resultFrameNumber);
3307 if (NO_ERROR != rc) {
3308 LOGE("Error releasing offline memory rc = %d", rc);
3309 }
3310 /* Since reprocessing is done, send the callback to release the input buffer */
3311 if (mChannelCB) {
3312 mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
3313 }
3314 }
3315
3316 if (mBypass) {
3317 int32_t rc = handleOfflinePpCallback(resultFrameNumber, pendingCbs);
3318 if (rc != NO_ERROR) {
3319 return;
3320 }
3321 }
3322
3323 issueChannelCb(resultBuffer, resultFrameNumber);
3324
3325 // Call all pending callbacks to return buffers
3326 for (size_t i = 0; i < pendingCbs.size(); i++) {
3327 QCamera3ProcessingChannel::streamCbRoutine(
3328 pendingCbs[i], mStreams[0]);
3329 }
3330
3331 }
3332
3333 /*===========================================================================
3334 * FUNCTION : needsFramePostprocessing
3335 *
3336 * DESCRIPTION:
3337 *
3338 * PARAMETERS :
3339 *
3340 * RETURN :
3341 * TRUE if frame needs to be postprocessed
3342 * FALSE is frame does not need to be postprocessed
3343 *
3344 *==========================================================================*/
needsFramePostprocessing(metadata_buffer_t * meta)3345 bool QCamera3YUVChannel::needsFramePostprocessing(metadata_buffer_t *meta)
3346 {
3347 bool ppNeeded = false;
3348
3349 //sharpness
3350 IF_META_AVAILABLE(cam_edge_application_t, edgeMode,
3351 CAM_INTF_META_EDGE_MODE, meta) {
3352 mEdgeMode = *edgeMode;
3353 }
3354
3355 //wnr
3356 IF_META_AVAILABLE(uint32_t, noiseRedMode,
3357 CAM_INTF_META_NOISE_REDUCTION_MODE, meta) {
3358 mNoiseRedMode = *noiseRedMode;
3359 }
3360
3361 //crop region
3362 IF_META_AVAILABLE(cam_crop_region_t, scalerCropRegion,
3363 CAM_INTF_META_SCALER_CROP_REGION, meta) {
3364 mCropRegion = *scalerCropRegion;
3365 }
3366
3367 if ((CAM_EDGE_MODE_OFF != mEdgeMode.edge_mode) &&
3368 (CAM_EDGE_MODE_ZERO_SHUTTER_LAG != mEdgeMode.edge_mode)) {
3369 ppNeeded = true;
3370 }
3371 if ((CAM_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG != mNoiseRedMode) &&
3372 (CAM_NOISE_REDUCTION_MODE_OFF != mNoiseRedMode) &&
3373 (CAM_NOISE_REDUCTION_MODE_MINIMAL != mNoiseRedMode)) {
3374 ppNeeded = true;
3375 }
3376 if ((mCropRegion.width < (int32_t)mCamera3Stream->width) ||
3377 (mCropRegion.height < (int32_t)mCamera3Stream->height)) {
3378 ppNeeded = true;
3379 }
3380
3381 return ppNeeded;
3382 }
3383
3384 /*===========================================================================
3385 * FUNCTION : handleOfflinePpCallback
3386 *
3387 * DESCRIPTION: callback function for the reprocessed frame from offline
3388 * postprocessing.
3389 *
3390 * PARAMETERS :
3391 * @resultFrameNumber : frame number on which the buffer was requested
3392 * @pendingCbs : pending buffers to be returned first
3393 *
3394 * RETURN : int32_t type of status
3395 * NO_ERROR -- success
3396 * none-zero failure code
3397 *==========================================================================*/
handleOfflinePpCallback(uint32_t resultFrameNumber,Vector<mm_camera_super_buf_t * > & pendingCbs)3398 int32_t QCamera3YUVChannel::handleOfflinePpCallback(uint32_t resultFrameNumber,
3399 Vector<mm_camera_super_buf_t *>& pendingCbs)
3400 {
3401 Mutex::Autolock lock(mOfflinePpLock);
3402 List<PpInfo>::iterator ppInfo;
3403
3404 for (ppInfo = mOfflinePpInfoList.begin();
3405 ppInfo != mOfflinePpInfoList.end(); ppInfo++) {
3406 if (ppInfo->frameNumber == resultFrameNumber) {
3407 break;
3408 }
3409 }
3410
3411 if (ppInfo == mOfflinePpInfoList.end()) {
3412 LOGI("Request of frame number %d is reprocessing",
3413 resultFrameNumber);
3414 return NO_ERROR;
3415 } else if (ppInfo != mOfflinePpInfoList.begin()) {
3416 LOGE("callback for frame number %d should be head of list",
3417 resultFrameNumber);
3418 return BAD_VALUE;
3419 }
3420
3421 if (ppInfo->offlinePpFlag) {
3422 // Need to get the input buffer frame index from the
3423 // mMemory object and add that to the free heap buffers list.
3424 int32_t bufferIndex =
3425 mMemory.getHeapBufferIndex(resultFrameNumber);
3426 if (bufferIndex < 0) {
3427 LOGE("Fatal %d: no buffer index for frame number %d",
3428 bufferIndex, resultFrameNumber);
3429 return BAD_VALUE;
3430 }
3431 mMemory.markFrameNumber(bufferIndex, -1);
3432 mFreeHeapBufferList.push_back(bufferIndex);
3433 //Move heap buffer into free pool and invalidate the frame number
3434 ppInfo = mOfflinePpInfoList.erase(ppInfo);
3435
3436 /* return any pending buffers that were received out of order
3437 earlier */
3438 mm_camera_super_buf_t* super_frame;
3439 while((super_frame = getNextPendingCbBuffer())) {
3440 pendingCbs.push_back(super_frame);
3441 }
3442 } else {
3443 LOGE("Fatal: request of frame number %d doesn't need"
3444 " offline postprocessing. However there is"
3445 " reprocessing callback.",
3446 resultFrameNumber);
3447 return BAD_VALUE;
3448 }
3449
3450 return NO_ERROR;
3451 }
3452
3453 /*===========================================================================
3454 * FUNCTION : getReprocessType
3455 *
3456 * DESCRIPTION: get the type of reprocess output supported by this channel
3457 *
3458 * PARAMETERS : NONE
3459 *
3460 * RETURN : reprocess_type_t : type of reprocess
3461 *==========================================================================*/
getReprocessType()3462 reprocess_type_t QCamera3YUVChannel::getReprocessType()
3463 {
3464 return REPROCESS_TYPE_YUV;
3465 }
3466
3467 /* QCamera3PicChannel methods */
3468
3469 /*===========================================================================
3470 * FUNCTION : jpegEvtHandle
3471 *
3472 * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events.
3473 Construct result payload and call mChannelCb to deliver buffer
3474 to framework.
3475 *
3476 * PARAMETERS :
3477 * @status : status of jpeg job
3478 * @client_hdl: jpeg client handle
3479 * @jobId : jpeg job Id
3480 * @p_ouput : ptr to jpeg output result struct
3481 * @userdata : user data ptr
3482 *
3483 * RETURN : none
3484 *==========================================================================*/
jpegEvtHandle(jpeg_job_status_t status,uint32_t,uint32_t jobId,mm_jpeg_output_t * p_output,void * userdata)3485 void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status,
3486 uint32_t /*client_hdl*/,
3487 uint32_t jobId,
3488 mm_jpeg_output_t *p_output,
3489 void *userdata)
3490 {
3491 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_JPEG_EVT_HANDLE);
3492 buffer_handle_t *resultBuffer = NULL;
3493 buffer_handle_t *jpegBufferHandle = NULL;
3494 int resultStatus = CAMERA3_BUFFER_STATUS_OK;
3495 camera3_stream_buffer_t result;
3496 camera3_jpeg_blob_t jpegHeader;
3497
3498 QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
3499 if (obj) {
3500 //Construct payload for process_capture_result. Call mChannelCb
3501
3502 qcamera_hal3_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId);
3503
3504 if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) {
3505 LOGE("Error in jobId: (%d) with status: %d", jobId, status);
3506 resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
3507 }
3508
3509 if (NULL != job) {
3510 uint32_t bufIdx = (uint32_t)job->jpeg_settings->out_buf_index;
3511 LOGD("jpeg out_buf_index: %d", bufIdx);
3512
3513 //Construct jpeg transient header of type camera3_jpeg_blob_t
3514 //Append at the end of jpeg image of buf_filled_len size
3515
3516 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
3517 if (JPEG_JOB_STATUS_DONE == status) {
3518 jpegHeader.jpeg_size = (uint32_t)p_output->buf_filled_len;
3519 char* jpeg_buf = (char *)p_output->buf_vaddr;
3520 cam_frame_len_offset_t offset;
3521 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
3522 mm_camera_buf_def_t *jpeg_dump_buffer = NULL;
3523 cam_dimension_t dim;
3524 dim.width = obj->mCamera3Stream->width;
3525 dim.height = obj->mCamera3Stream->height;
3526 jpeg_dump_buffer = (mm_camera_buf_def_t *)malloc(sizeof(mm_camera_buf_def_t));
3527 if(!jpeg_dump_buffer) {
3528 LOGE("Could not allocate jpeg dump buffer");
3529 } else {
3530 jpeg_dump_buffer->buffer = p_output->buf_vaddr;
3531 jpeg_dump_buffer->frame_len = p_output->buf_filled_len;
3532 jpeg_dump_buffer->frame_idx = obj->mMemory.getFrameNumber(bufIdx);
3533 obj->dumpYUV(jpeg_dump_buffer, dim, offset, QCAMERA_DUMP_FRM_OUTPUT_JPEG);
3534 free(jpeg_dump_buffer);
3535 }
3536
3537 ssize_t maxJpegSize = -1;
3538
3539 // Gralloc buffer may have additional padding for 4K page size
3540 // Follow size guidelines based on spec since framework relies
3541 // on that to reach end of buffer and with it the header
3542
3543 //Handle same as resultBuffer, but for readablity
3544 jpegBufferHandle =
3545 (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
3546
3547 if (NULL != jpegBufferHandle) {
3548 maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width;
3549 if (maxJpegSize > obj->mMemory.getSize(bufIdx)) {
3550 maxJpegSize = obj->mMemory.getSize(bufIdx);
3551 }
3552
3553 size_t jpeg_eof_offset =
3554 (size_t)(maxJpegSize - (ssize_t)sizeof(jpegHeader));
3555 char *jpeg_eof = &jpeg_buf[jpeg_eof_offset];
3556 memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
3557 obj->mMemory.cleanInvalidateCache(bufIdx);
3558 } else {
3559 LOGE("JPEG buffer not found and index: %d",
3560 bufIdx);
3561 resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
3562 }
3563 }
3564
3565 ////Use below data to issue framework callback
3566 resultBuffer =
3567 (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
3568 int32_t resultFrameNumber = obj->mMemory.getFrameNumber(bufIdx);
3569 int32_t rc = obj->mMemory.unregisterBuffer(bufIdx);
3570 if (NO_ERROR != rc) {
3571 LOGE("Error %d unregistering stream buffer %d",
3572 rc, bufIdx);
3573 }
3574
3575 result.stream = obj->mCamera3Stream;
3576 result.buffer = resultBuffer;
3577 result.status = resultStatus;
3578 result.acquire_fence = -1;
3579 result.release_fence = -1;
3580
3581 // Release any snapshot buffers before calling
3582 // the user callback. The callback can potentially
3583 // unblock pending requests to snapshot stream.
3584 int32_t snapshotIdx = -1;
3585 mm_camera_super_buf_t* src_frame = NULL;
3586
3587 if (job->src_reproc_frame)
3588 src_frame = job->src_reproc_frame;
3589 else
3590 src_frame = job->src_frame;
3591
3592 if (src_frame) {
3593 if (obj->mStreams[0]->getMyHandle() ==
3594 src_frame->bufs[0]->stream_id) {
3595 snapshotIdx = (int32_t)src_frame->bufs[0]->buf_idx;
3596 if (0 <= snapshotIdx) {
3597 Mutex::Autolock lock(obj->mFreeBuffersLock);
3598 obj->mFreeBufferList.push_back((uint32_t)snapshotIdx);
3599 }
3600 }
3601 }
3602
3603 LOGI("Issue Jpeg Callback frameNumber = %d status = %d",
3604 resultFrameNumber, resultStatus);
3605 ATRACE_ASYNC_END("SNAPSHOT", resultFrameNumber);
3606 if (obj->mChannelCB) {
3607 obj->mChannelCB(NULL,
3608 &result,
3609 (uint32_t)resultFrameNumber,
3610 false,
3611 obj->mUserData);
3612 }
3613
3614 // release internal data for jpeg job
3615 if ((NULL != job->fwk_frame) || (NULL != job->fwk_src_buffer)) {
3616 /* unregister offline input buffer */
3617 int32_t inputBufIndex =
3618 obj->mOfflineMemory.getGrallocBufferIndex((uint32_t)resultFrameNumber);
3619 if (0 <= inputBufIndex) {
3620 rc = obj->mOfflineMemory.unregisterBuffer(inputBufIndex);
3621 } else {
3622 LOGE("could not find the input buf index, frame number %d",
3623 resultFrameNumber);
3624 }
3625 if (NO_ERROR != rc) {
3626 LOGE("Error %d unregistering input buffer %d",
3627 rc, bufIdx);
3628 }
3629
3630 /* unregister offline meta buffer */
3631 int32_t metaBufIndex =
3632 obj->mOfflineMetaMemory.getHeapBufferIndex((uint32_t)resultFrameNumber);
3633 if (0 <= metaBufIndex) {
3634 Mutex::Autolock lock(obj->mFreeOfflineMetaBuffersLock);
3635 obj->mFreeOfflineMetaBuffersList.push_back((uint32_t)metaBufIndex);
3636 } else {
3637 LOGE("could not find the input meta buf index, frame number %d",
3638 resultFrameNumber);
3639 }
3640 }
3641 obj->m_postprocessor.releaseOfflineBuffers(false);
3642 obj->m_postprocessor.releaseJpegJobData(job);
3643 free(job);
3644 }
3645
3646 return;
3647 // }
3648 } else {
3649 LOGE("Null userdata in jpeg callback");
3650 }
3651 }
3652
QCamera3PicChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buf_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_feature_mask_t postprocess_mask,__unused bool is4KVideo,bool isInputStreamConfigured,bool useY8,QCamera3Channel * metadataChannel,uint32_t numBuffers)3653 QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle,
3654 uint32_t channel_handle,
3655 mm_camera_ops_t *cam_ops,
3656 channel_cb_routine cb_routine,
3657 channel_cb_buffer_err cb_buf_err,
3658 cam_padding_info_t *paddingInfo,
3659 void *userData,
3660 camera3_stream_t *stream,
3661 cam_feature_mask_t postprocess_mask,
3662 __unused bool is4KVideo,
3663 bool isInputStreamConfigured,
3664 bool useY8,
3665 QCamera3Channel *metadataChannel,
3666 uint32_t numBuffers) :
3667 QCamera3ProcessingChannel(cam_handle, channel_handle,
3668 cam_ops, cb_routine, cb_buf_err, paddingInfo, userData,
3669 stream, CAM_STREAM_TYPE_SNAPSHOT,
3670 postprocess_mask, metadataChannel, numBuffers),
3671 mNumSnapshotBufs(0),
3672 mInputBufferHint(isInputStreamConfigured),
3673 mYuvMemory(NULL),
3674 mFrameLen(0)
3675 {
3676 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
3677 m_max_pic_dim = hal_obj->calcMaxJpegDim();
3678 mYuvWidth = stream->width;
3679 mYuvHeight = stream->height;
3680 mStreamType = CAM_STREAM_TYPE_SNAPSHOT;
3681 // Use same pixelformat for 4K video case
3682 mStreamFormat = useY8 ? CAM_FORMAT_Y_ONLY : getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT,
3683 stream->width, stream->height, m_bUBWCenable, IS_TYPE_NONE);
3684 int32_t rc = m_postprocessor.initJpeg(jpegEvtHandle, &m_max_pic_dim, this);
3685 if (rc != 0) {
3686 LOGE("Init Postprocessor failed");
3687 }
3688 }
3689
3690 /*===========================================================================
3691 * FUNCTION : flush
3692 *
3693 * DESCRIPTION: flush pic channel, which will stop all processing within, including
3694 * the reprocessing channel in postprocessor and YUV stream.
3695 *
3696 * PARAMETERS : none
3697 *
3698 * RETURN : int32_t type of status
3699 * NO_ERROR -- success
3700 * none-zero failure code
3701 *==========================================================================*/
flush()3702 int32_t QCamera3PicChannel::flush()
3703 {
3704 int32_t rc = NO_ERROR;
3705 if(!m_bIsActive) {
3706 LOGE("Attempt to flush inactive channel");
3707 return NO_INIT;
3708 }
3709
3710 rc = m_postprocessor.flush();
3711 if (rc == 0) {
3712 LOGE("Postprocessor flush failed, rc = %d", rc);
3713 return rc;
3714 }
3715
3716 if (0 < mOfflineMetaMemory.getCnt()) {
3717 mOfflineMetaMemory.deallocate();
3718 }
3719 if (0 < mOfflineMemory.getCnt()) {
3720 mOfflineMemory.unregisterBuffers();
3721 }
3722 Mutex::Autolock lock(mFreeBuffersLock);
3723 mFreeBufferList.clear();
3724
3725 for (uint32_t i = 0; i < mCamera3Stream->max_buffers; i++) {
3726 mFreeBufferList.push_back(i);
3727 }
3728 return rc;
3729 }
3730
3731
~QCamera3PicChannel()3732 QCamera3PicChannel::~QCamera3PicChannel()
3733 {
3734 if (mYuvMemory != nullptr) {
3735 mYuvMemory->deallocate();
3736 delete mYuvMemory;
3737 mYuvMemory = nullptr;
3738 }
3739 }
3740
3741 /*===========================================================================
3742 * FUNCTION : metadataBufDone
3743 *
3744 * DESCRIPTION: Buffer done method for a metadata buffer
3745 *
3746 * PARAMETERS :
3747 * @recvd_frame : received metadata frame
3748 *
3749 * RETURN : int32_t type of status
3750 * OK -- success
3751 * none-zero failure code
3752 *==========================================================================*/
metadataBufDone(mm_camera_super_buf_t * recvd_frame)3753 int32_t QCamera3PicChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
3754 {
3755 // Check if this is an external metadata
3756 if (recvd_frame != nullptr && recvd_frame->num_bufs == 1) {
3757 Mutex::Autolock lock(mPendingExternalMetadataLock);
3758 auto iter = mPendingExternalMetadata.begin();
3759 while (iter != mPendingExternalMetadata.end()) {
3760 if (iter->get() == recvd_frame->bufs[0]->buffer) {
3761 // Remove the metadata allocated externally.
3762 mPendingExternalMetadata.erase(iter);
3763 return OK;
3764 }
3765
3766 iter++;
3767 }
3768 }
3769
3770 // If this is not an external metadata, return the metadata.
3771 return QCamera3ProcessingChannel::metadataBufDone(recvd_frame);
3772 }
3773
initialize(cam_is_type_t isType)3774 int32_t QCamera3PicChannel::initialize(cam_is_type_t isType)
3775 {
3776 int32_t rc = NO_ERROR;
3777 cam_dimension_t streamDim;
3778 cam_stream_type_t streamType;
3779 cam_format_t streamFormat;
3780
3781 if (NULL == mCamera3Stream) {
3782 LOGE("Camera stream uninitialized");
3783 return NO_INIT;
3784 }
3785
3786 if (1 <= m_numStreams) {
3787 // Only one stream per channel supported in v3 Hal
3788 return NO_ERROR;
3789 }
3790
3791 mIsType = isType;
3792 streamType = mStreamType;
3793 streamFormat = mStreamFormat;
3794 streamDim.width = (int32_t)mYuvWidth;
3795 streamDim.height = (int32_t)mYuvHeight;
3796
3797 mNumSnapshotBufs = mCamera3Stream->max_buffers;
3798 rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
3799 ROTATE_0, (uint8_t)mCamera3Stream->max_buffers, mPostProcMask,
3800 mIsType);
3801
3802 if (NO_ERROR != rc) {
3803 LOGE("Initialize failed, rc = %d", rc);
3804 return rc;
3805 }
3806
3807 mYuvMemory = new QCamera3StreamMem(mCamera3Stream->max_buffers);
3808 if (!mYuvMemory) {
3809 LOGE("unable to create YUV buffers");
3810 return NO_MEMORY;
3811 }
3812 cam_stream_buf_plane_info_t buf_planes;
3813 cam_padding_info_t paddingInfo = mPaddingInfo;
3814
3815 memset(&buf_planes, 0, sizeof(buf_planes));
3816 //to ensure a big enough buffer size set the height and width
3817 //padding to max(height padding, width padding)
3818 paddingInfo.width_padding = MAX(paddingInfo.width_padding, paddingInfo.height_padding);
3819 paddingInfo.height_padding = paddingInfo.width_padding;
3820
3821 rc = mm_stream_calc_offset_snapshot(mStreamFormat, &streamDim, &paddingInfo,
3822 &buf_planes);
3823 if (rc < 0) {
3824 LOGE("mm_stream_calc_offset_preview failed");
3825 return rc;
3826 }
3827 mFrameLen = buf_planes.plane_info.frame_len;
3828
3829 /* initialize offline meta memory for input reprocess */
3830 rc = QCamera3ProcessingChannel::initialize(isType);
3831 if (NO_ERROR != rc) {
3832 LOGE("Processing Channel initialize failed, rc = %d",
3833 rc);
3834 }
3835
3836 return rc;
3837 }
3838
3839 /*===========================================================================
3840 * FUNCTION : request
3841 *
3842 * DESCRIPTION: handle the request - either with an input buffer or a direct
3843 * output request
3844 *
3845 * PARAMETERS :
3846 * @buffer : pointer to the output buffer
3847 * @frameNumber : frame number of the request
3848 * @pInputBuffer : pointer to input buffer if an input request
3849 * @metadata : parameters associated with the request
3850 * @internalreq : boolean to indicate if this is purely internal request
3851 * needing internal buffer allocation
3852 * @meteringonly : boolean indicating metering only frame subset of internal
3853 * not consumed by postprocessor
3854 *
3855 * RETURN : 0 on a success start of capture
3856 * -EINVAL on invalid input
3857 * -ENODEV on serious error
3858 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata,int & indexUsed,bool internalRequest,bool meteringOnly)3859 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
3860 uint32_t frameNumber,
3861 camera3_stream_buffer_t *pInputBuffer,
3862 metadata_buffer_t *metadata, int &indexUsed,
3863 bool internalRequest, bool meteringOnly)
3864 {
3865 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_REQ);
3866 //FIX ME: Return buffer back in case of failures below.
3867
3868 int32_t rc = NO_ERROR;
3869
3870 reprocess_config_t reproc_cfg;
3871 cam_dimension_t dim;
3872 memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
3873 //make sure to set the correct input stream dim in case of YUV size override
3874 //and recalculate the plane info
3875 dim.width = (int32_t)mYuvWidth;
3876 dim.height = (int32_t)mYuvHeight;
3877
3878 setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat, dim);
3879
3880 // Start postprocessor
3881 startPostProc(reproc_cfg);
3882
3883 if (!internalRequest) {
3884 int index = mMemory.getMatchBufIndex((void*)buffer);
3885
3886 if(index < 0) {
3887 rc = registerBuffer(buffer, mIsType);
3888 if (NO_ERROR != rc) {
3889 LOGE("On-the-fly buffer registration failed %d",
3890 rc);
3891 return rc;
3892 }
3893
3894 index = mMemory.getMatchBufIndex((void*)buffer);
3895 if (index < 0) {
3896 LOGE("Could not find object among registered buffers");
3897 return DEAD_OBJECT;
3898 }
3899 }
3900 LOGD("buffer index %d, frameNumber: %u", index, frameNumber);
3901
3902 rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
3903
3904 // Queue jpeg settings
3905 rc = queueJpegSetting((uint32_t)index, metadata);
3906
3907 } else {
3908 LOGD("Internal request @ Picchannel");
3909 }
3910
3911
3912 if (pInputBuffer == NULL) {
3913 Mutex::Autolock lock(mFreeBuffersLock);
3914 uint32_t bufIdx;
3915 if (mFreeBufferList.empty()) {
3916 rc = mYuvMemory->allocateOne(mFrameLen, /*isCached*/false);
3917 if (rc < 0) {
3918 LOGE("Failed to allocate heap buffer. Fatal");
3919 return rc;
3920 } else {
3921 bufIdx = (uint32_t)rc;
3922 }
3923 } else {
3924 List<uint32_t>::iterator it = mFreeBufferList.begin();
3925 bufIdx = *it;
3926 mFreeBufferList.erase(it);
3927 }
3928 if (meteringOnly) {
3929 mYuvMemory->markFrameNumber(bufIdx, 0xFFFFFFFF);
3930 } else {
3931 mYuvMemory->markFrameNumber(bufIdx, frameNumber);
3932 }
3933 if (m_bIsActive) {
3934 mStreams[0]->bufDone(bufIdx);
3935 }
3936 indexUsed = bufIdx;
3937 } else {
3938 qcamera_fwk_input_pp_data_t *src_frame = NULL;
3939 src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1,
3940 sizeof(qcamera_fwk_input_pp_data_t));
3941 if (src_frame == NULL) {
3942 LOGE("No memory for src frame");
3943 return NO_MEMORY;
3944 }
3945 rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata,
3946 NULL /*fwk output buffer*/, frameNumber);
3947 if (NO_ERROR != rc) {
3948 LOGE("Error %d while setting framework input PP data", rc);
3949 free(src_frame);
3950 return rc;
3951 }
3952 LOGH("Post-process started");
3953 m_postprocessor.processData(src_frame);
3954 }
3955 return rc;
3956 }
3957
3958
3959
3960 /*===========================================================================
3961 * FUNCTION : dataNotifyCB
3962 *
3963 * DESCRIPTION: Channel Level callback used for super buffer data notify.
3964 * This function is registered with mm-camera-interface to handle
3965 * data notify
3966 *
3967 * PARAMETERS :
3968 * @recvd_frame : stream frame received
3969 * userdata : user data ptr
3970 *
3971 * RETURN : none
3972 *==========================================================================*/
dataNotifyCB(mm_camera_super_buf_t * recvd_frame,void * userdata)3973 void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
3974 void *userdata)
3975 {
3976 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_DATA_NOTIFY_CB);
3977 LOGD("E\n");
3978 QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
3979
3980 if (channel == NULL) {
3981 LOGE("invalid channel pointer");
3982 return;
3983 }
3984
3985 if(channel->m_numStreams != 1) {
3986 LOGE("Error: Bug: This callback assumes one stream per channel");
3987 return;
3988 }
3989
3990
3991 if(channel->mStreams[0] == NULL) {
3992 LOGE("Error: Invalid Stream object");
3993 return;
3994 }
3995
3996 channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]);
3997
3998 LOGD("X\n");
3999 return;
4000 }
4001
4002 /*===========================================================================
4003 * FUNCTION : streamCbRoutine
4004 *
4005 * DESCRIPTION:
4006 *
4007 * PARAMETERS :
4008 * @super_frame : the super frame with filled buffer
4009 * @stream : stream on which the buffer was requested and filled
4010 *
4011 * RETURN : none
4012 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)4013 void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
4014 QCamera3Stream *stream)
4015 {
4016 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CAPTURE_CH_CB);
4017 //TODO
4018 //Used only for getting YUV. Jpeg callback will be sent back from channel
4019 //directly to HWI. Refer to func jpegEvtHandle
4020
4021 //Got the yuv callback. Calling yuv callback handler in PostProc
4022 uint8_t frameIndex;
4023 mm_camera_super_buf_t* frame = NULL;
4024 cam_dimension_t dim;
4025 cam_frame_len_offset_t offset;
4026
4027 memset(&dim, 0, sizeof(dim));
4028 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
4029
4030 if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
4031 LOGE("Error with the stream callback");
4032 return;
4033 }
4034
4035 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
4036 LOGD("recvd buf_idx: %u for further processing",
4037 (uint32_t)frameIndex);
4038 if(frameIndex >= mNumSnapshotBufs) {
4039 LOGE("Error, Invalid index for buffer");
4040 if(stream) {
4041 Mutex::Autolock lock(mFreeBuffersLock);
4042 mFreeBufferList.push_back(frameIndex);
4043 stream->bufDone(frameIndex);
4044 }
4045 return;
4046 }
4047
4048 if ((uint32_t)mYuvMemory->getFrameNumber(frameIndex) == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4049 LOGD("Internal Request recycle frame");
4050 Mutex::Autolock lock(mFreeBuffersLock);
4051 mFreeBufferList.push_back(frameIndex);
4052 return;
4053 }
4054
4055 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
4056 if (frame == NULL) {
4057 LOGE("Error allocating memory to save received_frame structure.");
4058 if(stream) {
4059 Mutex::Autolock lock(mFreeBuffersLock);
4060 mFreeBufferList.push_back(frameIndex);
4061 stream->bufDone(frameIndex);
4062 }
4063 return;
4064 }
4065 *frame = *super_frame;
4066 stream->getFrameDimension(dim);
4067 stream->getFrameOffset(offset);
4068 dumpYUV(frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
4069
4070 if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
4071 mChannelCbBufErr(this, mYuvMemory->getFrameNumber(frameIndex),
4072 CAMERA3_BUFFER_STATUS_ERROR, mUserData);
4073 }
4074
4075 m_postprocessor.processData(frame);
4076 free(super_frame);
4077 return;
4078 }
4079
getStreamBufs(uint32_t)4080 QCamera3StreamMem* QCamera3PicChannel::getStreamBufs(uint32_t /*len*/)
4081 {
4082 return mYuvMemory;
4083 }
4084
putStreamBufs()4085 void QCamera3PicChannel::putStreamBufs()
4086 {
4087 QCamera3ProcessingChannel::putStreamBufs();
4088 Mutex::Autolock lock(mFreeBuffersLock);
4089 mFreeBufferList.clear();
4090
4091 if (nullptr != mYuvMemory) {
4092 uint32_t count = mYuvMemory->getCnt();
4093 for (uint32_t i = 0; i < count; i++) {
4094 mFreeBufferList.push_back(i);
4095 }
4096 }
4097 }
4098
queueJpegSetting(uint32_t index,metadata_buffer_t * metadata)4099 int32_t QCamera3PicChannel::queueJpegSetting(uint32_t index, metadata_buffer_t *metadata)
4100 {
4101 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
4102 jpeg_settings_t *settings =
4103 (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t));
4104
4105 if (!settings) {
4106 LOGE("out of memory allocating jpeg_settings");
4107 return -ENOMEM;
4108 }
4109
4110 memset(settings, 0, sizeof(jpeg_settings_t));
4111
4112 settings->out_buf_index = index;
4113
4114 settings->jpeg_orientation = 0;
4115 IF_META_AVAILABLE(int32_t, orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
4116 settings->jpeg_orientation = *orientation;
4117 }
4118
4119 settings->jpeg_quality = 85;
4120 IF_META_AVAILABLE(uint32_t, quality1, CAM_INTF_META_JPEG_QUALITY, metadata) {
4121 settings->jpeg_quality = (uint8_t) *quality1;
4122 }
4123
4124 IF_META_AVAILABLE(uint32_t, quality2, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
4125 settings->jpeg_thumb_quality = (uint8_t) *quality2;
4126 }
4127
4128 IF_META_AVAILABLE(cam_dimension_t, dimension, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4129 settings->thumbnail_size = *dimension;
4130 }
4131
4132 settings->gps_timestamp_valid = 0;
4133 IF_META_AVAILABLE(int64_t, timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4134 settings->gps_timestamp = *timestamp;
4135 settings->gps_timestamp_valid = 1;
4136 }
4137
4138 settings->gps_coordinates_valid = 0;
4139 IF_META_AVAILABLE(double, coordinates, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4140 memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double));
4141 settings->gps_coordinates_valid = 1;
4142 }
4143
4144 IF_META_AVAILABLE(uint8_t, proc_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4145 memset(settings->gps_processing_method, 0,
4146 sizeof(settings->gps_processing_method));
4147 strlcpy(settings->gps_processing_method, (const char *)proc_methods,
4148 sizeof(settings->gps_processing_method));
4149 }
4150
4151 settings->hdr_snapshot = 0;
4152 IF_META_AVAILABLE(cam_hdr_param_t, hdr_info, CAM_INTF_PARM_HAL_BRACKETING_HDR, metadata) {
4153 if (hdr_info->hdr_enable) {
4154 settings->hdr_snapshot = 1;
4155 }
4156 }
4157
4158
4159 // Image description
4160 const char *eepromVersion = hal_obj->getEepromVersionInfo();
4161 const uint32_t *ldafCalib = hal_obj->getLdafCalib();
4162 const char *easelFwVersion = hal_obj->getEaselFwVersion();
4163 if ((eepromVersion && strlen(eepromVersion)) ||
4164 ldafCalib || easelFwVersion) {
4165 uint32_t len = 0;
4166 settings->image_desc_valid = true;
4167 if (eepromVersion && strlen(eepromVersion)) {
4168 len = snprintf(settings->image_desc, sizeof(settings->image_desc),
4169 "%s", eepromVersion);
4170 }
4171 if (ldafCalib) {
4172 len += snprintf(settings->image_desc + len,
4173 sizeof(settings->image_desc) - len, "L:%u-%u",
4174 ldafCalib[0], ldafCalib[1]);
4175 }
4176 if (easelFwVersion) {
4177 ALOGD("%s: Easel FW version %s", __FUNCTION__, easelFwVersion);
4178 if (len > 0 && len < sizeof(settings->image_desc)) {
4179 settings->image_desc[len] = ',';
4180 len++;
4181 }
4182 len += snprintf(settings->image_desc + len,
4183 sizeof(settings->image_desc) - len, "E-ver:%s", easelFwVersion);
4184 }
4185 }
4186
4187 return m_postprocessor.processJpegSettingData(settings);
4188 }
4189
4190
overrideYuvSize(uint32_t width,uint32_t height)4191 void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height)
4192 {
4193 mYuvWidth = width;
4194 mYuvHeight = height;
4195 }
4196
4197 /*===========================================================================
4198 * FUNCTION : getReprocessType
4199 *
4200 * DESCRIPTION: get the type of reprocess output supported by this channel
4201 *
4202 * PARAMETERS : NONE
4203 *
4204 * RETURN : reprocess_type_t : type of reprocess
4205 *==========================================================================*/
getReprocessType()4206 reprocess_type_t QCamera3PicChannel::getReprocessType()
4207 {
4208 /* a picture channel could either use the postprocessor for reprocess+jpeg
4209 or only for reprocess */
4210 reprocess_type_t expectedReprocess;
4211 if (mPostProcMask == CAM_QCOM_FEATURE_NONE || mInputBufferHint) {
4212 expectedReprocess = REPROCESS_TYPE_JPEG;
4213 } else {
4214 expectedReprocess = REPROCESS_TYPE_NONE;
4215 }
4216 LOGH("expectedReprocess from Pic Channel is %d", expectedReprocess);
4217 return expectedReprocess;
4218 }
4219
4220
4221 /*===========================================================================
4222 * FUNCTION : timeoutFrame
4223 *
4224 * DESCRIPTION: Method to indicate to channel that a given frame has take too
4225 * long to be generated
4226 *
4227 * PARAMETERS : framenumber indicating the framenumber of the buffer timingout
4228 *
4229 * RETURN : int32_t type of status
4230 * NO_ERROR -- success
4231 * none-zero failure code
4232 *==========================================================================*/
timeoutFrame(uint32_t frameNumber)4233 int32_t QCamera3PicChannel::timeoutFrame(uint32_t frameNumber)
4234 {
4235 int32_t bufIdx;
4236
4237 bufIdx = mYuvMemory->getBufferIndex(frameNumber);
4238
4239 if (bufIdx < 0) {
4240 LOGE("%s: Buffer not found for frame:%d", __func__, frameNumber);
4241 return -1;
4242 }
4243
4244 mStreams[0]->timeoutFrame(bufIdx);
4245
4246 return NO_ERROR;
4247 }
4248
getYuvBufferForRequest(mm_camera_buf_def_t * frame,uint32_t frameNumber)4249 int32_t QCamera3PicChannel::getYuvBufferForRequest(mm_camera_buf_def_t *frame,
4250 uint32_t frameNumber)
4251 {
4252 uint32_t bufIdx;
4253 status_t rc;
4254
4255 Mutex::Autolock lock(mFreeBuffersLock);
4256
4257 // Get an available YUV buffer.
4258 if (mFreeBufferList.empty()) {
4259 // Allocate a buffer if no one is available.
4260 rc = mYuvMemory->allocateOne(mFrameLen, /*isCached*/false);
4261 if (rc < 0) {
4262 LOGE("Failed to allocate heap buffer. Fatal");
4263 return rc;
4264 } else {
4265 bufIdx = (uint32_t)rc;
4266 }
4267 } else {
4268 List<uint32_t>::iterator it = mFreeBufferList.begin();
4269 bufIdx = *it;
4270 mFreeBufferList.erase(it);
4271 }
4272
4273 mYuvMemory->markFrameNumber(bufIdx, frameNumber);
4274
4275 cam_frame_len_offset_t offset = {};
4276 mStreams[0]->getFrameOffset(offset);
4277
4278 // Get a buffer from YUV memory.
4279 rc = mYuvMemory->getBufDef(offset, *frame, bufIdx, mMapStreamBuffers);
4280 if (rc != 0) {
4281 ALOGE("%s: Getting a frame failed: %s (%d).", __FUNCTION__, strerror(-rc), rc);
4282 return rc;
4283 }
4284
4285 // Set the frame's stream ID because it's not set in getBufDef.
4286 frame->stream_id = mStreams[0]->getMyHandle();
4287 return 0;
4288 }
4289
returnYuvBuffer(mm_camera_buf_def_t * frame)4290 int32_t QCamera3PicChannel::returnYuvBuffer(mm_camera_buf_def_t *frame)
4291 {
4292 Mutex::Autolock lock(mFreeBuffersLock);
4293 mFreeBufferList.push_back(frame->buf_idx);
4294 return 0;
4295 }
4296
returnYuvBufferAndEncode(mm_camera_buf_def_t * frame,buffer_handle_t * outBuffer,uint32_t frameNumber,std::shared_ptr<metadata_buffer_t> metadata)4297 int32_t QCamera3PicChannel::returnYuvBufferAndEncode(mm_camera_buf_def_t *frame,
4298 buffer_handle_t *outBuffer, uint32_t frameNumber,
4299 std::shared_ptr<metadata_buffer_t> metadata)
4300 {
4301 int32_t rc = OK;
4302
4303 // Picture stream must have been started before any request comes in.
4304 if (!m_bIsActive) {
4305 LOGE("Channel not started!!");
4306 return NO_INIT;
4307 }
4308
4309 // Set up reprocess configuration
4310 reprocess_config_t reproc_cfg = {};
4311 cam_dimension_t dim;
4312 dim.width = (int32_t)mYuvWidth;
4313 dim.height = (int32_t)mYuvHeight;
4314 setReprocConfig(reproc_cfg, nullptr, metadata.get(), mStreamFormat, dim);
4315
4316 // Override reprocess type to just JPEG encoding without reprocessing.
4317 reproc_cfg.reprocess_type = REPROCESS_TYPE_NONE;
4318
4319 // Get the index of the output jpeg buffer.
4320 int index = mMemory.getMatchBufIndex((void*)outBuffer);
4321 if(index < 0) {
4322 rc = registerBuffer(outBuffer, mIsType);
4323 if (OK != rc) {
4324 LOGE("On-the-fly buffer registration failed %d",
4325 rc);
4326 return rc;
4327 }
4328
4329 index = mMemory.getMatchBufIndex((void*)outBuffer);
4330 if (index < 0) {
4331 LOGE("Could not find object among registered buffers");
4332 return DEAD_OBJECT;
4333 }
4334 }
4335
4336 rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
4337 if (rc != OK) {
4338 ALOGE("%s: Marking frame number (%u) for jpeg buffer (%d) failed: %s (%d)", __FUNCTION__,
4339 frameNumber, index, strerror(-rc), rc);
4340 return rc;
4341 }
4342
4343 // Start postprocessor
4344 startPostProc(reproc_cfg);
4345
4346 // Queue jpeg settings
4347 rc = queueJpegSetting((uint32_t)index, metadata.get());
4348 if (rc != OK) {
4349 ALOGE("%s: Queueing Jpeg setting for frame number (%u) buffer index (%d) failed: %s (%d)",
4350 __FUNCTION__, frameNumber, index, strerror(-rc), rc);
4351 return rc;
4352 }
4353
4354 // Allocate a buffer for the YUV input. It will be freed in QCamera3PostProc.
4355 mm_camera_super_buf_t *src_frame =
4356 (mm_camera_super_buf_t *)calloc(1, sizeof(mm_camera_super_buf_t));
4357 if (src_frame == nullptr) {
4358 LOGE("%s: No memory for src frame", __FUNCTION__);
4359 return NO_MEMORY;
4360 }
4361 src_frame->camera_handle = m_camHandle;
4362 src_frame->ch_id = getMyHandle();
4363 src_frame->num_bufs = 1;
4364 src_frame->bufs[0] = frame;
4365
4366 // Start processing the YUV buffer.
4367 ALOGD("%s: %d: Post-process started", __FUNCTION__, __LINE__);
4368 rc = m_postprocessor.processData(src_frame);
4369 if (rc != OK) {
4370 ALOGE("%s: Post processing frame (frame number: %u, jpeg buffer: %d) failed: %s (%d)",
4371 __FUNCTION__, frameNumber, index, strerror(-rc), rc);
4372 return rc;
4373 }
4374
4375 // Allocate a buffer for the metadata. It will be freed in QCamera3PostProc.
4376 mm_camera_super_buf_t *metadataBuf =
4377 (mm_camera_super_buf_t *)calloc(1, sizeof(mm_camera_super_buf_t));
4378 if (metadata == nullptr) {
4379 LOGE("%s: No memory for metadata", __FUNCTION__);
4380 return NO_MEMORY;
4381 }
4382 metadataBuf->camera_handle = m_camHandle;
4383 metadataBuf->ch_id = getMyHandle();
4384 metadataBuf->num_bufs = 1;
4385 metadataBuf->bufs[0] = (mm_camera_buf_def_t *)calloc(1, sizeof(mm_camera_buf_def_t));
4386 metadataBuf->bufs[0]->buffer = metadata.get();
4387
4388 // Start processing the metadata
4389 rc = m_postprocessor.processPPMetadata(metadataBuf);
4390 if (rc != OK) {
4391 ALOGE("%s: Post processing metadata (frame number: %u, jpeg buffer: %d) failed: %s (%d)",
4392 __FUNCTION__, frameNumber, index, strerror(-rc), rc);
4393 return rc;
4394 }
4395
4396 // Queue the external metadata.
4397 {
4398 Mutex::Autolock lock(mPendingExternalMetadataLock);
4399 mPendingExternalMetadata.push_back(metadata);
4400 }
4401
4402 return OK;
4403 }
4404
4405 /*===========================================================================
4406 * FUNCTION : QCamera3ReprocessChannel
4407 *
4408 * DESCRIPTION: constructor of QCamera3ReprocessChannel
4409 *
4410 * PARAMETERS :
4411 * @cam_handle : camera handle
4412 * @cam_ops : ptr to camera ops table
4413 * @pp_mask : post-proccess feature mask
4414 *
4415 * RETURN : none
4416 *==========================================================================*/
QCamera3ReprocessChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buf_err,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,void * userData,void * ch_hdl)4417 QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle,
4418 uint32_t channel_handle,
4419 mm_camera_ops_t *cam_ops,
4420 channel_cb_routine cb_routine,
4421 channel_cb_buffer_err cb_buf_err,
4422 cam_padding_info_t *paddingInfo,
4423 cam_feature_mask_t postprocess_mask,
4424 void *userData, void *ch_hdl) :
4425 /* In case of framework reprocessing, pproc and jpeg operations could be
4426 * parallelized by allowing 1 extra buffer for reprocessing output:
4427 * ch_hdl->getNumBuffers() + 1 */
4428 QCamera3Channel(cam_handle, channel_handle, cam_ops, cb_routine, cb_buf_err, paddingInfo,
4429 postprocess_mask, userData,
4430 ((QCamera3ProcessingChannel *)ch_hdl)->getNumBuffers()
4431 + (MAX_REPROCESS_PIPELINE_STAGES - 1)),
4432 inputChHandle(ch_hdl),
4433 mOfflineBuffersIndex(-1),
4434 mFrameLen(0),
4435 mReprocessType(REPROCESS_TYPE_NONE),
4436 m_pSrcChannel(NULL),
4437 m_pMetaChannel(NULL),
4438 mMemory(NULL),
4439 mGrallocMemory(0),
4440 mReprocessPerfMode(false)
4441 {
4442 memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
4443 mOfflineBuffersIndex = mNumBuffers -1;
4444 mOfflineMetaIndex = (int32_t) (2*mNumBuffers -1);
4445 }
4446
4447
4448 /*===========================================================================
4449 * FUNCTION : QCamera3ReprocessChannel
4450 *
4451 * DESCRIPTION: constructor of QCamera3ReprocessChannel
4452 *
4453 * PARAMETERS :
4454 * @cam_handle : camera handle
4455 * @cam_ops : ptr to camera ops table
4456 * @pp_mask : post-proccess feature mask
4457 *
4458 * RETURN : none
4459 *==========================================================================*/
initialize(cam_is_type_t isType)4460 int32_t QCamera3ReprocessChannel::initialize(cam_is_type_t isType)
4461 {
4462 int32_t rc = NO_ERROR;
4463 mm_camera_channel_attr_t attr;
4464
4465 memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4466 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4467 attr.max_unmatched_frames = 1;
4468
4469 m_handle = m_camOps->add_channel(m_camHandle,
4470 &attr,
4471 NULL,
4472 this);
4473 if (m_handle == 0) {
4474 LOGE("Add channel failed");
4475 return UNKNOWN_ERROR;
4476 }
4477
4478 mIsType = isType;
4479 return rc;
4480 }
4481
4482 /*===========================================================================
4483 * FUNCTION : registerBuffer
4484 *
4485 * DESCRIPTION: register streaming buffer to the channel object
4486 *
4487 * PARAMETERS :
4488 * @buffer : buffer to be registered
4489 * @isType : the image stabilization type for the buffer
4490 *
4491 * RETURN : int32_t type of status
4492 * NO_ERROR -- success
4493 * none-zero failure code
4494 *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType)4495 int32_t QCamera3ReprocessChannel::registerBuffer(buffer_handle_t *buffer,
4496 cam_is_type_t isType)
4497 {
4498 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_REG_BUF);
4499 int rc = 0;
4500 mIsType = isType;
4501 cam_stream_type_t streamType;
4502
4503 if (buffer == NULL) {
4504 LOGE("Error: Cannot register a NULL buffer");
4505 return BAD_VALUE;
4506 }
4507
4508 if ((uint32_t)mGrallocMemory.getCnt() > (mNumBuffers - 1)) {
4509 LOGE("Trying to register more buffers than initially requested");
4510 return BAD_VALUE;
4511 }
4512
4513 if (0 == m_numStreams) {
4514 rc = initialize(mIsType);
4515 if (rc != NO_ERROR) {
4516 LOGE("Couldn't initialize camera stream %d",
4517 rc);
4518 return rc;
4519 }
4520 }
4521
4522 streamType = mStreams[0]->getMyType();
4523 rc = mGrallocMemory.registerBuffer(buffer, streamType);
4524 if (ALREADY_EXISTS == rc) {
4525 return NO_ERROR;
4526 } else if (NO_ERROR != rc) {
4527 LOGE("Buffer %p couldn't be registered %d", buffer, rc);
4528 return rc;
4529 }
4530
4531 return rc;
4532 }
4533
4534 /*===========================================================================
4535 * FUNCTION : QCamera3ReprocessChannel
4536 *
4537 * DESCRIPTION: constructor of QCamera3ReprocessChannel
4538 *
4539 * PARAMETERS :
4540 * @cam_handle : camera handle
4541 * @cam_ops : ptr to camera ops table
4542 * @pp_mask : post-proccess feature mask
4543 *
4544 * RETURN : none
4545 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)4546 void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
4547 QCamera3Stream *stream)
4548 {
4549 //Got the pproc data callback. Now send to jpeg encoding
4550 uint8_t frameIndex;
4551 uint32_t resultFrameNumber;
4552 ATRACE_CALL();
4553 mm_camera_super_buf_t* frame = NULL;
4554 QCamera3ProcessingChannel *obj = (QCamera3ProcessingChannel *)inputChHandle;
4555 cam_dimension_t dim;
4556 cam_frame_len_offset_t offset;
4557
4558 memset(&dim, 0, sizeof(dim));
4559 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
4560 if(!super_frame) {
4561 LOGE("Invalid Super buffer");
4562 return;
4563 }
4564
4565 if(super_frame->num_bufs != 1) {
4566 LOGE("Multiple streams are not supported");
4567 return;
4568 }
4569 if(super_frame->bufs[0] == NULL ) {
4570 LOGE("Error, Super buffer frame does not contain valid buffer");
4571 return;
4572 }
4573 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
4574
4575
4576 if (mReprocessType == REPROCESS_TYPE_JPEG) {
4577 resultFrameNumber = mMemory->getFrameNumber(frameIndex);
4578 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
4579 if (frame == NULL) {
4580 LOGE("Error allocating memory to save received_frame structure.");
4581 if(stream) {
4582 stream->bufDone(frameIndex);
4583 }
4584 return;
4585 }
4586 LOGI("bufIndex: %u recvd from post proc",
4587 (uint32_t)frameIndex);
4588 *frame = *super_frame;
4589
4590 stream->getFrameDimension(dim);
4591 stream->getFrameOffset(offset);
4592 dumpYUV(frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_INPUT_JPEG);
4593 // Release offline buffers.
4594 int32_t rc = obj->releaseOfflineMemory(resultFrameNumber);
4595 if (NO_ERROR != rc) {
4596 LOGE("Error releasing offline memory %d", rc);
4597 }
4598 /* Since reprocessing is done, send the callback to release the input buffer */
4599 if (mChannelCB) {
4600 mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
4601 }
4602 obj->m_postprocessor.processPPData(frame);
4603 } else {
4604 buffer_handle_t *resultBuffer;
4605 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
4606 resultBuffer = (buffer_handle_t *)mGrallocMemory.getBufferHandle(frameIndex);
4607 resultFrameNumber = mGrallocMemory.getFrameNumber(frameIndex);
4608 int32_t rc = stream->bufRelease(frameIndex);
4609 if (NO_ERROR != rc) {
4610 LOGE("Error %d releasing stream buffer %d",
4611 rc, frameIndex);
4612 }
4613 rc = mGrallocMemory.unregisterBuffer(frameIndex);
4614 if (NO_ERROR != rc) {
4615 LOGE("Error %d unregistering stream buffer %d",
4616 rc, frameIndex);
4617 }
4618 obj->reprocessCbRoutine(resultBuffer, resultFrameNumber);
4619
4620 obj->m_postprocessor.releaseOfflineBuffers(false);
4621 qcamera_hal3_pp_data_t *pp_job = obj->m_postprocessor.dequeuePPJob(resultFrameNumber);
4622 if (pp_job != NULL) {
4623 obj->m_postprocessor.releasePPJobData(pp_job);
4624 }
4625 free(pp_job);
4626 resetToCamPerfNormal(resultFrameNumber);
4627 }
4628 free(super_frame);
4629 return;
4630 }
4631
4632 /*===========================================================================
4633 * FUNCTION : resetToCamPerfNormal
4634 *
4635 * DESCRIPTION: Set the perf mode to normal if all the priority frames
4636 * have been reprocessed
4637 *
4638 * PARAMETERS :
4639 * @frameNumber: Frame number of the reprocess completed frame
4640 *
4641 * RETURN : QCamera3StreamMem *
4642 *==========================================================================*/
resetToCamPerfNormal(uint32_t frameNumber)4643 int32_t QCamera3ReprocessChannel::resetToCamPerfNormal(uint32_t frameNumber)
4644 {
4645 int32_t rc = NO_ERROR;
4646 bool resetToPerfNormal = false;
4647 {
4648 Mutex::Autolock lock(mPriorityFramesLock);
4649 /* remove the priority frame number from the list */
4650 for (size_t i = 0; i < mPriorityFrames.size(); i++) {
4651 if (mPriorityFrames[i] == frameNumber) {
4652 mPriorityFrames.removeAt(i);
4653 }
4654 }
4655 /* reset the perf mode if pending priority frame list is empty */
4656 if (mReprocessPerfMode && mPriorityFrames.empty()) {
4657 resetToPerfNormal = true;
4658 }
4659 }
4660 if (resetToPerfNormal) {
4661 QCamera3Stream *pStream = mStreams[0];
4662 cam_stream_parm_buffer_t param;
4663 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
4664
4665 param.type = CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE;
4666 param.perf_mode = CAM_PERF_NORMAL;
4667 rc = pStream->setParameter(param);
4668 {
4669 Mutex::Autolock lock(mPriorityFramesLock);
4670 mReprocessPerfMode = false;
4671 }
4672 }
4673 return rc;
4674 }
4675
4676 /*===========================================================================
4677 * FUNCTION : getStreamBufs
4678 *
4679 * DESCRIPTION: register the buffers of the reprocess channel
4680 *
4681 * PARAMETERS : none
4682 *
4683 * RETURN : QCamera3StreamMem *
4684 *==========================================================================*/
getStreamBufs(uint32_t len)4685 QCamera3StreamMem* QCamera3ReprocessChannel::getStreamBufs(uint32_t len)
4686 {
4687 if (mReprocessType == REPROCESS_TYPE_JPEG) {
4688 mMemory = new QCamera3StreamMem(mNumBuffers);
4689 if (!mMemory) {
4690 LOGE("unable to create reproc memory");
4691 return NULL;
4692 }
4693 mFrameLen = len;
4694 return mMemory;
4695 }
4696 return &mGrallocMemory;
4697 }
4698
4699 /*===========================================================================
4700 * FUNCTION : putStreamBufs
4701 *
4702 * DESCRIPTION: release the reprocess channel buffers
4703 *
4704 * PARAMETERS : none
4705 *
4706 * RETURN :
4707 *==========================================================================*/
putStreamBufs()4708 void QCamera3ReprocessChannel::putStreamBufs()
4709 {
4710 if (mReprocessType == REPROCESS_TYPE_JPEG) {
4711 mMemory->deallocate();
4712 delete mMemory;
4713 mMemory = NULL;
4714 mFreeBufferList.clear();
4715 } else {
4716 mGrallocMemory.unregisterBuffers();
4717 }
4718 }
4719
4720 /*===========================================================================
4721 * FUNCTION : ~QCamera3ReprocessChannel
4722 *
4723 * DESCRIPTION: destructor of QCamera3ReprocessChannel
4724 *
4725 * PARAMETERS : none
4726 *
4727 * RETURN : none
4728 *==========================================================================*/
~QCamera3ReprocessChannel()4729 QCamera3ReprocessChannel::~QCamera3ReprocessChannel()
4730 {
4731 destroy();
4732
4733 if (m_handle) {
4734 m_camOps->delete_channel(m_camHandle, m_handle);
4735 LOGD("deleting channel %d", m_handle);
4736 m_handle = 0;
4737 }
4738 }
4739
4740 /*===========================================================================
4741 * FUNCTION : start
4742 *
4743 * DESCRIPTION: start reprocess channel.
4744 *
4745 * PARAMETERS :
4746 *
4747 * RETURN : int32_t type of status
4748 * NO_ERROR -- success
4749 * none-zero failure code
4750 *==========================================================================*/
start()4751 int32_t QCamera3ReprocessChannel::start()
4752 {
4753 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_START);
4754 int32_t rc = NO_ERROR;
4755
4756 rc = QCamera3Channel::start();
4757
4758 if (rc == NO_ERROR) {
4759 rc = m_camOps->start_channel(m_camHandle, m_handle, /*start_sensor_streaming*/true);
4760
4761 // Check failure
4762 if (rc != NO_ERROR) {
4763 LOGE("start_channel failed %d", rc);
4764 QCamera3Channel::stop();
4765 }
4766 }
4767 return rc;
4768 }
4769
4770 /*===========================================================================
4771 * FUNCTION : stop
4772 *
4773 * DESCRIPTION: stop reprocess channel.
4774 *
4775 * PARAMETERS : none
4776 *
4777 * RETURN : int32_t type of status
4778 * NO_ERROR -- success
4779 * none-zero failure code
4780 *==========================================================================*/
stop()4781 int32_t QCamera3ReprocessChannel::stop()
4782 {
4783 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_STOP);
4784 int32_t rc = NO_ERROR;
4785
4786 rc = QCamera3Channel::stop();
4787 rc |= m_camOps->stop_channel(m_camHandle, m_handle, /*stop_channel_immediately*/false);
4788 // Unmapping the buffers
4789 unmapOfflineBuffers(true);
4790 return rc;
4791 }
4792
4793 /*===========================================================================
4794 * FUNCTION : getStreamBySrcHandle
4795 *
4796 * DESCRIPTION: find reprocess stream by its source stream handle
4797 *
4798 * PARAMETERS :
4799 * @srcHandle : source stream handle
4800 *
4801 * RETURN : ptr to reprocess stream if found. NULL if not found
4802 *==========================================================================*/
getStreamBySrcHandle(uint32_t srcHandle)4803 QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle)
4804 {
4805 QCamera3Stream *pStream = NULL;
4806
4807 for (uint32_t i = 0; i < m_numStreams; i++) {
4808 if (mSrcStreamHandles[i] == srcHandle) {
4809 pStream = mStreams[i];
4810 break;
4811 }
4812 }
4813 return pStream;
4814 }
4815
4816 /*===========================================================================
4817 * FUNCTION : getSrcStreamBySrcHandle
4818 *
4819 * DESCRIPTION: find source stream by source stream handle
4820 *
4821 * PARAMETERS :
4822 * @srcHandle : source stream handle
4823 *
4824 * RETURN : ptr to reprocess stream if found. NULL if not found
4825 *==========================================================================*/
getSrcStreamBySrcHandle(uint32_t srcHandle)4826 QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle)
4827 {
4828 QCamera3Stream *pStream = NULL;
4829
4830 if (NULL == m_pSrcChannel) {
4831 return NULL;
4832 }
4833
4834 for (uint32_t i = 0; i < m_numStreams; i++) {
4835 if (mSrcStreamHandles[i] == srcHandle) {
4836 pStream = m_pSrcChannel->getStreamByIndex(i);
4837 break;
4838 }
4839 }
4840 return pStream;
4841 }
4842
4843 /*===========================================================================
4844 * FUNCTION : unmapOfflineBuffers
4845 *
4846 * DESCRIPTION: Unmaps offline buffers
4847 *
4848 * PARAMETERS : none
4849 *
4850 * RETURN : int32_t type of status
4851 * NO_ERROR -- success
4852 * none-zero failure code
4853 *==========================================================================*/
unmapOfflineBuffers(bool all)4854 int32_t QCamera3ReprocessChannel::unmapOfflineBuffers(bool all)
4855 {
4856 int rc = NO_ERROR;
4857 Mutex::Autolock l(mOfflineBuffersLock);
4858 if (!mOfflineBuffers.empty()) {
4859 QCamera3Stream *stream = NULL;
4860 List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
4861 for (; it != mOfflineBuffers.end(); it++) {
4862 stream = (*it).stream;
4863 if (NULL != stream) {
4864 rc = stream->unmapBuf((*it).type,
4865 (*it).index,
4866 -1);
4867 if (NO_ERROR != rc) {
4868 LOGE("Error during offline buffer unmap %d",
4869 rc);
4870 }
4871 LOGD("Unmapped buffer with index %d", (*it).index);
4872 }
4873 if (!all) {
4874 mOfflineBuffers.erase(it);
4875 break;
4876 }
4877 }
4878 if (all) {
4879 mOfflineBuffers.clear();
4880 }
4881 }
4882
4883 if (!mOfflineMetaBuffers.empty()) {
4884 QCamera3Stream *stream = NULL;
4885 List<OfflineBuffer>::iterator it = mOfflineMetaBuffers.begin();
4886 for (; it != mOfflineMetaBuffers.end(); it++) {
4887 stream = (*it).stream;
4888 if (NULL != stream) {
4889 rc = stream->unmapBuf((*it).type,
4890 (*it).index,
4891 -1);
4892 if (NO_ERROR != rc) {
4893 LOGE("Error during offline buffer unmap %d",
4894 rc);
4895 }
4896 LOGD("Unmapped meta buffer with index %d", (*it).index);
4897 }
4898 if (!all) {
4899 mOfflineMetaBuffers.erase(it);
4900 break;
4901 }
4902 }
4903 if (all) {
4904 mOfflineMetaBuffers.clear();
4905 }
4906 }
4907 return rc;
4908 }
4909
4910 /*===========================================================================
4911 * FUNCTION : bufDone
4912 *
4913 * DESCRIPTION: Return reprocess stream buffer to free buffer list.
4914 * Note that this function doesn't queue buffer back to kernel.
4915 * It's up to doReprocessOffline to do that instead.
4916 * PARAMETERS :
4917 * @recvd_frame : stream buf frame to be returned
4918 *
4919 * RETURN : int32_t type of status
4920 * NO_ERROR -- success
4921 * none-zero failure code
4922 *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)4923 int32_t QCamera3ReprocessChannel::bufDone(mm_camera_super_buf_t *recvd_frame)
4924 {
4925 int rc = NO_ERROR;
4926 if (recvd_frame && recvd_frame->num_bufs == 1) {
4927 Mutex::Autolock lock(mFreeBuffersLock);
4928 uint32_t buf_idx = recvd_frame->bufs[0]->buf_idx;
4929 mFreeBufferList.push_back(buf_idx);
4930
4931 } else {
4932 LOGE("Fatal. Not supposed to be here");
4933 rc = BAD_VALUE;
4934 }
4935
4936 return rc;
4937 }
4938
4939 /*===========================================================================
4940 * FUNCTION : overrideMetadata
4941 *
4942 * DESCRIPTION: Override metadata entry such as rotation, crop, and CDS info.
4943 *
4944 * PARAMETERS :
4945 * @frame : input frame from source stream
4946 * meta_buffer: metadata buffer
4947 * @metadata : corresponding metadata
4948 * @fwk_frame :
4949 *
4950 * RETURN : int32_t type of status
4951 * NO_ERROR -- success
4952 * none-zero failure code
4953 *==========================================================================*/
overrideMetadata(qcamera_hal3_pp_buffer_t * pp_buffer,mm_camera_buf_def_t * meta_buffer,jpeg_settings_t * jpeg_settings,qcamera_fwk_input_pp_data_t & fwk_frame)4954 int32_t QCamera3ReprocessChannel::overrideMetadata(qcamera_hal3_pp_buffer_t *pp_buffer,
4955 mm_camera_buf_def_t *meta_buffer, jpeg_settings_t *jpeg_settings,
4956 qcamera_fwk_input_pp_data_t &fwk_frame)
4957 {
4958 int32_t rc = NO_ERROR;
4959 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
4960 if ((NULL == meta_buffer) || (NULL == pp_buffer) || (NULL == pp_buffer->input) ||
4961 (NULL == hal_obj)) {
4962 return BAD_VALUE;
4963 }
4964
4965 metadata_buffer_t *meta = (metadata_buffer_t *)meta_buffer->buffer;
4966 mm_camera_super_buf_t *frame = pp_buffer->input;
4967 if (NULL == meta) {
4968 return BAD_VALUE;
4969 }
4970
4971 for (uint32_t i = 0; i < frame->num_bufs; i++) {
4972 QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
4973 QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id);
4974
4975 if (pStream != NULL && pSrcStream != NULL) {
4976 if (jpeg_settings) {
4977 // Find rotation info for reprocess stream
4978 cam_rotation_info_t rotation_info;
4979 memset(&rotation_info, 0, sizeof(rotation_info));
4980 if (jpeg_settings->jpeg_orientation == 0) {
4981 rotation_info.rotation = ROTATE_0;
4982 } else if (jpeg_settings->jpeg_orientation == 90) {
4983 rotation_info.rotation = ROTATE_90;
4984 } else if (jpeg_settings->jpeg_orientation == 180) {
4985 rotation_info.rotation = ROTATE_180;
4986 } else if (jpeg_settings->jpeg_orientation == 270) {
4987 rotation_info.rotation = ROTATE_270;
4988 }
4989
4990 rotation_info.device_rotation = ROTATE_0;
4991 rotation_info.streamId = mStreams[0]->getMyServerID();
4992 ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info);
4993 }
4994
4995 // Find and insert crop info for reprocess stream
4996 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) {
4997 if (MAX_NUM_STREAMS > crop_data->num_of_streams) {
4998 for (int j = 0; j < crop_data->num_of_streams; j++) {
4999 if (crop_data->crop_info[j].stream_id ==
5000 pSrcStream->getMyServerID()) {
5001
5002 // Store crop/roi information for offline reprocess
5003 // in the reprocess stream slot
5004 crop_data->crop_info[crop_data->num_of_streams].crop =
5005 crop_data->crop_info[j].crop;
5006 crop_data->crop_info[crop_data->num_of_streams].roi_map =
5007 crop_data->crop_info[j].roi_map;
5008 crop_data->crop_info[crop_data->num_of_streams].stream_id =
5009 mStreams[0]->getMyServerID();
5010 crop_data->num_of_streams++;
5011
5012 LOGD("Reprocess stream server id: %d",
5013 mStreams[0]->getMyServerID());
5014 LOGD("Found offline reprocess crop %dx%d %dx%d",
5015 crop_data->crop_info[j].crop.left,
5016 crop_data->crop_info[j].crop.top,
5017 crop_data->crop_info[j].crop.width,
5018 crop_data->crop_info[j].crop.height);
5019 LOGD("Found offline reprocess roimap %dx%d %dx%d",
5020 crop_data->crop_info[j].roi_map.left,
5021 crop_data->crop_info[j].roi_map.top,
5022 crop_data->crop_info[j].roi_map.width,
5023 crop_data->crop_info[j].roi_map.height);
5024
5025 break;
5026 }
5027 }
5028 } else {
5029 LOGE("No space to add reprocess stream crop/roi information");
5030 }
5031 }
5032
5033 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, meta) {
5034 uint8_t cnt = cdsInfo->num_of_streams;
5035 if (cnt <= MAX_NUM_STREAMS) {
5036 cam_stream_cds_info_t repro_cds_info;
5037 memset(&repro_cds_info, 0, sizeof(repro_cds_info));
5038 repro_cds_info.stream_id = mStreams[0]->getMyServerID();
5039 for (size_t i = 0; i < cnt; i++) {
5040 if (cdsInfo->cds_info[i].stream_id ==
5041 pSrcStream->getMyServerID()) {
5042 repro_cds_info.cds_enable =
5043 cdsInfo->cds_info[i].cds_enable;
5044 break;
5045 }
5046 }
5047 cdsInfo->num_of_streams = 1;
5048 cdsInfo->cds_info[0] = repro_cds_info;
5049 } else {
5050 LOGE("No space to add reprocess stream cds information");
5051 }
5052 }
5053
5054 fwk_frame.input_buffer = *frame->bufs[i];
5055 fwk_frame.metadata_buffer = *meta_buffer;
5056 fwk_frame.output_buffer = pp_buffer->output;
5057 break;
5058 } else {
5059 LOGE("Source/Re-process streams are invalid");
5060 rc |= BAD_VALUE;
5061 }
5062 }
5063
5064 return rc;
5065 }
5066
5067 /*===========================================================================
5068 * FUNCTION : overrideFwkMetadata
5069 *
5070 * DESCRIPTION: Override frameworks metadata such as rotation, crop, and CDS data.
5071 *
5072 * PARAMETERS :
5073 * @frame : input frame for reprocessing
5074 *
5075 * RETURN : int32_t type of status
5076 * NO_ERROR -- success
5077 * none-zero failure code
5078 *==========================================================================*/
overrideFwkMetadata(qcamera_fwk_input_pp_data_t * frame)5079 int32_t QCamera3ReprocessChannel::overrideFwkMetadata(
5080 qcamera_fwk_input_pp_data_t *frame)
5081 {
5082 if (NULL == frame) {
5083 LOGE("Incorrect input frame");
5084 return BAD_VALUE;
5085 }
5086
5087 if (NULL == frame->metadata_buffer.buffer) {
5088 LOGE("No metadata available");
5089 return BAD_VALUE;
5090 }
5091 metadata_buffer_t *meta = (metadata_buffer_t *) frame->metadata_buffer.buffer;
5092
5093 // Not doing rotation at all for YUV to YUV reprocess
5094 if (mReprocessType != REPROCESS_TYPE_JPEG) {
5095 LOGD("Override rotation to 0 for channel reprocess type %d",
5096 mReprocessType);
5097 cam_rotation_info_t rotation_info;
5098 memset(&rotation_info, 0, sizeof(rotation_info));
5099 rotation_info.rotation = ROTATE_0;
5100 rotation_info.streamId = mStreams[0]->getMyServerID();
5101 ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info);
5102 }
5103
5104 // Find and insert crop info for reprocess stream
5105 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) {
5106 if (1 == crop_data->num_of_streams) {
5107 // Store crop/roi information for offline reprocess
5108 // in the reprocess stream slot
5109 crop_data->crop_info[crop_data->num_of_streams].crop =
5110 crop_data->crop_info[0].crop;
5111 crop_data->crop_info[crop_data->num_of_streams].roi_map =
5112 crop_data->crop_info[0].roi_map;
5113 crop_data->crop_info[crop_data->num_of_streams].stream_id =
5114 mStreams[0]->getMyServerID();
5115 crop_data->num_of_streams++;
5116
5117 LOGD("Reprocess stream server id: %d",
5118 mStreams[0]->getMyServerID());
5119 LOGD("Found offline reprocess crop %dx%d %dx%d",
5120 crop_data->crop_info[0].crop.left,
5121 crop_data->crop_info[0].crop.top,
5122 crop_data->crop_info[0].crop.width,
5123 crop_data->crop_info[0].crop.height);
5124 LOGD("Found offline reprocess roi map %dx%d %dx%d",
5125 crop_data->crop_info[0].roi_map.left,
5126 crop_data->crop_info[0].roi_map.top,
5127 crop_data->crop_info[0].roi_map.width,
5128 crop_data->crop_info[0].roi_map.height);
5129 } else {
5130 LOGE("Incorrect number of offline crop data entries %d",
5131 crop_data->num_of_streams);
5132 return BAD_VALUE;
5133 }
5134 } else {
5135 LOGW("Crop data not present");
5136 }
5137
5138 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, meta) {
5139 if (1 == cdsInfo->num_of_streams) {
5140 cdsInfo->cds_info[0].stream_id = mStreams[0]->getMyServerID();
5141 } else {
5142 LOGE("Incorrect number of offline cds info entries %d",
5143 cdsInfo->num_of_streams);
5144 return BAD_VALUE;
5145 }
5146 }
5147
5148 return NO_ERROR;
5149 }
5150
5151 /*===========================================================================
5152 * FUNCTION : doReprocessOffline
5153 *
5154 * DESCRIPTION: request to do a reprocess on the frame
5155 *
5156 * PARAMETERS :
5157 * @frame : input frame for reprocessing
5158 * @isPriorityFrame: Hint that this frame is of priority, equivalent to
5159 * real time, even though it is processed in offline mechanism
5160 *
5161 * RETURN : int32_t type of status
5162 * NO_ERROR -- success
5163 * none-zero failure code
5164 *==========================================================================*/
doReprocessOffline(qcamera_fwk_input_pp_data_t * frame,bool isPriorityFrame)5165 int32_t QCamera3ReprocessChannel::doReprocessOffline(
5166 qcamera_fwk_input_pp_data_t *frame, bool isPriorityFrame)
5167 {
5168 int32_t rc = 0;
5169 int index;
5170 OfflineBuffer mappedBuffer;
5171 ATRACE_CALL();
5172
5173 if (m_numStreams < 1) {
5174 LOGE("No reprocess stream is created");
5175 return -1;
5176 }
5177
5178 if (NULL == frame) {
5179 LOGE("Incorrect input frame");
5180 return BAD_VALUE;
5181 }
5182
5183 if (NULL == frame->metadata_buffer.buffer) {
5184 LOGE("No metadata available");
5185 return BAD_VALUE;
5186 }
5187
5188 if (0 > frame->input_buffer.fd) {
5189 LOGE("No input buffer available");
5190 return BAD_VALUE;
5191 }
5192
5193 if ((0 == m_numStreams) || (NULL == mStreams[0])) {
5194 LOGE("Reprocess stream not initialized!");
5195 return NO_INIT;
5196 }
5197
5198 QCamera3Stream *pStream = mStreams[0];
5199
5200 //qbuf the output buffer if it was allocated by the framework
5201 if (mReprocessType != REPROCESS_TYPE_JPEG && frame->output_buffer != NULL) {
5202 index = mGrallocMemory.getMatchBufIndex((void*)frame->output_buffer);
5203 if(index < 0) {
5204 rc = registerBuffer(frame->output_buffer, mIsType);
5205 if (NO_ERROR != rc) {
5206 LOGE("On-the-fly buffer registration failed %d",
5207 rc);
5208 return rc;
5209 }
5210
5211 index = mGrallocMemory.getMatchBufIndex((void*)frame->output_buffer);
5212 if (index < 0) {
5213 LOGE("Could not find object among registered buffers");
5214 return DEAD_OBJECT;
5215 }
5216 }
5217 rc = mGrallocMemory.markFrameNumber(index, frame->frameNumber);
5218 if(rc != NO_ERROR) {
5219 LOGE("Failed to mark frame#:%d, index:%d",frame->frameNumber,index);
5220 return rc;
5221 }
5222 if(!m_bIsActive) {
5223 rc = start();
5224 if (NO_ERROR != rc) {
5225 return rc;
5226 }
5227 } else {
5228 rc = pStream->bufDone(index);
5229 if(rc != NO_ERROR) {
5230 LOGE("Failed to Q new buffer to stream %d", rc);
5231 mGrallocMemory.markFrameNumber(index, -1);
5232 return rc;
5233 }
5234 }
5235
5236 } else if (mReprocessType == REPROCESS_TYPE_JPEG) {
5237 Mutex::Autolock lock(mFreeBuffersLock);
5238 uint32_t bufIdx;
5239 if (mFreeBufferList.empty()) {
5240 rc = mMemory->allocateOne(mFrameLen);
5241 if (rc < 0) {
5242 LOGE("Failed allocating heap buffer. Fatal");
5243 return BAD_VALUE;
5244 } else {
5245 bufIdx = (uint32_t)rc;
5246 }
5247 } else {
5248 bufIdx = *(mFreeBufferList.begin());
5249 mFreeBufferList.erase(mFreeBufferList.begin());
5250 }
5251
5252 mMemory->markFrameNumber(bufIdx, frame->frameNumber);
5253 rc = pStream->bufDone(bufIdx);
5254 if (rc != NO_ERROR) {
5255 LOGE("Failed to queue new buffer to stream");
5256 return rc;
5257 }
5258 }
5259
5260 int32_t max_idx = (int32_t) (mNumBuffers - 1);
5261 //loop back the indices if max burst count reached
5262 if (mOfflineBuffersIndex == max_idx) {
5263 mOfflineBuffersIndex = -1;
5264 }
5265 uint32_t buf_idx = (uint32_t)(mOfflineBuffersIndex + 1);
5266
5267 //Do cache ops before sending for reprocess
5268 if (mMemory != NULL) {
5269 mMemory->cleanInvalidateCache(buf_idx);
5270 }
5271
5272 rc = pStream->mapBuf(
5273 CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
5274 buf_idx, -1,
5275 frame->input_buffer.fd, frame->input_buffer.buffer,
5276 frame->input_buffer.frame_len);
5277 if (NO_ERROR == rc) {
5278 Mutex::Autolock l(mOfflineBuffersLock);
5279 mappedBuffer.index = buf_idx;
5280 mappedBuffer.stream = pStream;
5281 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
5282 mOfflineBuffers.push_back(mappedBuffer);
5283 mOfflineBuffersIndex = (int32_t)buf_idx;
5284 LOGD("Mapped buffer with index %d", mOfflineBuffersIndex);
5285 }
5286
5287 max_idx = (int32_t) ((mNumBuffers * 2) - 1);
5288 //loop back the indices if max burst count reached
5289 if (mOfflineMetaIndex == max_idx) {
5290 mOfflineMetaIndex = (int32_t) (mNumBuffers - 1);
5291 }
5292 uint32_t meta_buf_idx = (uint32_t)(mOfflineMetaIndex + 1);
5293 rc |= pStream->mapBuf(
5294 CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,
5295 meta_buf_idx, -1,
5296 frame->metadata_buffer.fd, frame->metadata_buffer.buffer,
5297 frame->metadata_buffer.frame_len);
5298 if (NO_ERROR == rc) {
5299 Mutex::Autolock l(mOfflineBuffersLock);
5300 mappedBuffer.index = meta_buf_idx;
5301 mappedBuffer.stream = pStream;
5302 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF;
5303 mOfflineMetaBuffers.push_back(mappedBuffer);
5304 mOfflineMetaIndex = (int32_t)meta_buf_idx;
5305 LOGD("Mapped meta buffer with index %d", mOfflineMetaIndex);
5306 }
5307
5308 if (rc == NO_ERROR) {
5309 cam_stream_parm_buffer_t param;
5310 uint32_t numPendingPriorityFrames = 0;
5311
5312 if(isPriorityFrame && (mReprocessType != REPROCESS_TYPE_JPEG)) {
5313 Mutex::Autolock lock(mPriorityFramesLock);
5314 /* read the length before pushing the frame number to check if
5315 * vector is empty */
5316 numPendingPriorityFrames = mPriorityFrames.size();
5317 mPriorityFrames.push(frame->frameNumber);
5318 }
5319
5320 if(isPriorityFrame && !numPendingPriorityFrames &&
5321 (mReprocessType != REPROCESS_TYPE_JPEG)) {
5322 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
5323 param.type = CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE;
5324 param.perf_mode = CAM_PERF_HIGH_PERFORMANCE;
5325 rc = pStream->setParameter(param);
5326 if (rc != NO_ERROR) {
5327 LOGE("%s: setParameter for CAM_PERF_HIGH_PERFORMANCE failed",
5328 __func__);
5329 }
5330 {
5331 Mutex::Autolock lock(mPriorityFramesLock);
5332 mReprocessPerfMode = true;
5333 }
5334 }
5335
5336 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
5337 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
5338 param.reprocess.buf_index = buf_idx;
5339 param.reprocess.frame_idx = frame->input_buffer.frame_idx;
5340 param.reprocess.meta_present = 1;
5341 param.reprocess.meta_buf_index = meta_buf_idx;
5342
5343 LOGI("Offline reprocessing id = %d buf Id = %d meta index = %d",
5344 param.reprocess.frame_idx, param.reprocess.buf_index,
5345 param.reprocess.meta_buf_index);
5346 rc = pStream->setParameter(param);
5347 if (rc != NO_ERROR) {
5348 LOGE("stream setParameter for reprocess failed");
5349 resetToCamPerfNormal(frame->frameNumber);
5350 }
5351 } else {
5352 LOGE("Input buffer memory map failed: %d", rc);
5353 }
5354
5355 return rc;
5356 }
5357
5358 /*===========================================================================
5359 * FUNCTION : doReprocess
5360 *
5361 * DESCRIPTION: request to do a reprocess on the frame
5362 *
5363 * PARAMETERS :
5364 * @buf_fd : fd to the input buffer that needs reprocess
5365 * @buffer : Buffer ptr
5366 * @buf_lenght : length of the input buffer
5367 * @ret_val : result of reprocess.
5368 * Example: Could be faceID in case of register face image.
5369 * @meta_frame : metadata frame.
5370 *
5371 * RETURN : int32_t type of status
5372 * NO_ERROR -- success
5373 * none-zero failure code
5374 *==========================================================================*/
doReprocess(int buf_fd,void * buffer,size_t buf_length,int32_t & ret_val,mm_camera_super_buf_t * meta_frame)5375 int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd, void *buffer, size_t buf_length,
5376 int32_t &ret_val, mm_camera_super_buf_t *meta_frame)
5377 {
5378 int32_t rc = 0;
5379 if (m_numStreams < 1) {
5380 LOGE("No reprocess stream is created");
5381 return -1;
5382 }
5383 if (meta_frame == NULL) {
5384 LOGE("Did not get corresponding metadata in time");
5385 return -1;
5386 }
5387
5388 uint8_t buf_idx = 0;
5389 for (uint32_t i = 0; i < m_numStreams; i++) {
5390 rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
5391 buf_idx, -1,
5392 buf_fd, buffer, buf_length);
5393
5394 //Do cache ops before sending for reprocess
5395 if (mMemory != NULL) {
5396 mMemory->cleanInvalidateCache(buf_idx);
5397 }
5398
5399 if (rc == NO_ERROR) {
5400 cam_stream_parm_buffer_t param;
5401 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
5402 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
5403 param.reprocess.buf_index = buf_idx;
5404 param.reprocess.meta_present = 1;
5405 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
5406 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
5407
5408 LOGI("Online reprocessing id = %d buf Id = %d meta index = %d",
5409 param.reprocess.frame_idx, param.reprocess.buf_index,
5410 param.reprocess.meta_buf_index);
5411 rc = mStreams[i]->setParameter(param);
5412 if (rc == NO_ERROR) {
5413 ret_val = param.reprocess.ret_val;
5414 }
5415 mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
5416 buf_idx, -1);
5417 }
5418 }
5419 return rc;
5420 }
5421
5422 /*===========================================================================
5423 * FUNCTION : addReprocStreamsFromSource
5424 *
5425 * DESCRIPTION: add reprocess streams from input source channel
5426 *
5427 * PARAMETERS :
5428 * @config : pp feature configuration
5429 * @src_config : source reprocess configuration
5430 * @isType : type of image stabilization required on this stream
5431 * @pMetaChannel : ptr to metadata channel to get corresp. metadata
5432 *
5433 *
5434 * RETURN : int32_t type of status
5435 * NO_ERROR -- success
5436 * none-zero failure code
5437 *==========================================================================*/
addReprocStreamsFromSource(cam_pp_feature_config_t & pp_config,const reprocess_config_t & src_config,cam_is_type_t is_type,QCamera3Channel * pMetaChannel)5438 int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
5439 const reprocess_config_t &src_config , cam_is_type_t is_type,
5440 QCamera3Channel *pMetaChannel)
5441 {
5442 int32_t rc = 0;
5443 cam_stream_reproc_config_t reprocess_config;
5444 cam_stream_type_t streamType;
5445
5446 cam_dimension_t streamDim = src_config.output_stream_dim;
5447
5448 if (NULL != src_config.src_channel) {
5449 QCamera3Stream *pSrcStream = src_config.src_channel->getStreamByIndex(0);
5450 if (pSrcStream == NULL) {
5451 LOGE("source channel doesn't have a stream");
5452 return BAD_VALUE;
5453 }
5454 mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle();
5455 }
5456
5457 streamType = CAM_STREAM_TYPE_OFFLINE_PROC;
5458 reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
5459
5460 reprocess_config.offline.input_fmt = src_config.input_stream_format;
5461 reprocess_config.offline.input_dim = src_config.input_stream_dim;
5462 reprocess_config.offline.input_buf_planes.plane_info =
5463 src_config.input_stream_plane_info.plane_info;
5464 reprocess_config.offline.num_of_bufs = (uint8_t)mNumBuffers;
5465 reprocess_config.offline.input_type = src_config.stream_type;
5466
5467 LOGH("input_fmt is %d, fmt is %d, input_dim is %d x %d", reprocess_config.offline.input_fmt,
5468 src_config.stream_format, reprocess_config.offline.input_dim.width,
5469 reprocess_config.offline.input_dim.height);
5470 reprocess_config.pp_feature_config = pp_config;
5471 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
5472 m_handle,
5473 m_camOps,
5474 &mPaddingInfo,
5475 (QCamera3Channel*)this,
5476 false/*mapStreamBuffers*/);
5477 if (pStream == NULL) {
5478 LOGE("No mem for Stream");
5479 return NO_MEMORY;
5480 }
5481
5482 rc = pStream->init(streamType, src_config.stream_format,
5483 streamDim, ROTATE_0, &reprocess_config,
5484 (uint8_t)mNumBuffers,
5485 reprocess_config.pp_feature_config.feature_mask,
5486 is_type,
5487 0,/* batchSize */
5488 QCamera3Channel::streamCbRoutine, this);
5489
5490 if (rc == 0) {
5491 mStreams[m_numStreams] = pStream;
5492 m_numStreams++;
5493 } else {
5494 LOGE("failed to create reprocess stream");
5495 delete pStream;
5496 }
5497
5498 if (rc == NO_ERROR) {
5499 m_pSrcChannel = src_config.src_channel;
5500 m_pMetaChannel = pMetaChannel;
5501 mReprocessType = src_config.reprocess_type;
5502 LOGD("mReprocessType is %d", mReprocessType);
5503 }
5504 mm_camera_req_buf_t buf;
5505 memset(&buf, 0x0, sizeof(buf));
5506 buf.type = MM_CAMERA_REQ_SUPER_BUF;
5507 buf.num_buf_requested = 1;
5508 if(m_camOps->request_super_buf(m_camHandle,m_handle, &buf) < 0) {
5509 LOGE("Request for super buffer failed");
5510 }
5511 return rc;
5512 }
5513
5514 /* QCamera3SupportChannel methods */
5515
5516 cam_dimension_t QCamera3SupportChannel::kDim = {640, 480};
5517
QCamera3SupportChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,cam_stream_type_t streamType,cam_dimension_t * dim,cam_format_t streamFormat,uint8_t hw_analysis_supported,cam_color_filter_arrangement_t color_arrangement,void * userData,uint32_t numBuffers)5518 QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle,
5519 uint32_t channel_handle,
5520 mm_camera_ops_t *cam_ops,
5521 cam_padding_info_t *paddingInfo,
5522 cam_feature_mask_t postprocess_mask,
5523 cam_stream_type_t streamType,
5524 cam_dimension_t *dim,
5525 cam_format_t streamFormat,
5526 uint8_t hw_analysis_supported,
5527 cam_color_filter_arrangement_t color_arrangement,
5528 void *userData, uint32_t numBuffers) :
5529 QCamera3Channel(cam_handle, channel_handle, cam_ops,
5530 NULL, NULL, paddingInfo, postprocess_mask,
5531 userData, numBuffers),
5532 mMemory(NULL),
5533 mHwAnalysisSupported(hw_analysis_supported),
5534 mColorArrangement(color_arrangement)
5535 {
5536 memcpy(&mDim, dim, sizeof(cam_dimension_t));
5537 mStreamType = streamType;
5538 mStreamFormat = streamFormat;
5539 }
5540
~QCamera3SupportChannel()5541 QCamera3SupportChannel::~QCamera3SupportChannel()
5542 {
5543 destroy();
5544
5545 if (mMemory) {
5546 mMemory->deallocate();
5547 delete mMemory;
5548 mMemory = NULL;
5549 }
5550 }
5551
initialize(cam_is_type_t isType)5552 int32_t QCamera3SupportChannel::initialize(cam_is_type_t isType)
5553 {
5554 int32_t rc;
5555
5556 if (mMemory || m_numStreams > 0) {
5557 LOGE("QCamera3SupportChannel already initialized");
5558 return -EINVAL;
5559 }
5560
5561 mIsType = isType;
5562 // Make Analysis same as Preview format
5563 if (!mHwAnalysisSupported && mStreamType == CAM_STREAM_TYPE_ANALYSIS &&
5564 mColorArrangement != CAM_FILTER_ARRANGEMENT_Y) {
5565 mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_PREVIEW,
5566 mDim.width, mDim.height, m_bUBWCenable, mIsType);
5567 }
5568
5569 rc = QCamera3Channel::addStream(mStreamType,
5570 mStreamFormat, mDim, ROTATE_0, MIN_STREAMING_BUFFER_NUM,
5571 mPostProcMask, mIsType);
5572 if (rc < 0) {
5573 LOGE("addStream failed");
5574 }
5575 return rc;
5576 }
5577
request(buffer_handle_t *,uint32_t,int &)5578 int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/,
5579 uint32_t /*frameNumber*/,
5580 int & /*indexUsed*/)
5581 {
5582 return NO_ERROR;
5583 }
5584
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)5585 void QCamera3SupportChannel::streamCbRoutine(
5586 mm_camera_super_buf_t *super_frame,
5587 QCamera3Stream * /*stream*/)
5588 {
5589 if (super_frame == NULL || super_frame->num_bufs != 1) {
5590 LOGE("super_frame is not valid");
5591 return;
5592 }
5593 bufDone(super_frame);
5594 free(super_frame);
5595 }
5596
getStreamBufs(uint32_t len)5597 QCamera3StreamMem* QCamera3SupportChannel::getStreamBufs(uint32_t len)
5598 {
5599 int rc;
5600 mMemory = new QCamera3StreamMem(mNumBuffers);
5601 if (!mMemory) {
5602 LOGE("unable to create heap memory");
5603 return NULL;
5604 }
5605 rc = mMemory->allocateAll(len);
5606 if (rc < 0) {
5607 LOGE("unable to allocate heap memory");
5608 delete mMemory;
5609 mMemory = NULL;
5610 return NULL;
5611 }
5612 return mMemory;
5613 }
5614
putStreamBufs()5615 void QCamera3SupportChannel::putStreamBufs()
5616 {
5617 mMemory->deallocate();
5618 delete mMemory;
5619 mMemory = NULL;
5620 }
5621
~QCamera3DepthChannel()5622 QCamera3DepthChannel::~QCamera3DepthChannel() {
5623 unmapAllBuffers();
5624 }
5625
5626 /*===========================================================================
5627 * FUNCTION : mapBuffer
5628 *
5629 * DESCRIPTION: Maps stream depth buffer
5630 *
5631 * PARAMETERS :
5632 * @buffer : Depth buffer
5633 * @frameNumber : Frame number
5634 *
5635 *
5636 * RETURN : int32_t type of status
5637 * NO_ERROR -- success
5638 * none-zero failure code
5639 *==========================================================================*/
mapBuffer(buffer_handle_t * buffer,uint32_t frameNumber)5640 int32_t QCamera3DepthChannel::mapBuffer(buffer_handle_t *buffer,
5641 uint32_t frameNumber) {
5642 int32_t rc = NO_ERROR;
5643
5644 int32_t index = mGrallocMem.getMatchBufIndex((void*)buffer);
5645 if (0 > index) {
5646 rc = mGrallocMem.registerBuffer(buffer, CAM_STREAM_TYPE_DEFAULT);
5647 if (NO_ERROR != rc) {
5648 LOGE("Buffer registration failed %d", rc);
5649 return rc;
5650 }
5651
5652 index = mGrallocMem.getMatchBufIndex((void*)buffer);
5653 if (index < 0) {
5654 LOGE("Could not find object among registered buffers");
5655 return DEAD_OBJECT;
5656 }
5657 } else {
5658 LOGE("Buffer: %p is already present at index: %d!", buffer, index);
5659 return ALREADY_EXISTS;
5660 }
5661
5662 rc = mGrallocMem.markFrameNumber((uint32_t)index, frameNumber);
5663
5664 return rc;
5665 }
5666
5667 /*===========================================================================
5668 * FUNCTION : populateDepthData
5669 *
5670 * DESCRIPTION: Copies the incoming depth data in the respective depth buffer
5671 *
5672 * PARAMETERS :
5673 * @data : Incoming Depth data
5674 * @frameNumber : Frame number of incoming depth data
5675 *
5676 *
5677 * RETURN : int32_t type of status
5678 * NO_ERROR -- success
5679 * none-zero failure code
5680 *==========================================================================*/
populateDepthData(const cam_depth_data_t & data,uint32_t frameNumber)5681 int32_t QCamera3DepthChannel::populateDepthData(const cam_depth_data_t &data,
5682 uint32_t frameNumber) {
5683 if (nullptr == mStream) {
5684 LOGE("Invalid depth stream!");
5685 return BAD_VALUE;
5686 }
5687
5688 ssize_t length = data.length;
5689 int32_t index = mGrallocMem.getBufferIndex(frameNumber);
5690 if (0 > index) {
5691 LOGE("Frame number: %u not present!");
5692 return BAD_VALUE;
5693 }
5694
5695 void *dst = mGrallocMem.getPtr(index);
5696 if (nullptr == dst) {
5697 LOGE("Invalid mapped buffer");
5698 return BAD_VALUE;
5699 }
5700
5701 camera3_jpeg_blob_t jpegHeader;
5702 ssize_t headerSize = sizeof jpegHeader;
5703 buffer_handle_t *blobBufferHandle = static_cast<buffer_handle_t *>
5704 (mGrallocMem.getBufferHandle(index));
5705 ssize_t maxBlobSize;
5706 if (nullptr != blobBufferHandle) {
5707 maxBlobSize = ((private_handle_t*)(*blobBufferHandle))->width;
5708 } else {
5709 LOGE("Couldn't query buffer handle!");
5710 return BAD_VALUE;
5711 }
5712
5713 if ((length + headerSize) > maxBlobSize) {
5714 LOGE("Depth buffer size mismatch expected: %d actual: %d",
5715 (length + headerSize), maxBlobSize);
5716 return BAD_VALUE;
5717 }
5718
5719 if (0 < length) {
5720 memcpy(dst, data.depth_data, length);
5721 }
5722
5723 memset(&jpegHeader, 0, headerSize);
5724 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
5725 jpegHeader.jpeg_size = length;
5726 size_t jpeg_eof_offset = static_cast<size_t> (maxBlobSize - headerSize);
5727 uint8_t *jpegBuffer = static_cast<uint8_t *> (dst);
5728 uint8_t *jpegEOF = &jpegBuffer[jpeg_eof_offset];
5729 memcpy(jpegEOF, &jpegHeader, headerSize);
5730
5731 return NO_ERROR;
5732 }
5733
5734 /*===========================================================================
5735 * FUNCTION : getOldestFrame
5736 *
5737 * DESCRIPTION: Return oldest mapped buffer
5738 *
5739 * PARAMETERS :
5740 * @frameNumber : Sets oldest frame number if present
5741 *
5742 *
5743 * RETURN : buffer_handle_t pointer
5744 * NULL in case of error
5745 *==========================================================================*/
getOldestFrame(uint32_t & frameNumber)5746 buffer_handle_t *QCamera3DepthChannel::getOldestFrame(uint32_t &frameNumber) {
5747 uint32_t oldestIndex = UINT32_MAX;
5748 int32_t frameNumberResult = mGrallocMem.getOldestFrameNumber(oldestIndex);
5749 if (0 > frameNumberResult) {
5750 LOGD("Invalid frame number!");
5751 return nullptr;
5752 }
5753 frameNumber = static_cast<uint32_t> (frameNumberResult);
5754
5755 buffer_handle_t *ret = static_cast<buffer_handle_t *>
5756 (mGrallocMem.getBufferHandle(oldestIndex));
5757 if (nullptr == ret) {
5758 LOGE("Invalid buffer handle!");
5759 return nullptr;
5760 }
5761
5762 return ret;
5763 }
5764
5765 /*===========================================================================
5766 * FUNCTION : unmapBuffer
5767 *
5768 * DESCRIPTION: Unmap a single buffer
5769 *
5770 * PARAMETERS :
5771 * @frameNumber : Frame number of buffer that should get unmapped
5772 *
5773 *
5774 * RETURN : int32_t type of status
5775 * NO_ERROR -- success
5776 * none-zero failure code
5777 *==========================================================================*/
unmapBuffer(uint32_t frameNumber)5778 int32_t QCamera3DepthChannel::unmapBuffer(uint32_t frameNumber) {
5779 int32_t index = mGrallocMem.getBufferIndex(frameNumber);
5780 if (0 > index) {
5781 LOGE("Frame number: %u not present!", frameNumber);
5782 return BAD_VALUE;
5783 }
5784
5785 return mGrallocMem.unregisterBuffer(index);
5786 }
5787
5788 /*===========================================================================
5789 * FUNCTION : unmapAllBuffers
5790 *
5791 * DESCRIPTION: This will unmap all buffers
5792 *
5793 * PARAMETERS :
5794 *
5795 * RETURN : int32_t type of status
5796 * NO_ERROR -- success
5797 * none-zero failure code
5798 *==========================================================================*/
unmapAllBuffers()5799 int32_t QCamera3DepthChannel::unmapAllBuffers() {
5800 mGrallocMem.unregisterBuffers();
5801
5802 return NO_ERROR;
5803 }
5804
5805 }; // namespace qcamera
5806