1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2 *
3 * Redistribution and use in source and binary forms, with or without
4 * modification, are permitted provided that the following conditions are
5 * met:
6 * * Redistributions of source code must retain the above copyright
7 * notice, this list of conditions and the following disclaimer.
8 * * Redistributions in binary form must reproduce the above
9 * copyright notice, this list of conditions and the following
10 * disclaimer in the documentation and/or other materials provided
11 * with the distribution.
12 * * Neither the name of The Linux Foundation nor the names of its
13 * contributors may be used to endorse or promote products derived
14 * from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 */
29
30
31 #define LOG_TAG "QCamera3Channel"
32
33 // To remove
34 #include <cutils/properties.h>
35
36 // System dependencies
37 #include <fcntl.h>
38 #include <stdio.h>
39 #include <stdlib.h>
40 #include "hardware/gralloc.h"
41 #include <utils/Timers.h>
42 #include <sys/stat.h>
43
44 // Camera dependencies
45 #include "QCamera3Channel.h"
46 #include "QCamera3HWI.h"
47 #include "QCameraTrace.h"
48 #include "QCameraFormat.h"
49 extern "C" {
50 #include "mm_camera_dbg.h"
51 }
52
53 using namespace android;
54
55 namespace qcamera {
56 #define IS_BUFFER_ERROR(x) (((x) & V4L2_BUF_FLAG_ERROR) == V4L2_BUF_FLAG_ERROR)
57
58 /*===========================================================================
59 * FUNCTION : QCamera3Channel
60 *
61 * DESCRIPTION: constrcutor of QCamera3Channel
62 *
63 * PARAMETERS :
64 * @cam_handle : camera handle
65 * @cam_ops : ptr to camera ops table
66 *
67 * RETURN : none
68 *==========================================================================*/
QCamera3Channel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,void * userData,uint32_t numBuffers)69 QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
70 uint32_t channel_handle,
71 mm_camera_ops_t *cam_ops,
72 channel_cb_routine cb_routine,
73 channel_cb_buffer_err cb_buffer_err,
74 cam_padding_info_t *paddingInfo,
75 cam_feature_mask_t postprocess_mask,
76 void *userData, uint32_t numBuffers)
77 {
78 m_camHandle = cam_handle;
79 m_handle = channel_handle;
80 m_camOps = cam_ops;
81 m_bIsActive = false;
82 m_bUBWCenable = true;
83
84 m_numStreams = 0;
85 memset(mStreams, 0, sizeof(mStreams));
86 mUserData = userData;
87
88 mStreamInfoBuf = NULL;
89 mChannelCB = cb_routine;
90 mChannelCbBufErr = cb_buffer_err;
91 mPaddingInfo = *paddingInfo;
92 mPaddingInfo.offset_info.offset_x = 0;
93 mPaddingInfo.offset_info.offset_y = 0;
94
95 mPostProcMask = postprocess_mask;
96
97 mIsType = IS_TYPE_NONE;
98 mNumBuffers = numBuffers;
99 mPerFrameMapUnmapEnable = true;
100 mDumpFrmCnt = 0;
101 mNRMode = 0;
102
103 mYUVDump = property_get_int32("persist.camera.dumpimg", 0);
104 mMapStreamBuffers = mYUVDump;
105 }
106
107 /*===========================================================================
108 * FUNCTION : ~QCamera3Channel
109 *
110 * DESCRIPTION: destructor of QCamera3Channel
111 *
112 * PARAMETERS : none
113 *
114 * RETURN : none
115 *==========================================================================*/
~QCamera3Channel()116 QCamera3Channel::~QCamera3Channel()
117 {
118 }
119
120 /*===========================================================================
121 * FUNCTION : destroy
122 *
123 * DESCRIPTION: internal destructor of QCamera3Channel called by the subclasses
124 * this destructor will call pure virtual functions. stop will eventuall call
125 * QCamera3Stream::putBufs. The putBufs function will
126 * call QCamera3Channel::putStreamBufs which is pure virtual
127 *
128 * PARAMETERS : none
129 *
130 * RETURN : none
131 *==========================================================================*/
destroy()132 void QCamera3Channel::destroy()
133 {
134 if (m_bIsActive)
135 stop();
136
137 for (uint32_t i = 0; i < m_numStreams; i++) {
138 if (mStreams[i] != NULL) {
139 delete mStreams[i];
140 mStreams[i] = 0;
141 }
142 }
143 m_numStreams = 0;
144 }
145
146 /*===========================================================================
147 * FUNCTION : addStream
148 *
149 * DESCRIPTION: add a stream into channel
150 *
151 * PARAMETERS :
152 * @streamType : stream type
153 * @streamFormat : stream format
154 * @streamDim : stream dimension
155 * @streamRotation : rotation of the stream
156 * @minStreamBufNum : minimal buffer count for particular stream type
157 * @postprocessMask : post-proccess feature mask
158 * @isType : type of image stabilization required on the stream
159 *
160 * RETURN : int32_t type of status
161 * NO_ERROR -- success
162 * none-zero failure code
163 *==========================================================================*/
addStream(cam_stream_type_t streamType,cam_format_t streamFormat,cam_dimension_t streamDim,cam_rotation_t streamRotation,uint8_t minStreamBufNum,cam_feature_mask_t postprocessMask,cam_is_type_t isType,uint32_t batchSize)164 int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
165 cam_format_t streamFormat,
166 cam_dimension_t streamDim,
167 cam_rotation_t streamRotation,
168 uint8_t minStreamBufNum,
169 cam_feature_mask_t postprocessMask,
170 cam_is_type_t isType,
171 uint32_t batchSize)
172 {
173 int32_t rc = NO_ERROR;
174
175 if (m_numStreams >= 1) {
176 LOGE("Only one stream per channel supported in v3 Hal");
177 return BAD_VALUE;
178 }
179
180 if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
181 LOGE("stream number (%d) exceeds max limit (%d)",
182 m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
183 return BAD_VALUE;
184 }
185 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
186 m_handle,
187 m_camOps,
188 &mPaddingInfo,
189 this,
190 mMapStreamBuffers);
191 if (pStream == NULL) {
192 LOGE("No mem for Stream");
193 return NO_MEMORY;
194 }
195 LOGD("batch size is %d", batchSize);
196
197 rc = pStream->init(streamType, streamFormat, streamDim, streamRotation,
198 NULL, minStreamBufNum, postprocessMask, isType, batchSize,
199 streamCbRoutine, this);
200 if (rc == 0) {
201 mStreams[m_numStreams] = pStream;
202 m_numStreams++;
203 } else {
204 delete pStream;
205 }
206 return rc;
207 }
208
209 /*===========================================================================
210 * FUNCTION : start
211 *
212 * DESCRIPTION: start channel, which will start all streams belong to this channel
213 *
214 * PARAMETERS :
215 *
216 * RETURN : int32_t type of status
217 * NO_ERROR -- success
218 * none-zero failure code
219 *==========================================================================*/
start()220 int32_t QCamera3Channel::start()
221 {
222 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_START);
223 int32_t rc = NO_ERROR;
224
225 if (m_numStreams > 1) {
226 LOGW("bundle not supported");
227 } else if (m_numStreams == 0) {
228 return NO_INIT;
229 }
230
231 if(m_bIsActive) {
232 LOGW("Attempt to start active channel");
233 return rc;
234 }
235
236 for (uint32_t i = 0; i < m_numStreams; i++) {
237 if (mStreams[i] != NULL) {
238 mStreams[i]->start();
239 }
240 }
241
242 m_bIsActive = true;
243
244 return rc;
245 }
246
247 /*===========================================================================
248 * FUNCTION : stop
249 *
250 * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
251 *
252 * PARAMETERS : none
253 *
254 * RETURN : int32_t type of status
255 * NO_ERROR -- success
256 * none-zero failure code
257 *==========================================================================*/
stop()258 int32_t QCamera3Channel::stop()
259 {
260 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_STOP);
261 int32_t rc = NO_ERROR;
262 if(!m_bIsActive) {
263 LOGE("Attempt to stop inactive channel");
264 return rc;
265 }
266
267 for (uint32_t i = 0; i < m_numStreams; i++) {
268 if (mStreams[i] != NULL) {
269 mStreams[i]->stop();
270 }
271 }
272
273 m_bIsActive = false;
274 return rc;
275 }
276
277 /*===========================================================================
278 * FUNCTION : setBatchSize
279 *
280 * DESCRIPTION: Set batch size for the channel. This is a dummy implementation
281 * for the base class
282 *
283 * PARAMETERS :
284 * @batchSize : Number of image buffers in a batch
285 *
286 * RETURN : int32_t type of status
287 * NO_ERROR -- success always
288 * none-zero failure code
289 *==========================================================================*/
setBatchSize(uint32_t batchSize)290 int32_t QCamera3Channel::setBatchSize(uint32_t batchSize)
291 {
292 LOGD("Dummy method. batchSize: %d unused ", batchSize);
293 return NO_ERROR;
294 }
295
296 /*===========================================================================
297 * FUNCTION : queueBatchBuf
298 *
299 * DESCRIPTION: This is a dummy implementation for the base class
300 *
301 * PARAMETERS :
302 *
303 * RETURN : int32_t type of status
304 * NO_ERROR -- success always
305 * none-zero failure code
306 *==========================================================================*/
queueBatchBuf()307 int32_t QCamera3Channel::queueBatchBuf()
308 {
309 LOGD("Dummy method. Unused ");
310 return NO_ERROR;
311 }
312
313 /*===========================================================================
314 * FUNCTION : setPerFrameMapUnmap
315 *
316 * DESCRIPTION: Sets internal enable flag
317 *
318 * PARAMETERS :
319 * @enable : Bool value for the enable flag
320 *
321 * RETURN : int32_t type of status
322 * NO_ERROR -- success always
323 * none-zero failure code
324 *==========================================================================*/
setPerFrameMapUnmap(bool enable)325 int32_t QCamera3Channel::setPerFrameMapUnmap(bool enable)
326 {
327 mPerFrameMapUnmapEnable = enable;
328 return NO_ERROR;
329 }
330
331 /*===========================================================================
332 * FUNCTION : flush
333 *
334 * DESCRIPTION: flush a channel
335 *
336 * PARAMETERS : none
337 *
338 * RETURN : int32_t type of status
339 * NO_ERROR -- success
340 * none-zero failure code
341 *==========================================================================*/
flush()342 int32_t QCamera3Channel::flush()
343 {
344 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_FLUSH);
345 return NO_ERROR;
346 }
347
348 /*===========================================================================
349 * FUNCTION : bufDone
350 *
351 * DESCRIPTION: return a stream buf back to kernel
352 *
353 * PARAMETERS :
354 * @recvd_frame : stream buf frame to be returned
355 *
356 * RETURN : int32_t type of status
357 * NO_ERROR -- success
358 * none-zero failure code
359 *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)360 int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame)
361 {
362 int32_t rc = NO_ERROR;
363 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
364 if (recvd_frame->bufs[i] != NULL) {
365 for (uint32_t j = 0; j < m_numStreams; j++) {
366 if (mStreams[j] != NULL &&
367 mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
368 rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
369 break; // break loop j
370 }
371 }
372 }
373 }
374
375 return rc;
376 }
377
setBundleInfo(const cam_bundle_config_t & bundleInfo)378 int32_t QCamera3Channel::setBundleInfo(const cam_bundle_config_t &bundleInfo)
379 {
380 int32_t rc = NO_ERROR;
381 cam_stream_parm_buffer_t param;
382 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
383 param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO;
384 param.bundleInfo = bundleInfo;
385 if (m_numStreams > 0 && mStreams[0]) {
386 rc = mStreams[0]->setParameter(param);
387 if (rc != NO_ERROR) {
388 LOGE("stream setParameter for set bundle failed");
389 }
390 }
391 return rc;
392 }
393
394 /*===========================================================================
395 * FUNCTION : getStreamTypeMask
396 *
397 * DESCRIPTION: Get bit mask of all stream types in this channel
398 *
399 * PARAMETERS : None
400 *
401 * RETURN : Bit mask of all stream types in this channel
402 *==========================================================================*/
getStreamTypeMask()403 uint32_t QCamera3Channel::getStreamTypeMask()
404 {
405 uint32_t mask = 0;
406 for (uint32_t i = 0; i < m_numStreams; i++) {
407 mask |= (1U << mStreams[i]->getMyType());
408 }
409 return mask;
410 }
411
412 /*===========================================================================
413 * FUNCTION : getStreamID
414 *
415 * DESCRIPTION: Get StreamID of requested stream type
416 *
417 * PARAMETERS : streamMask
418 *
419 * RETURN : Stream ID
420 *==========================================================================*/
getStreamID(uint32_t streamMask)421 uint32_t QCamera3Channel::getStreamID(uint32_t streamMask)
422 {
423 uint32_t streamID = 0;
424 for (uint32_t i = 0; i < m_numStreams; i++) {
425 if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) {
426 streamID = mStreams[i]->getMyServerID();
427 break;
428 }
429 }
430 return streamID;
431 }
432
433 /*===========================================================================
434 * FUNCTION : getStreamByHandle
435 *
436 * DESCRIPTION: return stream object by stream handle
437 *
438 * PARAMETERS :
439 * @streamHandle : stream handle
440 *
441 * RETURN : stream object. NULL if not found
442 *==========================================================================*/
getStreamByHandle(uint32_t streamHandle)443 QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle)
444 {
445 for (uint32_t i = 0; i < m_numStreams; i++) {
446 if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
447 return mStreams[i];
448 }
449 }
450 return NULL;
451 }
452
453 /*===========================================================================
454 * FUNCTION : getStreamByIndex
455 *
456 * DESCRIPTION: return stream object by index
457 *
458 * PARAMETERS :
459 * @streamHandle : stream handle
460 *
461 * RETURN : stream object. NULL if not found
462 *==========================================================================*/
getStreamByIndex(uint32_t index)463 QCamera3Stream *QCamera3Channel::getStreamByIndex(uint32_t index)
464 {
465 if (index < m_numStreams) {
466 return mStreams[index];
467 }
468 return NULL;
469 }
470
471 /*===========================================================================
472 * FUNCTION : streamCbRoutine
473 *
474 * DESCRIPTION: callback routine for stream
475 *
476 * PARAMETERS :
477 * @streamHandle : stream handle
478 *
479 * RETURN : stream object. NULL if not found
480 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream,void * userdata)481 void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
482 QCamera3Stream *stream, void *userdata)
483 {
484 QCamera3Channel *channel = (QCamera3Channel *)userdata;
485 if (channel == NULL) {
486 LOGE("invalid channel pointer");
487 return;
488 }
489 channel->streamCbRoutine(super_frame, stream);
490 }
491
492 /*===========================================================================
493 * FUNCTION : dumpYUV
494 *
495 * DESCRIPTION: function to dump the YUV data from ISP/pproc
496 *
497 * PARAMETERS :
498 * @frame : frame to be dumped
499 * @dim : dimension of the stream
500 * @offset : offset of the data
501 * @name : 1 if it is ISP output/pproc input, 2 if it is pproc output
502 *
503 * RETURN :
504 *==========================================================================*/
dumpYUV(mm_camera_buf_def_t * frame,cam_dimension_t dim,cam_frame_len_offset_t offset,uint8_t dump_type)505 void QCamera3Channel::dumpYUV(mm_camera_buf_def_t *frame, cam_dimension_t dim,
506 cam_frame_len_offset_t offset, uint8_t dump_type)
507 {
508 char buf[FILENAME_MAX];
509 memset(buf, 0, sizeof(buf));
510 static int counter = 0;
511 if (mYUVDump & dump_type) {
512 mFrmNum = ((mYUVDump & 0xffff0000) >> 16);
513 if (mFrmNum == 0) {
514 mFrmNum = 10;
515 }
516 if (mFrmNum > 256) {
517 mFrmNum = 256;
518 }
519 mSkipMode = ((mYUVDump & 0x0000ff00) >> 8);
520 if (mSkipMode == 0) {
521 mSkipMode = 1;
522 }
523 if (mDumpSkipCnt == 0) {
524 mDumpSkipCnt = 1;
525 }
526 if (mDumpSkipCnt % mSkipMode == 0) {
527 if (mDumpFrmCnt < mFrmNum) {
528 /* Note that the image dimension will be the unrotated stream dimension.
529 * If you feel that the image would have been rotated during reprocess
530 * then swap the dimensions while opening the file
531 * */
532 switch (dump_type) {
533 case QCAMERA_DUMP_FRM_PREVIEW:
534 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"p_%d_%d_%dx%d.yuv",
535 counter, frame->frame_idx, dim.width, dim.height);
536 break;
537 case QCAMERA_DUMP_FRM_VIDEO:
538 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"v_%d_%d_%dx%d.yuv",
539 counter, frame->frame_idx, dim.width, dim.height);
540 break;
541 case QCAMERA_DUMP_FRM_INPUT_JPEG:
542 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.yuv",
543 counter, frame->frame_idx, dim.width, dim.height);
544 break;
545 case QCAMERA_DUMP_FRM_INPUT_REPROCESS:
546 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"ir_%d_%d_%dx%d.yuv",
547 counter, frame->frame_idx, dim.width, dim.height);
548 break;
549 case QCAMERA_DUMP_FRM_CALLBACK:
550 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"c_%d_%d_%dx%d.yuv",
551 counter, frame->frame_idx, dim.width, dim.height);
552 break;
553 case QCAMERA_DUMP_FRM_OUTPUT_JPEG:
554 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"j_%d_%d_%dx%d.jpg",
555 counter, frame->frame_idx, dim.width, dim.height);
556 break;
557 default :
558 LOGE("dumping not enabled for stream type %d",dump_type);
559 break;
560 }
561 counter++;
562 int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
563 ssize_t written_len = 0;
564 if (file_fd >= 0) {
565 void *data = NULL;
566 fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
567 if( dump_type == QCAMERA_DUMP_FRM_OUTPUT_JPEG ) {
568 written_len = write(file_fd, frame->buffer, frame->frame_len);
569 }
570 else {
571 for (uint32_t i = 0; i < offset.num_planes; i++) {
572 uint32_t index = offset.mp[i].offset;
573 if (i > 0) {
574 index += offset.mp[i-1].len;
575 }
576 for (int j = 0; j < offset.mp[i].height; j++) {
577 data = (void *)((uint8_t *)frame->buffer + index);
578 written_len += write(file_fd, data,
579 (size_t)offset.mp[i].width);
580 index += (uint32_t)offset.mp[i].stride;
581 }
582 }
583 }
584 LOGH("written number of bytes %ld\n", written_len);
585 mDumpFrmCnt++;
586 frame->cache_flags |= CPU_HAS_READ;
587 close(file_fd);
588 } else {
589 LOGE("failed to open file to dump image");
590 }
591 }
592 } else {
593 mDumpSkipCnt++;
594 }
595 }
596 }
597
598 /*===========================================================================
599 * FUNCTION : isUBWCEnabled
600 *
601 * DESCRIPTION: Function to get UBWC hardware support.
602 *
603 * PARAMETERS : None
604 *
605 * RETURN : TRUE -- UBWC format supported
606 * FALSE -- UBWC is not supported.
607 *==========================================================================*/
isUBWCEnabled()608 bool QCamera3Channel::isUBWCEnabled()
609 {
610 #ifdef UBWC_PRESENT
611 char value[PROPERTY_VALUE_MAX];
612 int prop_value = 0;
613 memset(value, 0, sizeof(value));
614 property_get("debug.gralloc.gfx_ubwc_disable", value, "0");
615 prop_value = atoi(value);
616 if (prop_value) {
617 return FALSE;
618 }
619
620 //Disable UBWC if Eztune is enabled
621 //EzTune process CPP output frame and cannot understand UBWC.
622 memset(value, 0, sizeof(value));
623 property_get("persist.camera.eztune.enable", value, "0");
624 prop_value = atoi(value);
625 if (prop_value) {
626 return FALSE;
627 }
628 return TRUE;
629 #else
630 return FALSE;
631 #endif
632 }
633
634 /*===========================================================================
635 * FUNCTION : setUBWCEnabled
636 *
637 * DESCRIPTION: set UBWC enable
638 *
639 * PARAMETERS : UBWC enable value
640 *
641 * RETURN : none
642 *
643 *==========================================================================*/
setUBWCEnabled(bool val)644 void QCamera3Channel::setUBWCEnabled(bool val)
645 {
646 m_bUBWCenable = val;
647 }
648
649 /*===========================================================================
650 * FUNCTION : getStreamDefaultFormat
651 *
652 * DESCRIPTION: return default buffer format for the stream
653 *
654 * PARAMETERS : type : Stream type
655 *
656 ** RETURN : format for stream type
657 *
658 *==========================================================================*/
getStreamDefaultFormat(cam_stream_type_t type,uint32_t width,uint32_t height,bool forcePreviewUBWC,cam_is_type_t isType)659 cam_format_t QCamera3Channel::getStreamDefaultFormat(cam_stream_type_t type,
660 uint32_t width, uint32_t height, bool forcePreviewUBWC, cam_is_type_t isType)
661 {
662 cam_format_t streamFormat;
663
664 switch (type) {
665 case CAM_STREAM_TYPE_PREVIEW:
666 if (isUBWCEnabled()) {
667
668 char prop[PROPERTY_VALUE_MAX];
669 int pFormat;
670 memset(prop, 0, sizeof(prop));
671 property_get("persist.camera.preview.ubwc", prop, "1");
672 pFormat = atoi(prop);
673
674 // When goog_zoom is linked to the preview stream, disable ubwc to preview
675 property_get("persist.camera.gzoom.at", prop, "0");
676 bool is_goog_zoom_preview_enabled = ((atoi(prop) & 2) > 0) && isType == IS_TYPE_EIS_3_0;
677
678 if (pFormat == 1 && forcePreviewUBWC && !is_goog_zoom_preview_enabled) {
679 streamFormat = CAM_FORMAT_YUV_420_NV12_UBWC;
680 } else {
681 /* Changed to macro to ensure format sent to gralloc for preview
682 is also changed if the preview format is changed at camera HAL */
683 streamFormat = PREVIEW_STREAM_FORMAT;
684 }
685 } else {
686 /* Changed to macro to ensure format sent to gralloc for preview
687 is also changed if the preview format is changed at camera HAL */
688 streamFormat = PREVIEW_STREAM_FORMAT;
689 }
690 break;
691 case CAM_STREAM_TYPE_VIDEO:
692 {
693 /* Disable UBWC for smaller video resolutions due to CPP downscale
694 limits. Refer cpp_hw_params.h::CPP_DOWNSCALE_LIMIT_UBWC */
695 if (isUBWCEnabled() && (width >= 640) && (height >= 480)) {
696 // When goog_zoom is linked to the video stream, disable ubwc to video
697 char prop[PROPERTY_VALUE_MAX];
698 property_get("persist.camera.gzoom.at", prop, "0");
699 bool is_goog_zoom_video_enabled = ((atoi(prop) & 1) > 0) && isType == IS_TYPE_EIS_3_0;
700
701 property_get("persist.camera.gzoom.4k", prop, "0");
702 bool is_goog_zoom_4k_enabled = (atoi(prop) > 0);
703 bool is_4k_video = (width >= 3840 && height >= 2160);
704
705 if ((QCameraCommon::isVideoUBWCEnabled()) && (!is_goog_zoom_video_enabled
706 || (is_4k_video && !is_goog_zoom_4k_enabled))) {
707 streamFormat = CAM_FORMAT_YUV_420_NV12_UBWC;
708 } else {
709 streamFormat = CAM_FORMAT_YUV_420_NV12_VENUS;
710 }
711 } else {
712 #if VENUS_PRESENT
713 streamFormat = CAM_FORMAT_YUV_420_NV12_VENUS;
714 #else
715 streamFormat = CAM_FORMAT_YUV_420_NV12;
716 #endif
717 }
718 break;
719 }
720 case CAM_STREAM_TYPE_SNAPSHOT:
721 streamFormat = CAM_FORMAT_YUV_420_NV21;
722 break;
723 case CAM_STREAM_TYPE_CALLBACK:
724 /* Changed to macro to ensure format sent to gralloc for callback
725 is also changed if the preview format is changed at camera HAL */
726 streamFormat = CALLBACK_STREAM_FORMAT;
727 break;
728 case CAM_STREAM_TYPE_RAW:
729 streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
730 break;
731 default:
732 streamFormat = CAM_FORMAT_YUV_420_NV21;
733 break;
734 }
735 return streamFormat;
736 }
737
738
739 /* QCamera3ProcessingChannel methods */
740
741 /*===========================================================================
742 * FUNCTION : QCamera3ProcessingChannel
743 *
744 * DESCRIPTION: constructor of QCamera3ProcessingChannel
745 *
746 * PARAMETERS :
747 * @cam_handle : camera handle
748 * @cam_ops : ptr to camera ops table
749 * @cb_routine : callback routine to frame aggregator
750 * @paddingInfo: stream padding info
751 * @userData : HWI handle
752 * @stream : camera3_stream_t structure
753 * @stream_type: Channel stream type
754 * @postprocess_mask: the postprocess mask for streams of this channel
755 * @metadataChannel: handle to the metadataChannel
756 * @numBuffers : number of max dequeued buffers
757 * RETURN : none
758 *==========================================================================*/
QCamera3ProcessingChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel,uint32_t numBuffers)759 QCamera3ProcessingChannel::QCamera3ProcessingChannel(uint32_t cam_handle,
760 uint32_t channel_handle,
761 mm_camera_ops_t *cam_ops,
762 channel_cb_routine cb_routine,
763 channel_cb_buffer_err cb_buffer_err,
764 cam_padding_info_t *paddingInfo,
765 void *userData,
766 camera3_stream_t *stream,
767 cam_stream_type_t stream_type,
768 cam_feature_mask_t postprocess_mask,
769 QCamera3Channel *metadataChannel,
770 uint32_t numBuffers) :
771 QCamera3Channel(cam_handle, channel_handle, cam_ops, cb_routine,
772 cb_buffer_err, paddingInfo, postprocess_mask, userData, numBuffers),
773 m_postprocessor(this),
774 mFrameCount(0),
775 mLastFrameCount(0),
776 mLastFpsTime(0),
777 mMemory(numBuffers),
778 mCamera3Stream(stream),
779 mNumBufs(CAM_MAX_NUM_BUFS_PER_STREAM),
780 mStreamType(stream_type),
781 mPostProcStarted(false),
782 mReprocessType(REPROCESS_TYPE_NONE),
783 mInputBufferConfig(false),
784 m_pMetaChannel(metadataChannel),
785 mMetaFrame(NULL),
786 mOfflineMemory(0),
787 mOfflineMetaMemory(numBuffers + (MAX_REPROCESS_PIPELINE_STAGES - 1))
788 {
789 char prop[PROPERTY_VALUE_MAX];
790 property_get("persist.debug.sf.showfps", prop, "0");
791 mDebugFPS = (uint8_t) atoi(prop);
792
793 int32_t rc = m_postprocessor.init(&mMemory);
794 if (rc != 0) {
795 LOGE("Init Postprocessor failed");
796 }
797 }
798
799 /*===========================================================================
800 * FUNCTION : ~QCamera3ProcessingChannel
801 *
802 * DESCRIPTION: destructor of QCamera3ProcessingChannel
803 *
804 * PARAMETERS : none
805 *
806 * RETURN : none
807 *==========================================================================*/
~QCamera3ProcessingChannel()808 QCamera3ProcessingChannel::~QCamera3ProcessingChannel()
809 {
810 destroy();
811
812 int32_t rc = m_postprocessor.deinit();
813 if (rc != 0) {
814 LOGE("De-init Postprocessor failed");
815 }
816
817 if (0 < mOfflineMetaMemory.getCnt()) {
818 mOfflineMetaMemory.deallocate();
819 }
820 if (0 < mOfflineMemory.getCnt()) {
821 mOfflineMemory.unregisterBuffers();
822 }
823
824 }
825
826 /*===========================================================================
827 * FUNCTION : streamCbRoutine
828 *
829 * DESCRIPTION:
830 *
831 * PARAMETERS :
832 * @super_frame : the super frame with filled buffer
833 * @stream : stream on which the buffer was requested and filled
834 *
835 * RETURN : none
836 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)837 void QCamera3ProcessingChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
838 QCamera3Stream *stream)
839 {
840 if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
841 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PREVIEW_STRM_CB);
842 } else {
843 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_STRM_CB);
844 }
845 //FIXME Q Buf back in case of error?
846 uint8_t frameIndex;
847 buffer_handle_t *resultBuffer;
848 int32_t resultFrameNumber;
849 camera3_stream_buffer_t result;
850 cam_dimension_t dim;
851 cam_frame_len_offset_t offset;
852
853 memset(&dim, 0, sizeof(dim));
854 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
855 if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
856 LOGE("Error with the stream callback");
857 return;
858 }
859
860 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
861 if(frameIndex >= mNumBufs) {
862 LOGE("Error, Invalid index for buffer");
863 stream->bufDone(frameIndex);
864 return;
865 }
866
867 if (mDebugFPS) {
868 showDebugFPS(stream->getMyType());
869 }
870 stream->getFrameDimension(dim);
871 stream->getFrameOffset(offset);
872 if (stream->getMyType() == CAM_STREAM_TYPE_PREVIEW) {
873 dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_PREVIEW);
874 } else if (stream->getMyType() == CAM_STREAM_TYPE_VIDEO) {
875 dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_VIDEO);
876 } else if (stream->getMyType() == CAM_STREAM_TYPE_CALLBACK) {
877 dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_CALLBACK);
878 }
879
880 do {
881
882 //Use below data to issue framework callback
883 resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
884 resultFrameNumber = mMemory.getFrameNumber(frameIndex);
885 uint32_t oldestBufIndex;
886 int32_t lowestFrameNumber = mMemory.getOldestFrameNumber(oldestBufIndex);
887 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
888 if ((lowestFrameNumber != -1 ) && (lowestFrameNumber < resultFrameNumber) &&
889 hal_obj->mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) {
890 LOGE("Error buffer dropped for framenumber:%d with bufidx:%d",
891 lowestFrameNumber, oldestBufIndex);
892 if (mOutOfSequenceBuffers.empty()) {
893 stream->cancelBuffer(oldestBufIndex);
894 }
895
896 //push in order!
897 auto itr = mOutOfSequenceBuffers.begin();
898 for (; itr != mOutOfSequenceBuffers.end(); itr++) {
899 mm_camera_super_buf_t *super_buf = *itr;
900 uint32_t buf_idx = super_buf->bufs[0]->buf_idx;
901 int32_t frame_num = mMemory.getFrameNumber(buf_idx);
902 if (resultFrameNumber < frame_num) {
903 LOGE("Out of order frame!! set buffer status error flag!");
904 mOutOfSequenceBuffers.insert(itr, super_frame);
905 super_buf->bufs[0]->flags |= V4L2_BUF_FLAG_ERROR;
906 break;
907 }
908 }
909
910 if (itr == mOutOfSequenceBuffers.end()) {
911 LOGE("Add the frame to the end of mOutOfSequenceBuffers");
912 // add the buffer
913 mOutOfSequenceBuffers.push_back(super_frame);
914 }
915 return;
916 }
917
918 if(hal_obj->mStreamConfig == true) {
919 switch (stream->getMyType()) {
920 case CAM_STREAM_TYPE_PREVIEW:
921 LOGH("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
922 break;
923 case CAM_STREAM_TYPE_VIDEO:
924 LOGH("[KPI Perf] : PROFILE_FIRST_VIDEO_FRAME");
925 break;
926 default:
927 break;
928 }
929 hal_obj->mStreamConfig = false;
930 }
931
932 result.stream = mCamera3Stream;
933 result.buffer = resultBuffer;
934 if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
935 result.status = CAMERA3_BUFFER_STATUS_ERROR;
936 LOGW("CAMERA3_BUFFER_STATUS_ERROR for stream_type: %d",
937 mStreams[0]->getMyType());
938 mChannelCbBufErr(this, resultFrameNumber, CAMERA3_BUFFER_STATUS_ERROR, mUserData);
939 } else {
940 result.status = CAMERA3_BUFFER_STATUS_OK;
941 }
942 result.acquire_fence = -1;
943 result.release_fence = -1;
944 if(mPerFrameMapUnmapEnable) {
945 int32_t rc = stream->bufRelease(frameIndex);
946 if (NO_ERROR != rc) {
947 LOGE("Error %d releasing stream buffer %d",
948 rc, frameIndex);
949 }
950
951 rc = mMemory.unregisterBuffer(frameIndex);
952 if (NO_ERROR != rc) {
953 LOGE("Error %d unregistering stream buffer %d",
954 rc, frameIndex);
955 }
956 }
957
958 if (0 <= resultFrameNumber) {
959 if (mChannelCB) {
960 mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, false, mUserData);
961 }
962 } else {
963 LOGE("Bad frame number");
964 }
965 free(super_frame);
966 super_frame = NULL;
967 if (mOutOfSequenceBuffers.empty()) {
968 break;
969 } else {
970 auto itr = mOutOfSequenceBuffers.begin();
971 super_frame = *itr;
972 frameIndex = super_frame->bufs[0]->buf_idx;
973 resultFrameNumber = mMemory.getFrameNumber(frameIndex);
974 lowestFrameNumber = mMemory.getOldestFrameNumber(oldestBufIndex);
975 LOGE("Attempting to recover next frame: result Frame#: %d, resultIdx: %d, "
976 "Lowest Frame#: %d, oldestBufIndex: %d",
977 resultFrameNumber, frameIndex, lowestFrameNumber, oldestBufIndex);
978 if ((lowestFrameNumber != -1) && (lowestFrameNumber < resultFrameNumber)) {
979 LOGE("Multiple frame dropped requesting cancel for frame %d, idx:%d",
980 lowestFrameNumber, oldestBufIndex);
981 stream->cancelBuffer(oldestBufIndex);
982 return;
983 } else if (lowestFrameNumber == resultFrameNumber) {
984 LOGE("Time to flush out head of list continue loop with this new super frame");
985 itr = mOutOfSequenceBuffers.erase(itr);
986 } else {
987 LOGE("Unexpected condition head of list is not the lowest frame number");
988 itr = mOutOfSequenceBuffers.erase(itr);
989 }
990 }
991 } while (1);
992 return;
993 }
994
995 /*===========================================================================
996 * FUNCTION : putStreamBufs
997 *
998 * DESCRIPTION: release the buffers allocated to the stream
999 *
1000 * PARAMETERS : NONE
1001 *
1002 * RETURN : NONE
1003 *==========================================================================*/
putStreamBufs()1004 void QCamera3YUVChannel::putStreamBufs()
1005 {
1006 QCamera3ProcessingChannel::putStreamBufs();
1007
1008 // Free allocated heap buffer.
1009 mMemory.deallocate();
1010 // Clear free heap buffer list.
1011 mFreeHeapBufferList.clear();
1012 // Clear offlinePpInfoList
1013 mOfflinePpInfoList.clear();
1014 }
1015
1016 /*===========================================================================
1017 * FUNCTION : timeoutFrame
1018 *
1019 * DESCRIPTION: Method to indicate to channel that a given frame has take too
1020 * long to be generated
1021 *
1022 * PARAMETERS : framenumber indicating the framenumber of the buffer timingout
1023 *
1024 * RETURN : int32_t type of status
1025 * NO_ERROR -- success
1026 * none-zero failure code
1027 *==========================================================================*/
timeoutFrame(uint32_t frameNumber)1028 int32_t QCamera3ProcessingChannel::timeoutFrame(uint32_t frameNumber)
1029 {
1030 int32_t bufIdx;
1031
1032 bufIdx = mMemory.getBufferIndex(frameNumber);
1033
1034 if (bufIdx < 0) {
1035 LOGE("%s: Buffer not found for frame:%d", __func__, frameNumber);
1036 return -1;
1037 }
1038
1039 mStreams[0]->timeoutFrame(bufIdx);
1040 return NO_ERROR;
1041 }
1042
1043 /*===========================================================================
1044 * FUNCTION : request
1045 *
1046 * DESCRIPTION: handle the request - either with an input buffer or a direct
1047 * output request
1048 *
1049 * PARAMETERS :
1050 * @buffer : pointer to the output buffer
1051 * @frameNumber : frame number of the request
1052 * @pInputBuffer : pointer to input buffer if an input request
1053 * @metadata : parameters associated with the request
1054 * @internalreq : boolean to indicate if this is purely internal request
1055 * needing internal buffer allocation
1056 * @meteringonly : boolean indicating metering only frame subset of internal
1057 * not consumed by postprocessor
1058 *
1059 * RETURN : 0 on a success start of capture
1060 * -EINVAL on invalid input
1061 * -ENODEV on serious error
1062 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata,int & indexUsed,__unused bool internalRequest=false,__unused bool meteringOnly=false)1063 int32_t QCamera3ProcessingChannel::request(buffer_handle_t *buffer,
1064 uint32_t frameNumber,
1065 camera3_stream_buffer_t* pInputBuffer,
1066 metadata_buffer_t* metadata,
1067 int &indexUsed,
1068 __unused bool internalRequest = false,
1069 __unused bool meteringOnly = false)
1070 {
1071 int32_t rc = NO_ERROR;
1072 int index;
1073
1074 if (NULL == buffer || NULL == metadata) {
1075 LOGE("Invalid buffer/metadata in channel request");
1076 return BAD_VALUE;
1077 }
1078
1079 if (pInputBuffer) {
1080 //need to send to reprocessing
1081 LOGD("Got a request with input buffer, output streamType = %d", mStreamType);
1082 reprocess_config_t reproc_cfg;
1083 cam_dimension_t dim;
1084 memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
1085 memset(&dim, 0, sizeof(dim));
1086 setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat, dim);
1087 startPostProc(reproc_cfg);
1088
1089 qcamera_fwk_input_pp_data_t *src_frame = NULL;
1090 src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1,
1091 sizeof(qcamera_fwk_input_pp_data_t));
1092 if (src_frame == NULL) {
1093 LOGE("No memory for src frame");
1094 return NO_MEMORY;
1095 }
1096 rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata, buffer, frameNumber);
1097 if (NO_ERROR != rc) {
1098 LOGE("Error %d while setting framework input PP data", rc);
1099 free(src_frame);
1100 return rc;
1101 }
1102 LOGH("Post-process started");
1103 m_postprocessor.processData(src_frame);
1104 } else {
1105 index = mMemory.getMatchBufIndex((void*)buffer);
1106 if(index < 0) {
1107 rc = registerBuffer(buffer, mIsType);
1108 if (NO_ERROR != rc) {
1109 LOGE("On-the-fly buffer registration failed %d",
1110 rc);
1111 return rc;
1112 }
1113
1114 index = mMemory.getMatchBufIndex((void*)buffer);
1115 if (index < 0) {
1116 LOGE("Could not find object among registered buffers");
1117 return DEAD_OBJECT;
1118 }
1119 }
1120 rc = mMemory.markFrameNumber(index, frameNumber);
1121 if(rc != NO_ERROR) {
1122 LOGE("Error marking frame number:%d for index %d", frameNumber,
1123 index);
1124 return rc;
1125 }
1126 if (m_bIsActive) {
1127 rc = mStreams[0]->bufDone(index);
1128 if(rc != NO_ERROR) {
1129 LOGE("Failed to Q new buffer to stream");
1130 mMemory.markFrameNumber(index, -1);
1131 return rc;
1132 }
1133 }
1134 indexUsed = index;
1135 }
1136 return rc;
1137 }
1138
1139 /*===========================================================================
1140 * FUNCTION : initialize
1141 *
1142 * DESCRIPTION:
1143 *
1144 * PARAMETERS : isType : type of image stabilization on the buffer
1145 *
1146 * RETURN : int32_t type of status
1147 * NO_ERROR -- success
1148 * none-zero failure code
1149 *==========================================================================*/
initialize(__unused cam_is_type_t isType)1150 int32_t QCamera3ProcessingChannel::initialize(__unused cam_is_type_t isType)
1151 {
1152 int32_t rc = NO_ERROR;
1153 rc = mOfflineMetaMemory.allocateAll(sizeof(metadata_buffer_t));
1154 if (rc == NO_ERROR) {
1155 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1156 mFreeOfflineMetaBuffersList.clear();
1157 for (uint32_t i = 0; i < mNumBuffers + (MAX_REPROCESS_PIPELINE_STAGES - 1);
1158 i++) {
1159 mFreeOfflineMetaBuffersList.push_back(i);
1160 }
1161 } else {
1162 LOGE("Could not allocate offline meta buffers for input reprocess");
1163 }
1164 mOutOfSequenceBuffers.clear();
1165 return rc;
1166 }
1167
1168 /*===========================================================================
1169 * FUNCTION : registerBuffer
1170 *
1171 * DESCRIPTION: register streaming buffer to the channel object
1172 *
1173 * PARAMETERS :
1174 * @buffer : buffer to be registered
1175 * @isType : image stabilization type on the stream
1176 *
1177 * RETURN : int32_t type of status
1178 * NO_ERROR -- success
1179 * none-zero failure code
1180 *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType)1181 int32_t QCamera3ProcessingChannel::registerBuffer(buffer_handle_t *buffer,
1182 cam_is_type_t isType)
1183 {
1184 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_REG_BUF);
1185 int rc = 0;
1186 mIsType = isType;
1187 cam_stream_type_t streamType;
1188
1189 if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
1190 LOGE("Trying to register more buffers than initially requested");
1191 return BAD_VALUE;
1192 }
1193
1194 if (0 == m_numStreams) {
1195 rc = initialize(mIsType);
1196 if (rc != NO_ERROR) {
1197 LOGE("Couldn't initialize camera stream %d", rc);
1198 return rc;
1199 }
1200 }
1201
1202 streamType = mStreams[0]->getMyType();
1203 rc = mMemory.registerBuffer(buffer, streamType);
1204 if (ALREADY_EXISTS == rc) {
1205 return NO_ERROR;
1206 } else if (NO_ERROR != rc) {
1207 LOGE("Buffer %p couldn't be registered %d", buffer, rc);
1208 return rc;
1209 }
1210
1211 return rc;
1212 }
1213
registerBufferAndGetBufDef(buffer_handle_t * buffer,mm_camera_buf_def_t * frame)1214 int32_t QCamera3ProcessingChannel::registerBufferAndGetBufDef(buffer_handle_t *buffer,
1215 mm_camera_buf_def_t *frame)
1216 {
1217 if (buffer == nullptr || frame == nullptr) {
1218 ALOGE("%s: buffer and frame cannot be nullptr.", __FUNCTION__);
1219 return BAD_VALUE;
1220 }
1221
1222 status_t rc;
1223
1224 // Get the buffer index.
1225 int index = mMemory.getMatchBufIndex((void*)buffer);
1226 if(index < 0) {
1227 // Register the buffer if it was not registered.
1228 rc = registerBuffer(buffer, mIsType);
1229 if (rc != OK) {
1230 ALOGE("%s: Regitering buffer failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1231 return rc;
1232 }
1233
1234 index = mMemory.getMatchBufIndex((void*)buffer);
1235 if (index < 0) {
1236 ALOGE("%s: Could not find object among registered buffers", __FUNCTION__);
1237 return DEAD_OBJECT;
1238 }
1239 }
1240
1241 cam_frame_len_offset_t offset = {};
1242 mStreams[0]->getFrameOffset(offset);
1243
1244 // Get the buffer def.
1245 rc = mMemory.getBufDef(offset, *frame, index, mMapStreamBuffers);
1246 if (rc != 0) {
1247 ALOGE("%s: Getting a frame failed: %s (%d).", __FUNCTION__, strerror(-rc), rc);
1248 return rc;
1249 }
1250
1251 // Set the frame's stream ID because it's not set in getBufDef.
1252 frame->stream_id = mStreams[0]->getMyHandle();
1253 return 0;
1254 }
1255
unregisterBuffer(mm_camera_buf_def_t * frame)1256 void QCamera3ProcessingChannel::unregisterBuffer(mm_camera_buf_def_t *frame)
1257 {
1258 if (frame == nullptr) {
1259 ALOGE("%s: frame is nullptr", __FUNCTION__);
1260 return;
1261 }
1262
1263 mMemory.unregisterBuffer(frame->buf_idx);
1264 }
1265
1266 /*===========================================================================
1267 * FUNCTION : setFwkInputPPData
1268 *
1269 * DESCRIPTION: fill out the framework src frame information for reprocessing
1270 *
1271 * PARAMETERS :
1272 * @src_frame : input pp data to be filled out
1273 * @pInputBuffer : input buffer for reprocessing
1274 * @reproc_cfg : pointer to the reprocess config
1275 * @metadata : pointer to the metadata buffer
1276 * @output_buffer : output buffer for reprocessing; could be NULL if not
1277 * framework allocated
1278 * @frameNumber : frame number of the request
1279 *
1280 * RETURN : int32_t type of status
1281 * NO_ERROR -- success
1282 * none-zero failure code
1283 *==========================================================================*/
setFwkInputPPData(qcamera_fwk_input_pp_data_t * src_frame,camera3_stream_buffer_t * pInputBuffer,reprocess_config_t * reproc_cfg,metadata_buffer_t * metadata,buffer_handle_t * output_buffer,uint32_t frameNumber)1284 int32_t QCamera3ProcessingChannel::setFwkInputPPData(qcamera_fwk_input_pp_data_t *src_frame,
1285 camera3_stream_buffer_t *pInputBuffer, reprocess_config_t *reproc_cfg,
1286 metadata_buffer_t *metadata, buffer_handle_t *output_buffer,
1287 uint32_t frameNumber)
1288 {
1289 int32_t rc = NO_ERROR;
1290 int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1291 if(input_index < 0) {
1292 rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer, mStreamType);
1293 if (NO_ERROR != rc) {
1294 LOGE("On-the-fly input buffer registration failed %d",
1295 rc);
1296 return rc;
1297 }
1298 input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
1299 if (input_index < 0) {
1300 LOGE("Could not find object among registered buffers");
1301 return DEAD_OBJECT;
1302 }
1303 }
1304 mOfflineMemory.markFrameNumber(input_index, frameNumber);
1305
1306 src_frame->src_frame = *pInputBuffer;
1307 rc = mOfflineMemory.getBufDef(reproc_cfg->input_stream_plane_info.plane_info,
1308 src_frame->input_buffer, input_index, mMapStreamBuffers);
1309 if (rc != 0) {
1310 return rc;
1311 }
1312 dumpYUV(&src_frame->input_buffer, reproc_cfg->input_stream_dim,
1313 reproc_cfg->input_stream_plane_info.plane_info, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
1314 cam_dimension_t dim = {sizeof(metadata_buffer_t), 1};
1315 cam_stream_buf_plane_info_t meta_planes;
1316 rc = mm_stream_calc_offset_metadata(&dim, &mPaddingInfo, &meta_planes);
1317 if (rc != 0) {
1318 LOGE("Metadata stream plane info calculation failed!");
1319 return rc;
1320 }
1321 uint32_t metaBufIdx;
1322 {
1323 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1324 if (mFreeOfflineMetaBuffersList.empty()) {
1325 LOGE("mFreeOfflineMetaBuffersList is null. Fatal");
1326 return BAD_VALUE;
1327 }
1328
1329 metaBufIdx = *(mFreeOfflineMetaBuffersList.begin());
1330 mFreeOfflineMetaBuffersList.erase(mFreeOfflineMetaBuffersList.begin());
1331 LOGD("erasing %d, mFreeOfflineMetaBuffersList.size %d", metaBufIdx,
1332 mFreeOfflineMetaBuffersList.size());
1333 }
1334
1335 mOfflineMetaMemory.markFrameNumber(metaBufIdx, frameNumber);
1336
1337 mm_camera_buf_def_t meta_buf;
1338 cam_frame_len_offset_t offset = meta_planes.plane_info;
1339 rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, metaBufIdx, true /*virtualAddr*/);
1340 if (NO_ERROR != rc) {
1341 return rc;
1342 }
1343 memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t));
1344 src_frame->metadata_buffer = meta_buf;
1345 src_frame->reproc_config = *reproc_cfg;
1346 src_frame->output_buffer = output_buffer;
1347 src_frame->frameNumber = frameNumber;
1348 return rc;
1349 }
1350
1351 /*===========================================================================
1352 * FUNCTION : checkStreamCbErrors
1353 *
1354 * DESCRIPTION: check the stream callback for errors
1355 *
1356 * PARAMETERS :
1357 * @super_frame : the super frame with filled buffer
1358 * @stream : stream on which the buffer was requested and filled
1359 *
1360 * RETURN : int32_t type of status
1361 * NO_ERROR -- success
1362 * none-zero failure code
1363 *==========================================================================*/
checkStreamCbErrors(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)1364 int32_t QCamera3ProcessingChannel::checkStreamCbErrors(mm_camera_super_buf_t *super_frame,
1365 QCamera3Stream *stream)
1366 {
1367 if (NULL == stream) {
1368 LOGE("Invalid stream");
1369 return BAD_VALUE;
1370 }
1371
1372 if(NULL == super_frame) {
1373 LOGE("Invalid Super buffer");
1374 return BAD_VALUE;
1375 }
1376
1377 if(super_frame->num_bufs != 1) {
1378 LOGE("Multiple streams are not supported");
1379 return BAD_VALUE;
1380 }
1381 if(NULL == super_frame->bufs[0]) {
1382 LOGE("Error, Super buffer frame does not contain valid buffer");
1383 return BAD_VALUE;
1384 }
1385 return NO_ERROR;
1386 }
1387
1388 /*===========================================================================
1389 * FUNCTION : getStreamSize
1390 *
1391 * DESCRIPTION: get the size from the camera3_stream_t for the channel
1392 *
1393 * PARAMETERS :
1394 * @dim : Return the size of the stream
1395 *
1396 * RETURN : int32_t type of status
1397 * NO_ERROR -- success
1398 * none-zero failure code
1399 *==========================================================================*/
getStreamSize(cam_dimension_t & dim)1400 int32_t QCamera3ProcessingChannel::getStreamSize(cam_dimension_t &dim)
1401 {
1402 if (mCamera3Stream) {
1403 dim.width = mCamera3Stream->width;
1404 dim.height = mCamera3Stream->height;
1405 return NO_ERROR;
1406 } else {
1407 return BAD_VALUE;
1408 }
1409 }
1410
1411 /*===========================================================================
1412 * FUNCTION : getStreamBufs
1413 *
1414 * DESCRIPTION: get the buffers allocated to the stream
1415 *
1416 * PARAMETERS :
1417 * @len : buffer length
1418 *
1419 * RETURN : int32_t type of status
1420 * NO_ERROR -- success
1421 * none-zero failure code
1422 *==========================================================================*/
getStreamBufs(uint32_t)1423 QCamera3StreamMem* QCamera3ProcessingChannel::getStreamBufs(uint32_t /*len*/)
1424 {
1425 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GETSTREAMBUFS);
1426 return &mMemory;
1427 }
1428
1429 /*===========================================================================
1430 * FUNCTION : putStreamBufs
1431 *
1432 * DESCRIPTION: release the buffers allocated to the stream
1433 *
1434 * PARAMETERS : NONE
1435 *
1436 * RETURN : NONE
1437 *==========================================================================*/
putStreamBufs()1438 void QCamera3ProcessingChannel::putStreamBufs()
1439 {
1440 mMemory.unregisterBuffers();
1441
1442 /* Reclaim all the offline metabuffers and push them to free list */
1443 {
1444 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1445 mFreeOfflineMetaBuffersList.clear();
1446 for (uint32_t i = 0; i < mOfflineMetaMemory.getCnt(); i++) {
1447 mFreeOfflineMetaBuffersList.push_back(i);
1448 }
1449 }
1450 }
1451
1452
1453 /*===========================================================================
1454 * FUNCTION : stop
1455 *
1456 * DESCRIPTION: stop processing channel, which will stop all streams within,
1457 * including the reprocessing channel in postprocessor.
1458 *
1459 * PARAMETERS : none
1460 *
1461 * RETURN : int32_t type of status
1462 * NO_ERROR -- success
1463 * none-zero failure code
1464 *==========================================================================*/
stop()1465 int32_t QCamera3ProcessingChannel::stop()
1466 {
1467 if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
1468 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
1469 }
1470 int32_t rc = NO_ERROR;
1471 if(!m_bIsActive) {
1472 LOGE("Attempt to stop inactive channel");
1473 return rc;
1474 }
1475
1476 m_postprocessor.stop();
1477 mPostProcStarted = false;
1478 rc |= QCamera3Channel::stop();
1479 return rc;
1480 }
1481
1482 /*===========================================================================
1483 * FUNCTION : startPostProc
1484 *
1485 * DESCRIPTION: figure out if the postprocessor needs to be restarted and if yes
1486 * start it
1487 *
1488 * PARAMETERS :
1489 * @inputBufExists : whether there is an input buffer for post processing
1490 * @config : reprocessing configuration
1491 * @metadata : metadata associated with the reprocessing request
1492 *
1493 * RETURN : NONE
1494 *==========================================================================*/
startPostProc(const reprocess_config_t & config)1495 void QCamera3ProcessingChannel::startPostProc(const reprocess_config_t &config)
1496 {
1497 if (mPostProcStarted) {
1498 if (config.reprocess_type != mReprocessType) {
1499 // If the reprocess type doesn't match, stop and start with the new type
1500 m_postprocessor.stop();
1501 mPostProcStarted = false;
1502 } else {
1503 // Return if reprocess type is the same.
1504 return;
1505 }
1506 }
1507
1508 m_postprocessor.start(config);
1509 mPostProcStarted = true;
1510 mReprocessType = config.reprocess_type;
1511 }
1512
1513 /*===========================================================================
1514 * FUNCTION : queueReprocMetadata
1515 *
1516 * DESCRIPTION: queue the reprocess metadata to the postprocessor
1517 *
1518 * PARAMETERS : metadata : the metadata corresponding to the pp frame
1519 *
1520 * RETURN : int32_t type of status
1521 * NO_ERROR -- success
1522 * none-zero failure code
1523 *==========================================================================*/
queueReprocMetadata(mm_camera_super_buf_t * metadata)1524 int32_t QCamera3ProcessingChannel::queueReprocMetadata(mm_camera_super_buf_t *metadata)
1525 {
1526 return m_postprocessor.processPPMetadata(metadata);
1527 }
1528
1529 /*===========================================================================
1530 * FUNCTION : metadataBufDone
1531 *
1532 * DESCRIPTION: Buffer done method for a metadata buffer
1533 *
1534 * PARAMETERS :
1535 * @recvd_frame : received metadata frame
1536 *
1537 * RETURN : int32_t type of status
1538 * NO_ERROR -- success
1539 * none-zero failure code
1540 *==========================================================================*/
metadataBufDone(mm_camera_super_buf_t * recvd_frame)1541 int32_t QCamera3ProcessingChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
1542 {
1543 int32_t rc = NO_ERROR;;
1544 if ((NULL == m_pMetaChannel) || (NULL == recvd_frame)) {
1545 LOGE("Metadata channel or metadata buffer invalid");
1546 return BAD_VALUE;
1547 }
1548
1549 rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame);
1550
1551 return rc;
1552 }
1553
1554 /*===========================================================================
1555 * FUNCTION : translateStreamTypeAndFormat
1556 *
1557 * DESCRIPTION: translates the framework stream format into HAL stream type
1558 * and format
1559 *
1560 * PARAMETERS :
1561 * @streamType : translated stream type
1562 * @streamFormat : translated stream format
1563 * @stream : fwk stream
1564 *
1565 * RETURN : int32_t type of status
1566 * NO_ERROR -- success
1567 * none-zero failure code
1568 *==========================================================================*/
translateStreamTypeAndFormat(camera3_stream_t * stream,cam_stream_type_t & streamType,cam_format_t & streamFormat)1569 int32_t QCamera3ProcessingChannel::translateStreamTypeAndFormat(camera3_stream_t *stream,
1570 cam_stream_type_t &streamType, cam_format_t &streamFormat)
1571 {
1572 switch (stream->format) {
1573 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1574 if(stream->stream_type == CAMERA3_STREAM_INPUT){
1575 streamType = CAM_STREAM_TYPE_SNAPSHOT;
1576 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT,
1577 stream->width, stream->height, m_bUBWCenable, mIsType);
1578 } else {
1579 streamType = CAM_STREAM_TYPE_CALLBACK;
1580 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_CALLBACK,
1581 stream->width, stream->height, m_bUBWCenable, mIsType);
1582 }
1583 break;
1584 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1585 if (stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
1586 streamType = CAM_STREAM_TYPE_VIDEO;
1587 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_VIDEO,
1588 stream->width, stream->height, m_bUBWCenable, mIsType);
1589 } else if(stream->stream_type == CAMERA3_STREAM_INPUT ||
1590 stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1591 IS_USAGE_ZSL(stream->usage)){
1592 streamType = CAM_STREAM_TYPE_SNAPSHOT;
1593 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT,
1594 stream->width, stream->height, m_bUBWCenable, mIsType);
1595 } else {
1596 streamType = CAM_STREAM_TYPE_PREVIEW;
1597 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_PREVIEW,
1598 stream->width, stream->height, m_bUBWCenable, mIsType);
1599 }
1600 break;
1601 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1602 case HAL_PIXEL_FORMAT_RAW16:
1603 case HAL_PIXEL_FORMAT_RAW10:
1604 streamType = CAM_STREAM_TYPE_RAW;
1605 if ((HAL_DATASPACE_DEPTH == stream->data_space) &&
1606 (HAL_PIXEL_FORMAT_RAW16 == stream->format)) {
1607 streamFormat = CAM_FORMAT_META_RAW_10BIT;
1608 } else {
1609 streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
1610 }
1611 break;
1612 default:
1613 return -EINVAL;
1614 }
1615 LOGD("fwk_format = %d, streamType = %d, streamFormat = %d",
1616 stream->format, streamType, streamFormat);
1617 return NO_ERROR;
1618 }
1619
1620 /*===========================================================================
1621 * FUNCTION : setReprocConfig
1622 *
1623 * DESCRIPTION: sets the reprocessing parameters for the input buffer
1624 *
1625 * PARAMETERS :
1626 * @reproc_cfg : the configuration to be set
1627 * @pInputBuffer : pointer to the input buffer
1628 * @metadata : pointer to the reprocessing metadata buffer
1629 * @streamFormat : format of the input stream
1630 *
1631 * RETURN : int32_t type of status
1632 * NO_ERROR -- success
1633 * none-zero failure code
1634 *==========================================================================*/
setReprocConfig(reprocess_config_t & reproc_cfg,camera3_stream_buffer_t * pInputBuffer,__unused metadata_buffer_t * metadata,cam_format_t streamFormat,cam_dimension_t dim)1635 int32_t QCamera3ProcessingChannel::setReprocConfig(reprocess_config_t &reproc_cfg,
1636 camera3_stream_buffer_t *pInputBuffer,
1637 __unused metadata_buffer_t *metadata,
1638 cam_format_t streamFormat, cam_dimension_t dim)
1639 {
1640 int32_t rc = 0;
1641 reproc_cfg.padding = &mPaddingInfo;
1642 //to ensure a big enough buffer size set the height and width
1643 //padding to max(height padding, width padding)
1644 if (reproc_cfg.padding->height_padding > reproc_cfg.padding->width_padding) {
1645 reproc_cfg.padding->width_padding = reproc_cfg.padding->height_padding;
1646 } else {
1647 reproc_cfg.padding->height_padding = reproc_cfg.padding->width_padding;
1648 }
1649 if (NULL != pInputBuffer) {
1650 reproc_cfg.input_stream_dim.width = (int32_t)pInputBuffer->stream->width;
1651 reproc_cfg.input_stream_dim.height = (int32_t)pInputBuffer->stream->height;
1652 } else {
1653 reproc_cfg.input_stream_dim.width = (int32_t)dim.width;
1654 reproc_cfg.input_stream_dim.height = (int32_t)dim.height;
1655 }
1656 reproc_cfg.src_channel = this;
1657 reproc_cfg.output_stream_dim.width = mCamera3Stream->width;
1658 reproc_cfg.output_stream_dim.height = mCamera3Stream->height;
1659 reproc_cfg.reprocess_type = getReprocessType();
1660
1661 //offset calculation
1662 if (NULL != pInputBuffer) {
1663 rc = translateStreamTypeAndFormat(pInputBuffer->stream,
1664 reproc_cfg.stream_type, reproc_cfg.stream_format);
1665 if (rc != NO_ERROR) {
1666 LOGE("Stream format %d is not supported",
1667 pInputBuffer->stream->format);
1668 return rc;
1669 }
1670 } else {
1671 reproc_cfg.stream_type = mStreamType;
1672 reproc_cfg.stream_format = streamFormat;
1673 }
1674
1675 switch (reproc_cfg.stream_type) {
1676 case CAM_STREAM_TYPE_PREVIEW:
1677 if (getStreamByIndex(0) == NULL) {
1678 LOGE("Could not find stream");
1679 rc = -1;
1680 break;
1681 }
1682 rc = mm_stream_calc_offset_preview(
1683 getStreamByIndex(0)->getStreamInfo(),
1684 &reproc_cfg.input_stream_dim,
1685 reproc_cfg.padding,
1686 &reproc_cfg.input_stream_plane_info);
1687 break;
1688 case CAM_STREAM_TYPE_VIDEO:
1689 rc = mm_stream_calc_offset_video(reproc_cfg.stream_format,
1690 &reproc_cfg.input_stream_dim,
1691 &reproc_cfg.input_stream_plane_info);
1692 break;
1693 case CAM_STREAM_TYPE_RAW:
1694 rc = mm_stream_calc_offset_raw(reproc_cfg.stream_format,
1695 &reproc_cfg.input_stream_dim,
1696 reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
1697 break;
1698 case CAM_STREAM_TYPE_SNAPSHOT:
1699 case CAM_STREAM_TYPE_CALLBACK:
1700 default:
1701 rc = mm_stream_calc_offset_snapshot(streamFormat, &reproc_cfg.input_stream_dim,
1702 reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
1703 break;
1704 }
1705 if (rc != 0) {
1706 LOGE("Stream %d plane info calculation failed!", mStreamType);
1707 return rc;
1708 }
1709
1710 IF_META_AVAILABLE(cam_hdr_param_t, hdr_info, CAM_INTF_PARM_HAL_BRACKETING_HDR, metadata) {
1711 reproc_cfg.hdr_param = *hdr_info;
1712 }
1713
1714 return rc;
1715 }
1716
1717 /*===========================================================================
1718 * FUNCTION : reprocessCbRoutine
1719 *
1720 * DESCRIPTION: callback function for the reprocessed frame. This frame now
1721 * should be returned to the framework
1722 *
1723 * PARAMETERS :
1724 * @resultBuffer : buffer containing the reprocessed data
1725 * @resultFrameNumber : frame number on which the buffer was requested
1726 *
1727 * RETURN : NONE
1728 *
1729 *==========================================================================*/
reprocessCbRoutine(buffer_handle_t * resultBuffer,uint32_t resultFrameNumber)1730 void QCamera3ProcessingChannel::reprocessCbRoutine(buffer_handle_t *resultBuffer,
1731 uint32_t resultFrameNumber)
1732 {
1733 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_REPROC_CB);
1734 int rc = NO_ERROR;
1735
1736 rc = releaseOfflineMemory(resultFrameNumber);
1737 if (NO_ERROR != rc) {
1738 LOGE("Error releasing offline memory %d", rc);
1739 }
1740 /* Since reprocessing is done, send the callback to release the input buffer */
1741 if (mChannelCB) {
1742 mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
1743 }
1744 issueChannelCb(resultBuffer, resultFrameNumber);
1745
1746 return;
1747 }
1748
1749 /*===========================================================================
1750 * FUNCTION : issueChannelCb
1751 *
1752 * DESCRIPTION: function to set the result and issue channel callback
1753 *
1754 * PARAMETERS :
1755 * @resultBuffer : buffer containing the data
1756 * @resultFrameNumber : frame number on which the buffer was requested
1757 *
1758 * RETURN : NONE
1759 *
1760 *
1761 *==========================================================================*/
issueChannelCb(buffer_handle_t * resultBuffer,uint32_t resultFrameNumber)1762 void QCamera3ProcessingChannel::issueChannelCb(buffer_handle_t *resultBuffer,
1763 uint32_t resultFrameNumber)
1764 {
1765 camera3_stream_buffer_t result;
1766 //Use below data to issue framework callback
1767 result.stream = mCamera3Stream;
1768 result.buffer = resultBuffer;
1769 result.status = CAMERA3_BUFFER_STATUS_OK;
1770 result.acquire_fence = -1;
1771 result.release_fence = -1;
1772
1773 if (mChannelCB) {
1774 mChannelCB(NULL, &result, resultFrameNumber, false, mUserData);
1775 }
1776 }
1777
1778 /*===========================================================================
1779 * FUNCTION : showDebugFPS
1780 *
1781 * DESCRIPTION: Function to log the fps for preview, video, callback and raw
1782 * streams
1783 *
1784 * PARAMETERS : Stream type
1785 *
1786 * RETURN : None
1787 *==========================================================================*/
showDebugFPS(int32_t streamType)1788 void QCamera3ProcessingChannel::showDebugFPS(int32_t streamType)
1789 {
1790 double fps = 0;
1791 mFrameCount++;
1792 nsecs_t now = systemTime();
1793 nsecs_t diff = now - mLastFpsTime;
1794 if (diff > ms2ns(250)) {
1795 fps = (((double)(mFrameCount - mLastFrameCount)) *
1796 (double)(s2ns(1))) / (double)diff;
1797 switch(streamType) {
1798 case CAM_STREAM_TYPE_PREVIEW:
1799 LOGH("PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f: mFrameCount=%d",
1800 fps, mFrameCount);
1801 break;
1802 case CAM_STREAM_TYPE_VIDEO:
1803 LOGH("PROFILE_VIDEO_FRAMES_PER_SECOND : %.4f",
1804 fps);
1805 break;
1806 case CAM_STREAM_TYPE_CALLBACK:
1807 LOGH("PROFILE_CALLBACK_FRAMES_PER_SECOND : %.4f",
1808 fps);
1809 break;
1810 case CAM_STREAM_TYPE_RAW:
1811 LOGH("PROFILE_RAW_FRAMES_PER_SECOND : %.4f",
1812 fps);
1813 break;
1814 default:
1815 LOGH("logging not supported for the stream");
1816 break;
1817 }
1818 mLastFpsTime = now;
1819 mLastFrameCount = mFrameCount;
1820 }
1821 }
1822
1823 /*===========================================================================
1824 * FUNCTION : releaseOfflineMemory
1825 *
1826 * DESCRIPTION: function to clean up the offline memory used for input reprocess
1827 *
1828 * PARAMETERS :
1829 * @resultFrameNumber : frame number on which the buffer was requested
1830 *
1831 * RETURN : int32_t type of status
1832 * NO_ERROR -- success
1833 * non-zero failure code
1834 *
1835 *
1836 *==========================================================================*/
releaseOfflineMemory(uint32_t resultFrameNumber)1837 int32_t QCamera3ProcessingChannel::releaseOfflineMemory(uint32_t resultFrameNumber)
1838 {
1839 int32_t rc = NO_ERROR;
1840 int32_t inputBufIndex =
1841 mOfflineMemory.getGrallocBufferIndex(resultFrameNumber);
1842 if (0 <= inputBufIndex) {
1843 rc = mOfflineMemory.unregisterBuffer(inputBufIndex);
1844 } else {
1845 LOGW("Could not find offline input buffer, resultFrameNumber %d",
1846 resultFrameNumber);
1847 }
1848 if (rc != NO_ERROR) {
1849 LOGE("Failed to unregister offline input buffer");
1850 }
1851
1852 int32_t metaBufIndex =
1853 mOfflineMetaMemory.getHeapBufferIndex(resultFrameNumber);
1854 if (0 <= metaBufIndex) {
1855 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
1856 mFreeOfflineMetaBuffersList.push_back((uint32_t)metaBufIndex);
1857 } else {
1858 LOGW("Could not find offline meta buffer, resultFrameNumber %d",
1859 resultFrameNumber);
1860 }
1861
1862 return rc;
1863 }
1864
1865 /* Regular Channel methods */
1866 /*===========================================================================
1867 * FUNCTION : QCamera3RegularChannel
1868 *
1869 * DESCRIPTION: constructor of QCamera3RegularChannel
1870 *
1871 * PARAMETERS :
1872 * @cam_handle : camera handle
1873 * @cam_ops : ptr to camera ops table
1874 * @cb_routine : callback routine to frame aggregator
1875 * @stream : camera3_stream_t structure
1876 * @stream_type: Channel stream type
1877 * @postprocess_mask: feature mask for postprocessing
1878 * @metadataChannel : metadata channel for the session
1879 * @numBuffers : number of max dequeued buffers
1880 *
1881 * RETURN : none
1882 *==========================================================================*/
QCamera3RegularChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel,uint32_t numBuffers)1883 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
1884 uint32_t channel_handle,
1885 mm_camera_ops_t *cam_ops,
1886 channel_cb_routine cb_routine,
1887 channel_cb_buffer_err cb_buffer_err,
1888 cam_padding_info_t *paddingInfo,
1889 void *userData,
1890 camera3_stream_t *stream,
1891 cam_stream_type_t stream_type,
1892 cam_feature_mask_t postprocess_mask,
1893 QCamera3Channel *metadataChannel,
1894 uint32_t numBuffers) :
1895 QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops,
1896 cb_routine, cb_buffer_err, paddingInfo, userData, stream, stream_type,
1897 postprocess_mask, metadataChannel, numBuffers),
1898 mBatchSize(0),
1899 mRotation(ROTATE_0)
1900 {
1901 }
1902
1903 /*===========================================================================
1904 * FUNCTION : ~QCamera3RegularChannel
1905 *
1906 * DESCRIPTION: destructor of QCamera3RegularChannel
1907 *
1908 * PARAMETERS : none
1909 *
1910 * RETURN : none
1911 *==========================================================================*/
~QCamera3RegularChannel()1912 QCamera3RegularChannel::~QCamera3RegularChannel()
1913 {
1914 destroy();
1915 }
1916
1917 /*===========================================================================
1918 * FUNCTION : initialize
1919 *
1920 * DESCRIPTION: Initialize and add camera channel & stream
1921 *
1922 * PARAMETERS :
1923 * @isType : type of image stabilization required on this stream
1924 *
1925 * RETURN : int32_t type of status
1926 * NO_ERROR -- success
1927 * none-zero failure code
1928 *==========================================================================*/
1929
initialize(cam_is_type_t isType)1930 int32_t QCamera3RegularChannel::initialize(cam_is_type_t isType)
1931 {
1932 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REG_CH_INIT);
1933 int32_t rc = NO_ERROR;
1934
1935 cam_dimension_t streamDim;
1936
1937 if (NULL == mCamera3Stream) {
1938 LOGE("Camera stream uninitialized");
1939 return NO_INIT;
1940 }
1941
1942 if (1 <= m_numStreams) {
1943 // Only one stream per channel supported in v3 Hal
1944 return NO_ERROR;
1945 }
1946
1947 mIsType = isType;
1948
1949 rc = translateStreamTypeAndFormat(mCamera3Stream, mStreamType,
1950 mStreamFormat);
1951 if (rc != NO_ERROR) {
1952 return -EINVAL;
1953 }
1954
1955
1956 if ((mStreamType == CAM_STREAM_TYPE_VIDEO) ||
1957 (mStreamType == CAM_STREAM_TYPE_PREVIEW)) {
1958 if ((mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) &&
1959 ((mPostProcMask & CAM_QCOM_FEATURE_ROTATION) == 0)) {
1960 LOGE("attempting rotation %d when rotation is disabled",
1961 mCamera3Stream->rotation);
1962 return -EINVAL;
1963 }
1964
1965 switch (mCamera3Stream->rotation) {
1966 case CAMERA3_STREAM_ROTATION_0:
1967 mRotation = ROTATE_0;
1968 break;
1969 case CAMERA3_STREAM_ROTATION_90: {
1970 mRotation = ROTATE_90;
1971 break;
1972 }
1973 case CAMERA3_STREAM_ROTATION_180:
1974 mRotation = ROTATE_180;
1975 break;
1976 case CAMERA3_STREAM_ROTATION_270: {
1977 mRotation = ROTATE_270;
1978 break;
1979 }
1980 default:
1981 LOGE("Unknown rotation: %d",
1982 mCamera3Stream->rotation);
1983 return -EINVAL;
1984 }
1985
1986 // Camera3/HAL3 spec expecting counter clockwise rotation but CPP HW is
1987 // doing Clockwise rotation and so swap it.
1988 if (mRotation == ROTATE_90) {
1989 mRotation = ROTATE_270;
1990 } else if (mRotation == ROTATE_270) {
1991 mRotation = ROTATE_90;
1992 }
1993
1994 } else if (mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) {
1995 LOGE("Rotation %d is not supported by stream type %d",
1996 mCamera3Stream->rotation,
1997 mStreamType);
1998 return -EINVAL;
1999 }
2000
2001 streamDim.width = mCamera3Stream->width;
2002 streamDim.height = mCamera3Stream->height;
2003
2004 LOGD("batch size is %d", mBatchSize);
2005 rc = QCamera3Channel::addStream(mStreamType,
2006 mStreamFormat,
2007 streamDim,
2008 mRotation,
2009 mNumBufs,
2010 mPostProcMask,
2011 mIsType,
2012 mBatchSize);
2013
2014 return rc;
2015 }
2016
2017 /*===========================================================================
2018 * FUNCTION : setBatchSize
2019 *
2020 * DESCRIPTION: Set batch size for the channel.
2021 *
2022 * PARAMETERS :
2023 * @batchSize : Number of image buffers in a batch
2024 *
2025 * RETURN : int32_t type of status
2026 * NO_ERROR -- success always
2027 * none-zero failure code
2028 *==========================================================================*/
setBatchSize(uint32_t batchSize)2029 int32_t QCamera3RegularChannel::setBatchSize(uint32_t batchSize)
2030 {
2031 int32_t rc = NO_ERROR;
2032
2033 mBatchSize = batchSize;
2034 LOGD("Batch size set: %d", mBatchSize);
2035 return rc;
2036 }
2037
2038 /*===========================================================================
2039 * FUNCTION : getStreamTypeMask
2040 *
2041 * DESCRIPTION: Get bit mask of all stream types in this channel.
2042 * If stream is not initialized, then generate mask based on
2043 * local streamType
2044 *
2045 * PARAMETERS : None
2046 *
2047 * RETURN : Bit mask of all stream types in this channel
2048 *==========================================================================*/
getStreamTypeMask()2049 uint32_t QCamera3RegularChannel::getStreamTypeMask()
2050 {
2051 if (mStreams[0]) {
2052 return QCamera3Channel::getStreamTypeMask();
2053 } else {
2054 return (1U << mStreamType);
2055 }
2056 }
2057
2058 /*===========================================================================
2059 * FUNCTION : queueBatchBuf
2060 *
2061 * DESCRIPTION: queue batch container to downstream
2062 *
2063 * PARAMETERS :
2064 *
2065 * RETURN : int32_t type of status
2066 * NO_ERROR -- success always
2067 * none-zero failure code
2068 *==========================================================================*/
queueBatchBuf()2069 int32_t QCamera3RegularChannel::queueBatchBuf()
2070 {
2071 int32_t rc = NO_ERROR;
2072
2073 if (mStreams[0]) {
2074 rc = mStreams[0]->queueBatchBuf();
2075 }
2076 if (rc != NO_ERROR) {
2077 LOGE("stream->queueBatchContainer failed");
2078 }
2079 return rc;
2080 }
2081
2082 /*===========================================================================
2083 * FUNCTION : request
2084 *
2085 * DESCRIPTION: process a request from camera service. Stream on if ncessary.
2086 *
2087 * PARAMETERS :
2088 * @buffer : buffer to be filled for this request
2089 *
2090 * RETURN : 0 on a success start of capture
2091 * -EINVAL on invalid input
2092 * -ENODEV on serious error
2093 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,int & indexUsed)2094 int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber, int &indexUsed)
2095 {
2096 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REG_CH_REQ);
2097 //FIX ME: Return buffer back in case of failures below.
2098
2099 int32_t rc = NO_ERROR;
2100 int index;
2101
2102 if (NULL == buffer) {
2103 LOGE("Invalid buffer in channel request");
2104 return BAD_VALUE;
2105 }
2106
2107 index = mMemory.getMatchBufIndex((void*)buffer);
2108 if(index < 0) {
2109 rc = registerBuffer(buffer, mIsType);
2110 if (NO_ERROR != rc) {
2111 LOGE("On-the-fly buffer registration failed %d",
2112 rc);
2113 return rc;
2114 }
2115
2116 index = mMemory.getMatchBufIndex((void*)buffer);
2117 if (index < 0) {
2118 LOGE("Could not find object among registered buffers");
2119 return DEAD_OBJECT;
2120 }
2121 }
2122
2123 rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
2124 if(rc != NO_ERROR) {
2125 LOGE("Failed to mark FrameNumber:%d,idx:%d",frameNumber,index);
2126 return rc;
2127 }
2128 if (m_bIsActive) {
2129 rc = mStreams[0]->bufDone((uint32_t)index);
2130 if(rc != NO_ERROR) {
2131 LOGE("Failed to Q new buffer to stream");
2132 mMemory.markFrameNumber(index, -1);
2133 return rc;
2134 }
2135 }
2136
2137 indexUsed = index;
2138 return rc;
2139 }
2140
2141 /*===========================================================================
2142 * FUNCTION : getReprocessType
2143 *
2144 * DESCRIPTION: get the type of reprocess output supported by this channel
2145 *
2146 * PARAMETERS : NONE
2147 *
2148 * RETURN : reprocess_type_t : type of reprocess
2149 *==========================================================================*/
getReprocessType()2150 reprocess_type_t QCamera3RegularChannel::getReprocessType()
2151 {
2152 return REPROCESS_TYPE_PRIVATE;
2153 }
2154
2155
QCamera3MetadataChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,void * userData,uint32_t numBuffers)2156 QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
2157 uint32_t channel_handle,
2158 mm_camera_ops_t *cam_ops,
2159 channel_cb_routine cb_routine,
2160 channel_cb_buffer_err cb_buffer_err,
2161 cam_padding_info_t *paddingInfo,
2162 cam_feature_mask_t postprocess_mask,
2163 void *userData, uint32_t numBuffers) :
2164 QCamera3Channel(cam_handle, channel_handle, cam_ops,
2165 cb_routine, cb_buffer_err, paddingInfo, postprocess_mask,
2166 userData, numBuffers),
2167 mMemory(NULL), mDepthDataPresent(false)
2168 {
2169 mMapStreamBuffers = true;
2170 }
2171
~QCamera3MetadataChannel()2172 QCamera3MetadataChannel::~QCamera3MetadataChannel()
2173 {
2174 destroy();
2175
2176 if (mMemory) {
2177 mMemory->deallocate();
2178 delete mMemory;
2179 mMemory = NULL;
2180 }
2181 }
2182
initialize(cam_is_type_t isType)2183 int32_t QCamera3MetadataChannel::initialize(cam_is_type_t isType)
2184 {
2185 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_METADATA_CH_INIT);
2186 int32_t rc;
2187 cam_dimension_t streamDim;
2188
2189 if (mMemory || m_numStreams > 0) {
2190 LOGE("metadata channel already initialized");
2191 return -EINVAL;
2192 }
2193
2194 streamDim.width = (int32_t)sizeof(metadata_buffer_t),
2195 streamDim.height = 1;
2196
2197 mIsType = isType;
2198 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
2199 streamDim, ROTATE_0, (uint8_t)mNumBuffers, mPostProcMask, mIsType);
2200 if (rc < 0) {
2201 LOGE("addStream failed");
2202 }
2203 return rc;
2204 }
2205
request(buffer_handle_t *,uint32_t,int &)2206 int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/,
2207 uint32_t /*frameNumber*/,
2208 int& /*indexUsed*/)
2209 {
2210 if (!m_bIsActive) {
2211 return start();
2212 }
2213 else
2214 return 0;
2215 }
2216
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)2217 void QCamera3MetadataChannel::streamCbRoutine(
2218 mm_camera_super_buf_t *super_frame,
2219 QCamera3Stream * /*stream*/)
2220 {
2221 ATRACE_NAME("metadata_stream_cb_routine");
2222 uint32_t requestNumber = 0;
2223 if (super_frame == NULL || super_frame->num_bufs != 1) {
2224 LOGE("super_frame is not valid");
2225 return;
2226 }
2227 if (mChannelCB) {
2228 mChannelCB(super_frame, NULL, requestNumber, false, mUserData);
2229 }
2230 }
2231
getStreamBufs(uint32_t len)2232 QCamera3StreamMem* QCamera3MetadataChannel::getStreamBufs(uint32_t len)
2233 {
2234 int rc;
2235 if (len < sizeof(metadata_buffer_t)) {
2236 LOGE("Metadata buffer size less than structure %d vs %d",
2237 len,
2238 sizeof(metadata_buffer_t));
2239 return NULL;
2240 }
2241 mMemory = new QCamera3StreamMem(MIN_STREAMING_BUFFER_NUM);
2242 if (!mMemory) {
2243 LOGE("unable to create metadata memory");
2244 return NULL;
2245 }
2246 rc = mMemory->allocateAll(len);
2247 if (rc < 0) {
2248 LOGE("unable to allocate metadata memory");
2249 delete mMemory;
2250 mMemory = NULL;
2251 return NULL;
2252 }
2253 clear_metadata_buffer((metadata_buffer_t*)mMemory->getPtr(0));
2254
2255 for (uint32_t i = 0; i < mMemory->getCnt(); i++) {
2256 if (mMemory->valid(i)) {
2257 metadata_buffer_t *metadata_buffer_t =
2258 static_cast<::metadata_buffer_t *> (mMemory->getPtr(i));
2259 metadata_buffer_t->depth_data.depth_data = nullptr;
2260 if (mDepthDataPresent) {
2261 metadata_buffer_t->depth_data.depth_data =
2262 new uint8_t[PD_DATA_SIZE];
2263 }
2264 } else {
2265 LOGE("Invalid meta buffer at index: %d", i);
2266 }
2267 }
2268
2269 return mMemory;
2270 }
2271
putStreamBufs()2272 void QCamera3MetadataChannel::putStreamBufs()
2273 {
2274 for (uint32_t i = 0; i < mMemory->getCnt(); i++) {
2275 if (mMemory->valid(i)) {
2276 metadata_buffer_t *metadata_buffer_t =
2277 static_cast<::metadata_buffer_t *> (mMemory->getPtr(i));
2278 if (nullptr != metadata_buffer_t->depth_data.depth_data) {
2279 delete [] metadata_buffer_t->depth_data.depth_data;
2280 metadata_buffer_t->depth_data.depth_data = nullptr;
2281 }
2282 } else {
2283 LOGE("Invalid meta buffer at index: %d", i);
2284 }
2285 }
2286
2287 mMemory->deallocate();
2288 delete mMemory;
2289 mMemory = NULL;
2290 }
2291 /*************************************************************************************/
2292 // RAW Channel related functions
QCamera3RawChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buffer_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel,bool raw_16,uint32_t numBuffers)2293 QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle,
2294 uint32_t channel_handle,
2295 mm_camera_ops_t *cam_ops,
2296 channel_cb_routine cb_routine,
2297 channel_cb_buffer_err cb_buffer_err,
2298 cam_padding_info_t *paddingInfo,
2299 void *userData,
2300 camera3_stream_t *stream,
2301 cam_feature_mask_t postprocess_mask,
2302 QCamera3Channel *metadataChannel,
2303 bool raw_16, uint32_t numBuffers) :
2304 QCamera3RegularChannel(cam_handle, channel_handle, cam_ops,
2305 cb_routine, cb_buffer_err, paddingInfo, userData, stream,
2306 CAM_STREAM_TYPE_RAW, postprocess_mask,
2307 metadataChannel, numBuffers),
2308 mIsRaw16(raw_16)
2309 {
2310 char prop[PROPERTY_VALUE_MAX];
2311 property_get("persist.camera.raw.debug.dump", prop, "0");
2312 mRawDump = atoi(prop);
2313 mMapStreamBuffers = (mRawDump || mIsRaw16);
2314 }
2315
~QCamera3RawChannel()2316 QCamera3RawChannel::~QCamera3RawChannel()
2317 {
2318 }
2319
2320 /*===========================================================================
2321 * FUNCTION : initialize
2322 *
2323 * DESCRIPTION: Initialize and add camera channel & stream
2324 *
2325 * PARAMETERS :
2326 * @isType : image stabilization type on the stream
2327 *
2328 * RETURN : int32_t type of status
2329 * NO_ERROR -- success
2330 * none-zero failure code
2331 *==========================================================================*/
2332
initialize(cam_is_type_t isType)2333 int32_t QCamera3RawChannel::initialize(cam_is_type_t isType)
2334 {
2335 return QCamera3RegularChannel::initialize(isType);
2336 }
2337
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)2338 void QCamera3RawChannel::streamCbRoutine(
2339 mm_camera_super_buf_t *super_frame,
2340 QCamera3Stream * stream)
2341 {
2342 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_RAW_CH_STRM_CB);
2343 /* Move this back down once verified */
2344 if (mRawDump)
2345 dumpRawSnapshot(super_frame->bufs[0]);
2346
2347 if (mIsRaw16) {
2348 cam_format_t streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_RAW,
2349 mCamera3Stream->width, mCamera3Stream->height, m_bUBWCenable, mIsType);
2350 if (streamFormat == CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG)
2351 convertMipiToRaw16(super_frame->bufs[0]);
2352 else
2353 convertLegacyToRaw16(super_frame->bufs[0]);
2354
2355 //Make sure cache coherence because extra processing is done
2356 mMemory.cleanCache(super_frame->bufs[0]->buf_idx);
2357 }
2358
2359 QCamera3RegularChannel::streamCbRoutine(super_frame, stream);
2360 return;
2361 }
2362
dumpRawSnapshot(mm_camera_buf_def_t * frame)2363 void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
2364 {
2365 QCamera3Stream *stream = getStreamByIndex(0);
2366 if (stream != NULL) {
2367 char buf[FILENAME_MAX];
2368 memset(buf, 0, sizeof(buf));
2369 cam_dimension_t dim;
2370 memset(&dim, 0, sizeof(dim));
2371 stream->getFrameDimension(dim);
2372
2373 cam_frame_len_offset_t offset;
2374 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2375 stream->getFrameOffset(offset);
2376 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"r_%d_%dx%d.raw",
2377 frame->frame_idx, offset.mp[0].stride, offset.mp[0].scanline);
2378
2379 int file_fd = open(buf, O_RDWR| O_CREAT, 0644);
2380 if (file_fd >= 0) {
2381 ssize_t written_len = write(file_fd, frame->buffer, frame->frame_len);
2382 LOGD("written number of bytes %zd", written_len);
2383 frame->cache_flags |= CPU_HAS_READ;
2384 close(file_fd);
2385 } else {
2386 LOGE("failed to open file to dump image");
2387 }
2388 } else {
2389 LOGE("Could not find stream");
2390 }
2391
2392 }
2393
convertLegacyToRaw16(mm_camera_buf_def_t * frame)2394 void QCamera3RawChannel::convertLegacyToRaw16(mm_camera_buf_def_t *frame)
2395 {
2396 // Convert image buffer from Opaque raw format to RAW16 format
2397 // 10bit Opaque raw is stored in the format of:
2398 // 0000 - p5 - p4 - p3 - p2 - p1 - p0
2399 // where p0 to p5 are 6 pixels (each is 10bit)_and most significant
2400 // 4 bits are 0s. Each 64bit word contains 6 pixels.
2401
2402 QCamera3Stream *stream = getStreamByIndex(0);
2403 if (stream != NULL) {
2404 cam_dimension_t dim;
2405 memset(&dim, 0, sizeof(dim));
2406 stream->getFrameDimension(dim);
2407
2408 cam_frame_len_offset_t offset;
2409 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2410 stream->getFrameOffset(offset);
2411
2412 uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U;
2413 uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
2414
2415 // In-place format conversion.
2416 // Raw16 format always occupy more memory than opaque raw10.
2417 // Convert to Raw16 by iterating through all pixels from bottom-right
2418 // to top-left of the image.
2419 // One special notes:
2420 // 1. Cross-platform raw16's stride is 16 pixels.
2421 // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes.
2422 for (int32_t ys = dim.height - 1; ys >= 0; ys--) {
2423 uint32_t y = (uint32_t)ys;
2424 uint64_t* row_start = (uint64_t *)frame->buffer +
2425 y * (uint32_t)offset.mp[0].stride_in_bytes / 8;
2426 for (int32_t xs = dim.width - 1; xs >= 0; xs--) {
2427 uint32_t x = (uint32_t)xs;
2428 uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6)));
2429 raw16_buffer[y*raw16_stride+x] = raw16_pixel;
2430 }
2431 }
2432 } else {
2433 LOGE("Could not find stream");
2434 }
2435
2436 }
2437
convertMipiToRaw16(mm_camera_buf_def_t * frame)2438 void QCamera3RawChannel::convertMipiToRaw16(mm_camera_buf_def_t *frame)
2439 {
2440 // Convert image buffer from mipi10 raw format to RAW16 format
2441 // mipi10 opaque raw is stored in the format of:
2442 // P3(1:0) P2(1:0) P1(1:0) P0(1:0) P3(9:2) P2(9:2) P1(9:2) P0(9:2)
2443 // 4 pixels occupy 5 bytes, no padding needed
2444
2445 QCamera3Stream *stream = getStreamByIndex(0);
2446 if (stream != NULL) {
2447 cam_dimension_t dim;
2448 memset(&dim, 0, sizeof(dim));
2449 stream->getFrameDimension(dim);
2450
2451 cam_frame_len_offset_t offset;
2452 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2453 stream->getFrameOffset(offset);
2454
2455 uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U;
2456 uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
2457 uint8_t first_quintuple[5];
2458 memcpy(first_quintuple, raw16_buffer, sizeof(first_quintuple));
2459
2460 // In-place format conversion.
2461 // Raw16 format always occupy more memory than opaque raw10.
2462 // Convert to Raw16 by iterating through all pixels from bottom-right
2463 // to top-left of the image.
2464 // One special notes:
2465 // 1. Cross-platform raw16's stride is 16 pixels.
2466 // 2. mipi raw10's stride is 4 pixels, and aligned to 16 bytes.
2467 for (int32_t ys = dim.height - 1; ys >= 0; ys--) {
2468 uint32_t y = (uint32_t)ys;
2469 uint8_t* row_start = (uint8_t *)frame->buffer +
2470 y * (uint32_t)offset.mp[0].stride_in_bytes;
2471 for (int32_t xs = dim.width - 1; xs >= 0; xs--) {
2472 uint32_t x = (uint32_t)xs;
2473 uint8_t upper_8bit = row_start[5*(x/4)+x%4];
2474 uint8_t lower_2bit = ((row_start[5*(x/4)+4] >> ((x%4) << 1)) & 0x3);
2475 uint16_t raw16_pixel =
2476 (uint16_t)(((uint16_t)upper_8bit)<<2 |
2477 (uint16_t)lower_2bit);
2478 raw16_buffer[y*raw16_stride+x] = raw16_pixel;
2479 }
2480 }
2481
2482 // Re-convert the first 2 pixels of the buffer because the loop above messes
2483 // them up by reading the first quintuple while modifying it.
2484 raw16_buffer[0] = ((uint16_t)first_quintuple[0]<<2) | (first_quintuple[4] & 0x3);
2485 raw16_buffer[1] = ((uint16_t)first_quintuple[1]<<2) | ((first_quintuple[4] >> 2) & 0x3);
2486
2487 } else {
2488 LOGE("Could not find stream");
2489 }
2490
2491 }
2492
2493 /*===========================================================================
2494 * FUNCTION : getReprocessType
2495 *
2496 * DESCRIPTION: get the type of reprocess output supported by this channel
2497 *
2498 * PARAMETERS : NONE
2499 *
2500 * RETURN : reprocess_type_t : type of reprocess
2501 *==========================================================================*/
getReprocessType()2502 reprocess_type_t QCamera3RawChannel::getReprocessType()
2503 {
2504 return REPROCESS_TYPE_RAW;
2505 }
2506
2507
2508 /*************************************************************************************/
2509 // RAW Dump Channel related functions
2510
2511 /*===========================================================================
2512 * FUNCTION : QCamera3RawDumpChannel
2513 *
2514 * DESCRIPTION: Constructor for RawDumpChannel
2515 *
2516 * PARAMETERS :
2517 * @cam_handle : Handle for Camera
2518 * @cam_ops : Function pointer table
2519 * @rawDumpSize : Dimensions for the Raw stream
2520 * @paddinginfo : Padding information for stream
2521 * @userData : Cookie for parent
2522 * @pp mask : PP feature mask for this stream
2523 * @numBuffers : number of max dequeued buffers
2524 *
2525 * RETURN : NA
2526 *==========================================================================*/
QCamera3RawDumpChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,cam_dimension_t rawDumpSize,cam_padding_info_t * paddingInfo,void * userData,cam_feature_mask_t postprocess_mask,uint32_t numBuffers)2527 QCamera3RawDumpChannel::QCamera3RawDumpChannel(uint32_t cam_handle,
2528 uint32_t channel_handle,
2529 mm_camera_ops_t *cam_ops,
2530 cam_dimension_t rawDumpSize,
2531 cam_padding_info_t *paddingInfo,
2532 void *userData,
2533 cam_feature_mask_t postprocess_mask, uint32_t numBuffers) :
2534 QCamera3Channel(cam_handle, channel_handle, cam_ops, NULL,
2535 NULL, paddingInfo, postprocess_mask,
2536 userData, numBuffers),
2537 mDim(rawDumpSize),
2538 mMemory(NULL)
2539 {
2540 char prop[PROPERTY_VALUE_MAX];
2541 property_get("persist.camera.raw.dump", prop, "0");
2542 mRawDump = atoi(prop);
2543 }
2544
2545 /*===========================================================================
2546 * FUNCTION : QCamera3RawDumpChannel
2547 *
2548 * DESCRIPTION: Destructor for RawDumpChannel
2549 *
2550 * PARAMETERS :
2551 *
2552 * RETURN : NA
2553 *==========================================================================*/
2554
~QCamera3RawDumpChannel()2555 QCamera3RawDumpChannel::~QCamera3RawDumpChannel()
2556 {
2557 destroy();
2558 }
2559
2560 /*===========================================================================
2561 * FUNCTION : dumpRawSnapshot
2562 *
2563 * DESCRIPTION: Helper function to dump Raw frames
2564 *
2565 * PARAMETERS :
2566 * @frame : stream buf frame to be dumped
2567 *
2568 * RETURN : NA
2569 *==========================================================================*/
dumpRawSnapshot(mm_camera_buf_def_t * frame)2570 void QCamera3RawDumpChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
2571 {
2572 QCamera3Stream *stream = getStreamByIndex(0);
2573 if (stream != NULL) {
2574 char buf[FILENAME_MAX];
2575 struct timeval tv;
2576 struct tm timeinfo_data;
2577 struct tm *timeinfo;
2578
2579 cam_dimension_t dim;
2580 memset(&dim, 0, sizeof(dim));
2581 stream->getFrameDimension(dim);
2582
2583 cam_frame_len_offset_t offset;
2584 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
2585 stream->getFrameOffset(offset);
2586
2587 gettimeofday(&tv, NULL);
2588 timeinfo = localtime_r(&tv.tv_sec, &timeinfo_data);
2589
2590 if (NULL != timeinfo) {
2591 memset(buf, 0, sizeof(buf));
2592 snprintf(buf, sizeof(buf),
2593 QCAMERA_DUMP_FRM_LOCATION
2594 "%04d-%02d-%02d-%02d-%02d-%02d-%06ld_%d_%dx%d.raw",
2595 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
2596 timeinfo->tm_mday, timeinfo->tm_hour,
2597 timeinfo->tm_min, timeinfo->tm_sec,tv.tv_usec,
2598 frame->frame_idx, dim.width, dim.height);
2599
2600 int file_fd = open(buf, O_RDWR| O_CREAT, 0777);
2601 if (file_fd >= 0) {
2602 ssize_t written_len =
2603 write(file_fd, frame->buffer, offset.frame_len);
2604 LOGD("written number of bytes %zd", written_len);
2605 frame->cache_flags |= CPU_HAS_READ;
2606 close(file_fd);
2607 } else {
2608 LOGE("failed to open file to dump image");
2609 }
2610 } else {
2611 LOGE("localtime_r() error");
2612 }
2613 } else {
2614 LOGE("Could not find stream");
2615 }
2616
2617 }
2618
2619 /*===========================================================================
2620 * FUNCTION : streamCbRoutine
2621 *
2622 * DESCRIPTION: Callback routine invoked for each frame generated for
2623 * Rawdump channel
2624 *
2625 * PARAMETERS :
2626 * @super_frame : stream buf frame generated
2627 * @stream : Underlying Stream object cookie
2628 *
2629 * RETURN : NA
2630 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,__unused QCamera3Stream * stream)2631 void QCamera3RawDumpChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
2632 __unused QCamera3Stream *stream)
2633 {
2634 LOGD("E");
2635 if (super_frame == NULL || super_frame->num_bufs != 1) {
2636 LOGE("super_frame is not valid");
2637 return;
2638 }
2639
2640 if (mRawDump)
2641 dumpRawSnapshot(super_frame->bufs[0]);
2642
2643 bufDone(super_frame);
2644 free(super_frame);
2645 }
2646
2647 /*===========================================================================
2648 * FUNCTION : getStreamBufs
2649 *
2650 * DESCRIPTION: Callback function provided to interface to get buffers.
2651 *
2652 * PARAMETERS :
2653 * @len : Length of each buffer to be allocated
2654 *
2655 * RETURN : NULL on buffer allocation failure
2656 * QCamera3StreamMem object on sucess
2657 *==========================================================================*/
getStreamBufs(uint32_t len)2658 QCamera3StreamMem* QCamera3RawDumpChannel::getStreamBufs(uint32_t len)
2659 {
2660 int rc;
2661 mMemory = new QCamera3StreamMem(mNumBuffers);
2662
2663 if (!mMemory) {
2664 LOGE("unable to create heap memory");
2665 return NULL;
2666 }
2667 rc = mMemory->allocateAll((size_t)len);
2668 if (rc < 0) {
2669 LOGE("unable to allocate heap memory");
2670 delete mMemory;
2671 mMemory = NULL;
2672 return NULL;
2673 }
2674 return mMemory;
2675 }
2676
2677 /*===========================================================================
2678 * FUNCTION : putStreamBufs
2679 *
2680 * DESCRIPTION: Callback function provided to interface to return buffers.
2681 * Although no handles are actually returned, implicitl assumption
2682 * that interface will no longer use buffers and channel can
2683 * deallocated if necessary.
2684 *
2685 * PARAMETERS : NA
2686 *
2687 * RETURN : NA
2688 *==========================================================================*/
putStreamBufs()2689 void QCamera3RawDumpChannel::putStreamBufs()
2690 {
2691 mMemory->deallocate();
2692 delete mMemory;
2693 mMemory = NULL;
2694 }
2695
2696 /*===========================================================================
2697 * FUNCTION : request
2698 *
2699 * DESCRIPTION: Request function used as trigger
2700 *
2701 * PARAMETERS :
2702 * @recvd_frame : buffer- this will be NULL since this is internal channel
2703 * @frameNumber : Undefined again since this is internal stream
2704 *
2705 * RETURN : int32_t type of status
2706 * NO_ERROR -- success
2707 * none-zero failure code
2708 *==========================================================================*/
request(buffer_handle_t *,uint32_t,int &)2709 int32_t QCamera3RawDumpChannel::request(buffer_handle_t * /*buffer*/,
2710 uint32_t /*frameNumber*/,
2711 int & /*indexUsed*/)
2712 {
2713 if (!m_bIsActive) {
2714 return QCamera3Channel::start();
2715 }
2716 else
2717 return 0;
2718 }
2719
2720 /*===========================================================================
2721 * FUNCTION : intialize
2722 *
2723 * DESCRIPTION: Initializes channel params and creates underlying stream
2724 *
2725 * PARAMETERS :
2726 * @isType : type of image stabilization required on this stream
2727 *
2728 * RETURN : int32_t type of status
2729 * NO_ERROR -- success
2730 * none-zero failure code
2731 *==========================================================================*/
initialize(cam_is_type_t isType)2732 int32_t QCamera3RawDumpChannel::initialize(cam_is_type_t isType)
2733 {
2734 int32_t rc;
2735
2736 mIsType = isType;
2737 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_RAW,
2738 CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG, mDim, ROTATE_0, (uint8_t)mNumBuffers,
2739 mPostProcMask, mIsType);
2740 if (rc < 0) {
2741 LOGE("addStream failed");
2742 }
2743 return rc;
2744 }
2745
2746 /*************************************************************************************/
2747 // HDR+ RAW Source Channel related functions
QCamera3HdrPlusRawSrcChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,cam_dimension_t rawDumpSize,cam_padding_info_t * paddingInfo,void * userData,cam_feature_mask_t postprocess_mask,std::shared_ptr<HdrPlusClient> hdrPlusClient,uint32_t hdrPlusStreamId,uint32_t numBuffers)2748 QCamera3HdrPlusRawSrcChannel::QCamera3HdrPlusRawSrcChannel(uint32_t cam_handle,
2749 uint32_t channel_handle,
2750 mm_camera_ops_t *cam_ops,
2751 cam_dimension_t rawDumpSize,
2752 cam_padding_info_t *paddingInfo,
2753 void *userData,
2754 cam_feature_mask_t postprocess_mask,
2755 std::shared_ptr<HdrPlusClient> hdrPlusClient,
2756 uint32_t hdrPlusStreamId,
2757 uint32_t numBuffers) :
2758 QCamera3RawDumpChannel(cam_handle, channel_handle, cam_ops, rawDumpSize, paddingInfo, userData,
2759 postprocess_mask, numBuffers),
2760 mHdrPlusClient(hdrPlusClient),
2761 mHdrPlusStreamId(hdrPlusStreamId)
2762 {
2763
2764 }
2765
~QCamera3HdrPlusRawSrcChannel()2766 QCamera3HdrPlusRawSrcChannel::~QCamera3HdrPlusRawSrcChannel()
2767 {
2768 }
2769
streamCbRoutine(mm_camera_super_buf_t * super_frame,__unused QCamera3Stream * stream)2770 void QCamera3HdrPlusRawSrcChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
2771 __unused QCamera3Stream *stream)
2772 {
2773 if (super_frame == NULL || super_frame->num_bufs != 1) {
2774 LOGE("super_frame is not valid");
2775 return;
2776 }
2777
2778 // Send RAW buffer to HDR+ service
2779 sendRawToHdrPlusService(super_frame->bufs[0]);
2780
2781 bufDone(super_frame);
2782 free(super_frame);
2783 }
2784
sendRawToHdrPlusService(mm_camera_buf_def_t * frame)2785 void QCamera3HdrPlusRawSrcChannel::sendRawToHdrPlusService(mm_camera_buf_def_t *frame)
2786 {
2787 QCamera3Stream *stream = getStreamByIndex(0);
2788 if (stream == nullptr) {
2789 LOGE("%s: Could not find stream.", __FUNCTION__);
2790 return;
2791 }
2792
2793 cam_frame_len_offset_t offset = {};
2794 stream->getFrameOffset(offset);
2795
2796 pbcamera::StreamBuffer buffer;
2797 buffer.streamId = mHdrPlusStreamId;
2798 buffer.data = frame->buffer;
2799 buffer.dataSize = offset.frame_len;
2800
2801 // Use the frame timestamp as mock Easel timestamp.
2802 int64_t mockEaselTimestampNs = (int64_t)frame->ts.tv_sec * 1000000000 + frame->ts.tv_nsec;
2803 mHdrPlusClient->notifyInputBuffer(buffer, mockEaselTimestampNs);
2804 }
2805
2806 /*************************************************************************************/
2807
2808 /* QCamera3YUVChannel methods */
2809
2810 /*===========================================================================
2811 * FUNCTION : QCamera3YUVChannel
2812 *
2813 * DESCRIPTION: constructor of QCamera3YUVChannel
2814 *
2815 * PARAMETERS :
2816 * @cam_handle : camera handle
2817 * @cam_ops : ptr to camera ops table
2818 * @cb_routine : callback routine to frame aggregator
2819 * @paddingInfo : padding information for the stream
2820 * @stream : camera3_stream_t structure
2821 * @stream_type: Channel stream type
2822 * @postprocess_mask: the postprocess mask for streams of this channel
2823 * @metadataChannel: handle to the metadataChannel
2824 * RETURN : none
2825 *==========================================================================*/
QCamera3YUVChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buf_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_stream_type_t stream_type,cam_feature_mask_t postprocess_mask,QCamera3Channel * metadataChannel)2826 QCamera3YUVChannel::QCamera3YUVChannel(uint32_t cam_handle,
2827 uint32_t channel_handle,
2828 mm_camera_ops_t *cam_ops,
2829 channel_cb_routine cb_routine,
2830 channel_cb_buffer_err cb_buf_err,
2831 cam_padding_info_t *paddingInfo,
2832 void *userData,
2833 camera3_stream_t *stream,
2834 cam_stream_type_t stream_type,
2835 cam_feature_mask_t postprocess_mask,
2836 QCamera3Channel *metadataChannel) :
2837 QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops,
2838 cb_routine, cb_buf_err, paddingInfo, userData, stream, stream_type,
2839 postprocess_mask, metadataChannel)
2840 {
2841
2842 mBypass = (postprocess_mask == CAM_QCOM_FEATURE_NONE);
2843 mFrameLen = 0;
2844 mEdgeMode.edge_mode = CAM_EDGE_MODE_OFF;
2845 mEdgeMode.sharpness = 0;
2846 mNoiseRedMode = CAM_NOISE_REDUCTION_MODE_OFF;
2847 memset(&mCropRegion, 0, sizeof(mCropRegion));
2848 }
2849
2850 /*===========================================================================
2851 * FUNCTION : ~QCamera3YUVChannel
2852 *
2853 * DESCRIPTION: destructor of QCamera3YUVChannel
2854 *
2855 * PARAMETERS : none
2856 *
2857 *
2858 * RETURN : none
2859 *==========================================================================*/
~QCamera3YUVChannel()2860 QCamera3YUVChannel::~QCamera3YUVChannel()
2861 {
2862 // Deallocation of heap buffers allocated in mMemory is freed
2863 // automatically by its destructor
2864 }
2865
2866 /*===========================================================================
2867 * FUNCTION : initialize
2868 *
2869 * DESCRIPTION: Initialize and add camera channel & stream
2870 *
2871 * PARAMETERS :
2872 * @isType : the image stabilization type
2873 *
2874 * RETURN : int32_t type of status
2875 * NO_ERROR -- success
2876 * none-zero failure code
2877 *==========================================================================*/
initialize(cam_is_type_t isType)2878 int32_t QCamera3YUVChannel::initialize(cam_is_type_t isType)
2879 {
2880 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_YUV_CH_INIT);
2881 int32_t rc = NO_ERROR;
2882 cam_dimension_t streamDim;
2883
2884 if (NULL == mCamera3Stream) {
2885 LOGE("Camera stream uninitialized");
2886 return NO_INIT;
2887 }
2888
2889 if (1 <= m_numStreams) {
2890 // Only one stream per channel supported in v3 Hal
2891 return NO_ERROR;
2892 }
2893
2894 mIsType = isType;
2895 mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_CALLBACK,
2896 mCamera3Stream->width, mCamera3Stream->height, m_bUBWCenable, mIsType);
2897 streamDim.width = mCamera3Stream->width;
2898 streamDim.height = mCamera3Stream->height;
2899
2900 rc = QCamera3Channel::addStream(mStreamType,
2901 mStreamFormat,
2902 streamDim,
2903 ROTATE_0,
2904 mNumBufs,
2905 mPostProcMask,
2906 mIsType);
2907 if (rc < 0) {
2908 LOGE("addStream failed");
2909 return rc;
2910 }
2911
2912 cam_stream_buf_plane_info_t buf_planes;
2913 cam_padding_info_t paddingInfo = mPaddingInfo;
2914
2915 memset(&buf_planes, 0, sizeof(buf_planes));
2916 //to ensure a big enough buffer size set the height and width
2917 //padding to max(height padding, width padding)
2918 paddingInfo.width_padding = MAX(paddingInfo.width_padding, paddingInfo.height_padding);
2919 paddingInfo.height_padding = paddingInfo.width_padding;
2920
2921 rc = mm_stream_calc_offset_snapshot(mStreamFormat, &streamDim, &paddingInfo,
2922 &buf_planes);
2923 if (rc < 0) {
2924 LOGE("mm_stream_calc_offset_preview failed");
2925 return rc;
2926 }
2927
2928 mFrameLen = buf_planes.plane_info.frame_len;
2929
2930 if (NO_ERROR != rc) {
2931 LOGE("Initialize failed, rc = %d", rc);
2932 return rc;
2933 }
2934
2935 /* initialize offline meta memory for input reprocess */
2936 rc = QCamera3ProcessingChannel::initialize(isType);
2937 if (NO_ERROR != rc) {
2938 LOGE("Processing Channel initialize failed, rc = %d",
2939 rc);
2940 }
2941
2942 return rc;
2943 }
2944
2945 /*===========================================================================
2946 * FUNCTION : request
2947 *
2948 * DESCRIPTION: entry function for a request on a YUV stream. This function
2949 * has the logic to service a request based on its type
2950 *
2951 * PARAMETERS :
2952 * @buffer : pointer to the output buffer
2953 * @frameNumber : frame number of the request
2954 * @pInputBuffer : pointer to input buffer if an input request
2955 * @metadata : parameters associated with the request
2956 * @internalreq : boolean to indicate if this is purely internal request
2957 * needing internal buffer allocation
2958 * @meteringonly : boolean indicating metering only frame subset of internal
2959 * not consumed by postprocessor
2960 *
2961 * RETURN : 0 on a success start of capture
2962 * -EINVAL on invalid input
2963 * -ENODEV on serious error
2964 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata,bool & needMetadata,int & indexUsed,__unused bool internalRequest=false,__unused bool meteringOnly=false)2965 int32_t QCamera3YUVChannel::request(buffer_handle_t *buffer,
2966 uint32_t frameNumber,
2967 camera3_stream_buffer_t* pInputBuffer,
2968 metadata_buffer_t* metadata, bool &needMetadata,
2969 int &indexUsed,
2970 __unused bool internalRequest = false,
2971 __unused bool meteringOnly = false)
2972 {
2973 int32_t rc = NO_ERROR;
2974 Mutex::Autolock lock(mOfflinePpLock);
2975
2976 LOGD("pInputBuffer is %p frame number %d", pInputBuffer, frameNumber);
2977 if (NULL == buffer || NULL == metadata) {
2978 LOGE("Invalid buffer/metadata in channel request");
2979 return BAD_VALUE;
2980 }
2981
2982 PpInfo ppInfo;
2983 memset(&ppInfo, 0, sizeof(ppInfo));
2984 ppInfo.frameNumber = frameNumber;
2985 ppInfo.offlinePpFlag = false;
2986 if (mBypass && !pInputBuffer ) {
2987 ppInfo.offlinePpFlag = needsFramePostprocessing(metadata);
2988 ppInfo.output = buffer;
2989 mOfflinePpInfoList.push_back(ppInfo);
2990 }
2991
2992 LOGD("offlinePpFlag is %d", ppInfo.offlinePpFlag);
2993 needMetadata = ppInfo.offlinePpFlag;
2994 if (!ppInfo.offlinePpFlag) {
2995 // regular request
2996 return QCamera3ProcessingChannel::request(buffer, frameNumber,
2997 pInputBuffer, metadata, indexUsed);
2998 } else {
2999
3000 //we need to send this frame through the CPP
3001 //Allocate heap memory, then buf done on the buffer
3002 uint32_t bufIdx;
3003 if (mFreeHeapBufferList.empty()) {
3004 rc = mMemory.allocateOne(mFrameLen);
3005 if (rc < 0) {
3006 LOGE("Failed allocating heap buffer. Fatal");
3007 return BAD_VALUE;
3008 } else {
3009 bufIdx = (uint32_t)rc;
3010 }
3011 } else {
3012 bufIdx = *(mFreeHeapBufferList.begin());
3013 mFreeHeapBufferList.erase(mFreeHeapBufferList.begin());
3014 }
3015
3016 /* Configure and start postproc if necessary */
3017 reprocess_config_t reproc_cfg;
3018 cam_dimension_t dim;
3019 memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
3020 memset(&dim, 0, sizeof(dim));
3021 mStreams[0]->getFrameDimension(dim);
3022 setReprocConfig(reproc_cfg, NULL, metadata, mStreamFormat, dim);
3023
3024 // Start postprocessor without input buffer
3025 startPostProc(reproc_cfg);
3026
3027 LOGD("erasing %d", bufIdx);
3028
3029 mMemory.markFrameNumber(bufIdx, frameNumber);
3030 indexUsed = bufIdx;
3031 if (m_bIsActive) {
3032 mStreams[0]->bufDone(bufIdx);
3033 }
3034
3035 }
3036 return rc;
3037 }
3038
3039 /*===========================================================================
3040 * FUNCTION : streamCbRoutine
3041 *
3042 * DESCRIPTION:
3043 *
3044 * PARAMETERS :
3045 * @super_frame : the super frame with filled buffer
3046 * @stream : stream on which the buffer was requested and filled
3047 *
3048 * RETURN : none
3049 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)3050 void QCamera3YUVChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
3051 QCamera3Stream *stream)
3052 {
3053 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_YUV_CH_STRM_CB);
3054 uint8_t frameIndex;
3055 int32_t resultFrameNumber;
3056
3057 if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
3058 LOGE("Error with the stream callback");
3059 return;
3060 }
3061
3062 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
3063 if(frameIndex >= mNumBufs) {
3064 LOGE("Error, Invalid index for buffer");
3065 stream->bufDone(frameIndex);
3066 return;
3067 }
3068
3069 if (mBypass) {
3070 {
3071 List<PpInfo>::iterator ppInfo;
3072
3073 Mutex::Autolock lock(mOfflinePpLock);
3074 resultFrameNumber = mMemory.getFrameNumber(frameIndex);
3075 for (ppInfo = mOfflinePpInfoList.begin();
3076 ppInfo != mOfflinePpInfoList.end(); ppInfo++) {
3077 if (ppInfo->frameNumber == (uint32_t)resultFrameNumber) {
3078 break;
3079 }
3080 }
3081 LOGD("frame index %d, frame number %d", frameIndex,
3082 resultFrameNumber);
3083 //check the reprocessing required flag against the frame number
3084 if (ppInfo == mOfflinePpInfoList.end()) {
3085 LOGE("Error, request for frame number is a reprocess.");
3086 stream->bufDone(frameIndex);
3087 return;
3088 }
3089
3090 if (ppInfo->offlinePpFlag) {
3091 mm_camera_super_buf_t *frame =
3092 (mm_camera_super_buf_t *)malloc(sizeof(
3093 mm_camera_super_buf_t));
3094 if (frame == NULL) {
3095 LOGE("Error allocating memory to save received_frame structure.");
3096 if(stream) {
3097 stream->bufDone(frameIndex);
3098 }
3099 return;
3100 }
3101
3102 *frame = *super_frame;
3103 m_postprocessor.processData(frame, ppInfo->output,
3104 resultFrameNumber);
3105 free(super_frame);
3106 return;
3107 } else {
3108 if (ppInfo != mOfflinePpInfoList.begin()) {
3109 // There is pending reprocess buffer, cache current buffer
3110 if (ppInfo->callback_buffer != NULL) {
3111 LOGE("Fatal: cached callback_buffer is already present");
3112 }
3113 ppInfo->callback_buffer = super_frame;
3114 return;
3115 } else {
3116 mOfflinePpInfoList.erase(ppInfo);
3117 }
3118 }
3119 }
3120
3121 if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
3122 mChannelCbBufErr(this, resultFrameNumber,
3123 CAMERA3_BUFFER_STATUS_ERROR, mUserData);
3124 }
3125 }
3126
3127 QCamera3ProcessingChannel::streamCbRoutine(super_frame, stream);
3128
3129 /* return any pending buffers that were received out of order earlier */
3130 while((super_frame = getNextPendingCbBuffer())) {
3131 QCamera3ProcessingChannel::streamCbRoutine(super_frame, stream);
3132 }
3133
3134 return;
3135 }
3136
3137 /*===========================================================================
3138 * FUNCTION : getNextPendingCbBuffer
3139 *
3140 * DESCRIPTION: Returns the callback_buffer from the first entry of
3141 * mOfflinePpInfoList
3142 *
3143 * PARAMETERS : none
3144 *
3145 * RETURN : callback_buffer
3146 *==========================================================================*/
getNextPendingCbBuffer()3147 mm_camera_super_buf_t* QCamera3YUVChannel::getNextPendingCbBuffer() {
3148 mm_camera_super_buf_t* super_frame = NULL;
3149 if (mOfflinePpInfoList.size()) {
3150 if ((super_frame = mOfflinePpInfoList.begin()->callback_buffer)) {
3151 mOfflinePpInfoList.erase(mOfflinePpInfoList.begin());
3152 }
3153 }
3154 return super_frame;
3155 }
3156
3157 /*===========================================================================
3158 * FUNCTION : reprocessCbRoutine
3159 *
3160 * DESCRIPTION: callback function for the reprocessed frame. This frame now
3161 * should be returned to the framework. This same callback is
3162 * used during input reprocessing or offline postprocessing
3163 *
3164 * PARAMETERS :
3165 * @resultBuffer : buffer containing the reprocessed data
3166 * @resultFrameNumber : frame number on which the buffer was requested
3167 *
3168 * RETURN : NONE
3169 *
3170 *==========================================================================*/
reprocessCbRoutine(buffer_handle_t * resultBuffer,uint32_t resultFrameNumber)3171 void QCamera3YUVChannel::reprocessCbRoutine(buffer_handle_t *resultBuffer,
3172 uint32_t resultFrameNumber)
3173 {
3174 LOGD("E: frame number %d", resultFrameNumber);
3175 Vector<mm_camera_super_buf_t *> pendingCbs;
3176
3177 /* release the input buffer and input metadata buffer if used */
3178 if (0 > mMemory.getHeapBufferIndex(resultFrameNumber)) {
3179 /* mOfflineMemory and mOfflineMetaMemory used only for input reprocessing */
3180 int32_t rc = releaseOfflineMemory(resultFrameNumber);
3181 if (NO_ERROR != rc) {
3182 LOGE("Error releasing offline memory rc = %d", rc);
3183 }
3184 /* Since reprocessing is done, send the callback to release the input buffer */
3185 if (mChannelCB) {
3186 mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
3187 }
3188 }
3189
3190 if (mBypass) {
3191 int32_t rc = handleOfflinePpCallback(resultFrameNumber, pendingCbs);
3192 if (rc != NO_ERROR) {
3193 return;
3194 }
3195 }
3196
3197 issueChannelCb(resultBuffer, resultFrameNumber);
3198
3199 // Call all pending callbacks to return buffers
3200 for (size_t i = 0; i < pendingCbs.size(); i++) {
3201 QCamera3ProcessingChannel::streamCbRoutine(
3202 pendingCbs[i], mStreams[0]);
3203 }
3204
3205 }
3206
3207 /*===========================================================================
3208 * FUNCTION : needsFramePostprocessing
3209 *
3210 * DESCRIPTION:
3211 *
3212 * PARAMETERS :
3213 *
3214 * RETURN :
3215 * TRUE if frame needs to be postprocessed
3216 * FALSE is frame does not need to be postprocessed
3217 *
3218 *==========================================================================*/
needsFramePostprocessing(metadata_buffer_t * meta)3219 bool QCamera3YUVChannel::needsFramePostprocessing(metadata_buffer_t *meta)
3220 {
3221 bool ppNeeded = false;
3222
3223 //sharpness
3224 IF_META_AVAILABLE(cam_edge_application_t, edgeMode,
3225 CAM_INTF_META_EDGE_MODE, meta) {
3226 mEdgeMode = *edgeMode;
3227 }
3228
3229 //wnr
3230 IF_META_AVAILABLE(uint32_t, noiseRedMode,
3231 CAM_INTF_META_NOISE_REDUCTION_MODE, meta) {
3232 mNoiseRedMode = *noiseRedMode;
3233 }
3234
3235 //crop region
3236 IF_META_AVAILABLE(cam_crop_region_t, scalerCropRegion,
3237 CAM_INTF_META_SCALER_CROP_REGION, meta) {
3238 mCropRegion = *scalerCropRegion;
3239 }
3240
3241 if ((CAM_EDGE_MODE_OFF != mEdgeMode.edge_mode) &&
3242 (CAM_EDGE_MODE_ZERO_SHUTTER_LAG != mEdgeMode.edge_mode)) {
3243 ppNeeded = true;
3244 }
3245 if ((CAM_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG != mNoiseRedMode) &&
3246 (CAM_NOISE_REDUCTION_MODE_OFF != mNoiseRedMode) &&
3247 (CAM_NOISE_REDUCTION_MODE_MINIMAL != mNoiseRedMode)) {
3248 ppNeeded = true;
3249 }
3250 if ((mCropRegion.width < (int32_t)mCamera3Stream->width) ||
3251 (mCropRegion.height < (int32_t)mCamera3Stream->height)) {
3252 ppNeeded = true;
3253 }
3254
3255 return ppNeeded;
3256 }
3257
3258 /*===========================================================================
3259 * FUNCTION : handleOfflinePpCallback
3260 *
3261 * DESCRIPTION: callback function for the reprocessed frame from offline
3262 * postprocessing.
3263 *
3264 * PARAMETERS :
3265 * @resultFrameNumber : frame number on which the buffer was requested
3266 * @pendingCbs : pending buffers to be returned first
3267 *
3268 * RETURN : int32_t type of status
3269 * NO_ERROR -- success
3270 * none-zero failure code
3271 *==========================================================================*/
handleOfflinePpCallback(uint32_t resultFrameNumber,Vector<mm_camera_super_buf_t * > & pendingCbs)3272 int32_t QCamera3YUVChannel::handleOfflinePpCallback(uint32_t resultFrameNumber,
3273 Vector<mm_camera_super_buf_t *>& pendingCbs)
3274 {
3275 Mutex::Autolock lock(mOfflinePpLock);
3276 List<PpInfo>::iterator ppInfo;
3277
3278 for (ppInfo = mOfflinePpInfoList.begin();
3279 ppInfo != mOfflinePpInfoList.end(); ppInfo++) {
3280 if (ppInfo->frameNumber == resultFrameNumber) {
3281 break;
3282 }
3283 }
3284
3285 if (ppInfo == mOfflinePpInfoList.end()) {
3286 LOGI("Request of frame number %d is reprocessing",
3287 resultFrameNumber);
3288 return NO_ERROR;
3289 } else if (ppInfo != mOfflinePpInfoList.begin()) {
3290 LOGE("callback for frame number %d should be head of list",
3291 resultFrameNumber);
3292 return BAD_VALUE;
3293 }
3294
3295 if (ppInfo->offlinePpFlag) {
3296 // Need to get the input buffer frame index from the
3297 // mMemory object and add that to the free heap buffers list.
3298 int32_t bufferIndex =
3299 mMemory.getHeapBufferIndex(resultFrameNumber);
3300 if (bufferIndex < 0) {
3301 LOGE("Fatal %d: no buffer index for frame number %d",
3302 bufferIndex, resultFrameNumber);
3303 return BAD_VALUE;
3304 }
3305 mMemory.markFrameNumber(bufferIndex, -1);
3306 mFreeHeapBufferList.push_back(bufferIndex);
3307 //Move heap buffer into free pool and invalidate the frame number
3308 ppInfo = mOfflinePpInfoList.erase(ppInfo);
3309
3310 /* return any pending buffers that were received out of order
3311 earlier */
3312 mm_camera_super_buf_t* super_frame;
3313 while((super_frame = getNextPendingCbBuffer())) {
3314 pendingCbs.push_back(super_frame);
3315 }
3316 } else {
3317 LOGE("Fatal: request of frame number %d doesn't need"
3318 " offline postprocessing. However there is"
3319 " reprocessing callback.",
3320 resultFrameNumber);
3321 return BAD_VALUE;
3322 }
3323
3324 return NO_ERROR;
3325 }
3326
3327 /*===========================================================================
3328 * FUNCTION : getReprocessType
3329 *
3330 * DESCRIPTION: get the type of reprocess output supported by this channel
3331 *
3332 * PARAMETERS : NONE
3333 *
3334 * RETURN : reprocess_type_t : type of reprocess
3335 *==========================================================================*/
getReprocessType()3336 reprocess_type_t QCamera3YUVChannel::getReprocessType()
3337 {
3338 return REPROCESS_TYPE_YUV;
3339 }
3340
3341 /* QCamera3PicChannel methods */
3342
3343 /*===========================================================================
3344 * FUNCTION : jpegEvtHandle
3345 *
3346 * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events.
3347 Construct result payload and call mChannelCb to deliver buffer
3348 to framework.
3349 *
3350 * PARAMETERS :
3351 * @status : status of jpeg job
3352 * @client_hdl: jpeg client handle
3353 * @jobId : jpeg job Id
3354 * @p_ouput : ptr to jpeg output result struct
3355 * @userdata : user data ptr
3356 *
3357 * RETURN : none
3358 *==========================================================================*/
jpegEvtHandle(jpeg_job_status_t status,uint32_t,uint32_t jobId,mm_jpeg_output_t * p_output,void * userdata)3359 void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status,
3360 uint32_t /*client_hdl*/,
3361 uint32_t jobId,
3362 mm_jpeg_output_t *p_output,
3363 void *userdata)
3364 {
3365 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_JPEG_EVT_HANDLE);
3366 buffer_handle_t *resultBuffer = NULL;
3367 buffer_handle_t *jpegBufferHandle = NULL;
3368 int resultStatus = CAMERA3_BUFFER_STATUS_OK;
3369 camera3_stream_buffer_t result;
3370 camera3_jpeg_blob_t jpegHeader;
3371
3372 QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
3373 if (obj) {
3374 //Construct payload for process_capture_result. Call mChannelCb
3375
3376 qcamera_hal3_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId);
3377
3378 if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) {
3379 LOGE("Error in jobId: (%d) with status: %d", jobId, status);
3380 resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
3381 }
3382
3383 if (NULL != job) {
3384 uint32_t bufIdx = (uint32_t)job->jpeg_settings->out_buf_index;
3385 LOGD("jpeg out_buf_index: %d", bufIdx);
3386
3387 //Construct jpeg transient header of type camera3_jpeg_blob_t
3388 //Append at the end of jpeg image of buf_filled_len size
3389
3390 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
3391 if (JPEG_JOB_STATUS_DONE == status) {
3392 jpegHeader.jpeg_size = (uint32_t)p_output->buf_filled_len;
3393 char* jpeg_buf = (char *)p_output->buf_vaddr;
3394 cam_frame_len_offset_t offset;
3395 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
3396 mm_camera_buf_def_t *jpeg_dump_buffer = NULL;
3397 cam_dimension_t dim;
3398 dim.width = obj->mCamera3Stream->width;
3399 dim.height = obj->mCamera3Stream->height;
3400 jpeg_dump_buffer = (mm_camera_buf_def_t *)malloc(sizeof(mm_camera_buf_def_t));
3401 if(!jpeg_dump_buffer) {
3402 LOGE("Could not allocate jpeg dump buffer");
3403 } else {
3404 jpeg_dump_buffer->buffer = p_output->buf_vaddr;
3405 jpeg_dump_buffer->frame_len = p_output->buf_filled_len;
3406 jpeg_dump_buffer->frame_idx = obj->mMemory.getFrameNumber(bufIdx);
3407 obj->dumpYUV(jpeg_dump_buffer, dim, offset, QCAMERA_DUMP_FRM_OUTPUT_JPEG);
3408 free(jpeg_dump_buffer);
3409 }
3410
3411 ssize_t maxJpegSize = -1;
3412
3413 // Gralloc buffer may have additional padding for 4K page size
3414 // Follow size guidelines based on spec since framework relies
3415 // on that to reach end of buffer and with it the header
3416
3417 //Handle same as resultBuffer, but for readablity
3418 jpegBufferHandle =
3419 (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
3420
3421 if (NULL != jpegBufferHandle) {
3422 maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width;
3423 if (maxJpegSize > obj->mMemory.getSize(bufIdx)) {
3424 maxJpegSize = obj->mMemory.getSize(bufIdx);
3425 }
3426
3427 size_t jpeg_eof_offset =
3428 (size_t)(maxJpegSize - (ssize_t)sizeof(jpegHeader));
3429 char *jpeg_eof = &jpeg_buf[jpeg_eof_offset];
3430 memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
3431 obj->mMemory.cleanInvalidateCache(bufIdx);
3432 } else {
3433 LOGE("JPEG buffer not found and index: %d",
3434 bufIdx);
3435 resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
3436 }
3437 }
3438
3439 ////Use below data to issue framework callback
3440 resultBuffer =
3441 (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
3442 int32_t resultFrameNumber = obj->mMemory.getFrameNumber(bufIdx);
3443 int32_t rc = obj->mMemory.unregisterBuffer(bufIdx);
3444 if (NO_ERROR != rc) {
3445 LOGE("Error %d unregistering stream buffer %d",
3446 rc, bufIdx);
3447 }
3448
3449 result.stream = obj->mCamera3Stream;
3450 result.buffer = resultBuffer;
3451 result.status = resultStatus;
3452 result.acquire_fence = -1;
3453 result.release_fence = -1;
3454
3455 // Release any snapshot buffers before calling
3456 // the user callback. The callback can potentially
3457 // unblock pending requests to snapshot stream.
3458 int32_t snapshotIdx = -1;
3459 mm_camera_super_buf_t* src_frame = NULL;
3460
3461 if (job->src_reproc_frame)
3462 src_frame = job->src_reproc_frame;
3463 else
3464 src_frame = job->src_frame;
3465
3466 if (src_frame) {
3467 if (obj->mStreams[0]->getMyHandle() ==
3468 src_frame->bufs[0]->stream_id) {
3469 snapshotIdx = (int32_t)src_frame->bufs[0]->buf_idx;
3470 if (0 <= snapshotIdx) {
3471 Mutex::Autolock lock(obj->mFreeBuffersLock);
3472 obj->mFreeBufferList.push_back((uint32_t)snapshotIdx);
3473 }
3474 }
3475 }
3476
3477 LOGI("Issue Jpeg Callback frameNumber = %d status = %d",
3478 resultFrameNumber, resultStatus);
3479 ATRACE_ASYNC_END("SNAPSHOT", resultFrameNumber);
3480 if (obj->mChannelCB) {
3481 obj->mChannelCB(NULL,
3482 &result,
3483 (uint32_t)resultFrameNumber,
3484 false,
3485 obj->mUserData);
3486 }
3487
3488 // release internal data for jpeg job
3489 if ((NULL != job->fwk_frame) || (NULL != job->fwk_src_buffer)) {
3490 /* unregister offline input buffer */
3491 int32_t inputBufIndex =
3492 obj->mOfflineMemory.getGrallocBufferIndex((uint32_t)resultFrameNumber);
3493 if (0 <= inputBufIndex) {
3494 rc = obj->mOfflineMemory.unregisterBuffer(inputBufIndex);
3495 } else {
3496 LOGE("could not find the input buf index, frame number %d",
3497 resultFrameNumber);
3498 }
3499 if (NO_ERROR != rc) {
3500 LOGE("Error %d unregistering input buffer %d",
3501 rc, bufIdx);
3502 }
3503
3504 /* unregister offline meta buffer */
3505 int32_t metaBufIndex =
3506 obj->mOfflineMetaMemory.getHeapBufferIndex((uint32_t)resultFrameNumber);
3507 if (0 <= metaBufIndex) {
3508 Mutex::Autolock lock(obj->mFreeOfflineMetaBuffersLock);
3509 obj->mFreeOfflineMetaBuffersList.push_back((uint32_t)metaBufIndex);
3510 } else {
3511 LOGE("could not find the input meta buf index, frame number %d",
3512 resultFrameNumber);
3513 }
3514 }
3515 obj->m_postprocessor.releaseOfflineBuffers(false);
3516 obj->m_postprocessor.releaseJpegJobData(job);
3517 free(job);
3518 }
3519
3520 return;
3521 // }
3522 } else {
3523 LOGE("Null userdata in jpeg callback");
3524 }
3525 }
3526
QCamera3PicChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buf_err,cam_padding_info_t * paddingInfo,void * userData,camera3_stream_t * stream,cam_feature_mask_t postprocess_mask,__unused bool is4KVideo,bool isInputStreamConfigured,QCamera3Channel * metadataChannel,uint32_t numBuffers)3527 QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle,
3528 uint32_t channel_handle,
3529 mm_camera_ops_t *cam_ops,
3530 channel_cb_routine cb_routine,
3531 channel_cb_buffer_err cb_buf_err,
3532 cam_padding_info_t *paddingInfo,
3533 void *userData,
3534 camera3_stream_t *stream,
3535 cam_feature_mask_t postprocess_mask,
3536 __unused bool is4KVideo,
3537 bool isInputStreamConfigured,
3538 QCamera3Channel *metadataChannel,
3539 uint32_t numBuffers) :
3540 QCamera3ProcessingChannel(cam_handle, channel_handle,
3541 cam_ops, cb_routine, cb_buf_err, paddingInfo, userData,
3542 stream, CAM_STREAM_TYPE_SNAPSHOT,
3543 postprocess_mask, metadataChannel, numBuffers),
3544 mNumSnapshotBufs(0),
3545 mInputBufferHint(isInputStreamConfigured),
3546 mYuvMemory(NULL),
3547 mFrameLen(0)
3548 {
3549 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
3550 m_max_pic_dim = hal_obj->calcMaxJpegDim();
3551 mYuvWidth = stream->width;
3552 mYuvHeight = stream->height;
3553 mStreamType = CAM_STREAM_TYPE_SNAPSHOT;
3554 // Use same pixelformat for 4K video case
3555 mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT,
3556 stream->width, stream->height, m_bUBWCenable, IS_TYPE_NONE);
3557 int32_t rc = m_postprocessor.initJpeg(jpegEvtHandle, &m_max_pic_dim, this);
3558 if (rc != 0) {
3559 LOGE("Init Postprocessor failed");
3560 }
3561 }
3562
3563 /*===========================================================================
3564 * FUNCTION : flush
3565 *
3566 * DESCRIPTION: flush pic channel, which will stop all processing within, including
3567 * the reprocessing channel in postprocessor and YUV stream.
3568 *
3569 * PARAMETERS : none
3570 *
3571 * RETURN : int32_t type of status
3572 * NO_ERROR -- success
3573 * none-zero failure code
3574 *==========================================================================*/
flush()3575 int32_t QCamera3PicChannel::flush()
3576 {
3577 int32_t rc = NO_ERROR;
3578 if(!m_bIsActive) {
3579 LOGE("Attempt to flush inactive channel");
3580 return NO_INIT;
3581 }
3582
3583 rc = m_postprocessor.flush();
3584 if (rc == 0) {
3585 LOGE("Postprocessor flush failed, rc = %d", rc);
3586 return rc;
3587 }
3588
3589 if (0 < mOfflineMetaMemory.getCnt()) {
3590 mOfflineMetaMemory.deallocate();
3591 }
3592 if (0 < mOfflineMemory.getCnt()) {
3593 mOfflineMemory.unregisterBuffers();
3594 }
3595 Mutex::Autolock lock(mFreeBuffersLock);
3596 mFreeBufferList.clear();
3597
3598 for (uint32_t i = 0; i < mCamera3Stream->max_buffers; i++) {
3599 mFreeBufferList.push_back(i);
3600 }
3601 return rc;
3602 }
3603
3604
~QCamera3PicChannel()3605 QCamera3PicChannel::~QCamera3PicChannel()
3606 {
3607 if (mYuvMemory != nullptr) {
3608 mYuvMemory->deallocate();
3609 delete mYuvMemory;
3610 mYuvMemory = nullptr;
3611 }
3612 }
3613
3614 /*===========================================================================
3615 * FUNCTION : metadataBufDone
3616 *
3617 * DESCRIPTION: Buffer done method for a metadata buffer
3618 *
3619 * PARAMETERS :
3620 * @recvd_frame : received metadata frame
3621 *
3622 * RETURN : int32_t type of status
3623 * OK -- success
3624 * none-zero failure code
3625 *==========================================================================*/
metadataBufDone(mm_camera_super_buf_t * recvd_frame)3626 int32_t QCamera3PicChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
3627 {
3628 // Check if this is an external metadata
3629 if (recvd_frame != nullptr && recvd_frame->num_bufs == 1) {
3630 Mutex::Autolock lock(mPendingExternalMetadataLock);
3631 auto iter = mPendingExternalMetadata.begin();
3632 while (iter != mPendingExternalMetadata.end()) {
3633 if (iter->get() == recvd_frame->bufs[0]->buffer) {
3634 // Remove the metadata allocated externally.
3635 mPendingExternalMetadata.erase(iter);
3636 return OK;
3637 }
3638
3639 iter++;
3640 }
3641 }
3642
3643 // If this is not an external metadata, return the metadata.
3644 return QCamera3ProcessingChannel::metadataBufDone(recvd_frame);
3645 }
3646
initialize(cam_is_type_t isType)3647 int32_t QCamera3PicChannel::initialize(cam_is_type_t isType)
3648 {
3649 int32_t rc = NO_ERROR;
3650 cam_dimension_t streamDim;
3651 cam_stream_type_t streamType;
3652 cam_format_t streamFormat;
3653
3654 if (NULL == mCamera3Stream) {
3655 LOGE("Camera stream uninitialized");
3656 return NO_INIT;
3657 }
3658
3659 if (1 <= m_numStreams) {
3660 // Only one stream per channel supported in v3 Hal
3661 return NO_ERROR;
3662 }
3663
3664 mIsType = isType;
3665 streamType = mStreamType;
3666 streamFormat = mStreamFormat;
3667 streamDim.width = (int32_t)mYuvWidth;
3668 streamDim.height = (int32_t)mYuvHeight;
3669
3670 mNumSnapshotBufs = mCamera3Stream->max_buffers;
3671 rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
3672 ROTATE_0, (uint8_t)mCamera3Stream->max_buffers, mPostProcMask,
3673 mIsType);
3674
3675 if (NO_ERROR != rc) {
3676 LOGE("Initialize failed, rc = %d", rc);
3677 return rc;
3678 }
3679
3680 mYuvMemory = new QCamera3StreamMem(mCamera3Stream->max_buffers);
3681 if (!mYuvMemory) {
3682 LOGE("unable to create YUV buffers");
3683 return NO_MEMORY;
3684 }
3685 cam_stream_buf_plane_info_t buf_planes;
3686 cam_padding_info_t paddingInfo = mPaddingInfo;
3687
3688 memset(&buf_planes, 0, sizeof(buf_planes));
3689 //to ensure a big enough buffer size set the height and width
3690 //padding to max(height padding, width padding)
3691 paddingInfo.width_padding = MAX(paddingInfo.width_padding, paddingInfo.height_padding);
3692 paddingInfo.height_padding = paddingInfo.width_padding;
3693
3694 rc = mm_stream_calc_offset_snapshot(mStreamFormat, &streamDim, &paddingInfo,
3695 &buf_planes);
3696 if (rc < 0) {
3697 LOGE("mm_stream_calc_offset_preview failed");
3698 return rc;
3699 }
3700 mFrameLen = buf_planes.plane_info.frame_len;
3701
3702 /* initialize offline meta memory for input reprocess */
3703 rc = QCamera3ProcessingChannel::initialize(isType);
3704 if (NO_ERROR != rc) {
3705 LOGE("Processing Channel initialize failed, rc = %d",
3706 rc);
3707 }
3708
3709 return rc;
3710 }
3711
3712 /*===========================================================================
3713 * FUNCTION : request
3714 *
3715 * DESCRIPTION: handle the request - either with an input buffer or a direct
3716 * output request
3717 *
3718 * PARAMETERS :
3719 * @buffer : pointer to the output buffer
3720 * @frameNumber : frame number of the request
3721 * @pInputBuffer : pointer to input buffer if an input request
3722 * @metadata : parameters associated with the request
3723 * @internalreq : boolean to indicate if this is purely internal request
3724 * needing internal buffer allocation
3725 * @meteringonly : boolean indicating metering only frame subset of internal
3726 * not consumed by postprocessor
3727 *
3728 * RETURN : 0 on a success start of capture
3729 * -EINVAL on invalid input
3730 * -ENODEV on serious error
3731 *==========================================================================*/
request(buffer_handle_t * buffer,uint32_t frameNumber,camera3_stream_buffer_t * pInputBuffer,metadata_buffer_t * metadata,int & indexUsed,bool internalRequest,bool meteringOnly)3732 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
3733 uint32_t frameNumber,
3734 camera3_stream_buffer_t *pInputBuffer,
3735 metadata_buffer_t *metadata, int &indexUsed,
3736 bool internalRequest, bool meteringOnly)
3737 {
3738 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_REQ);
3739 //FIX ME: Return buffer back in case of failures below.
3740
3741 int32_t rc = NO_ERROR;
3742
3743 reprocess_config_t reproc_cfg;
3744 cam_dimension_t dim;
3745 memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
3746 //make sure to set the correct input stream dim in case of YUV size override
3747 //and recalculate the plane info
3748 dim.width = (int32_t)mYuvWidth;
3749 dim.height = (int32_t)mYuvHeight;
3750
3751 setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat, dim);
3752
3753 // Start postprocessor
3754 startPostProc(reproc_cfg);
3755
3756 if (!internalRequest) {
3757 int index = mMemory.getMatchBufIndex((void*)buffer);
3758
3759 if(index < 0) {
3760 rc = registerBuffer(buffer, mIsType);
3761 if (NO_ERROR != rc) {
3762 LOGE("On-the-fly buffer registration failed %d",
3763 rc);
3764 return rc;
3765 }
3766
3767 index = mMemory.getMatchBufIndex((void*)buffer);
3768 if (index < 0) {
3769 LOGE("Could not find object among registered buffers");
3770 return DEAD_OBJECT;
3771 }
3772 }
3773 LOGD("buffer index %d, frameNumber: %u", index, frameNumber);
3774
3775 rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
3776
3777 // Queue jpeg settings
3778 rc = queueJpegSetting((uint32_t)index, metadata);
3779
3780 } else {
3781 LOGD("Internal request @ Picchannel");
3782 }
3783
3784
3785 if (pInputBuffer == NULL) {
3786 Mutex::Autolock lock(mFreeBuffersLock);
3787 uint32_t bufIdx;
3788 if (mFreeBufferList.empty()) {
3789 rc = mYuvMemory->allocateOne(mFrameLen, /*isCached*/false);
3790 if (rc < 0) {
3791 LOGE("Failed to allocate heap buffer. Fatal");
3792 return rc;
3793 } else {
3794 bufIdx = (uint32_t)rc;
3795 }
3796 } else {
3797 List<uint32_t>::iterator it = mFreeBufferList.begin();
3798 bufIdx = *it;
3799 mFreeBufferList.erase(it);
3800 }
3801 if (meteringOnly) {
3802 mYuvMemory->markFrameNumber(bufIdx, 0xFFFFFFFF);
3803 } else {
3804 mYuvMemory->markFrameNumber(bufIdx, frameNumber);
3805 }
3806 if (m_bIsActive) {
3807 mStreams[0]->bufDone(bufIdx);
3808 }
3809 indexUsed = bufIdx;
3810 } else {
3811 qcamera_fwk_input_pp_data_t *src_frame = NULL;
3812 src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1,
3813 sizeof(qcamera_fwk_input_pp_data_t));
3814 if (src_frame == NULL) {
3815 LOGE("No memory for src frame");
3816 return NO_MEMORY;
3817 }
3818 rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata,
3819 NULL /*fwk output buffer*/, frameNumber);
3820 if (NO_ERROR != rc) {
3821 LOGE("Error %d while setting framework input PP data", rc);
3822 free(src_frame);
3823 return rc;
3824 }
3825 LOGH("Post-process started");
3826 m_postprocessor.processData(src_frame);
3827 }
3828 return rc;
3829 }
3830
3831
3832
3833 /*===========================================================================
3834 * FUNCTION : dataNotifyCB
3835 *
3836 * DESCRIPTION: Channel Level callback used for super buffer data notify.
3837 * This function is registered with mm-camera-interface to handle
3838 * data notify
3839 *
3840 * PARAMETERS :
3841 * @recvd_frame : stream frame received
3842 * userdata : user data ptr
3843 *
3844 * RETURN : none
3845 *==========================================================================*/
dataNotifyCB(mm_camera_super_buf_t * recvd_frame,void * userdata)3846 void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
3847 void *userdata)
3848 {
3849 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_DATA_NOTIFY_CB);
3850 LOGD("E\n");
3851 QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
3852
3853 if (channel == NULL) {
3854 LOGE("invalid channel pointer");
3855 return;
3856 }
3857
3858 if(channel->m_numStreams != 1) {
3859 LOGE("Error: Bug: This callback assumes one stream per channel");
3860 return;
3861 }
3862
3863
3864 if(channel->mStreams[0] == NULL) {
3865 LOGE("Error: Invalid Stream object");
3866 return;
3867 }
3868
3869 channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]);
3870
3871 LOGD("X\n");
3872 return;
3873 }
3874
3875 /*===========================================================================
3876 * FUNCTION : streamCbRoutine
3877 *
3878 * DESCRIPTION:
3879 *
3880 * PARAMETERS :
3881 * @super_frame : the super frame with filled buffer
3882 * @stream : stream on which the buffer was requested and filled
3883 *
3884 * RETURN : none
3885 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)3886 void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
3887 QCamera3Stream *stream)
3888 {
3889 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CAPTURE_CH_CB);
3890 //TODO
3891 //Used only for getting YUV. Jpeg callback will be sent back from channel
3892 //directly to HWI. Refer to func jpegEvtHandle
3893
3894 //Got the yuv callback. Calling yuv callback handler in PostProc
3895 uint8_t frameIndex;
3896 mm_camera_super_buf_t* frame = NULL;
3897 cam_dimension_t dim;
3898 cam_frame_len_offset_t offset;
3899
3900 memset(&dim, 0, sizeof(dim));
3901 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
3902
3903 if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
3904 LOGE("Error with the stream callback");
3905 return;
3906 }
3907
3908 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
3909 LOGD("recvd buf_idx: %u for further processing",
3910 (uint32_t)frameIndex);
3911 if(frameIndex >= mNumSnapshotBufs) {
3912 LOGE("Error, Invalid index for buffer");
3913 if(stream) {
3914 Mutex::Autolock lock(mFreeBuffersLock);
3915 mFreeBufferList.push_back(frameIndex);
3916 stream->bufDone(frameIndex);
3917 }
3918 return;
3919 }
3920
3921 if ((uint32_t)mYuvMemory->getFrameNumber(frameIndex) == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3922 LOGD("Internal Request recycle frame");
3923 Mutex::Autolock lock(mFreeBuffersLock);
3924 mFreeBufferList.push_back(frameIndex);
3925 return;
3926 }
3927
3928 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
3929 if (frame == NULL) {
3930 LOGE("Error allocating memory to save received_frame structure.");
3931 if(stream) {
3932 Mutex::Autolock lock(mFreeBuffersLock);
3933 mFreeBufferList.push_back(frameIndex);
3934 stream->bufDone(frameIndex);
3935 }
3936 return;
3937 }
3938 *frame = *super_frame;
3939 stream->getFrameDimension(dim);
3940 stream->getFrameOffset(offset);
3941 dumpYUV(frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
3942
3943 if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
3944 mChannelCbBufErr(this, mYuvMemory->getFrameNumber(frameIndex),
3945 CAMERA3_BUFFER_STATUS_ERROR, mUserData);
3946 }
3947
3948 m_postprocessor.processData(frame);
3949 free(super_frame);
3950 return;
3951 }
3952
getStreamBufs(uint32_t)3953 QCamera3StreamMem* QCamera3PicChannel::getStreamBufs(uint32_t /*len*/)
3954 {
3955 return mYuvMemory;
3956 }
3957
putStreamBufs()3958 void QCamera3PicChannel::putStreamBufs()
3959 {
3960 QCamera3ProcessingChannel::putStreamBufs();
3961 Mutex::Autolock lock(mFreeBuffersLock);
3962 mFreeBufferList.clear();
3963
3964 if (nullptr != mYuvMemory) {
3965 uint32_t count = mYuvMemory->getCnt();
3966 for (uint32_t i = 0; i < count; i++) {
3967 mFreeBufferList.push_back(i);
3968 }
3969 }
3970 }
3971
queueJpegSetting(uint32_t index,metadata_buffer_t * metadata)3972 int32_t QCamera3PicChannel::queueJpegSetting(uint32_t index, metadata_buffer_t *metadata)
3973 {
3974 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
3975 jpeg_settings_t *settings =
3976 (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t));
3977
3978 if (!settings) {
3979 LOGE("out of memory allocating jpeg_settings");
3980 return -ENOMEM;
3981 }
3982
3983 memset(settings, 0, sizeof(jpeg_settings_t));
3984
3985 settings->out_buf_index = index;
3986
3987 settings->jpeg_orientation = 0;
3988 IF_META_AVAILABLE(int32_t, orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
3989 settings->jpeg_orientation = *orientation;
3990 }
3991
3992 settings->jpeg_quality = 85;
3993 IF_META_AVAILABLE(uint32_t, quality1, CAM_INTF_META_JPEG_QUALITY, metadata) {
3994 settings->jpeg_quality = (uint8_t) *quality1;
3995 }
3996
3997 IF_META_AVAILABLE(uint32_t, quality2, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
3998 settings->jpeg_thumb_quality = (uint8_t) *quality2;
3999 }
4000
4001 IF_META_AVAILABLE(cam_dimension_t, dimension, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
4002 settings->thumbnail_size = *dimension;
4003 }
4004
4005 settings->gps_timestamp_valid = 0;
4006 IF_META_AVAILABLE(int64_t, timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
4007 settings->gps_timestamp = *timestamp;
4008 settings->gps_timestamp_valid = 1;
4009 }
4010
4011 settings->gps_coordinates_valid = 0;
4012 IF_META_AVAILABLE(double, coordinates, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
4013 memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double));
4014 settings->gps_coordinates_valid = 1;
4015 }
4016
4017 IF_META_AVAILABLE(uint8_t, proc_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
4018 memset(settings->gps_processing_method, 0,
4019 sizeof(settings->gps_processing_method));
4020 strlcpy(settings->gps_processing_method, (const char *)proc_methods,
4021 sizeof(settings->gps_processing_method));
4022 }
4023
4024 settings->hdr_snapshot = 0;
4025 IF_META_AVAILABLE(cam_hdr_param_t, hdr_info, CAM_INTF_PARM_HAL_BRACKETING_HDR, metadata) {
4026 if (hdr_info->hdr_enable) {
4027 settings->hdr_snapshot = 1;
4028 }
4029 }
4030
4031
4032 // Image description
4033 const char *eepromVersion = hal_obj->getEepromVersionInfo();
4034 const uint32_t *ldafCalib = hal_obj->getLdafCalib();
4035 const char *easelFwVersion = hal_obj->getEaselFwVersion();
4036 if ((eepromVersion && strlen(eepromVersion)) ||
4037 ldafCalib) {
4038 int len = 0;
4039 settings->image_desc_valid = true;
4040 if (eepromVersion && strlen(eepromVersion)) {
4041 len = snprintf(settings->image_desc, sizeof(settings->image_desc),
4042 "%s", eepromVersion);
4043 }
4044 if (ldafCalib) {
4045 len += snprintf(settings->image_desc + len,
4046 sizeof(settings->image_desc) - len, "L:%u-%u",
4047 ldafCalib[0], ldafCalib[1]);
4048 }
4049 if (easelFwVersion) {
4050 ALOGD("%s: Easel FW version %s", __FUNCTION__, easelFwVersion);
4051 len += snprintf(settings->image_desc + len,
4052 sizeof(settings->image_desc) - len, ":%s", easelFwVersion);
4053 }
4054 }
4055
4056 return m_postprocessor.processJpegSettingData(settings);
4057 }
4058
4059
overrideYuvSize(uint32_t width,uint32_t height)4060 void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height)
4061 {
4062 mYuvWidth = width;
4063 mYuvHeight = height;
4064 }
4065
4066 /*===========================================================================
4067 * FUNCTION : getReprocessType
4068 *
4069 * DESCRIPTION: get the type of reprocess output supported by this channel
4070 *
4071 * PARAMETERS : NONE
4072 *
4073 * RETURN : reprocess_type_t : type of reprocess
4074 *==========================================================================*/
getReprocessType()4075 reprocess_type_t QCamera3PicChannel::getReprocessType()
4076 {
4077 /* a picture channel could either use the postprocessor for reprocess+jpeg
4078 or only for reprocess */
4079 reprocess_type_t expectedReprocess;
4080 if (mPostProcMask == CAM_QCOM_FEATURE_NONE || mInputBufferHint) {
4081 expectedReprocess = REPROCESS_TYPE_JPEG;
4082 } else {
4083 expectedReprocess = REPROCESS_TYPE_NONE;
4084 }
4085 LOGH("expectedReprocess from Pic Channel is %d", expectedReprocess);
4086 return expectedReprocess;
4087 }
4088
4089
4090 /*===========================================================================
4091 * FUNCTION : timeoutFrame
4092 *
4093 * DESCRIPTION: Method to indicate to channel that a given frame has take too
4094 * long to be generated
4095 *
4096 * PARAMETERS : framenumber indicating the framenumber of the buffer timingout
4097 *
4098 * RETURN : int32_t type of status
4099 * NO_ERROR -- success
4100 * none-zero failure code
4101 *==========================================================================*/
timeoutFrame(uint32_t frameNumber)4102 int32_t QCamera3PicChannel::timeoutFrame(uint32_t frameNumber)
4103 {
4104 int32_t bufIdx;
4105
4106 bufIdx = mYuvMemory->getBufferIndex(frameNumber);
4107
4108 if (bufIdx < 0) {
4109 LOGE("%s: Buffer not found for frame:%d", __func__, frameNumber);
4110 return -1;
4111 }
4112
4113 mStreams[0]->timeoutFrame(bufIdx);
4114
4115 return NO_ERROR;
4116 }
4117
getYuvBufferForRequest(mm_camera_buf_def_t * frame,uint32_t frameNumber)4118 int32_t QCamera3PicChannel::getYuvBufferForRequest(mm_camera_buf_def_t *frame,
4119 uint32_t frameNumber)
4120 {
4121 uint32_t bufIdx;
4122 status_t rc;
4123
4124 Mutex::Autolock lock(mFreeBuffersLock);
4125
4126 // Get an available YUV buffer.
4127 if (mFreeBufferList.empty()) {
4128 // Allocate a buffer if no one is available.
4129 rc = mYuvMemory->allocateOne(mFrameLen, /*isCached*/false);
4130 if (rc < 0) {
4131 LOGE("Failed to allocate heap buffer. Fatal");
4132 return rc;
4133 } else {
4134 bufIdx = (uint32_t)rc;
4135 }
4136 } else {
4137 List<uint32_t>::iterator it = mFreeBufferList.begin();
4138 bufIdx = *it;
4139 mFreeBufferList.erase(it);
4140 }
4141
4142 mYuvMemory->markFrameNumber(bufIdx, frameNumber);
4143
4144 cam_frame_len_offset_t offset = {};
4145 mStreams[0]->getFrameOffset(offset);
4146
4147 // Get a buffer from YUV memory.
4148 rc = mYuvMemory->getBufDef(offset, *frame, bufIdx, mMapStreamBuffers);
4149 if (rc != 0) {
4150 ALOGE("%s: Getting a frame failed: %s (%d).", __FUNCTION__, strerror(-rc), rc);
4151 return rc;
4152 }
4153
4154 // Set the frame's stream ID because it's not set in getBufDef.
4155 frame->stream_id = mStreams[0]->getMyHandle();
4156 return 0;
4157 }
4158
returnYuvBuffer(mm_camera_buf_def_t * frame)4159 int32_t QCamera3PicChannel::returnYuvBuffer(mm_camera_buf_def_t *frame)
4160 {
4161 Mutex::Autolock lock(mFreeBuffersLock);
4162 mFreeBufferList.push_back(frame->buf_idx);
4163 return 0;
4164 }
4165
returnYuvBufferAndEncode(mm_camera_buf_def_t * frame,buffer_handle_t * outBuffer,uint32_t frameNumber,std::shared_ptr<metadata_buffer_t> metadata)4166 int32_t QCamera3PicChannel::returnYuvBufferAndEncode(mm_camera_buf_def_t *frame,
4167 buffer_handle_t *outBuffer, uint32_t frameNumber,
4168 std::shared_ptr<metadata_buffer_t> metadata)
4169 {
4170 int32_t rc = OK;
4171
4172 // Picture stream must have been started before any request comes in.
4173 if (!m_bIsActive) {
4174 LOGE("Channel not started!!");
4175 return NO_INIT;
4176 }
4177
4178 // Set up reprocess configuration
4179 reprocess_config_t reproc_cfg = {};
4180 cam_dimension_t dim;
4181 dim.width = (int32_t)mYuvWidth;
4182 dim.height = (int32_t)mYuvHeight;
4183 setReprocConfig(reproc_cfg, nullptr, metadata.get(), mStreamFormat, dim);
4184
4185 // Override reprocess type to just JPEG encoding without reprocessing.
4186 reproc_cfg.reprocess_type = REPROCESS_TYPE_NONE;
4187
4188 // Get the index of the output jpeg buffer.
4189 int index = mMemory.getMatchBufIndex((void*)outBuffer);
4190 if(index < 0) {
4191 rc = registerBuffer(outBuffer, mIsType);
4192 if (OK != rc) {
4193 LOGE("On-the-fly buffer registration failed %d",
4194 rc);
4195 return rc;
4196 }
4197
4198 index = mMemory.getMatchBufIndex((void*)outBuffer);
4199 if (index < 0) {
4200 LOGE("Could not find object among registered buffers");
4201 return DEAD_OBJECT;
4202 }
4203 }
4204
4205 rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
4206 if (rc != OK) {
4207 ALOGE("%s: Marking frame number (%u) for jpeg buffer (%d) failed: %s (%d)", __FUNCTION__,
4208 frameNumber, index, strerror(-rc), rc);
4209 return rc;
4210 }
4211
4212 // Start postprocessor
4213 startPostProc(reproc_cfg);
4214
4215 // Queue jpeg settings
4216 rc = queueJpegSetting((uint32_t)index, metadata.get());
4217 if (rc != OK) {
4218 ALOGE("%s: Queueing Jpeg setting for frame number (%u) buffer index (%d) failed: %s (%d)",
4219 __FUNCTION__, frameNumber, index, strerror(-rc), rc);
4220 return rc;
4221 }
4222
4223 // Allocate a buffer for the YUV input. It will be freed in QCamera3PostProc.
4224 mm_camera_super_buf_t *src_frame =
4225 (mm_camera_super_buf_t *)calloc(1, sizeof(mm_camera_super_buf_t));
4226 if (src_frame == nullptr) {
4227 LOGE("%s: No memory for src frame", __FUNCTION__);
4228 return NO_MEMORY;
4229 }
4230 src_frame->camera_handle = m_camHandle;
4231 src_frame->ch_id = getMyHandle();
4232 src_frame->num_bufs = 1;
4233 src_frame->bufs[0] = frame;
4234
4235 // Start processing the YUV buffer.
4236 ALOGD("%s: %d: Post-process started", __FUNCTION__, __LINE__);
4237 rc = m_postprocessor.processData(src_frame);
4238 if (rc != OK) {
4239 ALOGE("%s: Post processing frame (frame number: %u, jpeg buffer: %d) failed: %s (%d)",
4240 __FUNCTION__, frameNumber, index, strerror(-rc), rc);
4241 return rc;
4242 }
4243
4244 // Allocate a buffer for the metadata. It will be freed in QCamera3PostProc.
4245 mm_camera_super_buf_t *metadataBuf =
4246 (mm_camera_super_buf_t *)calloc(1, sizeof(mm_camera_super_buf_t));
4247 if (metadata == nullptr) {
4248 LOGE("%s: No memory for metadata", __FUNCTION__);
4249 return NO_MEMORY;
4250 }
4251 metadataBuf->camera_handle = m_camHandle;
4252 metadataBuf->ch_id = getMyHandle();
4253 metadataBuf->num_bufs = 1;
4254 metadataBuf->bufs[0] = (mm_camera_buf_def_t *)calloc(1, sizeof(mm_camera_buf_def_t));
4255 metadataBuf->bufs[0]->buffer = metadata.get();
4256
4257 // Start processing the metadata
4258 rc = m_postprocessor.processPPMetadata(metadataBuf);
4259 if (rc != OK) {
4260 ALOGE("%s: Post processing metadata (frame number: %u, jpeg buffer: %d) failed: %s (%d)",
4261 __FUNCTION__, frameNumber, index, strerror(-rc), rc);
4262 return rc;
4263 }
4264
4265 // Queue the external metadata.
4266 {
4267 Mutex::Autolock lock(mPendingExternalMetadataLock);
4268 mPendingExternalMetadata.push_back(metadata);
4269 }
4270
4271 return OK;
4272 }
4273
4274 /*===========================================================================
4275 * FUNCTION : QCamera3ReprocessChannel
4276 *
4277 * DESCRIPTION: constructor of QCamera3ReprocessChannel
4278 *
4279 * PARAMETERS :
4280 * @cam_handle : camera handle
4281 * @cam_ops : ptr to camera ops table
4282 * @pp_mask : post-proccess feature mask
4283 *
4284 * RETURN : none
4285 *==========================================================================*/
QCamera3ReprocessChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,channel_cb_routine cb_routine,channel_cb_buffer_err cb_buf_err,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,void * userData,void * ch_hdl)4286 QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle,
4287 uint32_t channel_handle,
4288 mm_camera_ops_t *cam_ops,
4289 channel_cb_routine cb_routine,
4290 channel_cb_buffer_err cb_buf_err,
4291 cam_padding_info_t *paddingInfo,
4292 cam_feature_mask_t postprocess_mask,
4293 void *userData, void *ch_hdl) :
4294 /* In case of framework reprocessing, pproc and jpeg operations could be
4295 * parallelized by allowing 1 extra buffer for reprocessing output:
4296 * ch_hdl->getNumBuffers() + 1 */
4297 QCamera3Channel(cam_handle, channel_handle, cam_ops, cb_routine, cb_buf_err, paddingInfo,
4298 postprocess_mask, userData,
4299 ((QCamera3ProcessingChannel *)ch_hdl)->getNumBuffers()
4300 + (MAX_REPROCESS_PIPELINE_STAGES - 1)),
4301 inputChHandle(ch_hdl),
4302 mOfflineBuffersIndex(-1),
4303 mFrameLen(0),
4304 mReprocessType(REPROCESS_TYPE_NONE),
4305 m_pSrcChannel(NULL),
4306 m_pMetaChannel(NULL),
4307 mMemory(NULL),
4308 mGrallocMemory(0),
4309 mReprocessPerfMode(false)
4310 {
4311 memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
4312 mOfflineBuffersIndex = mNumBuffers -1;
4313 mOfflineMetaIndex = (int32_t) (2*mNumBuffers -1);
4314 }
4315
4316
4317 /*===========================================================================
4318 * FUNCTION : QCamera3ReprocessChannel
4319 *
4320 * DESCRIPTION: constructor of QCamera3ReprocessChannel
4321 *
4322 * PARAMETERS :
4323 * @cam_handle : camera handle
4324 * @cam_ops : ptr to camera ops table
4325 * @pp_mask : post-proccess feature mask
4326 *
4327 * RETURN : none
4328 *==========================================================================*/
initialize(cam_is_type_t isType)4329 int32_t QCamera3ReprocessChannel::initialize(cam_is_type_t isType)
4330 {
4331 int32_t rc = NO_ERROR;
4332 mm_camera_channel_attr_t attr;
4333
4334 memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
4335 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
4336 attr.max_unmatched_frames = 1;
4337
4338 m_handle = m_camOps->add_channel(m_camHandle,
4339 &attr,
4340 NULL,
4341 this);
4342 if (m_handle == 0) {
4343 LOGE("Add channel failed");
4344 return UNKNOWN_ERROR;
4345 }
4346
4347 mIsType = isType;
4348 return rc;
4349 }
4350
4351 /*===========================================================================
4352 * FUNCTION : registerBuffer
4353 *
4354 * DESCRIPTION: register streaming buffer to the channel object
4355 *
4356 * PARAMETERS :
4357 * @buffer : buffer to be registered
4358 * @isType : the image stabilization type for the buffer
4359 *
4360 * RETURN : int32_t type of status
4361 * NO_ERROR -- success
4362 * none-zero failure code
4363 *==========================================================================*/
registerBuffer(buffer_handle_t * buffer,cam_is_type_t isType)4364 int32_t QCamera3ReprocessChannel::registerBuffer(buffer_handle_t *buffer,
4365 cam_is_type_t isType)
4366 {
4367 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_REG_BUF);
4368 int rc = 0;
4369 mIsType = isType;
4370 cam_stream_type_t streamType;
4371
4372 if (buffer == NULL) {
4373 LOGE("Error: Cannot register a NULL buffer");
4374 return BAD_VALUE;
4375 }
4376
4377 if ((uint32_t)mGrallocMemory.getCnt() > (mNumBuffers - 1)) {
4378 LOGE("Trying to register more buffers than initially requested");
4379 return BAD_VALUE;
4380 }
4381
4382 if (0 == m_numStreams) {
4383 rc = initialize(mIsType);
4384 if (rc != NO_ERROR) {
4385 LOGE("Couldn't initialize camera stream %d",
4386 rc);
4387 return rc;
4388 }
4389 }
4390
4391 streamType = mStreams[0]->getMyType();
4392 rc = mGrallocMemory.registerBuffer(buffer, streamType);
4393 if (ALREADY_EXISTS == rc) {
4394 return NO_ERROR;
4395 } else if (NO_ERROR != rc) {
4396 LOGE("Buffer %p couldn't be registered %d", buffer, rc);
4397 return rc;
4398 }
4399
4400 return rc;
4401 }
4402
4403 /*===========================================================================
4404 * FUNCTION : QCamera3ReprocessChannel
4405 *
4406 * DESCRIPTION: constructor of QCamera3ReprocessChannel
4407 *
4408 * PARAMETERS :
4409 * @cam_handle : camera handle
4410 * @cam_ops : ptr to camera ops table
4411 * @pp_mask : post-proccess feature mask
4412 *
4413 * RETURN : none
4414 *==========================================================================*/
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream * stream)4415 void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
4416 QCamera3Stream *stream)
4417 {
4418 //Got the pproc data callback. Now send to jpeg encoding
4419 uint8_t frameIndex;
4420 uint32_t resultFrameNumber;
4421 ATRACE_CALL();
4422 mm_camera_super_buf_t* frame = NULL;
4423 QCamera3ProcessingChannel *obj = (QCamera3ProcessingChannel *)inputChHandle;
4424 cam_dimension_t dim;
4425 cam_frame_len_offset_t offset;
4426
4427 memset(&dim, 0, sizeof(dim));
4428 memset(&offset, 0, sizeof(cam_frame_len_offset_t));
4429 if(!super_frame) {
4430 LOGE("Invalid Super buffer");
4431 return;
4432 }
4433
4434 if(super_frame->num_bufs != 1) {
4435 LOGE("Multiple streams are not supported");
4436 return;
4437 }
4438 if(super_frame->bufs[0] == NULL ) {
4439 LOGE("Error, Super buffer frame does not contain valid buffer");
4440 return;
4441 }
4442 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
4443
4444
4445 if (mReprocessType == REPROCESS_TYPE_JPEG) {
4446 resultFrameNumber = mMemory->getFrameNumber(frameIndex);
4447 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
4448 if (frame == NULL) {
4449 LOGE("Error allocating memory to save received_frame structure.");
4450 if(stream) {
4451 stream->bufDone(frameIndex);
4452 }
4453 return;
4454 }
4455 LOGI("bufIndex: %u recvd from post proc",
4456 (uint32_t)frameIndex);
4457 *frame = *super_frame;
4458
4459 stream->getFrameDimension(dim);
4460 stream->getFrameOffset(offset);
4461 dumpYUV(frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_INPUT_JPEG);
4462 // Release offline buffers.
4463 int32_t rc = obj->releaseOfflineMemory(resultFrameNumber);
4464 if (NO_ERROR != rc) {
4465 LOGE("Error releasing offline memory %d", rc);
4466 }
4467 /* Since reprocessing is done, send the callback to release the input buffer */
4468 if (mChannelCB) {
4469 mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
4470 }
4471 obj->m_postprocessor.processPPData(frame);
4472 } else {
4473 buffer_handle_t *resultBuffer;
4474 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
4475 resultBuffer = (buffer_handle_t *)mGrallocMemory.getBufferHandle(frameIndex);
4476 resultFrameNumber = mGrallocMemory.getFrameNumber(frameIndex);
4477 int32_t rc = stream->bufRelease(frameIndex);
4478 if (NO_ERROR != rc) {
4479 LOGE("Error %d releasing stream buffer %d",
4480 rc, frameIndex);
4481 }
4482 rc = mGrallocMemory.unregisterBuffer(frameIndex);
4483 if (NO_ERROR != rc) {
4484 LOGE("Error %d unregistering stream buffer %d",
4485 rc, frameIndex);
4486 }
4487 obj->reprocessCbRoutine(resultBuffer, resultFrameNumber);
4488
4489 obj->m_postprocessor.releaseOfflineBuffers(false);
4490 qcamera_hal3_pp_data_t *pp_job = obj->m_postprocessor.dequeuePPJob(resultFrameNumber);
4491 if (pp_job != NULL) {
4492 obj->m_postprocessor.releasePPJobData(pp_job);
4493 }
4494 free(pp_job);
4495 resetToCamPerfNormal(resultFrameNumber);
4496 }
4497 free(super_frame);
4498 return;
4499 }
4500
4501 /*===========================================================================
4502 * FUNCTION : resetToCamPerfNormal
4503 *
4504 * DESCRIPTION: Set the perf mode to normal if all the priority frames
4505 * have been reprocessed
4506 *
4507 * PARAMETERS :
4508 * @frameNumber: Frame number of the reprocess completed frame
4509 *
4510 * RETURN : QCamera3StreamMem *
4511 *==========================================================================*/
resetToCamPerfNormal(uint32_t frameNumber)4512 int32_t QCamera3ReprocessChannel::resetToCamPerfNormal(uint32_t frameNumber)
4513 {
4514 int32_t rc = NO_ERROR;
4515 bool resetToPerfNormal = false;
4516 {
4517 Mutex::Autolock lock(mPriorityFramesLock);
4518 /* remove the priority frame number from the list */
4519 for (size_t i = 0; i < mPriorityFrames.size(); i++) {
4520 if (mPriorityFrames[i] == frameNumber) {
4521 mPriorityFrames.removeAt(i);
4522 }
4523 }
4524 /* reset the perf mode if pending priority frame list is empty */
4525 if (mReprocessPerfMode && mPriorityFrames.empty()) {
4526 resetToPerfNormal = true;
4527 }
4528 }
4529 if (resetToPerfNormal) {
4530 QCamera3Stream *pStream = mStreams[0];
4531 cam_stream_parm_buffer_t param;
4532 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
4533
4534 param.type = CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE;
4535 param.perf_mode = CAM_PERF_NORMAL;
4536 rc = pStream->setParameter(param);
4537 {
4538 Mutex::Autolock lock(mPriorityFramesLock);
4539 mReprocessPerfMode = false;
4540 }
4541 }
4542 return rc;
4543 }
4544
4545 /*===========================================================================
4546 * FUNCTION : getStreamBufs
4547 *
4548 * DESCRIPTION: register the buffers of the reprocess channel
4549 *
4550 * PARAMETERS : none
4551 *
4552 * RETURN : QCamera3StreamMem *
4553 *==========================================================================*/
getStreamBufs(uint32_t len)4554 QCamera3StreamMem* QCamera3ReprocessChannel::getStreamBufs(uint32_t len)
4555 {
4556 if (mReprocessType == REPROCESS_TYPE_JPEG) {
4557 mMemory = new QCamera3StreamMem(mNumBuffers);
4558 if (!mMemory) {
4559 LOGE("unable to create reproc memory");
4560 return NULL;
4561 }
4562 mFrameLen = len;
4563 return mMemory;
4564 }
4565 return &mGrallocMemory;
4566 }
4567
4568 /*===========================================================================
4569 * FUNCTION : putStreamBufs
4570 *
4571 * DESCRIPTION: release the reprocess channel buffers
4572 *
4573 * PARAMETERS : none
4574 *
4575 * RETURN :
4576 *==========================================================================*/
putStreamBufs()4577 void QCamera3ReprocessChannel::putStreamBufs()
4578 {
4579 if (mReprocessType == REPROCESS_TYPE_JPEG) {
4580 mMemory->deallocate();
4581 delete mMemory;
4582 mMemory = NULL;
4583 mFreeBufferList.clear();
4584 } else {
4585 mGrallocMemory.unregisterBuffers();
4586 }
4587 }
4588
4589 /*===========================================================================
4590 * FUNCTION : ~QCamera3ReprocessChannel
4591 *
4592 * DESCRIPTION: destructor of QCamera3ReprocessChannel
4593 *
4594 * PARAMETERS : none
4595 *
4596 * RETURN : none
4597 *==========================================================================*/
~QCamera3ReprocessChannel()4598 QCamera3ReprocessChannel::~QCamera3ReprocessChannel()
4599 {
4600 destroy();
4601
4602 if (m_handle) {
4603 m_camOps->delete_channel(m_camHandle, m_handle);
4604 LOGD("deleting channel %d", m_handle);
4605 m_handle = 0;
4606 }
4607 }
4608
4609 /*===========================================================================
4610 * FUNCTION : start
4611 *
4612 * DESCRIPTION: start reprocess channel.
4613 *
4614 * PARAMETERS :
4615 *
4616 * RETURN : int32_t type of status
4617 * NO_ERROR -- success
4618 * none-zero failure code
4619 *==========================================================================*/
start()4620 int32_t QCamera3ReprocessChannel::start()
4621 {
4622 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_START);
4623 int32_t rc = NO_ERROR;
4624
4625 rc = QCamera3Channel::start();
4626
4627 if (rc == NO_ERROR) {
4628 rc = m_camOps->start_channel(m_camHandle, m_handle, /*start_sensor_streaming*/true);
4629
4630 // Check failure
4631 if (rc != NO_ERROR) {
4632 LOGE("start_channel failed %d", rc);
4633 QCamera3Channel::stop();
4634 }
4635 }
4636 return rc;
4637 }
4638
4639 /*===========================================================================
4640 * FUNCTION : stop
4641 *
4642 * DESCRIPTION: stop reprocess channel.
4643 *
4644 * PARAMETERS : none
4645 *
4646 * RETURN : int32_t type of status
4647 * NO_ERROR -- success
4648 * none-zero failure code
4649 *==========================================================================*/
stop()4650 int32_t QCamera3ReprocessChannel::stop()
4651 {
4652 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_STOP);
4653 int32_t rc = NO_ERROR;
4654
4655 rc = QCamera3Channel::stop();
4656 rc |= m_camOps->stop_channel(m_camHandle, m_handle, /*stop_channel_immediately*/false);
4657 // Unmapping the buffers
4658 unmapOfflineBuffers(true);
4659 return rc;
4660 }
4661
4662 /*===========================================================================
4663 * FUNCTION : getStreamBySrcHandle
4664 *
4665 * DESCRIPTION: find reprocess stream by its source stream handle
4666 *
4667 * PARAMETERS :
4668 * @srcHandle : source stream handle
4669 *
4670 * RETURN : ptr to reprocess stream if found. NULL if not found
4671 *==========================================================================*/
getStreamBySrcHandle(uint32_t srcHandle)4672 QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle)
4673 {
4674 QCamera3Stream *pStream = NULL;
4675
4676 for (uint32_t i = 0; i < m_numStreams; i++) {
4677 if (mSrcStreamHandles[i] == srcHandle) {
4678 pStream = mStreams[i];
4679 break;
4680 }
4681 }
4682 return pStream;
4683 }
4684
4685 /*===========================================================================
4686 * FUNCTION : getSrcStreamBySrcHandle
4687 *
4688 * DESCRIPTION: find source stream by source stream handle
4689 *
4690 * PARAMETERS :
4691 * @srcHandle : source stream handle
4692 *
4693 * RETURN : ptr to reprocess stream if found. NULL if not found
4694 *==========================================================================*/
getSrcStreamBySrcHandle(uint32_t srcHandle)4695 QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle)
4696 {
4697 QCamera3Stream *pStream = NULL;
4698
4699 if (NULL == m_pSrcChannel) {
4700 return NULL;
4701 }
4702
4703 for (uint32_t i = 0; i < m_numStreams; i++) {
4704 if (mSrcStreamHandles[i] == srcHandle) {
4705 pStream = m_pSrcChannel->getStreamByIndex(i);
4706 break;
4707 }
4708 }
4709 return pStream;
4710 }
4711
4712 /*===========================================================================
4713 * FUNCTION : unmapOfflineBuffers
4714 *
4715 * DESCRIPTION: Unmaps offline buffers
4716 *
4717 * PARAMETERS : none
4718 *
4719 * RETURN : int32_t type of status
4720 * NO_ERROR -- success
4721 * none-zero failure code
4722 *==========================================================================*/
unmapOfflineBuffers(bool all)4723 int32_t QCamera3ReprocessChannel::unmapOfflineBuffers(bool all)
4724 {
4725 int rc = NO_ERROR;
4726 Mutex::Autolock l(mOfflineBuffersLock);
4727 if (!mOfflineBuffers.empty()) {
4728 QCamera3Stream *stream = NULL;
4729 List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
4730 for (; it != mOfflineBuffers.end(); it++) {
4731 stream = (*it).stream;
4732 if (NULL != stream) {
4733 rc = stream->unmapBuf((*it).type,
4734 (*it).index,
4735 -1);
4736 if (NO_ERROR != rc) {
4737 LOGE("Error during offline buffer unmap %d",
4738 rc);
4739 }
4740 LOGD("Unmapped buffer with index %d", (*it).index);
4741 }
4742 if (!all) {
4743 mOfflineBuffers.erase(it);
4744 break;
4745 }
4746 }
4747 if (all) {
4748 mOfflineBuffers.clear();
4749 }
4750 }
4751
4752 if (!mOfflineMetaBuffers.empty()) {
4753 QCamera3Stream *stream = NULL;
4754 List<OfflineBuffer>::iterator it = mOfflineMetaBuffers.begin();
4755 for (; it != mOfflineMetaBuffers.end(); it++) {
4756 stream = (*it).stream;
4757 if (NULL != stream) {
4758 rc = stream->unmapBuf((*it).type,
4759 (*it).index,
4760 -1);
4761 if (NO_ERROR != rc) {
4762 LOGE("Error during offline buffer unmap %d",
4763 rc);
4764 }
4765 LOGD("Unmapped meta buffer with index %d", (*it).index);
4766 }
4767 if (!all) {
4768 mOfflineMetaBuffers.erase(it);
4769 break;
4770 }
4771 }
4772 if (all) {
4773 mOfflineMetaBuffers.clear();
4774 }
4775 }
4776 return rc;
4777 }
4778
4779 /*===========================================================================
4780 * FUNCTION : bufDone
4781 *
4782 * DESCRIPTION: Return reprocess stream buffer to free buffer list.
4783 * Note that this function doesn't queue buffer back to kernel.
4784 * It's up to doReprocessOffline to do that instead.
4785 * PARAMETERS :
4786 * @recvd_frame : stream buf frame to be returned
4787 *
4788 * RETURN : int32_t type of status
4789 * NO_ERROR -- success
4790 * none-zero failure code
4791 *==========================================================================*/
bufDone(mm_camera_super_buf_t * recvd_frame)4792 int32_t QCamera3ReprocessChannel::bufDone(mm_camera_super_buf_t *recvd_frame)
4793 {
4794 int rc = NO_ERROR;
4795 if (recvd_frame && recvd_frame->num_bufs == 1) {
4796 Mutex::Autolock lock(mFreeBuffersLock);
4797 uint32_t buf_idx = recvd_frame->bufs[0]->buf_idx;
4798 mFreeBufferList.push_back(buf_idx);
4799
4800 } else {
4801 LOGE("Fatal. Not supposed to be here");
4802 rc = BAD_VALUE;
4803 }
4804
4805 return rc;
4806 }
4807
4808 /*===========================================================================
4809 * FUNCTION : overrideMetadata
4810 *
4811 * DESCRIPTION: Override metadata entry such as rotation, crop, and CDS info.
4812 *
4813 * PARAMETERS :
4814 * @frame : input frame from source stream
4815 * meta_buffer: metadata buffer
4816 * @metadata : corresponding metadata
4817 * @fwk_frame :
4818 *
4819 * RETURN : int32_t type of status
4820 * NO_ERROR -- success
4821 * none-zero failure code
4822 *==========================================================================*/
overrideMetadata(qcamera_hal3_pp_buffer_t * pp_buffer,mm_camera_buf_def_t * meta_buffer,jpeg_settings_t * jpeg_settings,qcamera_fwk_input_pp_data_t & fwk_frame)4823 int32_t QCamera3ReprocessChannel::overrideMetadata(qcamera_hal3_pp_buffer_t *pp_buffer,
4824 mm_camera_buf_def_t *meta_buffer, jpeg_settings_t *jpeg_settings,
4825 qcamera_fwk_input_pp_data_t &fwk_frame)
4826 {
4827 int32_t rc = NO_ERROR;
4828 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
4829 if ((NULL == meta_buffer) || (NULL == pp_buffer) || (NULL == pp_buffer->input) ||
4830 (NULL == hal_obj)) {
4831 return BAD_VALUE;
4832 }
4833
4834 metadata_buffer_t *meta = (metadata_buffer_t *)meta_buffer->buffer;
4835 mm_camera_super_buf_t *frame = pp_buffer->input;
4836 if (NULL == meta) {
4837 return BAD_VALUE;
4838 }
4839
4840 for (uint32_t i = 0; i < frame->num_bufs; i++) {
4841 QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
4842 QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id);
4843
4844 if (pStream != NULL && pSrcStream != NULL) {
4845 if (jpeg_settings) {
4846 // Find rotation info for reprocess stream
4847 cam_rotation_info_t rotation_info;
4848 memset(&rotation_info, 0, sizeof(rotation_info));
4849 if (jpeg_settings->jpeg_orientation == 0) {
4850 rotation_info.rotation = ROTATE_0;
4851 } else if (jpeg_settings->jpeg_orientation == 90) {
4852 rotation_info.rotation = ROTATE_90;
4853 } else if (jpeg_settings->jpeg_orientation == 180) {
4854 rotation_info.rotation = ROTATE_180;
4855 } else if (jpeg_settings->jpeg_orientation == 270) {
4856 rotation_info.rotation = ROTATE_270;
4857 }
4858
4859 rotation_info.device_rotation = ROTATE_0;
4860 rotation_info.streamId = mStreams[0]->getMyServerID();
4861 ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info);
4862 }
4863
4864 // Find and insert crop info for reprocess stream
4865 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) {
4866 if (MAX_NUM_STREAMS > crop_data->num_of_streams) {
4867 for (int j = 0; j < crop_data->num_of_streams; j++) {
4868 if (crop_data->crop_info[j].stream_id ==
4869 pSrcStream->getMyServerID()) {
4870
4871 // Store crop/roi information for offline reprocess
4872 // in the reprocess stream slot
4873 crop_data->crop_info[crop_data->num_of_streams].crop =
4874 crop_data->crop_info[j].crop;
4875 crop_data->crop_info[crop_data->num_of_streams].roi_map =
4876 crop_data->crop_info[j].roi_map;
4877 crop_data->crop_info[crop_data->num_of_streams].stream_id =
4878 mStreams[0]->getMyServerID();
4879 crop_data->num_of_streams++;
4880
4881 LOGD("Reprocess stream server id: %d",
4882 mStreams[0]->getMyServerID());
4883 LOGD("Found offline reprocess crop %dx%d %dx%d",
4884 crop_data->crop_info[j].crop.left,
4885 crop_data->crop_info[j].crop.top,
4886 crop_data->crop_info[j].crop.width,
4887 crop_data->crop_info[j].crop.height);
4888 LOGD("Found offline reprocess roimap %dx%d %dx%d",
4889 crop_data->crop_info[j].roi_map.left,
4890 crop_data->crop_info[j].roi_map.top,
4891 crop_data->crop_info[j].roi_map.width,
4892 crop_data->crop_info[j].roi_map.height);
4893
4894 break;
4895 }
4896 }
4897 } else {
4898 LOGE("No space to add reprocess stream crop/roi information");
4899 }
4900 }
4901
4902 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, meta) {
4903 uint8_t cnt = cdsInfo->num_of_streams;
4904 if (cnt <= MAX_NUM_STREAMS) {
4905 cam_stream_cds_info_t repro_cds_info;
4906 memset(&repro_cds_info, 0, sizeof(repro_cds_info));
4907 repro_cds_info.stream_id = mStreams[0]->getMyServerID();
4908 for (size_t i = 0; i < cnt; i++) {
4909 if (cdsInfo->cds_info[i].stream_id ==
4910 pSrcStream->getMyServerID()) {
4911 repro_cds_info.cds_enable =
4912 cdsInfo->cds_info[i].cds_enable;
4913 break;
4914 }
4915 }
4916 cdsInfo->num_of_streams = 1;
4917 cdsInfo->cds_info[0] = repro_cds_info;
4918 } else {
4919 LOGE("No space to add reprocess stream cds information");
4920 }
4921 }
4922
4923 fwk_frame.input_buffer = *frame->bufs[i];
4924 fwk_frame.metadata_buffer = *meta_buffer;
4925 fwk_frame.output_buffer = pp_buffer->output;
4926 break;
4927 } else {
4928 LOGE("Source/Re-process streams are invalid");
4929 rc |= BAD_VALUE;
4930 }
4931 }
4932
4933 return rc;
4934 }
4935
4936 /*===========================================================================
4937 * FUNCTION : overrideFwkMetadata
4938 *
4939 * DESCRIPTION: Override frameworks metadata such as rotation, crop, and CDS data.
4940 *
4941 * PARAMETERS :
4942 * @frame : input frame for reprocessing
4943 *
4944 * RETURN : int32_t type of status
4945 * NO_ERROR -- success
4946 * none-zero failure code
4947 *==========================================================================*/
overrideFwkMetadata(qcamera_fwk_input_pp_data_t * frame)4948 int32_t QCamera3ReprocessChannel::overrideFwkMetadata(
4949 qcamera_fwk_input_pp_data_t *frame)
4950 {
4951 if (NULL == frame) {
4952 LOGE("Incorrect input frame");
4953 return BAD_VALUE;
4954 }
4955
4956 if (NULL == frame->metadata_buffer.buffer) {
4957 LOGE("No metadata available");
4958 return BAD_VALUE;
4959 }
4960 metadata_buffer_t *meta = (metadata_buffer_t *) frame->metadata_buffer.buffer;
4961
4962 // Not doing rotation at all for YUV to YUV reprocess
4963 if (mReprocessType != REPROCESS_TYPE_JPEG) {
4964 LOGD("Override rotation to 0 for channel reprocess type %d",
4965 mReprocessType);
4966 cam_rotation_info_t rotation_info;
4967 memset(&rotation_info, 0, sizeof(rotation_info));
4968 rotation_info.rotation = ROTATE_0;
4969 rotation_info.streamId = mStreams[0]->getMyServerID();
4970 ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info);
4971 }
4972
4973 // Find and insert crop info for reprocess stream
4974 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) {
4975 if (1 == crop_data->num_of_streams) {
4976 // Store crop/roi information for offline reprocess
4977 // in the reprocess stream slot
4978 crop_data->crop_info[crop_data->num_of_streams].crop =
4979 crop_data->crop_info[0].crop;
4980 crop_data->crop_info[crop_data->num_of_streams].roi_map =
4981 crop_data->crop_info[0].roi_map;
4982 crop_data->crop_info[crop_data->num_of_streams].stream_id =
4983 mStreams[0]->getMyServerID();
4984 crop_data->num_of_streams++;
4985
4986 LOGD("Reprocess stream server id: %d",
4987 mStreams[0]->getMyServerID());
4988 LOGD("Found offline reprocess crop %dx%d %dx%d",
4989 crop_data->crop_info[0].crop.left,
4990 crop_data->crop_info[0].crop.top,
4991 crop_data->crop_info[0].crop.width,
4992 crop_data->crop_info[0].crop.height);
4993 LOGD("Found offline reprocess roi map %dx%d %dx%d",
4994 crop_data->crop_info[0].roi_map.left,
4995 crop_data->crop_info[0].roi_map.top,
4996 crop_data->crop_info[0].roi_map.width,
4997 crop_data->crop_info[0].roi_map.height);
4998 } else {
4999 LOGE("Incorrect number of offline crop data entries %d",
5000 crop_data->num_of_streams);
5001 return BAD_VALUE;
5002 }
5003 } else {
5004 LOGW("Crop data not present");
5005 }
5006
5007 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, meta) {
5008 if (1 == cdsInfo->num_of_streams) {
5009 cdsInfo->cds_info[0].stream_id = mStreams[0]->getMyServerID();
5010 } else {
5011 LOGE("Incorrect number of offline cds info entries %d",
5012 cdsInfo->num_of_streams);
5013 return BAD_VALUE;
5014 }
5015 }
5016
5017 return NO_ERROR;
5018 }
5019
5020 /*===========================================================================
5021 * FUNCTION : doReprocessOffline
5022 *
5023 * DESCRIPTION: request to do a reprocess on the frame
5024 *
5025 * PARAMETERS :
5026 * @frame : input frame for reprocessing
5027 * @isPriorityFrame: Hint that this frame is of priority, equivalent to
5028 * real time, even though it is processed in offline mechanism
5029 *
5030 * RETURN : int32_t type of status
5031 * NO_ERROR -- success
5032 * none-zero failure code
5033 *==========================================================================*/
doReprocessOffline(qcamera_fwk_input_pp_data_t * frame,bool isPriorityFrame)5034 int32_t QCamera3ReprocessChannel::doReprocessOffline(
5035 qcamera_fwk_input_pp_data_t *frame, bool isPriorityFrame)
5036 {
5037 int32_t rc = 0;
5038 int index;
5039 OfflineBuffer mappedBuffer;
5040 ATRACE_CALL();
5041
5042 if (m_numStreams < 1) {
5043 LOGE("No reprocess stream is created");
5044 return -1;
5045 }
5046
5047 if (NULL == frame) {
5048 LOGE("Incorrect input frame");
5049 return BAD_VALUE;
5050 }
5051
5052 if (NULL == frame->metadata_buffer.buffer) {
5053 LOGE("No metadata available");
5054 return BAD_VALUE;
5055 }
5056
5057 if (0 > frame->input_buffer.fd) {
5058 LOGE("No input buffer available");
5059 return BAD_VALUE;
5060 }
5061
5062 if ((0 == m_numStreams) || (NULL == mStreams[0])) {
5063 LOGE("Reprocess stream not initialized!");
5064 return NO_INIT;
5065 }
5066
5067 QCamera3Stream *pStream = mStreams[0];
5068
5069 //qbuf the output buffer if it was allocated by the framework
5070 if (mReprocessType != REPROCESS_TYPE_JPEG && frame->output_buffer != NULL) {
5071 index = mGrallocMemory.getMatchBufIndex((void*)frame->output_buffer);
5072 if(index < 0) {
5073 rc = registerBuffer(frame->output_buffer, mIsType);
5074 if (NO_ERROR != rc) {
5075 LOGE("On-the-fly buffer registration failed %d",
5076 rc);
5077 return rc;
5078 }
5079
5080 index = mGrallocMemory.getMatchBufIndex((void*)frame->output_buffer);
5081 if (index < 0) {
5082 LOGE("Could not find object among registered buffers");
5083 return DEAD_OBJECT;
5084 }
5085 }
5086 rc = mGrallocMemory.markFrameNumber(index, frame->frameNumber);
5087 if(rc != NO_ERROR) {
5088 LOGE("Failed to mark frame#:%d, index:%d",frame->frameNumber,index);
5089 return rc;
5090 }
5091 if(!m_bIsActive) {
5092 rc = start();
5093 if (NO_ERROR != rc) {
5094 return rc;
5095 }
5096 } else {
5097 rc = pStream->bufDone(index);
5098 if(rc != NO_ERROR) {
5099 LOGE("Failed to Q new buffer to stream %d", rc);
5100 mGrallocMemory.markFrameNumber(index, -1);
5101 return rc;
5102 }
5103 }
5104
5105 } else if (mReprocessType == REPROCESS_TYPE_JPEG) {
5106 Mutex::Autolock lock(mFreeBuffersLock);
5107 uint32_t bufIdx;
5108 if (mFreeBufferList.empty()) {
5109 rc = mMemory->allocateOne(mFrameLen);
5110 if (rc < 0) {
5111 LOGE("Failed allocating heap buffer. Fatal");
5112 return BAD_VALUE;
5113 } else {
5114 bufIdx = (uint32_t)rc;
5115 }
5116 } else {
5117 bufIdx = *(mFreeBufferList.begin());
5118 mFreeBufferList.erase(mFreeBufferList.begin());
5119 }
5120
5121 mMemory->markFrameNumber(bufIdx, frame->frameNumber);
5122 rc = pStream->bufDone(bufIdx);
5123 if (rc != NO_ERROR) {
5124 LOGE("Failed to queue new buffer to stream");
5125 return rc;
5126 }
5127 }
5128
5129 int32_t max_idx = (int32_t) (mNumBuffers - 1);
5130 //loop back the indices if max burst count reached
5131 if (mOfflineBuffersIndex == max_idx) {
5132 mOfflineBuffersIndex = -1;
5133 }
5134 uint32_t buf_idx = (uint32_t)(mOfflineBuffersIndex + 1);
5135
5136 //Do cache ops before sending for reprocess
5137 if (mMemory != NULL) {
5138 mMemory->cleanInvalidateCache(buf_idx);
5139 }
5140
5141 rc = pStream->mapBuf(
5142 CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
5143 buf_idx, -1,
5144 frame->input_buffer.fd, frame->input_buffer.buffer,
5145 frame->input_buffer.frame_len);
5146 if (NO_ERROR == rc) {
5147 Mutex::Autolock l(mOfflineBuffersLock);
5148 mappedBuffer.index = buf_idx;
5149 mappedBuffer.stream = pStream;
5150 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
5151 mOfflineBuffers.push_back(mappedBuffer);
5152 mOfflineBuffersIndex = (int32_t)buf_idx;
5153 LOGD("Mapped buffer with index %d", mOfflineBuffersIndex);
5154 }
5155
5156 max_idx = (int32_t) ((mNumBuffers * 2) - 1);
5157 //loop back the indices if max burst count reached
5158 if (mOfflineMetaIndex == max_idx) {
5159 mOfflineMetaIndex = (int32_t) (mNumBuffers - 1);
5160 }
5161 uint32_t meta_buf_idx = (uint32_t)(mOfflineMetaIndex + 1);
5162 rc |= pStream->mapBuf(
5163 CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,
5164 meta_buf_idx, -1,
5165 frame->metadata_buffer.fd, frame->metadata_buffer.buffer,
5166 frame->metadata_buffer.frame_len);
5167 if (NO_ERROR == rc) {
5168 Mutex::Autolock l(mOfflineBuffersLock);
5169 mappedBuffer.index = meta_buf_idx;
5170 mappedBuffer.stream = pStream;
5171 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF;
5172 mOfflineMetaBuffers.push_back(mappedBuffer);
5173 mOfflineMetaIndex = (int32_t)meta_buf_idx;
5174 LOGD("Mapped meta buffer with index %d", mOfflineMetaIndex);
5175 }
5176
5177 if (rc == NO_ERROR) {
5178 cam_stream_parm_buffer_t param;
5179 uint32_t numPendingPriorityFrames = 0;
5180
5181 if(isPriorityFrame && (mReprocessType != REPROCESS_TYPE_JPEG)) {
5182 Mutex::Autolock lock(mPriorityFramesLock);
5183 /* read the length before pushing the frame number to check if
5184 * vector is empty */
5185 numPendingPriorityFrames = mPriorityFrames.size();
5186 mPriorityFrames.push(frame->frameNumber);
5187 }
5188
5189 if(isPriorityFrame && !numPendingPriorityFrames &&
5190 (mReprocessType != REPROCESS_TYPE_JPEG)) {
5191 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
5192 param.type = CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE;
5193 param.perf_mode = CAM_PERF_HIGH_PERFORMANCE;
5194 rc = pStream->setParameter(param);
5195 if (rc != NO_ERROR) {
5196 LOGE("%s: setParameter for CAM_PERF_HIGH_PERFORMANCE failed",
5197 __func__);
5198 }
5199 {
5200 Mutex::Autolock lock(mPriorityFramesLock);
5201 mReprocessPerfMode = true;
5202 }
5203 }
5204
5205 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
5206 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
5207 param.reprocess.buf_index = buf_idx;
5208 param.reprocess.frame_idx = frame->input_buffer.frame_idx;
5209 param.reprocess.meta_present = 1;
5210 param.reprocess.meta_buf_index = meta_buf_idx;
5211
5212 LOGI("Offline reprocessing id = %d buf Id = %d meta index = %d",
5213 param.reprocess.frame_idx, param.reprocess.buf_index,
5214 param.reprocess.meta_buf_index);
5215 rc = pStream->setParameter(param);
5216 if (rc != NO_ERROR) {
5217 LOGE("stream setParameter for reprocess failed");
5218 resetToCamPerfNormal(frame->frameNumber);
5219 }
5220 } else {
5221 LOGE("Input buffer memory map failed: %d", rc);
5222 }
5223
5224 return rc;
5225 }
5226
5227 /*===========================================================================
5228 * FUNCTION : doReprocess
5229 *
5230 * DESCRIPTION: request to do a reprocess on the frame
5231 *
5232 * PARAMETERS :
5233 * @buf_fd : fd to the input buffer that needs reprocess
5234 * @buffer : Buffer ptr
5235 * @buf_lenght : length of the input buffer
5236 * @ret_val : result of reprocess.
5237 * Example: Could be faceID in case of register face image.
5238 * @meta_frame : metadata frame.
5239 *
5240 * RETURN : int32_t type of status
5241 * NO_ERROR -- success
5242 * none-zero failure code
5243 *==========================================================================*/
doReprocess(int buf_fd,void * buffer,size_t buf_length,int32_t & ret_val,mm_camera_super_buf_t * meta_frame)5244 int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd, void *buffer, size_t buf_length,
5245 int32_t &ret_val, mm_camera_super_buf_t *meta_frame)
5246 {
5247 int32_t rc = 0;
5248 if (m_numStreams < 1) {
5249 LOGE("No reprocess stream is created");
5250 return -1;
5251 }
5252 if (meta_frame == NULL) {
5253 LOGE("Did not get corresponding metadata in time");
5254 return -1;
5255 }
5256
5257 uint8_t buf_idx = 0;
5258 for (uint32_t i = 0; i < m_numStreams; i++) {
5259 rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
5260 buf_idx, -1,
5261 buf_fd, buffer, buf_length);
5262
5263 //Do cache ops before sending for reprocess
5264 if (mMemory != NULL) {
5265 mMemory->cleanInvalidateCache(buf_idx);
5266 }
5267
5268 if (rc == NO_ERROR) {
5269 cam_stream_parm_buffer_t param;
5270 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t));
5271 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
5272 param.reprocess.buf_index = buf_idx;
5273 param.reprocess.meta_present = 1;
5274 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
5275 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
5276
5277 LOGI("Online reprocessing id = %d buf Id = %d meta index = %d",
5278 param.reprocess.frame_idx, param.reprocess.buf_index,
5279 param.reprocess.meta_buf_index);
5280 rc = mStreams[i]->setParameter(param);
5281 if (rc == NO_ERROR) {
5282 ret_val = param.reprocess.ret_val;
5283 }
5284 mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
5285 buf_idx, -1);
5286 }
5287 }
5288 return rc;
5289 }
5290
5291 /*===========================================================================
5292 * FUNCTION : addReprocStreamsFromSource
5293 *
5294 * DESCRIPTION: add reprocess streams from input source channel
5295 *
5296 * PARAMETERS :
5297 * @config : pp feature configuration
5298 * @src_config : source reprocess configuration
5299 * @isType : type of image stabilization required on this stream
5300 * @pMetaChannel : ptr to metadata channel to get corresp. metadata
5301 *
5302 *
5303 * RETURN : int32_t type of status
5304 * NO_ERROR -- success
5305 * none-zero failure code
5306 *==========================================================================*/
addReprocStreamsFromSource(cam_pp_feature_config_t & pp_config,const reprocess_config_t & src_config,cam_is_type_t is_type,QCamera3Channel * pMetaChannel)5307 int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
5308 const reprocess_config_t &src_config , cam_is_type_t is_type,
5309 QCamera3Channel *pMetaChannel)
5310 {
5311 int32_t rc = 0;
5312 cam_stream_reproc_config_t reprocess_config;
5313 cam_stream_type_t streamType;
5314
5315 cam_dimension_t streamDim = src_config.output_stream_dim;
5316
5317 if (NULL != src_config.src_channel) {
5318 QCamera3Stream *pSrcStream = src_config.src_channel->getStreamByIndex(0);
5319 if (pSrcStream == NULL) {
5320 LOGE("source channel doesn't have a stream");
5321 return BAD_VALUE;
5322 }
5323 mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle();
5324 }
5325
5326 streamType = CAM_STREAM_TYPE_OFFLINE_PROC;
5327 reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
5328
5329 reprocess_config.offline.input_fmt = src_config.stream_format;
5330 reprocess_config.offline.input_dim = src_config.input_stream_dim;
5331 reprocess_config.offline.input_buf_planes.plane_info =
5332 src_config.input_stream_plane_info.plane_info;
5333 reprocess_config.offline.num_of_bufs = (uint8_t)mNumBuffers;
5334 reprocess_config.offline.input_type = src_config.stream_type;
5335
5336 reprocess_config.pp_feature_config = pp_config;
5337 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
5338 m_handle,
5339 m_camOps,
5340 &mPaddingInfo,
5341 (QCamera3Channel*)this,
5342 false/*mapStreamBuffers*/);
5343 if (pStream == NULL) {
5344 LOGE("No mem for Stream");
5345 return NO_MEMORY;
5346 }
5347
5348 rc = pStream->init(streamType, src_config.stream_format,
5349 streamDim, ROTATE_0, &reprocess_config,
5350 (uint8_t)mNumBuffers,
5351 reprocess_config.pp_feature_config.feature_mask,
5352 is_type,
5353 0,/* batchSize */
5354 QCamera3Channel::streamCbRoutine, this);
5355
5356 if (rc == 0) {
5357 mStreams[m_numStreams] = pStream;
5358 m_numStreams++;
5359 } else {
5360 LOGE("failed to create reprocess stream");
5361 delete pStream;
5362 }
5363
5364 if (rc == NO_ERROR) {
5365 m_pSrcChannel = src_config.src_channel;
5366 m_pMetaChannel = pMetaChannel;
5367 mReprocessType = src_config.reprocess_type;
5368 LOGD("mReprocessType is %d", mReprocessType);
5369 }
5370 mm_camera_req_buf_t buf;
5371 memset(&buf, 0x0, sizeof(buf));
5372 buf.type = MM_CAMERA_REQ_SUPER_BUF;
5373 buf.num_buf_requested = 1;
5374 if(m_camOps->request_super_buf(m_camHandle,m_handle, &buf) < 0) {
5375 LOGE("Request for super buffer failed");
5376 }
5377 return rc;
5378 }
5379
5380 /* QCamera3SupportChannel methods */
5381
5382 cam_dimension_t QCamera3SupportChannel::kDim = {640, 480};
5383
QCamera3SupportChannel(uint32_t cam_handle,uint32_t channel_handle,mm_camera_ops_t * cam_ops,cam_padding_info_t * paddingInfo,cam_feature_mask_t postprocess_mask,cam_stream_type_t streamType,cam_dimension_t * dim,cam_format_t streamFormat,uint8_t hw_analysis_supported,cam_color_filter_arrangement_t color_arrangement,void * userData,uint32_t numBuffers)5384 QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle,
5385 uint32_t channel_handle,
5386 mm_camera_ops_t *cam_ops,
5387 cam_padding_info_t *paddingInfo,
5388 cam_feature_mask_t postprocess_mask,
5389 cam_stream_type_t streamType,
5390 cam_dimension_t *dim,
5391 cam_format_t streamFormat,
5392 uint8_t hw_analysis_supported,
5393 cam_color_filter_arrangement_t color_arrangement,
5394 void *userData, uint32_t numBuffers) :
5395 QCamera3Channel(cam_handle, channel_handle, cam_ops,
5396 NULL, NULL, paddingInfo, postprocess_mask,
5397 userData, numBuffers),
5398 mMemory(NULL),
5399 mHwAnalysisSupported(hw_analysis_supported),
5400 mColorArrangement(color_arrangement)
5401 {
5402 memcpy(&mDim, dim, sizeof(cam_dimension_t));
5403 mStreamType = streamType;
5404 mStreamFormat = streamFormat;
5405 }
5406
~QCamera3SupportChannel()5407 QCamera3SupportChannel::~QCamera3SupportChannel()
5408 {
5409 destroy();
5410
5411 if (mMemory) {
5412 mMemory->deallocate();
5413 delete mMemory;
5414 mMemory = NULL;
5415 }
5416 }
5417
initialize(cam_is_type_t isType)5418 int32_t QCamera3SupportChannel::initialize(cam_is_type_t isType)
5419 {
5420 int32_t rc;
5421
5422 if (mMemory || m_numStreams > 0) {
5423 LOGE("metadata channel already initialized");
5424 return -EINVAL;
5425 }
5426
5427 mIsType = isType;
5428 // Make Analysis same as Preview format
5429 if (!mHwAnalysisSupported && mStreamType == CAM_STREAM_TYPE_ANALYSIS &&
5430 mColorArrangement != CAM_FILTER_ARRANGEMENT_Y) {
5431 mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_PREVIEW,
5432 mDim.width, mDim.height, m_bUBWCenable, mIsType);
5433 }
5434
5435 rc = QCamera3Channel::addStream(mStreamType,
5436 mStreamFormat, mDim, ROTATE_0, MIN_STREAMING_BUFFER_NUM,
5437 mPostProcMask, mIsType);
5438 if (rc < 0) {
5439 LOGE("addStream failed");
5440 }
5441 return rc;
5442 }
5443
request(buffer_handle_t *,uint32_t,int &)5444 int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/,
5445 uint32_t /*frameNumber*/,
5446 int & /*indexUsed*/)
5447 {
5448 return NO_ERROR;
5449 }
5450
streamCbRoutine(mm_camera_super_buf_t * super_frame,QCamera3Stream *)5451 void QCamera3SupportChannel::streamCbRoutine(
5452 mm_camera_super_buf_t *super_frame,
5453 QCamera3Stream * /*stream*/)
5454 {
5455 if (super_frame == NULL || super_frame->num_bufs != 1) {
5456 LOGE("super_frame is not valid");
5457 return;
5458 }
5459 bufDone(super_frame);
5460 free(super_frame);
5461 }
5462
getStreamBufs(uint32_t len)5463 QCamera3StreamMem* QCamera3SupportChannel::getStreamBufs(uint32_t len)
5464 {
5465 int rc;
5466 mMemory = new QCamera3StreamMem(mNumBuffers);
5467 if (!mMemory) {
5468 LOGE("unable to create heap memory");
5469 return NULL;
5470 }
5471 rc = mMemory->allocateAll(len);
5472 if (rc < 0) {
5473 LOGE("unable to allocate heap memory");
5474 delete mMemory;
5475 mMemory = NULL;
5476 return NULL;
5477 }
5478 return mMemory;
5479 }
5480
putStreamBufs()5481 void QCamera3SupportChannel::putStreamBufs()
5482 {
5483 mMemory->deallocate();
5484 delete mMemory;
5485 mMemory = NULL;
5486 }
5487
~QCamera3DepthChannel()5488 QCamera3DepthChannel::~QCamera3DepthChannel() {
5489 unmapAllBuffers();
5490 }
5491
5492 /*===========================================================================
5493 * FUNCTION : mapBuffer
5494 *
5495 * DESCRIPTION: Maps stream depth buffer
5496 *
5497 * PARAMETERS :
5498 * @buffer : Depth buffer
5499 * @frameNumber : Frame number
5500 *
5501 *
5502 * RETURN : int32_t type of status
5503 * NO_ERROR -- success
5504 * none-zero failure code
5505 *==========================================================================*/
mapBuffer(buffer_handle_t * buffer,uint32_t frameNumber)5506 int32_t QCamera3DepthChannel::mapBuffer(buffer_handle_t *buffer,
5507 uint32_t frameNumber) {
5508 int32_t rc = NO_ERROR;
5509
5510 int32_t index = mGrallocMem.getMatchBufIndex((void*)buffer);
5511 if (0 > index) {
5512 rc = mGrallocMem.registerBuffer(buffer, CAM_STREAM_TYPE_DEFAULT);
5513 if (NO_ERROR != rc) {
5514 LOGE("Buffer registration failed %d", rc);
5515 return rc;
5516 }
5517
5518 index = mGrallocMem.getMatchBufIndex((void*)buffer);
5519 if (index < 0) {
5520 LOGE("Could not find object among registered buffers");
5521 return DEAD_OBJECT;
5522 }
5523 } else {
5524 LOGE("Buffer: %p is already present at index: %d!", buffer, index);
5525 return ALREADY_EXISTS;
5526 }
5527
5528 rc = mGrallocMem.markFrameNumber((uint32_t)index, frameNumber);
5529
5530 return rc;
5531 }
5532
5533 /*===========================================================================
5534 * FUNCTION : populateDepthData
5535 *
5536 * DESCRIPTION: Copies the incoming depth data in the respective depth buffer
5537 *
5538 * PARAMETERS :
5539 * @data : Incoming Depth data
5540 * @frameNumber : Frame number of incoming depth data
5541 *
5542 *
5543 * RETURN : int32_t type of status
5544 * NO_ERROR -- success
5545 * none-zero failure code
5546 *==========================================================================*/
populateDepthData(const cam_depth_data_t & data,uint32_t frameNumber)5547 int32_t QCamera3DepthChannel::populateDepthData(const cam_depth_data_t &data,
5548 uint32_t frameNumber) {
5549 if (nullptr == mStream) {
5550 LOGE("Invalid depth stream!");
5551 return BAD_VALUE;
5552 }
5553
5554 ssize_t length = data.length;
5555 int32_t index = mGrallocMem.getBufferIndex(frameNumber);
5556 if (0 > index) {
5557 LOGE("Frame number: %u not present!");
5558 return BAD_VALUE;
5559 }
5560
5561 void *dst = mGrallocMem.getPtr(index);
5562 if (nullptr == dst) {
5563 LOGE("Invalid mapped buffer");
5564 return BAD_VALUE;
5565 }
5566
5567 camera3_jpeg_blob_t jpegHeader;
5568 ssize_t headerSize = sizeof jpegHeader;
5569 buffer_handle_t *blobBufferHandle = static_cast<buffer_handle_t *>
5570 (mGrallocMem.getBufferHandle(index));
5571 ssize_t maxBlobSize;
5572 if (nullptr != blobBufferHandle) {
5573 maxBlobSize = ((private_handle_t*)(*blobBufferHandle))->width;
5574 } else {
5575 LOGE("Couldn't query buffer handle!");
5576 return BAD_VALUE;
5577 }
5578
5579 if ((length + headerSize) > maxBlobSize) {
5580 LOGE("Depth buffer size mismatch expected: %d actual: %d",
5581 (length + headerSize), maxBlobSize);
5582 return BAD_VALUE;
5583 }
5584
5585 if (0 < length) {
5586 memcpy(dst, data.depth_data, length);
5587 }
5588
5589 memset(&jpegHeader, 0, headerSize);
5590 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
5591 jpegHeader.jpeg_size = length;
5592 size_t jpeg_eof_offset = static_cast<size_t> (maxBlobSize - headerSize);
5593 uint8_t *jpegBuffer = static_cast<uint8_t *> (dst);
5594 uint8_t *jpegEOF = &jpegBuffer[jpeg_eof_offset];
5595 memcpy(jpegEOF, &jpegHeader, headerSize);
5596
5597 return NO_ERROR;
5598 }
5599
5600 /*===========================================================================
5601 * FUNCTION : getOldestFrame
5602 *
5603 * DESCRIPTION: Return oldest mapped buffer
5604 *
5605 * PARAMETERS :
5606 * @frameNumber : Sets oldest frame number if present
5607 *
5608 *
5609 * RETURN : buffer_handle_t pointer
5610 * NULL in case of error
5611 *==========================================================================*/
getOldestFrame(uint32_t & frameNumber)5612 buffer_handle_t *QCamera3DepthChannel::getOldestFrame(uint32_t &frameNumber) {
5613 uint32_t oldestIndex = UINT32_MAX;
5614 int32_t frameNumberResult = mGrallocMem.getOldestFrameNumber(oldestIndex);
5615 if (0 > frameNumberResult) {
5616 LOGD("Invalid frame number!");
5617 return nullptr;
5618 }
5619 frameNumber = static_cast<uint32_t> (frameNumberResult);
5620
5621 buffer_handle_t *ret = static_cast<buffer_handle_t *>
5622 (mGrallocMem.getBufferHandle(oldestIndex));
5623 if (nullptr == ret) {
5624 LOGE("Invalid buffer handle!");
5625 return nullptr;
5626 }
5627
5628 return ret;
5629 }
5630
5631 /*===========================================================================
5632 * FUNCTION : unmapBuffer
5633 *
5634 * DESCRIPTION: Unmap a single buffer
5635 *
5636 * PARAMETERS :
5637 * @frameNumber : Frame number of buffer that should get unmapped
5638 *
5639 *
5640 * RETURN : int32_t type of status
5641 * NO_ERROR -- success
5642 * none-zero failure code
5643 *==========================================================================*/
unmapBuffer(uint32_t frameNumber)5644 int32_t QCamera3DepthChannel::unmapBuffer(uint32_t frameNumber) {
5645 int32_t index = mGrallocMem.getBufferIndex(frameNumber);
5646 if (0 > index) {
5647 LOGE("Frame number: %u not present!", frameNumber);
5648 return BAD_VALUE;
5649 }
5650
5651 return mGrallocMem.unregisterBuffer(index);
5652 }
5653
5654 /*===========================================================================
5655 * FUNCTION : unmapAllBuffers
5656 *
5657 * DESCRIPTION: This will unmap all buffers
5658 *
5659 * PARAMETERS :
5660 *
5661 * RETURN : int32_t type of status
5662 * NO_ERROR -- success
5663 * none-zero failure code
5664 *==========================================================================*/
unmapAllBuffers()5665 int32_t QCamera3DepthChannel::unmapAllBuffers() {
5666 mGrallocMem.unregisterBuffers();
5667
5668 return NO_ERROR;
5669 }
5670
5671 }; // namespace qcamera
5672