• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2 **
3 ** Copyright 2008, The Android Open Source Project
4 ** Copyright 2012, Samsung Electronics Co. LTD
5 **
6 ** Licensed under the Apache License, Version 2.0 (the "License");
7 ** you may not use this file except in compliance with the License.
8 ** You may obtain a copy of the License at
9 **
10 **     http://www.apache.org/licenses/LICENSE-2.0
11 **
12 ** Unless required by applicable law or agreed to in writing, software
13 ** distributed under the License is distributed on an "AS IS" BASIS,
14 ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 ** See the License for the specific language governing permissions and
16 ** limitations under the License.
17 */
18 
19 /*!
20  * \file      ExynosCameraHWInterface2.cpp
21  * \brief     source file for Android Camera API 2.0 HAL
22  * \author    Sungjoong Kang(sj3.kang@samsung.com)
23  * \date      2012/07/10
24  *
25  * <b>Revision History: </b>
26  * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27  *   Initial Release
28  *
29  * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30  *   2nd Release
31  *
32  */
33 
34 //#define LOG_NDEBUG 0
35 #define LOG_TAG "ExynosCameraHAL2"
36 #include <utils/Log.h>
37 #include <math.h>
38 
39 #include "ExynosCameraHWInterface2.h"
40 #include "exynos_format.h"
41 
42 namespace android {
43 
m_savePostView(const char * fname,uint8_t * buf,uint32_t size)44 void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
45 {
46     int nw;
47     int cnt = 0;
48     uint32_t written = 0;
49 
50     ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
51     int fd = open(fname, O_RDWR | O_CREAT, 0644);
52     if (fd < 0) {
53         ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
54         return;
55     }
56 
57     ALOGV("writing %d bytes to file [%s]", size, fname);
58     while (written < size) {
59         nw = ::write(fd, buf + written, size - written);
60         if (nw < 0) {
61             ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
62             break;
63         }
64         written += nw;
65         cnt++;
66     }
67     ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
68     ::close(fd);
69 }
70 
get_pixel_depth(uint32_t fmt)71 int get_pixel_depth(uint32_t fmt)
72 {
73     int depth = 0;
74 
75     switch (fmt) {
76     case V4L2_PIX_FMT_JPEG:
77         depth = 8;
78         break;
79 
80     case V4L2_PIX_FMT_NV12:
81     case V4L2_PIX_FMT_NV21:
82     case V4L2_PIX_FMT_YUV420:
83     case V4L2_PIX_FMT_YVU420M:
84     case V4L2_PIX_FMT_NV12M:
85     case V4L2_PIX_FMT_NV12MT:
86         depth = 12;
87         break;
88 
89     case V4L2_PIX_FMT_RGB565:
90     case V4L2_PIX_FMT_YUYV:
91     case V4L2_PIX_FMT_YVYU:
92     case V4L2_PIX_FMT_UYVY:
93     case V4L2_PIX_FMT_VYUY:
94     case V4L2_PIX_FMT_NV16:
95     case V4L2_PIX_FMT_NV61:
96     case V4L2_PIX_FMT_YUV422P:
97     case V4L2_PIX_FMT_SBGGR10:
98     case V4L2_PIX_FMT_SBGGR12:
99     case V4L2_PIX_FMT_SBGGR16:
100         depth = 16;
101         break;
102 
103     case V4L2_PIX_FMT_RGB32:
104         depth = 32;
105         break;
106     default:
107         ALOGE("Get depth failed(format : %d)", fmt);
108         break;
109     }
110 
111     return depth;
112 }
113 
cam_int_s_fmt(node_info_t * node)114 int cam_int_s_fmt(node_info_t *node)
115 {
116     struct v4l2_format v4l2_fmt;
117     unsigned int framesize;
118     int ret;
119 
120     memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
121 
122     v4l2_fmt.type = node->type;
123     framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
124 
125     if (node->planes >= 1) {
126         v4l2_fmt.fmt.pix_mp.width       = node->width;
127         v4l2_fmt.fmt.pix_mp.height      = node->height;
128         v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
129         v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
130     } else {
131         ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
132     }
133 
134     /* Set up for capture */
135     ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
136 
137     if (ret < 0)
138         ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
139 
140 
141     return ret;
142 }
143 
cam_int_reqbufs(node_info_t * node)144 int cam_int_reqbufs(node_info_t *node)
145 {
146     struct v4l2_requestbuffers req;
147     int ret;
148 
149     req.count = node->buffers;
150     req.type = node->type;
151     req.memory = node->memory;
152 
153     ret = exynos_v4l2_reqbufs(node->fd, &req);
154 
155     if (ret < 0)
156         ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
157 
158     return req.count;
159 }
160 
cam_int_qbuf(node_info_t * node,int index)161 int cam_int_qbuf(node_info_t *node, int index)
162 {
163     struct v4l2_buffer v4l2_buf;
164     struct v4l2_plane planes[VIDEO_MAX_PLANES];
165     int i;
166     int ret = 0;
167 
168     v4l2_buf.m.planes   = planes;
169     v4l2_buf.type       = node->type;
170     v4l2_buf.memory     = node->memory;
171     v4l2_buf.index      = index;
172     v4l2_buf.length     = node->planes;
173 
174     for(i = 0; i < node->planes; i++){
175         v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
176         v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
177     }
178 
179     ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
180 
181     if (ret < 0)
182         ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
183 
184     return ret;
185 }
186 
cam_int_streamon(node_info_t * node)187 int cam_int_streamon(node_info_t *node)
188 {
189     enum v4l2_buf_type type = node->type;
190     int ret;
191 
192 
193     ret = exynos_v4l2_streamon(node->fd, type);
194 
195     if (ret < 0)
196         ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
197 
198     ALOGV("On streaming I/O... ... fd(%d)", node->fd);
199 
200     return ret;
201 }
202 
cam_int_streamoff(node_info_t * node)203 int cam_int_streamoff(node_info_t *node)
204 {
205     enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
206     int ret;
207 
208 
209     ALOGV("Off streaming I/O... fd(%d)", node->fd);
210     ret = exynos_v4l2_streamoff(node->fd, type);
211 
212     if (ret < 0)
213         ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
214 
215     return ret;
216 }
217 
isp_int_streamoff(node_info_t * node)218 int isp_int_streamoff(node_info_t *node)
219 {
220     enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
221     int ret;
222 
223     ALOGV("Off streaming I/O... fd(%d)", node->fd);
224     ret = exynos_v4l2_streamoff(node->fd, type);
225 
226     if (ret < 0)
227         ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
228 
229     return ret;
230 }
231 
cam_int_dqbuf(node_info_t * node)232 int cam_int_dqbuf(node_info_t *node)
233 {
234     struct v4l2_buffer v4l2_buf;
235     struct v4l2_plane planes[VIDEO_MAX_PLANES];
236     int ret;
237 
238     v4l2_buf.type       = node->type;
239     v4l2_buf.memory     = node->memory;
240     v4l2_buf.m.planes   = planes;
241     v4l2_buf.length     = node->planes;
242 
243     ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
244     if (ret < 0)
245         ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
246 
247     return v4l2_buf.index;
248 }
249 
cam_int_dqbuf(node_info_t * node,int num_plane)250 int cam_int_dqbuf(node_info_t *node, int num_plane)
251 {
252     struct v4l2_buffer v4l2_buf;
253     struct v4l2_plane planes[VIDEO_MAX_PLANES];
254     int ret;
255 
256     v4l2_buf.type       = node->type;
257     v4l2_buf.memory     = node->memory;
258     v4l2_buf.m.planes   = planes;
259     v4l2_buf.length     = num_plane;
260 
261     ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
262     if (ret < 0)
263         ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
264 
265     return v4l2_buf.index;
266 }
267 
cam_int_s_input(node_info_t * node,int index)268 int cam_int_s_input(node_info_t *node, int index)
269 {
270     int ret;
271 
272     ret = exynos_v4l2_s_input(node->fd, index);
273     if (ret < 0)
274         ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
275 
276     return ret;
277 }
278 
279 
280 gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
281 
RequestManager(SignalDrivenThread * main_thread)282 RequestManager::RequestManager(SignalDrivenThread* main_thread):
283     m_lastAeMode(0),
284     m_lastAaMode(0),
285     m_lastAwbMode(0),
286     m_vdisBubbleEn(false),
287     m_lastAeComp(0),
288     m_lastCompletedFrameCnt(-1)
289 {
290     m_metadataConverter = new MetadataConverter;
291     m_mainThread = main_thread;
292     ResetEntry();
293     m_sensorPipelineSkipCnt = 0;
294     return;
295 }
296 
~RequestManager()297 RequestManager::~RequestManager()
298 {
299     ALOGV("%s", __FUNCTION__);
300     if (m_metadataConverter != NULL) {
301         delete m_metadataConverter;
302         m_metadataConverter = NULL;
303     }
304 
305     releaseSensorQ();
306     return;
307 }
308 
ResetEntry()309 void RequestManager::ResetEntry()
310 {
311     Mutex::Autolock lock(m_requestMutex);
312     for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
313         memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
314         entries[i].internal_shot.shot.ctl.request.frameCount = -1;
315     }
316     m_numOfEntries = 0;
317     m_entryInsertionIndex = -1;
318     m_entryProcessingIndex = -1;
319     m_entryFrameOutputIndex = -1;
320 }
321 
GetNumEntries()322 int RequestManager::GetNumEntries()
323 {
324     return m_numOfEntries;
325 }
326 
SetDefaultParameters(int cropX)327 void RequestManager::SetDefaultParameters(int cropX)
328 {
329     m_cropX = cropX;
330 }
331 
IsRequestQueueFull()332 bool RequestManager::IsRequestQueueFull()
333 {
334     Mutex::Autolock lock(m_requestMutex);
335     if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
336         return true;
337     else
338         return false;
339 }
340 
RegisterRequest(camera_metadata_t * new_request,int * afMode,uint32_t * afRegion)341 void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion)
342 {
343     ALOGV("DEBUG(%s):", __FUNCTION__);
344 
345     Mutex::Autolock lock(m_requestMutex);
346 
347     request_manager_entry * newEntry = NULL;
348     int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
349     ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries );
350 
351 
352     newEntry = &(entries[newInsertionIndex]);
353 
354     if (newEntry->status!=EMPTY) {
355         ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
356         return;
357     }
358     newEntry->status = REGISTERED;
359     newEntry->original_request = new_request;
360     memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
361     m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
362     newEntry->output_stream_count = 0;
363     if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP)
364         newEntry->output_stream_count++;
365 
366     if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC)
367         newEntry->output_stream_count++;
368 
369     m_numOfEntries++;
370     m_entryInsertionIndex = newInsertionIndex;
371 
372 
373     *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
374     afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0];
375     afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1];
376     afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2];
377     afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3];
378     ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
379     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
380 }
381 
DeregisterRequest(camera_metadata_t ** deregistered_request)382 void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
383 {
384     ALOGV("DEBUG(%s):", __FUNCTION__);
385     int frame_index;
386     request_manager_entry * currentEntry;
387 
388     Mutex::Autolock lock(m_requestMutex);
389 
390     frame_index = GetCompletedIndex();
391     currentEntry =  &(entries[frame_index]);
392     if (currentEntry->status != COMPLETED) {
393         CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
394                        m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
395         return;
396     }
397     if (deregistered_request)  *deregistered_request = currentEntry->original_request;
398 
399     m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount;
400 
401     currentEntry->status = EMPTY;
402     currentEntry->original_request = NULL;
403     memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
404     currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
405     currentEntry->output_stream_count = 0;
406     m_numOfEntries--;
407     ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
408      m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
409 
410     CheckCompleted(GetNextIndex(frame_index));
411     return;
412 }
413 
PrepareFrame(size_t * num_entries,size_t * frame_size,camera_metadata_t ** prepared_frame,int afState)414 bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
415                 camera_metadata_t ** prepared_frame, int afState)
416 {
417     ALOGV("DEBUG(%s):", __FUNCTION__);
418     Mutex::Autolock lock(m_requestMutex);
419     status_t res = NO_ERROR;
420     int tempFrameOutputIndex = GetCompletedIndex();
421     request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
422     ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
423         m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
424 
425     if (currentEntry->status != COMPLETED) {
426         ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
427 
428         return false;
429     }
430     m_entryFrameOutputIndex = tempFrameOutputIndex;
431     m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated
432     add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
433     res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
434                 m_tempFrameMetadata);
435     if (res!=NO_ERROR) {
436         ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
437         return false;
438     }
439     *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
440     *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
441     *prepared_frame = m_tempFrameMetadata;
442     ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
443         currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
444     // Dump();
445     return true;
446 }
447 
MarkProcessingRequest(ExynosBuffer * buf)448 int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
449 {
450     struct camera2_shot_ext * shot_ext;
451     struct camera2_shot_ext * request_shot;
452     int targetStreamIndex = 0;
453     request_manager_entry * newEntry = NULL;
454     static int count = 0;
455 
456     Mutex::Autolock lock(m_requestMutex);
457     if (m_numOfEntries == 0)  {
458         CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
459         return -1;
460     }
461 
462     if ((m_entryProcessingIndex == m_entryInsertionIndex)
463         && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
464         ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
465          m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
466         return -1;
467     }
468 
469     int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
470     ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
471 
472     newEntry = &(entries[newProcessingIndex]);
473     request_shot = &(newEntry->internal_shot);
474     if (newEntry->status != REGISTERED) {
475         CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
476         for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
477                 CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
478         }
479         return -1;
480     }
481 
482     newEntry->status = REQUESTED;
483 
484     shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
485 
486     memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
487     shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
488     shot_ext->request_sensor = 1;
489     shot_ext->dis_bypass = 1;
490     shot_ext->dnr_bypass = 1;
491     shot_ext->fd_bypass = 1;
492     shot_ext->setfile = 0;
493 
494     targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
495     shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
496     if (targetStreamIndex & MASK_OUTPUT_SCP)
497         shot_ext->request_scp = 1;
498 
499     if (targetStreamIndex & MASK_OUTPUT_SCC)
500         shot_ext->request_scc = 1;
501 
502     if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
503         shot_ext->fd_bypass = 0;
504 
505     if (count == 0){
506         shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
507     } else
508         shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
509 
510     count++;
511     shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
512     shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
513     shot_ext->shot.magicNumber = 0x23456789;
514     shot_ext->shot.ctl.sensor.exposureTime = 0;
515     shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
516     shot_ext->shot.ctl.sensor.sensitivity = 0;
517 
518 
519     shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
520     shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
521     shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
522 
523     m_entryProcessingIndex = newProcessingIndex;
524     return newProcessingIndex;
525 }
526 
NotifyStreamOutput(int frameCnt)527 void RequestManager::NotifyStreamOutput(int frameCnt)
528 {
529     int index;
530 
531     Mutex::Autolock lock(m_requestMutex);
532     ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
533 
534     index = FindEntryIndexByFrameCnt(frameCnt);
535     if (index == -1) {
536         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
537         return;
538     }
539     ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt,   entries[index].output_stream_count);
540 
541     entries[index].output_stream_count--;  //TODO : match stream id also
542     CheckCompleted(index);
543 }
544 
CheckCompleted(int index)545 void RequestManager::CheckCompleted(int index)
546 {
547     if ((entries[index].status == METADONE || entries[index].status == COMPLETED)
548         && (entries[index].output_stream_count <= 0)){
549         ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__,
550                 index, entries[index].internal_shot.shot.ctl.request.frameCount );
551         entries[index].status = COMPLETED;
552         if (m_lastCompletedFrameCnt + 1 == entries[index].internal_shot.shot.ctl.request.frameCount)
553             m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
554     }
555 }
556 
GetCompletedIndex()557 int RequestManager::GetCompletedIndex()
558 {
559     return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1);
560 }
561 
pushSensorQ(int index)562 void  RequestManager::pushSensorQ(int index)
563 {
564     Mutex::Autolock lock(m_requestMutex);
565     m_sensorQ.push_back(index);
566 }
567 
popSensorQ()568 int RequestManager::popSensorQ()
569 {
570    List<int>::iterator sensor_token;
571    int index;
572 
573     Mutex::Autolock lock(m_requestMutex);
574 
575     if(m_sensorQ.size() == 0)
576         return -1;
577 
578     sensor_token = m_sensorQ.begin()++;
579     index = *sensor_token;
580     m_sensorQ.erase(sensor_token);
581 
582     return (index);
583 }
584 
releaseSensorQ()585 void RequestManager::releaseSensorQ()
586 {
587     List<int>::iterator r;
588 
589     Mutex::Autolock lock(m_requestMutex);
590     ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
591 
592     while(m_sensorQ.size() > 0){
593         r  = m_sensorQ.begin()++;
594         m_sensorQ.erase(r);
595     }
596     return;
597 }
598 
ApplyDynamicMetadata(struct camera2_shot_ext * shot_ext)599 void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
600 {
601     int index;
602     struct camera2_shot_ext * request_shot;
603     nsecs_t timeStamp;
604     int i;
605 
606     Mutex::Autolock lock(m_requestMutex);
607     ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
608 
609     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
610         if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
611             && (entries[i].status == CAPTURED)){
612             entries[i].status = METADONE;
613             break;
614         }
615     }
616 
617     if (i == NUM_MAX_REQUEST_MGR_ENTRY){
618         ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
619         return;
620     }
621 
622     request_manager_entry * newEntry = &(entries[i]);
623     request_shot = &(newEntry->internal_shot);
624 
625     timeStamp = request_shot->shot.dm.sensor.timeStamp;
626     memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
627     request_shot->shot.dm.sensor.timeStamp = timeStamp;
628     m_lastTimeStamp = timeStamp;
629     CheckCompleted(i);
630 }
631 
UpdateIspParameters(struct camera2_shot_ext * shot_ext,int frameCnt,ctl_request_info_t * ctl_info)632 void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
633 {
634     int index, targetStreamIndex;
635     struct camera2_shot_ext * request_shot;
636 
637     ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
638     if (frameCnt < 0)
639         return;
640 
641     index = FindEntryIndexByFrameCnt(frameCnt);
642     if (index == -1) {
643         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
644         return;
645     }
646 
647     request_manager_entry * newEntry = &(entries[index]);
648     request_shot = &(newEntry->internal_shot);
649     memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
650     shot_ext->shot.ctl.request.frameCount = frameCnt;
651     shot_ext->request_sensor = 1;
652     shot_ext->dis_bypass = 1;
653     shot_ext->dnr_bypass = 1;
654     shot_ext->fd_bypass = 1;
655     shot_ext->drc_bypass = 1;
656     shot_ext->setfile = 0;
657 
658     shot_ext->request_scc = 0;
659     shot_ext->request_scp = 0;
660 
661     shot_ext->isReprocessing = request_shot->isReprocessing;
662     shot_ext->reprocessInput = request_shot->reprocessInput;
663     shot_ext->shot.ctl.request.outputStreams[0] = 0;
664 
665     shot_ext->awb_mode_dm = request_shot->awb_mode_dm;
666 
667     shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
668     shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
669     shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
670 
671     // mapping flash UI mode from aeMode
672     if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
673         if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
674             ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
675         else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD)
676             ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
677         request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
678     }
679 
680     // Apply ae/awb lock or unlock
681     if (request_shot->ae_lock == AEMODE_LOCK_ON)
682             request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
683     if (request_shot->awb_lock == AWBMODE_LOCK_ON)
684             request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
685 
686     if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
687         shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
688     }
689     else {
690         shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
691         m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
692     }
693     if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
694         shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
695     }
696     else {
697         shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
698         m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
699     }
700     if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
701         shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
702     }
703     else {
704         shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
705         m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
706     }
707     if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
708         shot_ext->shot.ctl.aa.aeExpCompensation = 0;
709     }
710     else {
711         shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
712         m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
713     }
714 
715     if (request_shot->shot.ctl.aa.videoStabilizationMode) {
716         m_vdisBubbleEn = true;
717         shot_ext->dis_bypass = 0;
718         shot_ext->dnr_bypass = 0;
719     } else {
720         m_vdisBubbleEn = false;
721         shot_ext->dis_bypass = 1;
722         shot_ext->dnr_bypass = 1;
723     }
724 
725     shot_ext->shot.ctl.aa.afTrigger = 0;
726 
727     targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
728     shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
729     if (targetStreamIndex & MASK_OUTPUT_SCP)
730         shot_ext->request_scp = 1;
731 
732     if (targetStreamIndex & MASK_OUTPUT_SCC)
733         shot_ext->request_scc = 1;
734 
735     if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
736         shot_ext->fd_bypass = 0;
737 
738     shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0];
739     shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1];
740 
741     ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
742     (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
743     (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
744     (int)(shot_ext->shot.ctl.aa.afMode));
745 }
746 
IsVdisEnable(void)747 bool    RequestManager::IsVdisEnable(void)
748 {
749         return m_vdisBubbleEn;
750 }
751 
FindEntryIndexByFrameCnt(int frameCnt)752 int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
753 {
754     for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
755         if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
756             return i;
757     }
758     return -1;
759 }
760 
RegisterTimestamp(int frameCnt,nsecs_t * frameTime)761 void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
762 {
763     int index = FindEntryIndexByFrameCnt(frameCnt);
764     if (index == -1) {
765         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
766         return;
767     }
768 
769     request_manager_entry * currentEntry = &(entries[index]);
770     if (currentEntry->internal_shot.isReprocessing == 1) {
771         ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
772         index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
773     } else {
774         currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
775         ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
776             index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
777     }
778 }
779 
780 
GetTimestampByFrameCnt(int frameCnt)781 nsecs_t  RequestManager::GetTimestampByFrameCnt(int frameCnt)
782 {
783     int index = FindEntryIndexByFrameCnt(frameCnt);
784     if (index == -1) {
785         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
786         return m_lastTimeStamp;
787     }
788     else
789         return GetTimestamp(index);
790 }
791 
GetTimestamp(int index)792 nsecs_t  RequestManager::GetTimestamp(int index)
793 {
794     Mutex::Autolock lock(m_requestMutex);
795     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
796         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
797         return 0;
798     }
799 
800     request_manager_entry * currentEntry = &(entries[index]);
801     nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
802     if (frameTime == 0) {
803         ALOGV("DEBUG(%s): timestamp null,  returning saved value", __FUNCTION__);
804         frameTime = m_lastTimeStamp;
805     }
806     ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
807     return frameTime;
808 }
809 
GetOutputStreamByFrameCnt(int frameCnt)810 uint8_t  RequestManager::GetOutputStreamByFrameCnt(int frameCnt)
811 {
812     int index = FindEntryIndexByFrameCnt(frameCnt);
813     if (index == -1) {
814         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
815         return 0;
816     }
817     else
818         return GetOutputStream(index);
819 }
820 
GetOutputStream(int index)821 uint8_t  RequestManager::GetOutputStream(int index)
822 {
823     Mutex::Autolock lock(m_requestMutex);
824     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
825         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
826         return 0;
827     }
828 
829     request_manager_entry * currentEntry = &(entries[index]);
830     return currentEntry->internal_shot.shot.ctl.request.outputStreams[0];
831 }
832 
GetInternalShotExtByFrameCnt(int frameCnt)833 camera2_shot_ext *  RequestManager::GetInternalShotExtByFrameCnt(int frameCnt)
834 {
835     int index = FindEntryIndexByFrameCnt(frameCnt);
836     if (index == -1) {
837         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
838         return 0;
839     }
840     else
841         return GetInternalShotExt(index);
842 }
843 
GetInternalShotExt(int index)844 camera2_shot_ext *  RequestManager::GetInternalShotExt(int index)
845 {
846     Mutex::Autolock lock(m_requestMutex);
847     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
848         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
849         return 0;
850     }
851 
852     request_manager_entry * currentEntry = &(entries[index]);
853     return &currentEntry->internal_shot;
854 }
855 
FindFrameCnt(struct camera2_shot_ext * shot_ext)856 int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
857 {
858     Mutex::Autolock lock(m_requestMutex);
859     int i;
860 
861     if (m_numOfEntries == 0) {
862         CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
863         return -1;
864     }
865 
866     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
867         if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
868             continue;
869 
870         if (entries[i].status == REQUESTED) {
871             entries[i].status = CAPTURED;
872             return entries[i].internal_shot.shot.ctl.request.frameCount;
873         }
874         CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
875 
876     }
877     CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
878 
879     return -1;
880 }
881 
SetInitialSkip(int count)882 void     RequestManager::SetInitialSkip(int count)
883 {
884     ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
885     if (count > m_sensorPipelineSkipCnt)
886         m_sensorPipelineSkipCnt = count;
887 }
888 
GetSkipCnt()889 int     RequestManager::GetSkipCnt()
890 {
891     ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
892     if (m_sensorPipelineSkipCnt == 0)
893         return m_sensorPipelineSkipCnt;
894     else
895         return --m_sensorPipelineSkipCnt;
896 }
897 
Dump(void)898 void RequestManager::Dump(void)
899 {
900     int i = 0;
901     request_manager_entry * currentEntry;
902     ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
903     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
904 
905     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
906         currentEntry =  &(entries[i]);
907         ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
908         currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
909             currentEntry->output_stream_count,
910             currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
911     }
912 }
913 
GetNextIndex(int index)914 int     RequestManager::GetNextIndex(int index)
915 {
916     index++;
917     if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
918         index = 0;
919 
920     return index;
921 }
922 
GetPrevIndex(int index)923 int     RequestManager::GetPrevIndex(int index)
924 {
925     index--;
926     if (index < 0)
927         index = NUM_MAX_REQUEST_MGR_ENTRY-1;
928 
929     return index;
930 }
931 
ExynosCameraHWInterface2(int cameraId,camera2_device_t * dev,ExynosCamera2 * camera,int * openInvalid)932 ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
933             m_requestQueueOps(NULL),
934             m_frameQueueOps(NULL),
935             m_callbackCookie(NULL),
936             m_numOfRemainingReqInSvc(0),
937             m_isRequestQueuePending(false),
938             m_isRequestQueueNull(true),
939             m_isIspStarted(false),
940             m_ionCameraClient(0),
941             m_zoomRatio(1),
942             m_scp_closing(false),
943             m_scp_closed(false),
944             m_afState(HAL_AFSTATE_INACTIVE),
945             m_afMode(NO_CHANGE),
946             m_afMode2(NO_CHANGE),
947             m_vdisBubbleCnt(0),
948             m_vdisDupFrame(0),
949             m_IsAfModeUpdateRequired(false),
950             m_IsAfTriggerRequired(false),
951             m_IsAfLockRequired(false),
952             m_sccLocalBufferValid(false),
953             m_wideAspect(false),
954             m_scpOutputSignalCnt(0),
955             m_scpOutputImageCnt(0),
956             m_afTriggerId(0),
957             m_afPendingTriggerId(0),
958             m_afModeWaitingCnt(0),
959             m_jpegEncodingCount(0),
960             m_scpForceSuspended(false),
961             m_halDevice(dev),
962             m_nightCaptureCnt(0),
963             m_nightCaptureFrameCnt(0),
964             m_lastSceneMode(0),
965             m_cameraId(cameraId),
966             m_thumbNailW(160),
967             m_thumbNailH(120)
968 {
969     ALOGD("(%s): ENTER", __FUNCTION__);
970     int ret = 0;
971     int res = 0;
972 
973     m_exynosPictureCSC = NULL;
974     m_exynosVideoCSC = NULL;
975 
976     if (!m_grallocHal) {
977         ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
978         if (ret)
979             ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
980     }
981 
982     m_camera2 = camera;
983     m_ionCameraClient = createIonClient(m_ionCameraClient);
984     if(m_ionCameraClient == 0)
985         ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
986 
987 
988     m_BayerManager = new BayerBufManager();
989     m_mainThread    = new MainThread(this);
990     m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
991     *openInvalid = InitializeISPChain();
992     if (*openInvalid < 0) {
993         ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__);
994         // clean process
995         // 1. close video nodes
996         // SCP
997         res = exynos_v4l2_close(m_camera_info.scp.fd);
998         if (res != NO_ERROR ) {
999             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1000         }
1001         // SCC
1002         res = exynos_v4l2_close(m_camera_info.capture.fd);
1003         if (res != NO_ERROR ) {
1004             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1005         }
1006         // Sensor
1007         res = exynos_v4l2_close(m_camera_info.sensor.fd);
1008         if (res != NO_ERROR ) {
1009             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1010         }
1011         // ISP
1012         res = exynos_v4l2_close(m_camera_info.isp.fd);
1013         if (res != NO_ERROR ) {
1014             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1015         }
1016     } else {
1017         m_sensorThread  = new SensorThread(this);
1018         m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
1019         m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1020         ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__);
1021 
1022         for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
1023             m_subStreams[i].type =  SUBSTREAM_TYPE_NONE;
1024         CSC_METHOD cscMethod = CSC_METHOD_HW;
1025         m_exynosPictureCSC = csc_init(cscMethod);
1026         if (m_exynosPictureCSC == NULL)
1027             ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1028         csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
1029 
1030         m_exynosVideoCSC = csc_init(cscMethod);
1031         if (m_exynosVideoCSC == NULL)
1032             ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1033         csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
1034 
1035         m_setExifFixedAttribute();
1036 
1037         // contol information clear
1038         // flash
1039         m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
1040         m_ctlInfo.flash.m_afFlashDoneFlg= false;
1041         m_ctlInfo.flash.m_flashEnableFlg = false;
1042         m_ctlInfo.flash.m_flashFrameCount = 0;
1043         m_ctlInfo.flash.m_flashCnt = 0;
1044         m_ctlInfo.flash.m_flashTimeOut = 0;
1045         m_ctlInfo.flash.m_flashDecisionResult = false;
1046         m_ctlInfo.flash.m_flashTorchMode = false;
1047         m_ctlInfo.flash.m_precaptureState = 0;
1048         m_ctlInfo.flash.m_precaptureTriggerId = 0;
1049         // ae
1050         m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
1051         // af
1052         m_ctlInfo.af.m_afTriggerTimeOut = 0;
1053         // scene
1054         m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX;
1055     }
1056     ALOGD("(%s): EXIT", __FUNCTION__);
1057 }
1058 
~ExynosCameraHWInterface2()1059 ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
1060 {
1061     ALOGD("(%s): ENTER", __FUNCTION__);
1062     this->release();
1063     ALOGD("(%s): EXIT", __FUNCTION__);
1064 }
1065 
release()1066 void ExynosCameraHWInterface2::release()
1067 {
1068     int i, res;
1069     ALOGD("(HAL2::release): ENTER");
1070 
1071     if (m_streamThreads[1] != NULL) {
1072         m_streamThreads[1]->release();
1073         m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
1074     }
1075 
1076     if (m_streamThreads[0] != NULL) {
1077         m_streamThreads[0]->release();
1078         m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
1079     }
1080 
1081     if (m_sensorThread != NULL) {
1082         m_sensorThread->release();
1083     }
1084 
1085     if (m_mainThread != NULL) {
1086         m_mainThread->release();
1087     }
1088 
1089     if (m_exynosPictureCSC)
1090         csc_deinit(m_exynosPictureCSC);
1091     m_exynosPictureCSC = NULL;
1092 
1093     if (m_exynosVideoCSC)
1094         csc_deinit(m_exynosVideoCSC);
1095     m_exynosVideoCSC = NULL;
1096 
1097     if (m_streamThreads[1] != NULL) {
1098         ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
1099         while (!m_streamThreads[1]->IsTerminated())
1100             usleep(SIG_WAITING_TICK);
1101         ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 1 termination");
1102         m_streamThreads[1] = NULL;
1103     }
1104 
1105     if (m_streamThreads[0] != NULL) {
1106         ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
1107         while (!m_streamThreads[0]->IsTerminated())
1108             usleep(SIG_WAITING_TICK);
1109         ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 0 termination");
1110         m_streamThreads[0] = NULL;
1111     }
1112 
1113     if (m_sensorThread != NULL) {
1114         ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
1115         while (!m_sensorThread->IsTerminated())
1116             usleep(SIG_WAITING_TICK);
1117         ALOGD("(HAL2::release): END   Waiting for (indirect) sensor thread termination");
1118         m_sensorThread = NULL;
1119     }
1120 
1121     if (m_mainThread != NULL) {
1122         ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
1123         while (!m_mainThread->IsTerminated())
1124             usleep(SIG_WAITING_TICK);
1125         ALOGD("(HAL2::release): END   Waiting for (indirect) main thread termination");
1126         m_mainThread = NULL;
1127     }
1128 
1129     if (m_requestManager != NULL) {
1130         delete m_requestManager;
1131         m_requestManager = NULL;
1132     }
1133 
1134     if (m_BayerManager != NULL) {
1135         delete m_BayerManager;
1136         m_BayerManager = NULL;
1137     }
1138     for (i = 0; i < NUM_BAYER_BUFFERS; i++)
1139         freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1140 
1141     if (m_sccLocalBufferValid) {
1142         for (i = 0; i < NUM_SCC_BUFFERS; i++)
1143 #ifdef ENABLE_FRAME_SYNC
1144             freeCameraMemory(&m_sccLocalBuffer[i], 2);
1145 #else
1146             freeCameraMemory(&m_sccLocalBuffer[i], 1);
1147 #endif
1148     }
1149     else {
1150         for (i = 0; i < NUM_SCC_BUFFERS; i++)
1151             freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1152     }
1153 
1154     ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
1155     res = exynos_v4l2_close(m_camera_info.sensor.fd);
1156     if (res != NO_ERROR ) {
1157         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1158     }
1159 
1160     ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
1161     res = exynos_v4l2_close(m_camera_info.isp.fd);
1162     if (res != NO_ERROR ) {
1163         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1164     }
1165 
1166     ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
1167     res = exynos_v4l2_close(m_camera_info.capture.fd);
1168     if (res != NO_ERROR ) {
1169         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1170     }
1171 
1172     ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
1173     res = exynos_v4l2_close(m_camera_info.scp.fd);
1174     if (res != NO_ERROR ) {
1175         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1176     }
1177     ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
1178     deleteIonClient(m_ionCameraClient);
1179 
1180     ALOGD("(HAL2::release): EXIT");
1181 }
1182 
InitializeISPChain()1183 int ExynosCameraHWInterface2::InitializeISPChain()
1184 {
1185     char node_name[30];
1186     int fd = 0;
1187     int i;
1188     int ret = 0;
1189 
1190     /* Open Sensor */
1191     memset(&node_name, 0x00, sizeof(char[30]));
1192     sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1193     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1194 
1195     if (fd < 0) {
1196         ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1197     }
1198     else {
1199         ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1200     }
1201     m_camera_info.sensor.fd = fd;
1202 
1203     /* Open ISP */
1204     memset(&node_name, 0x00, sizeof(char[30]));
1205     sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1206     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1207 
1208     if (fd < 0) {
1209         ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1210     }
1211     else {
1212         ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1213     }
1214     m_camera_info.isp.fd = fd;
1215 
1216     /* Open ScalerC */
1217     memset(&node_name, 0x00, sizeof(char[30]));
1218     sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1219     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1220 
1221     if (fd < 0) {
1222         ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1223     }
1224     else {
1225         ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1226     }
1227     m_camera_info.capture.fd = fd;
1228 
1229     /* Open ScalerP */
1230     memset(&node_name, 0x00, sizeof(char[30]));
1231     sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1232     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1233     if (fd < 0) {
1234         ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1235     }
1236     else {
1237         ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1238     }
1239     m_camera_info.scp.fd = fd;
1240 
1241     if(m_cameraId == 0)
1242         m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1243     else
1244         m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1245 
1246     memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1247     m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1248     m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1249 
1250     m_camera_info.dummy_shot.dis_bypass = 1;
1251     m_camera_info.dummy_shot.dnr_bypass = 1;
1252     m_camera_info.dummy_shot.fd_bypass = 1;
1253 
1254     /*sensor setting*/
1255     m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1256     m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1257     m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1258 
1259     m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1260     m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1261 
1262     /*request setting*/
1263     m_camera_info.dummy_shot.request_sensor = 1;
1264     m_camera_info.dummy_shot.request_scc = 0;
1265     m_camera_info.dummy_shot.request_scp = 0;
1266     m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1267 
1268     m_camera_info.sensor.width = m_camera2->getSensorRawW();
1269     m_camera_info.sensor.height = m_camera2->getSensorRawH();
1270 
1271     m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1272     m_camera_info.sensor.planes = 2;
1273     m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1274     m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1275     m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1276 
1277     for(i = 0; i < m_camera_info.sensor.buffers; i++){
1278         initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1279         m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1280         m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1281         allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
1282     }
1283 
1284     m_camera_info.isp.width = m_camera_info.sensor.width;
1285     m_camera_info.isp.height = m_camera_info.sensor.height;
1286     m_camera_info.isp.format = m_camera_info.sensor.format;
1287     m_camera_info.isp.planes = m_camera_info.sensor.planes;
1288     m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1289     m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1290     m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1291 
1292     for(i = 0; i < m_camera_info.isp.buffers; i++){
1293         initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1294         m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
1295         m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
1296         m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
1297         m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
1298         m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
1299         m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
1300     };
1301 
1302     /* init ISP */
1303     ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1304     if (ret < 0) {
1305         ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ",  __FUNCTION__, m_camera_info.sensor_id);
1306         return false;
1307     }
1308     cam_int_s_fmt(&(m_camera_info.isp));
1309     ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1310     cam_int_reqbufs(&(m_camera_info.isp));
1311     ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1312     ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
1313 
1314     /* init Sensor */
1315     cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1316     ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
1317     if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1318         ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
1319     }
1320     ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
1321     cam_int_reqbufs(&(m_camera_info.sensor));
1322     ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
1323     for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1324         ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1325         m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1326         m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1327         memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1328                 sizeof(struct camera2_shot_ext));
1329     }
1330 
1331     for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1332         cam_int_qbuf(&(m_camera_info.sensor), i);
1333 
1334     for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1335         m_requestManager->pushSensorQ(i);
1336 
1337     ALOGV("== stream_on :: sensor");
1338     cam_int_streamon(&(m_camera_info.sensor));
1339     m_camera_info.sensor.status = true;
1340 
1341     /* init Capture */
1342     m_camera_info.capture.width = m_camera2->getSensorW();
1343     m_camera_info.capture.height = m_camera2->getSensorH();
1344     m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1345 #ifdef ENABLE_FRAME_SYNC
1346     m_camera_info.capture.planes = 2;
1347 #else
1348     m_camera_info.capture.planes = 1;
1349 #endif
1350     m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1351     m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1352     m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1353 
1354     m_camera_info.capture.status = false;
1355 
1356     return true;
1357 }
1358 
StartSCCThread(bool threadExists)1359 void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
1360 {
1361     ALOGV("(%s)", __FUNCTION__);
1362     StreamThread *AllocatedStream;
1363     stream_parameters_t newParameters;
1364     uint32_t format_actual;
1365 
1366 
1367     if (!threadExists) {
1368         m_streamThreads[1]  = new StreamThread(this, 1);
1369     }
1370     AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1371     if (!threadExists) {
1372         AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1373         m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1374         AllocatedStream->m_numRegisteredStream = 1;
1375     }
1376     AllocatedStream->m_index        = 1;
1377 
1378     format_actual                   = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1379 
1380     newParameters.width             = m_camera2->getSensorW();
1381     newParameters.height            = m_camera2->getSensorH();
1382     newParameters.format            = format_actual;
1383     newParameters.streamOps         = NULL;
1384     newParameters.numHwBuffers      = NUM_SCC_BUFFERS;
1385 #ifdef ENABLE_FRAME_SYNC
1386     newParameters.planes            = 2;
1387 #else
1388     newParameters.planes            = 1;
1389 #endif
1390 
1391     newParameters.numSvcBufsInHal   = 0;
1392 
1393     newParameters.node              = &m_camera_info.capture;
1394 
1395     AllocatedStream->streamType     = STREAM_TYPE_INDIRECT;
1396     ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1397 
1398     if (!threadExists) {
1399         if (!m_sccLocalBufferValid) {
1400             for (int i = 0; i < m_camera_info.capture.buffers; i++){
1401                 initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
1402                 m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1403 #ifdef ENABLE_FRAME_SYNC
1404                 m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
1405                 allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
1406 #else
1407                 allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1408 #endif
1409                 m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i];
1410             }
1411             m_sccLocalBufferValid = true;
1412         }
1413     } else {
1414         if (m_sccLocalBufferValid) {
1415              for (int i = 0; i < m_camera_info.capture.buffers; i++)
1416                 m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i];
1417         } else {
1418             ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__);
1419         }
1420     }
1421     cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
1422     m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1423     cam_int_s_fmt(newParameters.node);
1424     ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1425     cam_int_reqbufs(newParameters.node);
1426     ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1427 
1428     for (int i = 0; i < newParameters.node->buffers; i++) {
1429         ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1430         cam_int_qbuf(newParameters.node, i);
1431         newParameters.svcBufStatus[i] = ON_DRIVER;
1432     }
1433 
1434     ALOGV("== stream_on :: capture");
1435     if (cam_int_streamon(newParameters.node) < 0) {
1436         ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1437     } else {
1438         m_camera_info.capture.status = true;
1439     }
1440 
1441     AllocatedStream->setParameter(&newParameters);
1442     AllocatedStream->m_activated    = true;
1443     AllocatedStream->m_isBufferInit = true;
1444 }
1445 
StartISP()1446 void ExynosCameraHWInterface2::StartISP()
1447 {
1448     ALOGV("== stream_on :: isp");
1449     cam_int_streamon(&(m_camera_info.isp));
1450     exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1451 }
1452 
getCameraId() const1453 int ExynosCameraHWInterface2::getCameraId() const
1454 {
1455     return m_cameraId;
1456 }
1457 
setRequestQueueSrcOps(const camera2_request_queue_src_ops_t * request_src_ops)1458 int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1459 {
1460     ALOGV("DEBUG(%s):", __FUNCTION__);
1461     if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1462             && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1463         m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1464         return 0;
1465     }
1466     else {
1467         ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1468         return 1;
1469     }
1470 }
1471 
notifyRequestQueueNotEmpty()1472 int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1473 {
1474     int i = 0;
1475 
1476     ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1477     if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1478         ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1479         return 0;
1480     }
1481     m_isRequestQueueNull = false;
1482     if (m_requestManager->GetNumEntries() == 0)
1483         m_requestManager->SetInitialSkip(0);
1484 
1485     if (m_isIspStarted == false) {
1486         /* isp */
1487         m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1488         m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1489         cam_int_s_fmt(&(m_camera_info.isp));
1490         cam_int_reqbufs(&(m_camera_info.isp));
1491 
1492         /* sensor */
1493         if (m_camera_info.sensor.status == false) {
1494             cam_int_s_fmt(&(m_camera_info.sensor));
1495             cam_int_reqbufs(&(m_camera_info.sensor));
1496 
1497             for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1498                 ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1499                 m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1500                 m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1501                 memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1502                         sizeof(struct camera2_shot_ext));
1503             }
1504             for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1505                 cam_int_qbuf(&(m_camera_info.sensor), i);
1506 
1507             for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1508                 m_requestManager->pushSensorQ(i);
1509             ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
1510             cam_int_streamon(&(m_camera_info.sensor));
1511             m_camera_info.sensor.status = true;
1512         }
1513     }
1514     if (!(m_streamThreads[1].get())) {
1515         ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
1516         StartSCCThread(false);
1517     } else {
1518         if (m_streamThreads[1]->m_activated ==  false) {
1519             ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1520             StartSCCThread(true);
1521         } else {
1522             if (m_camera_info.capture.status == false) {
1523                 m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1524                 cam_int_s_fmt(&(m_camera_info.capture));
1525                 ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1526                 cam_int_reqbufs(&(m_camera_info.capture));
1527                 ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1528 
1529                 if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) {
1530                     StreamThread *          targetStream = m_streamThreads[1].get();
1531                     stream_parameters_t     *targetStreamParms = &(targetStream->m_parameters);
1532                     node_info_t             *currentNode = targetStreamParms->node;
1533 
1534                     struct v4l2_buffer v4l2_buf;
1535                     struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1536 
1537                     for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
1538                         v4l2_buf.m.planes   = planes;
1539                         v4l2_buf.type       = currentNode->type;
1540                         v4l2_buf.memory     = currentNode->memory;
1541 
1542                         v4l2_buf.length     = currentNode->planes;
1543                         v4l2_buf.index      = i;
1544                         ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i];
1545 
1546                         if (i < currentNode->buffers) {
1547 #ifdef ENABLE_FRAME_SYNC
1548                             v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0];
1549                             v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1];
1550                             v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2];
1551                             v4l2_buf.length += targetStreamParms->metaPlanes;
1552                             v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
1553                             v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
1554 
1555                             ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
1556 #endif
1557                             if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1558                                 ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd);
1559                             }
1560                             ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd);
1561                             targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1562                         }
1563                         else {
1564                             targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1565                         }
1566 
1567                     }
1568 
1569                 } else {
1570                     for (int i = 0; i < m_camera_info.capture.buffers; i++) {
1571                         ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1572                         cam_int_qbuf(&(m_camera_info.capture), i);
1573                     }
1574                 }
1575                 ALOGV("== stream_on :: capture");
1576                 if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1577                     ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1578                 } else {
1579                     m_camera_info.capture.status = true;
1580                 }
1581             }
1582             if (m_scpForceSuspended) {
1583                 m_scpForceSuspended = false;
1584             }
1585         }
1586     }
1587     if (m_isIspStarted == false) {
1588         StartISP();
1589         ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
1590         m_requestManager->SetInitialSkip(6);
1591         m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1592         m_isIspStarted = true;
1593     }
1594     m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1595     return 0;
1596 }
1597 
setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t * frame_dst_ops)1598 int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1599 {
1600     ALOGV("DEBUG(%s):", __FUNCTION__);
1601     if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1602             && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1603         m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1604         return 0;
1605     }
1606     else {
1607         ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1608         return 1;
1609     }
1610 }
1611 
getInProgressCount()1612 int ExynosCameraHWInterface2::getInProgressCount()
1613 {
1614     int inProgressCount = m_requestManager->GetNumEntries();
1615     ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__,
1616         inProgressCount, m_jpegEncodingCount, (inProgressCount + m_jpegEncodingCount));
1617     return (inProgressCount + m_jpegEncodingCount);
1618 }
1619 
flushCapturesInProgress()1620 int ExynosCameraHWInterface2::flushCapturesInProgress()
1621 {
1622     return 0;
1623 }
1624 
constructDefaultRequest(int request_template,camera_metadata_t ** request)1625 int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1626 {
1627     ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1628 
1629     if (request == NULL) return BAD_VALUE;
1630     if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1631         return BAD_VALUE;
1632     }
1633     status_t res;
1634     // Pass 1, calculate size and allocate
1635     res = m_camera2->constructDefaultRequest(request_template,
1636             request,
1637             true);
1638     if (res != OK) {
1639         return res;
1640     }
1641     // Pass 2, build request
1642     res = m_camera2->constructDefaultRequest(request_template,
1643             request,
1644             false);
1645     if (res != OK) {
1646         ALOGE("Unable to populate new request for template %d",
1647                 request_template);
1648     }
1649 
1650     return res;
1651 }
1652 
allocateStream(uint32_t width,uint32_t height,int format,const camera2_stream_ops_t * stream_ops,uint32_t * stream_id,uint32_t * format_actual,uint32_t * usage,uint32_t * max_buffers)1653 int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1654                                     uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1655 {
1656     ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
1657     bool useDirectOutput = false;
1658     StreamThread *AllocatedStream;
1659     stream_parameters_t newParameters;
1660     substream_parameters_t *subParameters;
1661     StreamThread *parentStream;
1662     status_t res;
1663     int allocCase = 0;
1664 
1665     if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE)  &&
1666             m_camera2->isSupportedResolution(width, height)) {
1667         if (!(m_streamThreads[0].get())) {
1668             ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1669             allocCase = 0;
1670         }
1671         else {
1672             if ((m_streamThreads[0].get())->m_activated == true) {
1673                 ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1674                 allocCase = 1;
1675             }
1676             else {
1677                 ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1678                 allocCase = 2;
1679             }
1680         }
1681 
1682         // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
1683         if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
1684                     || (width == 720 && height == 480) || (width == 1440 && height == 960)
1685                     || (width == 1344 && height == 896)) {
1686             m_wideAspect = true;
1687         } else {
1688             m_wideAspect = false;
1689         }
1690         ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1691 
1692         if (allocCase == 0 || allocCase == 2) {
1693             *stream_id = STREAM_ID_PREVIEW;
1694 
1695             m_streamThreads[0]  = new StreamThread(this, *stream_id);
1696 
1697             AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1698             AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1699             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1700 
1701             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1702             *usage                              = GRALLOC_USAGE_SW_WRITE_OFTEN;
1703             *max_buffers                        = 6;
1704 
1705             newParameters.width                 = width;
1706             newParameters.height                = height;
1707             newParameters.format                = *format_actual;
1708             newParameters.streamOps             = stream_ops;
1709             newParameters.usage                 = *usage;
1710             newParameters.numHwBuffers          = NUM_SCP_BUFFERS;
1711             newParameters.numOwnSvcBuffers      = *max_buffers;
1712             newParameters.planes                = NUM_PLANES(*format_actual);
1713             newParameters.metaPlanes            = 1;
1714             newParameters.numSvcBufsInHal       = 0;
1715             newParameters.minUndequedBuffer     = 3;
1716             newParameters.needsIonMap           = true;
1717 
1718             newParameters.node                  = &m_camera_info.scp;
1719             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1720             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1721 
1722             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1723             AllocatedStream->m_index            = 0;
1724             AllocatedStream->setParameter(&newParameters);
1725             AllocatedStream->m_activated = true;
1726             AllocatedStream->m_numRegisteredStream = 1;
1727             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1728             m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1729             m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1730             if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
1731                 AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
1732             if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
1733                 AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
1734             return 0;
1735         } else if (allocCase == 1) {
1736             *stream_id = STREAM_ID_RECORD;
1737 
1738             subParameters = &m_subStreams[STREAM_ID_RECORD];
1739             memset(subParameters, 0, sizeof(substream_parameters_t));
1740 
1741             parentStream = (StreamThread*)(m_streamThreads[0].get());
1742             if (!parentStream) {
1743                 return 1;
1744             }
1745 
1746             *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1747             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1748             *max_buffers = 6;
1749 
1750             subParameters->type         = SUBSTREAM_TYPE_RECORD;
1751             subParameters->width        = width;
1752             subParameters->height       = height;
1753             subParameters->format       = *format_actual;
1754             subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1755             subParameters->streamOps     = stream_ops;
1756             subParameters->usage         = *usage;
1757             subParameters->numOwnSvcBuffers = *max_buffers;
1758             subParameters->numSvcBufsInHal  = 0;
1759             subParameters->needBufferInit    = false;
1760             subParameters->minUndequedBuffer = 2;
1761 
1762             res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
1763             if (res != NO_ERROR) {
1764                 ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1765                 return 1;
1766             }
1767             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1768             ALOGV("(%s): Enabling Record", __FUNCTION__);
1769             return 0;
1770         }
1771     }
1772     else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL)
1773             && (width == m_camera2->getSensorW()) && (height == m_camera2->getSensorH())) {
1774 
1775         if (!(m_streamThreads[1].get())) {
1776             ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1777             useDirectOutput = true;
1778         }
1779         else {
1780             ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
1781             useDirectOutput = false;
1782         }
1783         if (useDirectOutput) {
1784             *stream_id = STREAM_ID_ZSL;
1785 
1786             m_streamThreads[1]  = new StreamThread(this, *stream_id);
1787             AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1788             AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1789             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1790 
1791             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1792             *max_buffers                        = 6;
1793 
1794             *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1795             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1796             *max_buffers = 6;
1797 
1798             newParameters.width                 = width;
1799             newParameters.height                = height;
1800             newParameters.format                = *format_actual;
1801             newParameters.streamOps             = stream_ops;
1802             newParameters.usage                 = *usage;
1803             newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1804             newParameters.numOwnSvcBuffers      = *max_buffers;
1805             newParameters.planes                = NUM_PLANES(*format_actual);
1806             newParameters.metaPlanes            = 1;
1807 
1808             newParameters.numSvcBufsInHal       = 0;
1809             newParameters.minUndequedBuffer     = 2;
1810             newParameters.needsIonMap           = false;
1811 
1812             newParameters.node                  = &m_camera_info.capture;
1813             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1814             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1815 
1816             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1817             AllocatedStream->m_index            = 1;
1818             AllocatedStream->setParameter(&newParameters);
1819             AllocatedStream->m_activated = true;
1820             AllocatedStream->m_numRegisteredStream = 1;
1821             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1822             return 0;
1823         } else {
1824             bool bJpegExists = false;
1825             AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1826             subParameters = &m_subStreams[STREAM_ID_JPEG];
1827             if (subParameters->type == SUBSTREAM_TYPE_JPEG) {
1828                 ALOGD("(%s): jpeg stream exists", __FUNCTION__);
1829                 bJpegExists = true;
1830                 AllocatedStream->detachSubStream(STREAM_ID_JPEG);
1831             }
1832             AllocatedStream->m_releasing = true;
1833             ALOGD("START stream thread 1 release %d", __LINE__);
1834             do {
1835                 AllocatedStream->release();
1836                 usleep(SIG_WAITING_TICK);
1837             } while (AllocatedStream->m_releasing);
1838             ALOGD("END   stream thread 1 release %d", __LINE__);
1839 
1840             *stream_id = STREAM_ID_ZSL;
1841 
1842             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1843 
1844             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1845             *max_buffers                        = 6;
1846 
1847             *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1848             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1849             *max_buffers = 6;
1850 
1851             newParameters.width                 = width;
1852             newParameters.height                = height;
1853             newParameters.format                = *format_actual;
1854             newParameters.streamOps             = stream_ops;
1855             newParameters.usage                 = *usage;
1856             newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1857             newParameters.numOwnSvcBuffers      = *max_buffers;
1858             newParameters.planes                = NUM_PLANES(*format_actual);
1859             newParameters.metaPlanes            = 1;
1860 
1861             newParameters.numSvcBufsInHal       = 0;
1862             newParameters.minUndequedBuffer     = 2;
1863             newParameters.needsIonMap           = false;
1864 
1865             newParameters.node                  = &m_camera_info.capture;
1866             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1867             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1868 
1869             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1870             AllocatedStream->m_index            = 1;
1871             AllocatedStream->setParameter(&newParameters);
1872             AllocatedStream->m_activated = true;
1873             AllocatedStream->m_numRegisteredStream = 1;
1874             if (bJpegExists) {
1875                 AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10);
1876             }
1877             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1878             return 0;
1879 
1880         }
1881     }
1882     else if (format == HAL_PIXEL_FORMAT_BLOB
1883             && m_camera2->isSupportedJpegResolution(width, height)) {
1884         *stream_id = STREAM_ID_JPEG;
1885 
1886         subParameters = &m_subStreams[*stream_id];
1887         memset(subParameters, 0, sizeof(substream_parameters_t));
1888 
1889         if (!(m_streamThreads[1].get())) {
1890             ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1891             StartSCCThread(false);
1892         }
1893         else if (m_streamThreads[1]->m_activated ==  false) {
1894             ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1895             StartSCCThread(true);
1896         }
1897         parentStream = (StreamThread*)(m_streamThreads[1].get());
1898 
1899         *format_actual = HAL_PIXEL_FORMAT_BLOB;
1900         *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1901         *max_buffers = 4;
1902 
1903         subParameters->type          = SUBSTREAM_TYPE_JPEG;
1904         subParameters->width         = width;
1905         subParameters->height        = height;
1906         subParameters->format        = *format_actual;
1907         subParameters->svcPlanes     = 1;
1908         subParameters->streamOps     = stream_ops;
1909         subParameters->usage         = *usage;
1910         subParameters->numOwnSvcBuffers = *max_buffers;
1911         subParameters->numSvcBufsInHal  = 0;
1912         subParameters->needBufferInit    = false;
1913         subParameters->minUndequedBuffer = 2;
1914 
1915         res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
1916         if (res != NO_ERROR) {
1917             ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1918             return 1;
1919         }
1920         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1921         ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
1922         return 0;
1923     }
1924     else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
1925         *stream_id = STREAM_ID_PRVCB;
1926 
1927         subParameters = &m_subStreams[STREAM_ID_PRVCB];
1928         memset(subParameters, 0, sizeof(substream_parameters_t));
1929 
1930         parentStream = (StreamThread*)(m_streamThreads[0].get());
1931         if (!parentStream) {
1932             return 1;
1933         }
1934 
1935         *format_actual = format;
1936         *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1937         *max_buffers = 6;
1938 
1939         subParameters->type         = SUBSTREAM_TYPE_PRVCB;
1940         subParameters->width        = width;
1941         subParameters->height       = height;
1942         subParameters->format       = *format_actual;
1943         subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1944         subParameters->streamOps     = stream_ops;
1945         subParameters->usage         = *usage;
1946         subParameters->numOwnSvcBuffers = *max_buffers;
1947         subParameters->numSvcBufsInHal  = 0;
1948         subParameters->needBufferInit    = false;
1949         subParameters->minUndequedBuffer = 2;
1950 
1951         if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
1952             subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
1953             subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
1954         }
1955         else {
1956             subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1957             subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
1958         }
1959 
1960         res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
1961         if (res != NO_ERROR) {
1962             ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1963             return 1;
1964         }
1965         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1966         ALOGV("(%s): Enabling previewcb", __FUNCTION__);
1967         return 0;
1968     }
1969     ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__);
1970     return 1;
1971 }
1972 
registerStreamBuffers(uint32_t stream_id,int num_buffers,buffer_handle_t * registeringBuffers)1973 int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
1974         int num_buffers, buffer_handle_t *registeringBuffers)
1975 {
1976     int                     i,j;
1977     void                    *virtAddr[3];
1978     int                     plane_index = 0;
1979     StreamThread *          targetStream;
1980     stream_parameters_t     *targetStreamParms;
1981     node_info_t             *currentNode;
1982 
1983     struct v4l2_buffer v4l2_buf;
1984     struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1985 
1986     ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__,
1987         stream_id, num_buffers, (uint32_t)registeringBuffers);
1988 
1989     if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
1990         targetStream = m_streamThreads[0].get();
1991         targetStreamParms = &(m_streamThreads[0]->m_parameters);
1992 
1993     }
1994     else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
1995         substream_parameters_t  *targetParms;
1996         targetParms = &m_subStreams[stream_id];
1997 
1998         targetParms->numSvcBuffers = num_buffers;
1999 
2000         for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
2001             ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
2002                 i, stream_id, (uint32_t)(registeringBuffers[i]));
2003             if (m_grallocHal) {
2004                 if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
2005                        targetParms->usage, 0, 0,
2006                        targetParms->width, targetParms->height, virtAddr) != 0) {
2007                     ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2008                 }
2009                 else {
2010                     ExynosBuffer currentBuf;
2011                     const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2012                     if (targetParms->svcPlanes == 1) {
2013                         currentBuf.fd.extFd[0] = priv_handle->fd;
2014                         currentBuf.size.extS[0] = priv_handle->size;
2015                         currentBuf.size.extS[1] = 0;
2016                         currentBuf.size.extS[2] = 0;
2017                     } else if (targetParms->svcPlanes == 2) {
2018                         currentBuf.fd.extFd[0] = priv_handle->fd;
2019                         currentBuf.fd.extFd[1] = priv_handle->fd1;
2020 
2021                     } else if (targetParms->svcPlanes == 3) {
2022                         currentBuf.fd.extFd[0] = priv_handle->fd;
2023                         currentBuf.fd.extFd[1] = priv_handle->fd1;
2024                         currentBuf.fd.extFd[2] = priv_handle->fd2;
2025                     }
2026                     for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
2027                         currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
2028                         CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
2029                              __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
2030                              (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
2031                     }
2032                     targetParms->svcBufStatus[i]  = ON_SERVICE;
2033                     targetParms->svcBuffers[i]    = currentBuf;
2034                     targetParms->svcBufHandle[i]  = registeringBuffers[i];
2035                 }
2036             }
2037         }
2038         targetParms->needBufferInit = true;
2039         return 0;
2040     }
2041     else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
2042         targetStream = m_streamThreads[1].get();
2043         targetStreamParms = &(m_streamThreads[1]->m_parameters);
2044     }
2045     else {
2046         ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id);
2047         return 1;
2048     }
2049 
2050     if (targetStream->streamType == STREAM_TYPE_DIRECT) {
2051         if (num_buffers < targetStreamParms->numHwBuffers) {
2052             ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
2053                 __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
2054             return 1;
2055         }
2056     }
2057     CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
2058             __FUNCTION__, targetStreamParms->format, targetStreamParms->width,
2059             targetStreamParms->height, targetStreamParms->planes);
2060     targetStreamParms->numSvcBuffers = num_buffers;
2061     currentNode = targetStreamParms->node;
2062     currentNode->width      = targetStreamParms->width;
2063     currentNode->height     = targetStreamParms->height;
2064     currentNode->format     = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
2065     currentNode->planes     = targetStreamParms->planes;
2066     currentNode->buffers    = targetStreamParms->numHwBuffers;
2067     cam_int_s_input(currentNode, m_camera_info.sensor_id);
2068     cam_int_s_fmt(currentNode);
2069     cam_int_reqbufs(currentNode);
2070     for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
2071         ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
2072             i, (uint32_t)(registeringBuffers[i]));
2073                 v4l2_buf.m.planes   = planes;
2074                 v4l2_buf.type       = currentNode->type;
2075                 v4l2_buf.memory     = currentNode->memory;
2076                 v4l2_buf.index      = i;
2077                 v4l2_buf.length     = currentNode->planes;
2078 
2079                 ExynosBuffer currentBuf;
2080                 ExynosBuffer metaBuf;
2081                 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2082 
2083                 m_getAlignedYUVSize(currentNode->format,
2084                     currentNode->width, currentNode->height, &currentBuf);
2085 
2086                 ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
2087                 if (currentNode->planes == 1) {
2088                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2089                     currentBuf.fd.extFd[0] = priv_handle->fd;
2090                     currentBuf.size.extS[0] = priv_handle->size;
2091                     currentBuf.size.extS[1] = 0;
2092                     currentBuf.size.extS[2] = 0;
2093                 } else if (currentNode->planes == 2) {
2094                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2095                     v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
2096                     currentBuf.fd.extFd[0] = priv_handle->fd;
2097                     currentBuf.fd.extFd[1] = priv_handle->fd1;
2098 
2099                 } else if (currentNode->planes == 3) {
2100                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2101                     v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
2102                     v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
2103                     currentBuf.fd.extFd[0] = priv_handle->fd;
2104                     currentBuf.fd.extFd[2] = priv_handle->fd1;
2105                     currentBuf.fd.extFd[1] = priv_handle->fd2;
2106                 }
2107 
2108                 for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
2109                     if (targetStreamParms->needsIonMap)
2110                         currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
2111                     v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
2112                     ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)",
2113                          __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
2114                          (unsigned int)currentBuf.virt.extP[plane_index],
2115                          v4l2_buf.m.planes[plane_index].length);
2116                 }
2117 
2118                 if (i < currentNode->buffers) {
2119 
2120 
2121 #ifdef ENABLE_FRAME_SYNC
2122                     /* add plane for metadata*/
2123                     metaBuf.size.extS[0] = 4*1024;
2124                     allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
2125 
2126                     v4l2_buf.length += targetStreamParms->metaPlanes;
2127                     v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
2128                     v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
2129 
2130                     ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
2131 #endif
2132                     if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
2133                         ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
2134                             __FUNCTION__, stream_id, currentNode->fd);
2135                     }
2136                     ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
2137                             __FUNCTION__, stream_id, currentNode->fd);
2138                     targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
2139                 }
2140                 else {
2141                     targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
2142                 }
2143 
2144                 targetStreamParms->svcBuffers[i]       = currentBuf;
2145                 targetStreamParms->metaBuffers[i] = metaBuf;
2146                 targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
2147             }
2148 
2149     ALOGV("DEBUG(%s): calling  streamon stream id = %d", __FUNCTION__, stream_id);
2150     cam_int_streamon(targetStreamParms->node);
2151     ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
2152     currentNode->status = true;
2153     ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
2154 
2155     return 0;
2156 }
2157 
releaseStream(uint32_t stream_id)2158 int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
2159 {
2160     StreamThread *targetStream;
2161     status_t res = NO_ERROR;
2162     ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2163     bool releasingScpMain = false;
2164 
2165     if (stream_id == STREAM_ID_PREVIEW) {
2166         targetStream = (StreamThread*)(m_streamThreads[0].get());
2167         if (!targetStream) {
2168             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2169             return NO_ERROR;
2170         }
2171         targetStream->m_numRegisteredStream--;
2172         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2173         releasingScpMain = true;
2174         if (targetStream->m_parameters.needsIonMap) {
2175             for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2176                 for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2177                     ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2178                                     targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2179                     ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2180                                   targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
2181                 }
2182             }
2183         }
2184     } else if (stream_id == STREAM_ID_JPEG) {
2185         if (m_resizeBuf.size.s != 0) {
2186             freeCameraMemory(&m_resizeBuf, 1);
2187         }
2188         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2189 
2190         targetStream = (StreamThread*)(m_streamThreads[1].get());
2191         if (!targetStream) {
2192             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2193             return NO_ERROR;
2194         }
2195 
2196         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2197             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2198             return 1;
2199         }
2200         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2201         return 0;
2202     } else if (stream_id == STREAM_ID_RECORD) {
2203         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2204 
2205         targetStream = (StreamThread*)(m_streamThreads[0].get());
2206         if (!targetStream) {
2207             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2208             return NO_ERROR;
2209         }
2210 
2211         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2212             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2213             return 1;
2214         }
2215 
2216         if (targetStream->m_numRegisteredStream != 0)
2217             return 0;
2218     } else if (stream_id == STREAM_ID_PRVCB) {
2219         if (m_previewCbBuf.size.s != 0) {
2220             freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
2221         }
2222         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2223 
2224         targetStream = (StreamThread*)(m_streamThreads[0].get());
2225         if (!targetStream) {
2226             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2227             return NO_ERROR;
2228         }
2229 
2230         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2231             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2232             return 1;
2233         }
2234 
2235         if (targetStream->m_numRegisteredStream != 0)
2236             return 0;
2237     } else if (stream_id == STREAM_ID_ZSL) {
2238         targetStream = (StreamThread*)(m_streamThreads[1].get());
2239         if (!targetStream) {
2240             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2241             return NO_ERROR;
2242         }
2243 
2244         targetStream->m_numRegisteredStream--;
2245         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2246         if (targetStream->m_parameters.needsIonMap) {
2247             for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2248                 for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2249                     ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2250                                     targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2251                     ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2252                                   targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
2253                 }
2254             }
2255         }
2256     } else {
2257         ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
2258         return 1;
2259     }
2260 
2261     if (m_sensorThread != NULL && releasingScpMain) {
2262         m_sensorThread->release();
2263         ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__);
2264         while (!m_sensorThread->IsTerminated())
2265             usleep(SIG_WAITING_TICK);
2266         ALOGD("(%s): END   Waiting for (indirect) sensor thread termination", __FUNCTION__);
2267     }
2268 
2269     if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
2270         ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
2271         targetStream = (StreamThread*)(m_streamThreads[1].get());
2272         targetStream->m_releasing = true;
2273         ALOGD("START stream thread release %d", __LINE__);
2274         do {
2275             targetStream->release();
2276             usleep(SIG_WAITING_TICK);
2277         } while (targetStream->m_releasing);
2278         m_camera_info.capture.status = false;
2279         ALOGD("END   stream thread release %d", __LINE__);
2280     }
2281 
2282     if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
2283         ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
2284         targetStream = (StreamThread*)(m_streamThreads[0].get());
2285         targetStream->m_releasing = true;
2286         ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2287         do {
2288             targetStream->release();
2289             usleep(SIG_WAITING_TICK);
2290         } while (targetStream->m_releasing);
2291         ALOGD("(%s): END   Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2292         targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
2293 
2294         if (targetStream != NULL) {
2295             ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__);
2296             while (!targetStream->IsTerminated())
2297                 usleep(SIG_WAITING_TICK);
2298             ALOGD("(%s): END   Waiting for (indirect) stream thread termination", __FUNCTION__);
2299             m_streamThreads[0] = NULL;
2300         }
2301         if (m_camera_info.capture.status == true) {
2302             m_scpForceSuspended = true;
2303         }
2304         m_isIspStarted = false;
2305     }
2306     ALOGV("(%s): END", __FUNCTION__);
2307     return 0;
2308 }
2309 
allocateReprocessStream(uint32_t width,uint32_t height,uint32_t format,const camera2_stream_in_ops_t * reprocess_stream_ops,uint32_t * stream_id,uint32_t * consumer_usage,uint32_t * max_buffers)2310 int ExynosCameraHWInterface2::allocateReprocessStream(
2311     uint32_t width, uint32_t height, uint32_t format,
2312     const camera2_stream_in_ops_t *reprocess_stream_ops,
2313     uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
2314 {
2315     ALOGV("DEBUG(%s):", __FUNCTION__);
2316     return 0;
2317 }
2318 
allocateReprocessStreamFromStream(uint32_t output_stream_id,const camera2_stream_in_ops_t * reprocess_stream_ops,uint32_t * stream_id)2319 int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
2320             uint32_t output_stream_id,
2321             const camera2_stream_in_ops_t *reprocess_stream_ops,
2322             // outputs
2323             uint32_t *stream_id)
2324 {
2325     ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
2326     *stream_id = STREAM_ID_JPEG_REPROCESS;
2327 
2328     m_reprocessStreamId = *stream_id;
2329     m_reprocessOps = reprocess_stream_ops;
2330     m_reprocessOutputStreamId = output_stream_id;
2331     return 0;
2332 }
2333 
releaseReprocessStream(uint32_t stream_id)2334 int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
2335 {
2336     ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2337     if (stream_id == STREAM_ID_JPEG_REPROCESS) {
2338         m_reprocessStreamId = 0;
2339         m_reprocessOps = NULL;
2340         m_reprocessOutputStreamId = 0;
2341         return 0;
2342     }
2343     return 1;
2344 }
2345 
triggerAction(uint32_t trigger_id,int ext1,int ext2)2346 int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
2347 {
2348     Mutex::Autolock lock(m_afModeTriggerLock);
2349     ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
2350 
2351     switch (trigger_id) {
2352     case CAMERA2_TRIGGER_AUTOFOCUS:
2353         ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2354         OnAfTrigger(ext1);
2355         break;
2356 
2357     case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
2358         ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2359         OnAfCancel(ext1);
2360         break;
2361     case CAMERA2_TRIGGER_PRECAPTURE_METERING:
2362         ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
2363         OnPrecaptureMeteringTriggerStart(ext1);
2364         break;
2365     default:
2366         break;
2367     }
2368     return 0;
2369 }
2370 
setNotifyCallback(camera2_notify_callback notify_cb,void * user)2371 int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
2372 {
2373     ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
2374     m_notifyCb = notify_cb;
2375     m_callbackCookie = user;
2376     return 0;
2377 }
2378 
getMetadataVendorTagOps(vendor_tag_query_ops_t ** ops)2379 int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
2380 {
2381     ALOGV("DEBUG(%s):", __FUNCTION__);
2382     return 0;
2383 }
2384 
dump(int fd)2385 int ExynosCameraHWInterface2::dump(int fd)
2386 {
2387     ALOGV("DEBUG(%s):", __FUNCTION__);
2388     return 0;
2389 }
2390 
m_getAlignedYUVSize(int colorFormat,int w,int h,ExynosBuffer * buf)2391 void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
2392 {
2393     switch (colorFormat) {
2394     // 1p
2395     case V4L2_PIX_FMT_RGB565 :
2396     case V4L2_PIX_FMT_YUYV :
2397     case V4L2_PIX_FMT_UYVY :
2398     case V4L2_PIX_FMT_VYUY :
2399     case V4L2_PIX_FMT_YVYU :
2400         buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
2401         buf->size.extS[1] = 0;
2402         buf->size.extS[2] = 0;
2403         break;
2404     // 2p
2405     case V4L2_PIX_FMT_NV12 :
2406     case V4L2_PIX_FMT_NV12T :
2407     case V4L2_PIX_FMT_NV21 :
2408         buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
2409         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
2410         buf->size.extS[2] = 0;
2411         break;
2412     case V4L2_PIX_FMT_NV12M :
2413     case V4L2_PIX_FMT_NV12MT_16X16 :
2414     case V4L2_PIX_FMT_NV21M:
2415         buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
2416         buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
2417         buf->size.extS[2] = 0;
2418         break;
2419     case V4L2_PIX_FMT_NV16 :
2420     case V4L2_PIX_FMT_NV61 :
2421         buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2422         buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
2423         buf->size.extS[2] = 0;
2424         break;
2425      // 3p
2426     case V4L2_PIX_FMT_YUV420 :
2427     case V4L2_PIX_FMT_YVU420 :
2428         buf->size.extS[0] = (w * h);
2429         buf->size.extS[1] = (w * h) >> 2;
2430         buf->size.extS[2] = (w * h) >> 2;
2431         break;
2432     case V4L2_PIX_FMT_YUV420M:
2433     case V4L2_PIX_FMT_YVU420M :
2434         buf->size.extS[0] = ALIGN(w,  32) * ALIGN(h,  16);
2435         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2436         buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2437         break;
2438     case V4L2_PIX_FMT_YUV422P :
2439         buf->size.extS[0] = ALIGN(w,  16) * ALIGN(h,  16);
2440         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2441         buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2442         break;
2443     default:
2444         ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
2445         return;
2446         break;
2447     }
2448 }
2449 
m_getRatioSize(int src_w,int src_h,int dst_w,int dst_h,int * crop_x,int * crop_y,int * crop_w,int * crop_h,int zoom)2450 bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
2451                                              int  dst_w,  int   dst_h,
2452                                              int *crop_x, int *crop_y,
2453                                              int *crop_w, int *crop_h,
2454                                              int zoom)
2455 {
2456     *crop_w = src_w;
2457     *crop_h = src_h;
2458 
2459     if (   src_w != dst_w
2460         || src_h != dst_h) {
2461         float src_ratio = 1.0f;
2462         float dst_ratio = 1.0f;
2463 
2464         // ex : 1024 / 768
2465         src_ratio = (float)src_w / (float)src_h;
2466 
2467         // ex : 352  / 288
2468         dst_ratio = (float)dst_w / (float)dst_h;
2469 
2470         if (dst_w * dst_h < src_w * src_h) {
2471             if (dst_ratio <= src_ratio) {
2472                 // shrink w
2473                 *crop_w = src_h * dst_ratio;
2474                 *crop_h = src_h;
2475             } else {
2476                 // shrink h
2477                 *crop_w = src_w;
2478                 *crop_h = src_w / dst_ratio;
2479             }
2480         } else {
2481             if (dst_ratio <= src_ratio) {
2482                 // shrink w
2483                 *crop_w = src_h * dst_ratio;
2484                 *crop_h = src_h;
2485             } else {
2486                 // shrink h
2487                 *crop_w = src_w;
2488                 *crop_h = src_w / dst_ratio;
2489             }
2490         }
2491     }
2492 
2493     if (zoom != 0) {
2494         float zoomLevel = ((float)zoom + 10.0) / 10.0;
2495         *crop_w = (int)((float)*crop_w / zoomLevel);
2496         *crop_h = (int)((float)*crop_h / zoomLevel);
2497     }
2498 
2499     #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
2500     unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
2501     if (w_align != 0) {
2502         if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
2503             && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
2504             *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
2505         }
2506         else
2507             *crop_w -= w_align;
2508     }
2509 
2510     #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
2511     unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
2512     if (h_align != 0) {
2513         if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
2514             && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
2515             *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
2516         }
2517         else
2518             *crop_h -= h_align;
2519     }
2520 
2521     *crop_x = (src_w - *crop_w) >> 1;
2522     *crop_y = (src_h - *crop_h) >> 1;
2523 
2524     if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
2525         *crop_x -= 1;
2526 
2527     if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
2528         *crop_y -= 1;
2529 
2530     return true;
2531 }
2532 
BayerBufManager()2533 BayerBufManager::BayerBufManager()
2534 {
2535     ALOGV("DEBUG(%s): ", __FUNCTION__);
2536     for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
2537         entries[i].status = BAYER_ON_HAL_EMPTY;
2538         entries[i].reqFrameCnt = 0;
2539     }
2540     sensorEnqueueHead = 0;
2541     sensorDequeueHead = 0;
2542     ispEnqueueHead = 0;
2543     ispDequeueHead = 0;
2544     numOnSensor = 0;
2545     numOnIsp = 0;
2546     numOnHalFilled = 0;
2547     numOnHalEmpty = NUM_BAYER_BUFFERS;
2548 }
2549 
~BayerBufManager()2550 BayerBufManager::~BayerBufManager()
2551 {
2552     ALOGV("%s", __FUNCTION__);
2553 }
2554 
GetIndexForSensorEnqueue()2555 int     BayerBufManager::GetIndexForSensorEnqueue()
2556 {
2557     int ret = 0;
2558     if (numOnHalEmpty == 0)
2559         ret = -1;
2560     else
2561         ret = sensorEnqueueHead;
2562     ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
2563     return ret;
2564 }
2565 
MarkSensorEnqueue(int index)2566 int    BayerBufManager::MarkSensorEnqueue(int index)
2567 {
2568     ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2569 
2570     // sanity check
2571     if (index != sensorEnqueueHead) {
2572         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
2573         return -1;
2574     }
2575     if (entries[index].status != BAYER_ON_HAL_EMPTY) {
2576         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2577             index, entries[index].status, BAYER_ON_HAL_EMPTY);
2578         return -1;
2579     }
2580 
2581     entries[index].status = BAYER_ON_SENSOR;
2582     entries[index].reqFrameCnt = 0;
2583     numOnHalEmpty--;
2584     numOnSensor++;
2585     sensorEnqueueHead = GetNextIndex(index);
2586     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2587         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2588     return 0;
2589 }
2590 
MarkSensorDequeue(int index,int reqFrameCnt,nsecs_t * timeStamp)2591 int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
2592 {
2593     ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2594 
2595     if (entries[index].status != BAYER_ON_SENSOR) {
2596         ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2597             index, entries[index].status, BAYER_ON_SENSOR);
2598         return -1;
2599     }
2600 
2601     entries[index].status = BAYER_ON_HAL_FILLED;
2602     numOnHalFilled++;
2603     numOnSensor--;
2604 
2605     return 0;
2606 }
2607 
GetIndexForIspEnqueue(int * reqFrameCnt)2608 int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2609 {
2610     int ret = 0;
2611     if (numOnHalFilled == 0)
2612         ret = -1;
2613     else {
2614         *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2615         ret = ispEnqueueHead;
2616     }
2617     ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2618     return ret;
2619 }
2620 
GetIndexForIspDequeue(int * reqFrameCnt)2621 int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2622 {
2623     int ret = 0;
2624     if (numOnIsp == 0)
2625         ret = -1;
2626     else {
2627         *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2628         ret = ispDequeueHead;
2629     }
2630     ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2631     return ret;
2632 }
2633 
MarkIspEnqueue(int index)2634 int    BayerBufManager::MarkIspEnqueue(int index)
2635 {
2636     ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2637 
2638     // sanity check
2639     if (index != ispEnqueueHead) {
2640         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2641         return -1;
2642     }
2643     if (entries[index].status != BAYER_ON_HAL_FILLED) {
2644         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2645             index, entries[index].status, BAYER_ON_HAL_FILLED);
2646         return -1;
2647     }
2648 
2649     entries[index].status = BAYER_ON_ISP;
2650     numOnHalFilled--;
2651     numOnIsp++;
2652     ispEnqueueHead = GetNextIndex(index);
2653     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2654         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2655     return 0;
2656 }
2657 
MarkIspDequeue(int index)2658 int    BayerBufManager::MarkIspDequeue(int index)
2659 {
2660     ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
2661 
2662     // sanity check
2663     if (index != ispDequeueHead) {
2664         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2665         return -1;
2666     }
2667     if (entries[index].status != BAYER_ON_ISP) {
2668         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2669             index, entries[index].status, BAYER_ON_ISP);
2670         return -1;
2671     }
2672 
2673     entries[index].status = BAYER_ON_HAL_EMPTY;
2674     entries[index].reqFrameCnt = 0;
2675     numOnHalEmpty++;
2676     numOnIsp--;
2677     ispDequeueHead = GetNextIndex(index);
2678     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2679         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2680     return 0;
2681 }
2682 
GetNumOnSensor()2683 int BayerBufManager::GetNumOnSensor()
2684 {
2685     return numOnSensor;
2686 }
2687 
GetNumOnHalFilled()2688 int BayerBufManager::GetNumOnHalFilled()
2689 {
2690     return numOnHalFilled;
2691 }
2692 
GetNumOnIsp()2693 int BayerBufManager::GetNumOnIsp()
2694 {
2695     return numOnIsp;
2696 }
2697 
GetNextIndex(int index)2698 int     BayerBufManager::GetNextIndex(int index)
2699 {
2700     index++;
2701     if (index >= NUM_BAYER_BUFFERS)
2702         index = 0;
2703 
2704     return index;
2705 }
2706 
m_mainThreadFunc(SignalDrivenThread * self)2707 void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2708 {
2709     camera_metadata_t *currentRequest = NULL;
2710     camera_metadata_t *currentFrame = NULL;
2711     size_t numEntries = 0;
2712     size_t frameSize = 0;
2713     camera_metadata_t * preparedFrame = NULL;
2714     camera_metadata_t *deregisteredRequest = NULL;
2715     uint32_t currentSignal = self->GetProcessingSignal();
2716     MainThread *  selfThread      = ((MainThread*)self);
2717     int res = 0;
2718 
2719     int ret;
2720     int afMode;
2721     uint32_t afRegion[4];
2722 
2723     ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2724 
2725     if (currentSignal & SIGNAL_THREAD_RELEASE) {
2726         ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2727 
2728         ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2729         selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2730         return;
2731     }
2732 
2733     if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2734         ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2735         if (m_requestManager->IsRequestQueueFull()==false) {
2736             Mutex::Autolock lock(m_afModeTriggerLock);
2737             m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2738             if (NULL == currentRequest) {
2739                 ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__, currentSignal);
2740                 m_isRequestQueueNull = true;
2741                 if (m_requestManager->IsVdisEnable())
2742                     m_vdisBubbleCnt = 1;
2743             }
2744             else {
2745                 m_requestManager->RegisterRequest(currentRequest, &afMode, afRegion);
2746 
2747                 SetAfMode((enum aa_afmode)afMode);
2748                 SetAfRegion(afRegion);
2749 
2750                 m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2751                 ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2752                 if (m_requestManager->IsRequestQueueFull()==false)
2753                     selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2754 
2755                 m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2756             }
2757         }
2758         else {
2759             m_isRequestQueuePending = true;
2760         }
2761     }
2762 
2763     if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2764         ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2765         /*while (1)*/ {
2766             ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2767             if (ret == false)
2768                 CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret);
2769 
2770             m_requestManager->DeregisterRequest(&deregisteredRequest);
2771 
2772             ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2773             if (ret < 0)
2774                 CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret);
2775 
2776             ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2777             if (ret < 0)
2778                 CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret);
2779 
2780             if (currentFrame==NULL) {
2781                 ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2782             }
2783             else {
2784                 ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2785             }
2786             res = append_camera_metadata(currentFrame, preparedFrame);
2787             if (res==0) {
2788                 ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2789                 m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2790             }
2791             else {
2792                 ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2793             }
2794         }
2795         if (!m_isRequestQueueNull) {
2796             selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2797         }
2798 
2799         if (getInProgressCount()>0) {
2800             ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2801             m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2802         }
2803     }
2804     ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2805     return;
2806 }
2807 
DumpInfoWithShot(struct camera2_shot_ext * shot_ext)2808 void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2809 {
2810     ALOGD("####  common Section");
2811     ALOGD("####                 magic(%x) ",
2812         shot_ext->shot.magicNumber);
2813     ALOGD("####  ctl Section");
2814     ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2815         shot_ext->shot.ctl.request.metadataMode,
2816         shot_ext->shot.ctl.lens.aperture,
2817         shot_ext->shot.ctl.sensor.exposureTime,
2818         shot_ext->shot.ctl.sensor.frameDuration,
2819         shot_ext->shot.ctl.sensor.sensitivity,
2820         shot_ext->shot.ctl.aa.awbMode);
2821 
2822     ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
2823         shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2824         shot_ext->shot.ctl.request.outputStreams[0]);
2825 
2826     ALOGD("####  DM Section");
2827     ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2828         shot_ext->shot.dm.request.metadataMode,
2829         shot_ext->shot.dm.lens.aperture,
2830         shot_ext->shot.dm.sensor.exposureTime,
2831         shot_ext->shot.dm.sensor.frameDuration,
2832         shot_ext->shot.dm.sensor.sensitivity,
2833         shot_ext->shot.dm.sensor.timeStamp,
2834         shot_ext->shot.dm.aa.awbMode,
2835         shot_ext->shot.dm.request.frameCount );
2836 }
2837 
m_preCaptureSetter(struct camera2_shot_ext * shot_ext)2838 void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext)
2839 {
2840     // Flash
2841     switch (m_ctlInfo.flash.m_flashCnt) {
2842     case IS_FLASH_STATE_ON:
2843         ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2844         // check AF locked
2845         if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
2846             if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2847                 if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2848                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2849                     m_ctlInfo.flash.m_flashTimeOut = 5;
2850                 } else
2851                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2852                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2853             } else {
2854                 m_ctlInfo.flash.m_flashTimeOut--;
2855             }
2856         } else {
2857             if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2858                 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2859                 m_ctlInfo.flash.m_flashTimeOut = 5;
2860             } else
2861                 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2862             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2863         }
2864         break;
2865     case IS_FLASH_STATE_ON_WAIT:
2866         break;
2867     case IS_FLASH_STATE_ON_DONE:
2868         if (!m_ctlInfo.flash.m_afFlashDoneFlg)
2869             // auto transition at pre-capture trigger
2870             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
2871         break;
2872     case IS_FLASH_STATE_AUTO_AE_AWB_LOCK:
2873         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2874         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
2875         //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
2876         shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
2877         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT;
2878         break;
2879     case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2880     case IS_FLASH_STATE_AUTO_WAIT:
2881         shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0;
2882         shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0;
2883         break;
2884     case IS_FLASH_STATE_AUTO_DONE:
2885         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2886         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2887         break;
2888     case IS_FLASH_STATE_AUTO_OFF:
2889         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2890         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2891         m_ctlInfo.flash.m_flashEnableFlg = false;
2892         break;
2893     case IS_FLASH_STATE_CAPTURE:
2894         ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2895         m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT;
2896         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
2897         shot_ext->request_scc = 0;
2898         shot_ext->request_scp = 0;
2899         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition
2900         break;
2901     case IS_FLASH_STATE_CAPTURE_WAIT:
2902         shot_ext->request_scc = 0;
2903         shot_ext->request_scp = 0;
2904         break;
2905     case IS_FLASH_STATE_CAPTURE_JPEG:
2906         ALOGV("(%s): [Flash] Flash Capture  (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut));
2907         shot_ext->request_scc = 1;
2908         shot_ext->request_scp = 1;
2909         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END;  // auto transition
2910         break;
2911     case IS_FLASH_STATE_CAPTURE_END:
2912         ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2913         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2914         shot_ext->request_scc = 0;
2915         shot_ext->request_scp = 0;
2916         m_ctlInfo.flash.m_flashEnableFlg = false;
2917         m_ctlInfo.flash.m_flashCnt = 0;
2918         m_ctlInfo.flash.m_afFlashDoneFlg= false;
2919         break;
2920     case IS_FLASH_STATE_NONE:
2921         break;
2922     default:
2923         ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
2924     }
2925 }
2926 
m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)2927 void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)
2928 {
2929     // Flash
2930     switch (m_ctlInfo.flash.m_flashCnt) {
2931     case IS_FLASH_STATE_AUTO_WAIT:
2932         if (m_ctlInfo.flash.m_flashDecisionResult) {
2933             if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
2934                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2935                 ALOGV("(%s): [Flash] Lis :  AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode);
2936             } else {
2937                 ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__);
2938             }
2939         } else {
2940             //If flash isn't activated at flash auto mode, skip flash auto control
2941             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2942             ALOGV("(%s): [Flash] Skip :  AUTO -> OFF", __FUNCTION__);
2943         }
2944         break;
2945     }
2946 }
2947 
m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)2948 void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)
2949 {
2950     // Flash
2951     switch (m_ctlInfo.flash.m_flashCnt) {
2952     case IS_FLASH_STATE_ON_WAIT:
2953         if (shot_ext->shot.dm.flash.decision > 0) {
2954             // store decision result to skip capture sequenece
2955             ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
2956             if (shot_ext->shot.dm.flash.decision == 2)
2957                 m_ctlInfo.flash.m_flashDecisionResult = false;
2958             else
2959                 m_ctlInfo.flash.m_flashDecisionResult = true;
2960             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
2961         } else {
2962             if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2963                 ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__);
2964                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
2965                 m_ctlInfo.flash.m_flashDecisionResult = false;
2966             } else {
2967                 m_ctlInfo.flash.m_flashTimeOut--;
2968             }
2969         }
2970         break;
2971     case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2972         if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) {
2973             ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode);
2974             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT;
2975         } else {
2976             ALOGV("(%s):  [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__);
2977         }
2978         break;
2979     case IS_FLASH_STATE_CAPTURE_WAIT:
2980         if (m_ctlInfo.flash.m_flashDecisionResult) {
2981             if (shot_ext->shot.dm.flash.firingStable) {
2982                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2983             } else {
2984                 if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2985                     ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__);
2986                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2987                 } else {
2988                     ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut);
2989                     m_ctlInfo.flash.m_flashTimeOut--;
2990                 }
2991             }
2992         } else {
2993             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2994         }
2995         break;
2996     }
2997 }
2998 
m_preCaptureAeState(struct camera2_shot_ext * shot_ext)2999 void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext * shot_ext)
3000 {
3001     switch (m_ctlInfo.flash.i_flashMode) {
3002     case AA_AEMODE_ON:
3003         // At flash off mode, capture can be done as zsl capture
3004         shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED;
3005         break;
3006     case AA_AEMODE_ON_AUTO_FLASH:
3007         // At flash auto mode, main flash have to be done if pre-flash was done.
3008         if (m_ctlInfo.flash.m_flashDecisionResult && m_ctlInfo.flash.m_afFlashDoneFlg)
3009             shot_ext->shot.dm.aa.aeState = AE_STATE_FLASH_REQUIRED;
3010         break;
3011     }
3012 }
3013 
m_updateAfRegion(struct camera2_shot_ext * shot_ext)3014 void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext)
3015 {
3016     shot_ext->shot.ctl.aa.afRegions[0] = currentAfRegion[0];
3017     shot_ext->shot.ctl.aa.afRegions[1] = currentAfRegion[1];
3018     shot_ext->shot.ctl.aa.afRegions[2] = currentAfRegion[2];
3019     shot_ext->shot.ctl.aa.afRegions[3] = currentAfRegion[3];
3020 }
3021 
SetAfRegion(uint32_t * afRegion)3022 void ExynosCameraHWInterface2::SetAfRegion(uint32_t * afRegion)
3023 {
3024     currentAfRegion[0] = afRegion[0];
3025     currentAfRegion[1] = afRegion[1];
3026     currentAfRegion[2] = afRegion[2];
3027     currentAfRegion[3] = afRegion[3];
3028 }
3029 
m_afTrigger(struct camera2_shot_ext * shot_ext,int mode)3030 void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode)
3031 {
3032     if (m_afState == HAL_AFSTATE_SCANNING) {
3033         ALOGD("(%s): restarting trigger ", __FUNCTION__);
3034     } else if (!mode) {
3035         if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
3036             ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
3037         else
3038             m_afState = HAL_AFSTATE_STARTED;
3039     }
3040     ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode, m_afState);
3041     shot_ext->shot.ctl.aa.afTrigger = 1;
3042     shot_ext->shot.ctl.aa.afMode = m_afMode;
3043     m_IsAfTriggerRequired = false;
3044 }
3045 
m_sensorThreadFunc(SignalDrivenThread * self)3046 void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
3047 {
3048     uint32_t        currentSignal = self->GetProcessingSignal();
3049     SensorThread *  selfThread      = ((SensorThread*)self);
3050     int index;
3051     int index_isp;
3052     status_t res;
3053     nsecs_t frameTime;
3054     int bayersOnSensor = 0, bayersOnIsp = 0;
3055     int j = 0;
3056     bool isCapture = false;
3057     ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
3058 
3059     if (currentSignal & SIGNAL_THREAD_RELEASE) {
3060         CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3061 
3062         ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
3063         cam_int_streamoff(&(m_camera_info.sensor));
3064         ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
3065 
3066         m_camera_info.sensor.buffers = 0;
3067         ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
3068         cam_int_reqbufs(&(m_camera_info.sensor));
3069         ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
3070         m_camera_info.sensor.status = false;
3071 
3072         ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
3073         isp_int_streamoff(&(m_camera_info.isp));
3074         ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
3075 
3076         m_camera_info.isp.buffers = 0;
3077         ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
3078         cam_int_reqbufs(&(m_camera_info.isp));
3079         ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
3080 
3081         exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
3082 
3083         m_requestManager->releaseSensorQ();
3084         m_requestManager->ResetEntry();
3085         ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3086         selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
3087         return;
3088     }
3089 
3090     if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
3091     {
3092         ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
3093         int targetStreamIndex = 0, i=0;
3094         int matchedFrameCnt = -1, processingReqIndex;
3095         struct camera2_shot_ext *shot_ext;
3096         struct camera2_shot_ext *shot_ext_capture;
3097         bool triggered = false;
3098 
3099         /* dqbuf from sensor */
3100         ALOGV("Sensor DQbuf start");
3101         index = cam_int_dqbuf(&(m_camera_info.sensor));
3102         m_requestManager->pushSensorQ(index);
3103         ALOGV("Sensor DQbuf done(%d)", index);
3104         shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3105 
3106         if (m_nightCaptureCnt != 0) {
3107             matchedFrameCnt = m_nightCaptureFrameCnt;
3108         } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
3109             matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount;
3110             ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt);
3111         } else {
3112             matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
3113         }
3114 
3115         if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) {
3116             matchedFrameCnt = m_vdisDupFrame;
3117         }
3118 
3119         if (matchedFrameCnt != -1) {
3120             if (m_vdisBubbleCnt == 0 || m_vdisDupFrame != matchedFrameCnt) {
3121                 frameTime = systemTime();
3122                 m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
3123                 m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
3124             } else {
3125                 ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame, matchedFrameCnt);
3126             }
3127 
3128             // face af mode setting in case of face priority scene mode
3129             if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) {
3130                 ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__, shot_ext->shot.ctl.aa.sceneMode);
3131                 m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3132             }
3133 
3134             m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2];
3135             float zoomLeft, zoomTop, zoomWidth, zoomHeight;
3136             int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
3137 
3138             m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
3139                            m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
3140                            &crop_x, &crop_y,
3141                            &crop_w, &crop_h,
3142                            0);
3143 
3144             if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) {
3145                 zoomWidth =  m_camera2->getSensorW() / m_zoomRatio;
3146                 zoomHeight = zoomWidth *
3147                         m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width;
3148             } else {
3149                 zoomHeight = m_camera2->getSensorH() / m_zoomRatio;
3150                 zoomWidth = zoomHeight *
3151                         m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height;
3152             }
3153             zoomLeft = (crop_w - zoomWidth) / 2;
3154             zoomTop = (crop_h - zoomHeight) / 2;
3155 
3156             int32_t new_cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
3157 
3158             int cropCompensation = (new_cropRegion[0] * 2 + new_cropRegion[2]) - ALIGN(crop_w, 4);
3159             if (cropCompensation)
3160                 new_cropRegion[2] -= cropCompensation;
3161 
3162             shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
3163             shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
3164             shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
3165             if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) {
3166                 ALOGD("### Applying AF Mode change(Mode %d) ", m_afMode);
3167                 shot_ext->shot.ctl.aa.afMode = m_afMode;
3168                 if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
3169                     ALOGD("### With Automatic triger for continuous modes");
3170                     m_afState = HAL_AFSTATE_STARTED;
3171                     shot_ext->shot.ctl.aa.afTrigger = 1;
3172                     triggered = true;
3173                     if ((m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_UNSUPPORTED) ||
3174                             (m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_FACE_PRIORITY)) {
3175                         switch (m_afMode) {
3176                         case AA_AFMODE_CONTINUOUS_PICTURE:
3177                             shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE_FACE;
3178                             ALOGD("### Face AF Mode change (Mode %d) ", shot_ext->shot.ctl.aa.afMode);
3179                             break;
3180                         }
3181                     }
3182                     // reset flash result
3183                     if (m_ctlInfo.flash.m_afFlashDoneFlg) {
3184                         m_ctlInfo.flash.m_flashEnableFlg = false;
3185                         m_ctlInfo.flash.m_afFlashDoneFlg = false;
3186                         m_ctlInfo.flash.m_flashDecisionResult = false;
3187                         m_ctlInfo.flash.m_flashCnt = 0;
3188                     }
3189                     m_ctlInfo.af.m_afTriggerTimeOut = 1;
3190                 }
3191 
3192                 m_IsAfModeUpdateRequired = false;
3193                 // support inifinity focus mode
3194                 if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) {
3195                     shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY;
3196                     shot_ext->shot.ctl.aa.afTrigger = 1;
3197                     triggered = true;
3198                 }
3199                 if (m_afMode2 != NO_CHANGE) {
3200                     enum aa_afmode tempAfMode = m_afMode2;
3201                     m_afMode2 = NO_CHANGE;
3202                     SetAfMode(tempAfMode);
3203                 }
3204             }
3205             else {
3206                 shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
3207             }
3208             if (m_IsAfTriggerRequired) {
3209                 if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
3210                     // flash case
3211                     if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) {
3212                         if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) {
3213                             // Flash is enabled and start AF
3214                             m_afTrigger(shot_ext, 1);
3215                         } else {
3216                             m_afTrigger(shot_ext, 0);
3217                         }
3218                     }
3219                 } else {
3220                     // non-flash case
3221                     m_afTrigger(shot_ext, 0);
3222                 }
3223             } else {
3224                 shot_ext->shot.ctl.aa.afTrigger = 0;
3225             }
3226 
3227             if (m_wideAspect) {
3228                 shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3229             } else {
3230                 shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3231             }
3232             if (triggered)
3233                 shot_ext->shot.ctl.aa.afTrigger = 1;
3234 
3235             // TODO : check collision with AFMode Update
3236             if (m_IsAfLockRequired) {
3237                 shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
3238                 m_IsAfLockRequired = false;
3239             }
3240             ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
3241                 index,
3242                 shot_ext->shot.ctl.request.frameCount,
3243                 shot_ext->request_scp,
3244                 shot_ext->request_scc,
3245                 shot_ext->dis_bypass, sizeof(camera2_shot));
3246 
3247             // update AF region
3248             m_updateAfRegion(shot_ext);
3249 
3250             m_lastSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3251             if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT
3252                     && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED)
3253                 shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON;
3254             if (m_nightCaptureCnt == 0) {
3255                 if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE
3256                         && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3257                     shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3258                     shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3259                     shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3260                     m_nightCaptureCnt = 4;
3261                     m_nightCaptureFrameCnt = matchedFrameCnt;
3262                     shot_ext->request_scc = 0;
3263                 }
3264             }
3265             else if (m_nightCaptureCnt == 1) {
3266                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3267                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
3268                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3269                 m_nightCaptureCnt--;
3270                 m_nightCaptureFrameCnt = 0;
3271                 shot_ext->request_scc = 1;
3272             }
3273             else if (m_nightCaptureCnt == 2) {
3274                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3275                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3276                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3277                 m_nightCaptureCnt--;
3278                 shot_ext->request_scc = 0;
3279             }
3280             else if (m_nightCaptureCnt == 3) {
3281                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3282                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3283                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3284                 m_nightCaptureCnt--;
3285                 shot_ext->request_scc = 0;
3286             }
3287             else if (m_nightCaptureCnt == 4) {
3288                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3289                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3290                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3291                 m_nightCaptureCnt--;
3292                 shot_ext->request_scc = 0;
3293             }
3294 
3295             switch (shot_ext->shot.ctl.aa.aeTargetFpsRange[1]) {
3296             case 15:
3297                 shot_ext->shot.ctl.sensor.frameDuration = (66666 * 1000);
3298                 break;
3299 
3300             case 24:
3301                 shot_ext->shot.ctl.sensor.frameDuration = (41666 * 1000);
3302                 break;
3303 
3304             case 25:
3305                 shot_ext->shot.ctl.sensor.frameDuration = (40000 * 1000);
3306                 break;
3307 
3308             case 30:
3309             default:
3310                 shot_ext->shot.ctl.sensor.frameDuration = (33333 * 1000);
3311                 break;
3312             }
3313             shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3314 
3315             // Flash mode
3316             // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
3317             if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
3318                     && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE)
3319                     && (m_cameraId == 0)) {
3320                 if (!m_ctlInfo.flash.m_flashDecisionResult) {
3321                     m_ctlInfo.flash.m_flashEnableFlg = false;
3322                     m_ctlInfo.flash.m_afFlashDoneFlg = false;
3323                     m_ctlInfo.flash.m_flashCnt = 0;
3324                 } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) ||
3325                                           (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) {
3326                     ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
3327                     shot_ext->request_scc = 0;
3328                     m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt;
3329                     m_ctlInfo.flash.m_flashEnableFlg = true;
3330                     m_ctlInfo.flash.m_afFlashDoneFlg = false;
3331                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE;
3332                 } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) {
3333                     ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
3334                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3335                     m_ctlInfo.flash.m_flashEnableFlg = false;
3336                     m_ctlInfo.flash.m_afFlashDoneFlg= false;
3337                     m_ctlInfo.flash.m_flashCnt = 0;
3338                 }
3339             } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) {
3340                 m_ctlInfo.flash.m_flashDecisionResult = false;
3341             }
3342 
3343             if (shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) {
3344                 if (m_ctlInfo.flash.m_flashTorchMode == false) {
3345                     m_ctlInfo.flash.m_flashTorchMode = true;
3346                 }
3347             } else {
3348                 if (m_ctlInfo.flash.m_flashTorchMode == true) {
3349                     shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3350                     shot_ext->shot.ctl.flash.firingPower = 0;
3351                     m_ctlInfo.flash.m_flashTorchMode = false;
3352                 } else {
3353                     shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP;
3354                 }
3355             }
3356 
3357             if (shot_ext->isReprocessing) {
3358                 ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__);
3359                 m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0];
3360                 shot_ext->request_scp = 0;
3361                 shot_ext->request_scc = 0;
3362                 m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount;
3363                 m_ctlInfo.flash.m_flashDecisionResult = false;
3364                 memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt)),
3365                     sizeof(struct camera2_shot_ext));
3366                 m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START);
3367                 m_ctlInfo.flash.m_flashEnableFlg = false;
3368             }
3369 
3370             if (m_ctlInfo.flash.m_flashEnableFlg) {
3371                 m_preCaptureListenerSensor(shot_ext);
3372                 m_preCaptureSetter(shot_ext);
3373             }
3374 
3375             ALOGV("(%s): queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
3376             (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3377             (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3378             (int)(shot_ext->shot.ctl.aa.afTrigger));
3379 
3380             if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) {
3381                 shot_ext->dis_bypass = 1;
3382                 shot_ext->dnr_bypass = 1;
3383                 shot_ext->request_scp = 0;
3384                 shot_ext->request_scc = 0;
3385                 m_vdisBubbleCnt--;
3386                 matchedFrameCnt = -1;
3387             } else {
3388                 m_vdisDupFrame = matchedFrameCnt;
3389             }
3390             if (m_scpForceSuspended)
3391                 shot_ext->request_scc = 0;
3392 
3393             uint32_t current_scp = shot_ext->request_scp;
3394             uint32_t current_scc = shot_ext->request_scc;
3395 
3396             if (shot_ext->shot.dm.request.frameCount == 0) {
3397                 CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount);
3398             }
3399 
3400             cam_int_qbuf(&(m_camera_info.isp), index);
3401 
3402             ALOGV("### isp DQBUF start");
3403             index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3404 
3405             shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3406 
3407             if (m_ctlInfo.flash.m_flashEnableFlg)
3408                 m_preCaptureListenerISP(shot_ext);
3409 
3410             ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)",
3411                 index,
3412                 shot_ext->shot.ctl.request.frameCount,
3413                 shot_ext->request_scp,
3414                 shot_ext->request_scc,
3415                 shot_ext->dis_bypass,
3416                 shot_ext->dnr_bypass, sizeof(camera2_shot));
3417 
3418             ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
3419                 (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3420                 (int)(shot_ext->shot.dm.aa.awbMode),
3421                 (int)(shot_ext->shot.dm.aa.afMode));
3422 
3423 #ifndef ENABLE_FRAME_SYNC
3424             m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0];
3425 #endif
3426 
3427             if (!shot_ext->fd_bypass) {
3428                 /* FD orientation axis transformation */
3429                 for (int i=0; i < CAMERA2_MAX_FACES; i++) {
3430                     if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
3431                         shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
3432                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][0])
3433                                                                                                 / m_streamThreads[0].get()->m_parameters.width;
3434                     if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
3435                         shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
3436                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][1])
3437                                                                                                 / m_streamThreads[0].get()->m_parameters.height;
3438                     if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
3439                         shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
3440                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][2])
3441                                                                                                 / m_streamThreads[0].get()->m_parameters.width;
3442                     if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
3443                         shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
3444                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][3])
3445                                                                                                 / m_streamThreads[0].get()->m_parameters.height;
3446                 }
3447             }
3448             // aeState control
3449             if (shot_ext->shot.ctl.aa.sceneMode != AA_SCENE_MODE_NIGHT)
3450                 m_preCaptureAeState(shot_ext);
3451 
3452             // At scene mode face priority
3453             if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE)
3454                 shot_ext->shot.dm.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
3455 
3456             if (matchedFrameCnt != -1 && m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) {
3457                 m_requestManager->ApplyDynamicMetadata(shot_ext);
3458             }
3459 
3460             if (current_scc != shot_ext->request_scc) {
3461                 ALOGD("(%s): scc frame drop1 request_scc(%d to %d)",
3462                                 __FUNCTION__, current_scc, shot_ext->request_scc);
3463                 m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3464             }
3465             if (shot_ext->request_scc) {
3466                 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)");
3467                 if (shot_ext->shot.ctl.request.outputStreams[0] & STREAM_MASK_JPEG) {
3468                     if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)
3469                         memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(shot_ext->shot.ctl.request.frameCount)),
3470                             sizeof(struct camera2_shot_ext));
3471                     else
3472                         memcpy(&m_jpegMetadata, (void*)shot_ext, sizeof(struct camera2_shot_ext));
3473                 }
3474                 m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3475             }
3476             if (current_scp != shot_ext->request_scp) {
3477                 ALOGD("(%s): scp frame drop1 request_scp(%d to %d)",
3478                                 __FUNCTION__, current_scp, shot_ext->request_scp);
3479                 m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3480             }
3481             if (shot_ext->request_scp) {
3482                 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)");
3483                 m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3484             }
3485 
3486             ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
3487                shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
3488             if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
3489                 ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
3490                 m_scp_closed = true;
3491             }
3492             else
3493                 m_scp_closed = false;
3494 
3495             OnAfNotification(shot_ext->shot.dm.aa.afState);
3496             OnPrecaptureMeteringNotificationISP();
3497         }   else {
3498             memcpy(&shot_ext->shot.ctl, &m_camera_info.dummy_shot.shot.ctl, sizeof(struct camera2_ctl));
3499             shot_ext->shot.ctl.request.frameCount = 0xfffffffe;
3500             shot_ext->request_sensor = 1;
3501             shot_ext->dis_bypass = 1;
3502             shot_ext->dnr_bypass = 1;
3503             shot_ext->fd_bypass = 1;
3504             shot_ext->drc_bypass = 1;
3505             shot_ext->request_scc = 0;
3506             shot_ext->request_scp = 0;
3507             if (m_wideAspect) {
3508                 shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3509             } else {
3510                 shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3511             }
3512             shot_ext->shot.ctl.aa.sceneMode = (enum aa_scene_mode)m_lastSceneMode;
3513             if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT_CAPTURE || shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3514                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
3515                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3516             }
3517             shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3518             shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3519             ALOGV("### isp QBUF start (bubble)");
3520             ALOGV("bubble: queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)",
3521                 (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3522                 (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3523                 (int)(shot_ext->shot.ctl.aa.afTrigger));
3524 
3525             cam_int_qbuf(&(m_camera_info.isp), index);
3526             ALOGV("### isp DQBUF start (bubble)");
3527             index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3528             shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3529             ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)",
3530                 (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3531                 (int)(shot_ext->shot.dm.aa.awbMode),
3532                 (int)(shot_ext->shot.dm.aa.afMode));
3533 
3534             OnAfNotification(shot_ext->shot.dm.aa.afState);
3535         }
3536 
3537         index = m_requestManager->popSensorQ();
3538         if(index < 0){
3539             ALOGE("sensorQ is empty");
3540             return;
3541         }
3542 
3543         processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
3544         shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3545         if (m_scp_closing || m_scp_closed) {
3546             ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
3547             shot_ext->request_scc = 0;
3548             shot_ext->request_scp = 0;
3549             shot_ext->request_sensor = 0;
3550         }
3551         cam_int_qbuf(&(m_camera_info.sensor), index);
3552         ALOGV("Sensor Qbuf done(%d)", index);
3553 
3554         if (!m_scp_closing
3555             && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
3556             ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
3557                                     matchedFrameCnt, processingReqIndex);
3558             selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
3559         }
3560     }
3561     return;
3562 }
3563 
m_streamBufferInit(SignalDrivenThread * self)3564 void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
3565 {
3566     uint32_t                currentSignal   = self->GetProcessingSignal();
3567     StreamThread *          selfThread      = ((StreamThread*)self);
3568     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3569     node_info_t             *currentNode    = selfStreamParms->node;
3570     substream_parameters_t  *subParms;
3571     buffer_handle_t * buf = NULL;
3572     status_t res;
3573     void *virtAddr[3];
3574     int i, j;
3575     int index;
3576     nsecs_t timestamp;
3577 
3578     if (!(selfThread->m_isBufferInit))
3579     {
3580         for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
3581             res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3582             if (res != NO_ERROR || buf == NULL) {
3583                 ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3584                 return;
3585             }
3586             ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3587                ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3588 
3589             index = selfThread->findBufferIndex(buf);
3590             if (index == -1) {
3591                 ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
3592             }
3593             else {
3594                 ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3595                     __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
3596                 if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
3597                     selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3598                 else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
3599                     selfStreamParms->svcBufStatus[index] = ON_HAL;
3600                 else {
3601                     ALOGV("DBG(%s): buffer status abnormal (%d) "
3602                         , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
3603                 }
3604                 selfStreamParms->numSvcBufsInHal++;
3605             }
3606             selfStreamParms->bufIndex = 0;
3607         }
3608         selfThread->m_isBufferInit = true;
3609     }
3610     for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3611         if (selfThread->m_attachedSubStreams[i].streamId == -1)
3612             continue;
3613 
3614         subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId];
3615         if (subParms->type && subParms->needBufferInit) {
3616             ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
3617                 __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers);
3618             int checkingIndex = 0;
3619             bool found = false;
3620             for ( i = 0 ; i < subParms->numSvcBuffers; i++) {
3621                 res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3622                 if (res != NO_ERROR || buf == NULL) {
3623                     ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3624                     return;
3625                 }
3626                 subParms->numSvcBufsInHal++;
3627                 ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3628                    subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3629 
3630                 if (m_grallocHal->lock(m_grallocHal, *buf,
3631                        subParms->usage, 0, 0,
3632                        subParms->width, subParms->height, virtAddr) != 0) {
3633                     ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
3634                 }
3635                 else {
3636                       ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3637                         __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
3638                 }
3639                 found = false;
3640                 for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3641                     if (subParms->svcBufHandle[checkingIndex] == *buf ) {
3642                         found = true;
3643                         break;
3644                     }
3645                 }
3646                 ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
3647                 if (!found) break;
3648 
3649                 index = checkingIndex;
3650 
3651                 if (index == -1) {
3652                     ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
3653                 }
3654                 else {
3655                     ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3656                         __FUNCTION__, index, subParms->svcBufStatus[index]);
3657                     if (subParms->svcBufStatus[index]== ON_SERVICE)
3658                         subParms->svcBufStatus[index] = ON_HAL;
3659                     else {
3660                         ALOGV("DBG(%s): buffer status abnormal (%d) "
3661                             , __FUNCTION__, subParms->svcBufStatus[index]);
3662                     }
3663                     if (*buf != subParms->svcBufHandle[index])
3664                         ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
3665                     else
3666                         ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3667                 }
3668                 subParms->svcBufIndex = 0;
3669             }
3670             if (subParms->type == SUBSTREAM_TYPE_JPEG) {
3671                 m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2;
3672                 m_resizeBuf.size.extS[1] = 0;
3673                 m_resizeBuf.size.extS[2] = 0;
3674 
3675                 if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) {
3676                     ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
3677                 }
3678             }
3679             if (subParms->type == SUBSTREAM_TYPE_PRVCB) {
3680                 m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width,
3681                 subParms->height, &m_previewCbBuf);
3682 
3683                 if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) {
3684                     ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__);
3685                 }
3686             }
3687             subParms->needBufferInit= false;
3688         }
3689     }
3690 }
3691 
m_streamThreadInitialize(SignalDrivenThread * self)3692 void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
3693 {
3694     StreamThread *          selfThread      = ((StreamThread*)self);
3695     ALOGV("DEBUG(%s): ", __FUNCTION__ );
3696     memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
3697     selfThread->m_isBufferInit = false;
3698     for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3699         selfThread->m_attachedSubStreams[i].streamId    = -1;
3700         selfThread->m_attachedSubStreams[i].priority    = 0;
3701     }
3702     return;
3703 }
3704 
m_runSubStreamFunc(StreamThread * selfThread,ExynosBuffer * srcImageBuf,int stream_id,nsecs_t frameTimeStamp)3705 int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf,
3706     int stream_id, nsecs_t frameTimeStamp)
3707 {
3708     substream_parameters_t  *subParms = &m_subStreams[stream_id];
3709 
3710     switch (stream_id) {
3711 
3712     case STREAM_ID_JPEG:
3713         return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp);
3714 
3715     case STREAM_ID_RECORD:
3716         return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp);
3717 
3718     case STREAM_ID_PRVCB:
3719         return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp);
3720 
3721     default:
3722         return 0;
3723     }
3724 }
m_streamFunc_direct(SignalDrivenThread * self)3725 void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self)
3726 {
3727     uint32_t                currentSignal   = self->GetProcessingSignal();
3728     StreamThread *          selfThread      = ((StreamThread*)self);
3729     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3730     node_info_t             *currentNode    = selfStreamParms->node;
3731     int i = 0;
3732     nsecs_t frameTimeStamp;
3733 
3734     if (currentSignal & SIGNAL_THREAD_RELEASE) {
3735         CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3736 
3737         if (selfThread->m_isBufferInit) {
3738             if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) {
3739                 ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__,
3740                     selfThread->m_index, currentNode->fd);
3741                 if (cam_int_streamoff(currentNode) < 0 ) {
3742                     ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3743                 }
3744                 ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__,
3745                         selfThread->m_index, currentNode->fd);
3746                 currentNode->buffers = 0;
3747                 cam_int_reqbufs(currentNode);
3748                 ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__,
3749                         selfThread->m_index, currentNode->fd);
3750             }
3751         }
3752 #ifdef ENABLE_FRAME_SYNC
3753         // free metabuffers
3754         for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
3755             if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) {
3756                 freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
3757                 selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
3758                 selfStreamParms->metaBuffers[i].size.extS[0] = 0;
3759             }
3760 #endif
3761         selfThread->m_isBufferInit = false;
3762         selfThread->m_releasing = false;
3763         selfThread->m_activated = false;
3764         ALOGV("(%s): [%d] END  SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3765         return;
3766     }
3767     if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) {
3768         status_t    res;
3769         buffer_handle_t * buf = NULL;
3770         bool found = false;
3771         ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
3772             __FUNCTION__, selfThread->m_index);
3773         res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf);
3774         if (res != NO_ERROR || buf == NULL) {
3775             ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res);
3776             return;
3777         }
3778         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3779         int checkingIndex = 0;
3780         for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3781             if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3782                 found = true;
3783                 break;
3784             }
3785         }
3786         ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
3787             __FUNCTION__, (unsigned int)buf, found, checkingIndex);
3788 
3789         if (!found) return;
3790 
3791         for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3792             if (selfThread->m_attachedSubStreams[i].streamId == -1)
3793                 continue;
3794 
3795 #ifdef ENABLE_FRAME_SYNC
3796             frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt);
3797             m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt);
3798 #else
3799             frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3800 #endif
3801             if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId))
3802                 m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]),
3803                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3804         }
3805 
3806         res = m_reprocessOps->release_buffer(m_reprocessOps, buf);
3807         if (res != NO_ERROR) {
3808             ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res);
3809             return;
3810         }
3811         ALOGV("(%s): streamthread[%d] END   SIGNAL_STREAM_REPROCESSING_START",
3812             __FUNCTION__,selfThread->m_index);
3813 
3814         return;
3815     }
3816     if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3817         buffer_handle_t * buf = NULL;
3818         status_t res = 0;
3819         int i, j;
3820         int index;
3821         nsecs_t timestamp;
3822 #ifdef ENABLE_FRAME_SYNC
3823         camera2_stream *frame;
3824         uint8_t currentOutputStreams;
3825         bool directOutputEnabled = false;
3826 #endif
3827         int numOfUndqbuf = 0;
3828 
3829         ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3830 
3831         m_streamBufferInit(self);
3832 
3833         do {
3834             ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__,
3835                 selfThread->m_index, selfThread->streamType);
3836 
3837 #ifdef ENABLE_FRAME_SYNC
3838             selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes);
3839             frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]);
3840             frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3841             currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
3842             ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams);
3843             if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)||
3844                  ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) {
3845                 directOutputEnabled = true;
3846             }
3847             if (!directOutputEnabled) {
3848                 if (!m_nightCaptureFrameCnt)
3849                     m_requestManager->NotifyStreamOutput(frame->rcount);
3850             }
3851 #else
3852             selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3853             frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex())
3854 #endif
3855             ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)  sigcnt(%d)",__FUNCTION__,
3856                 selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt);
3857 
3858             if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] !=  ON_DRIVER)
3859                 ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
3860                        __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]);
3861             selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3862 
3863             for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3864                 if (selfThread->m_attachedSubStreams[i].streamId == -1)
3865                     continue;
3866 #ifdef ENABLE_FRAME_SYNC
3867                 if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3868                     m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3869                         selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3870                 }
3871 #else
3872                 if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3873                     m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3874                         selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3875                 }
3876 #endif
3877             }
3878 
3879             if (m_requestManager->GetSkipCnt() <= 0) {
3880 #ifdef ENABLE_FRAME_SYNC
3881                 if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3882                     ALOGV("** Display Preview(frameCnt:%d)", frame->rcount);
3883                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3884                             frameTimeStamp,
3885                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3886                 }
3887                 else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3888                     ALOGV("** SCC output (frameCnt:%d)", frame->rcount);
3889                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3890                                 frameTimeStamp,
3891                                 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3892                 }
3893                 else {
3894                     res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3895                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3896                     ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3897                 }
3898 #else
3899                 if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3900                     ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
3901                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3902                             frameTimeStamp,
3903                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3904                 }
3905                 else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3906                     ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex());
3907                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3908                                 frameTimeStamp,
3909                                 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3910                 }
3911 #endif
3912                 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3913             }
3914             else {
3915                 res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3916                         &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3917                 ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3918             }
3919 #ifdef ENABLE_FRAME_SYNC
3920             if (directOutputEnabled) {
3921                 if (!m_nightCaptureFrameCnt)
3922                      m_requestManager->NotifyStreamOutput(frame->rcount);
3923             }
3924 #endif
3925             if (res == 0) {
3926                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE;
3927                 selfStreamParms->numSvcBufsInHal--;
3928             }
3929             else {
3930                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3931             }
3932 
3933         }
3934         while(0);
3935 
3936         while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS))
3937                     < selfStreamParms->minUndequedBuffer) {
3938             res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3939             if (res != NO_ERROR || buf == NULL) {
3940                 ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index,  res, selfStreamParms->numSvcBufsInHal);
3941                 break;
3942             }
3943             selfStreamParms->numSvcBufsInHal++;
3944             ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3945                 selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3946                ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3947             const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3948 
3949             bool found = false;
3950             int checkingIndex = 0;
3951             for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3952                 if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3953                     found = true;
3954                     break;
3955                 }
3956             }
3957             if (!found) break;
3958             selfStreamParms->bufIndex = checkingIndex;
3959             if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) {
3960                 uint32_t    plane_index = 0;
3961                 ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]);
3962                 struct v4l2_buffer v4l2_buf;
3963                 struct v4l2_plane  planes[VIDEO_MAX_PLANES];
3964 
3965                 v4l2_buf.m.planes   = planes;
3966                 v4l2_buf.type       = currentNode->type;
3967                 v4l2_buf.memory     = currentNode->memory;
3968                 v4l2_buf.index      = selfStreamParms->bufIndex;
3969                 v4l2_buf.length     = currentNode->planes;
3970 
3971                 v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
3972                 v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
3973                 v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
3974                 for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
3975                     v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
3976                 }
3977 #ifdef ENABLE_FRAME_SYNC
3978                 /* add plane for metadata*/
3979                 v4l2_buf.length += selfStreamParms->metaPlanes;
3980                 v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0];
3981                 v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0];
3982 #endif
3983                 if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
3984                     ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
3985                         __FUNCTION__, selfThread->m_index);
3986                     return;
3987                 }
3988                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER;
3989                 ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
3990                     __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex);
3991             }
3992         }
3993 
3994         ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3995     }
3996     return;
3997 }
3998 
m_streamFunc_indirect(SignalDrivenThread * self)3999 void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self)
4000 {
4001     uint32_t                currentSignal   = self->GetProcessingSignal();
4002     StreamThread *          selfThread      = ((StreamThread*)self);
4003     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
4004     node_info_t             *currentNode    = selfStreamParms->node;
4005 
4006 
4007     if (currentSignal & SIGNAL_THREAD_RELEASE) {
4008         CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4009 
4010         if (selfThread->m_isBufferInit) {
4011             if (currentNode->fd == m_camera_info.capture.fd) {
4012                 if (m_camera_info.capture.status == true) {
4013                     ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4014                     selfThread->m_index, currentNode->fd);
4015                     if (cam_int_streamoff(currentNode) < 0 ){
4016                         ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4017                     } else {
4018                         m_camera_info.capture.status = false;
4019                     }
4020                 }
4021             } else {
4022                 ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4023                 selfThread->m_index, currentNode->fd);
4024                 if (cam_int_streamoff(currentNode) < 0 ){
4025                     ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4026                 }
4027             }
4028             ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index);
4029             ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__,
4030                     selfThread->m_index, currentNode->fd);
4031             currentNode->buffers = 0;
4032             cam_int_reqbufs(currentNode);
4033             ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__,
4034                     selfThread->m_index, currentNode->fd);
4035         }
4036 
4037         selfThread->m_isBufferInit = false;
4038         selfThread->m_releasing = false;
4039         selfThread->m_activated = false;
4040         ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4041         return;
4042     }
4043 
4044     if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
4045 #ifdef ENABLE_FRAME_SYNC
4046         camera2_stream *frame;
4047         uint8_t currentOutputStreams;
4048 #endif
4049         nsecs_t frameTimeStamp;
4050 
4051         ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
4052             __FUNCTION__,selfThread->m_index);
4053 
4054         m_streamBufferInit(self);
4055 
4056         ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index);
4057         selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
4058         ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__,
4059             selfThread->m_index, selfStreamParms->bufIndex);
4060 
4061 #ifdef ENABLE_FRAME_SYNC
4062         frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]);
4063         frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
4064         currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
4065         ALOGV("frame count(SCC) : %d outputStream(%x)",  frame->rcount, currentOutputStreams);
4066 #else
4067         frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
4068 #endif
4069 
4070         for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
4071             if (selfThread->m_attachedSubStreams[i].streamId == -1)
4072                 continue;
4073 #ifdef ENABLE_FRAME_SYNC
4074             if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4075                 m_requestManager->NotifyStreamOutput(frame->rcount);
4076                 m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4077                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4078             }
4079 #else
4080             if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4081                 m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4082                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4083             }
4084 #endif
4085         }
4086         cam_int_qbuf(currentNode, selfStreamParms->bufIndex);
4087         ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index);
4088 
4089 
4090 
4091         ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
4092             __FUNCTION__, selfThread->m_index);
4093     }
4094 
4095 
4096     return;
4097 }
4098 
m_streamThreadFunc(SignalDrivenThread * self)4099 void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
4100 {
4101     uint32_t                currentSignal   = self->GetProcessingSignal();
4102     StreamThread *          selfThread      = ((StreamThread*)self);
4103     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
4104     node_info_t             *currentNode    = selfStreamParms->node;
4105 
4106     ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
4107 
4108     // Do something in Child thread handler
4109     // Should change function to class that inherited StreamThread class to support dynamic stream allocation
4110     if (selfThread->streamType == STREAM_TYPE_DIRECT) {
4111         m_streamFunc_direct(self);
4112     } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) {
4113         m_streamFunc_indirect(self);
4114     }
4115 
4116     return;
4117 }
m_jpegCreator(StreamThread * selfThread,ExynosBuffer * srcImageBuf,nsecs_t frameTimeStamp)4118 int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4119 {
4120     Mutex::Autolock lock(m_jpegEncoderLock);
4121     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4122     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_JPEG];
4123     status_t    res;
4124     ExynosRect jpegRect;
4125     bool found = false;
4126     int srcW, srcH, srcCropX, srcCropY;
4127     int pictureW, pictureH, pictureFramesize = 0;
4128     int pictureFormat;
4129     int cropX, cropY, cropW, cropH = 0;
4130     ExynosBuffer resizeBufInfo;
4131     ExynosRect   m_jpegPictureRect;
4132     buffer_handle_t * buf = NULL;
4133     camera2_jpeg_blob * jpegBlob = NULL;
4134     int jpegBufSize = 0;
4135 
4136     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4137     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4138         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4139             found = true;
4140             break;
4141         }
4142         subParms->svcBufIndex++;
4143         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4144             subParms->svcBufIndex = 0;
4145     }
4146     if (!found) {
4147         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4148         subParms->svcBufIndex++;
4149         return 1;
4150     }
4151 
4152     m_jpegEncodingCount++;
4153 
4154     m_getRatioSize(selfStreamParms->width, selfStreamParms->height,
4155                     m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
4156                     &srcCropX, &srcCropY,
4157                     &srcW, &srcH,
4158                     0);
4159 
4160     m_jpegPictureRect.w = subParms->width;
4161     m_jpegPictureRect.h = subParms->height;
4162 
4163      ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
4164               __FUNCTION__, selfStreamParms->width, selfStreamParms->height,
4165                    m_jpegPictureRect.w, m_jpegPictureRect.h);
4166 
4167     m_getRatioSize(srcW, srcH,
4168                    m_jpegPictureRect.w, m_jpegPictureRect.h,
4169                    &cropX, &cropY,
4170                    &pictureW, &pictureH,
4171                    0);
4172     pictureFormat = V4L2_PIX_FMT_YUYV;
4173     pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
4174 
4175     if (m_exynosPictureCSC) {
4176         float zoom_w = 0, zoom_h = 0;
4177         if (m_zoomRatio == 0)
4178             m_zoomRatio = 1;
4179 
4180         if (m_jpegPictureRect.w >= m_jpegPictureRect.h) {
4181             zoom_w =  pictureW / m_zoomRatio;
4182             zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w;
4183         } else {
4184             zoom_h = pictureH / m_zoomRatio;
4185             zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h;
4186         }
4187         cropX = (srcW - zoom_w) / 2;
4188         cropY = (srcH - zoom_h) / 2;
4189         cropW = zoom_w;
4190         cropH = zoom_h;
4191 
4192         ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4193               __FUNCTION__, cropX, cropY, cropW, cropH);
4194 
4195         csc_set_src_format(m_exynosPictureCSC,
4196                            ALIGN(srcW, 16), ALIGN(srcH, 16),
4197                            cropX, cropY, cropW, cropH,
4198                            V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
4199                            0);
4200 
4201         csc_set_dst_format(m_exynosPictureCSC,
4202                            m_jpegPictureRect.w, m_jpegPictureRect.h,
4203                            0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h,
4204                            V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
4205                            0);
4206         for (int i = 0 ; i < 3 ; i++)
4207             ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
4208                 __FUNCTION__, i, srcImageBuf->fd.extFd[i]);
4209         csc_set_src_buffer(m_exynosPictureCSC,
4210                            (void **)&srcImageBuf->fd.fd);
4211 
4212         csc_set_dst_buffer(m_exynosPictureCSC,
4213                            (void **)&m_resizeBuf.fd.fd);
4214         for (int i = 0 ; i < 3 ; i++)
4215             ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
4216                 __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
4217 
4218         if (csc_convert(m_exynosPictureCSC) != 0)
4219             ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
4220 
4221     }
4222     else {
4223         ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
4224     }
4225 
4226     resizeBufInfo = m_resizeBuf;
4227 
4228     m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf);
4229 
4230     for (int i = 1; i < 3; i++) {
4231         if (m_resizeBuf.size.extS[i] != 0)
4232             m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
4233 
4234         ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
4235     }
4236 
4237     jpegRect.w = m_jpegPictureRect.w;
4238     jpegRect.h = m_jpegPictureRect.h;
4239     jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
4240 
4241     for (int j = 0 ; j < 3 ; j++)
4242         ALOGV("DEBUG(%s): dest buf node  fd.extFd[%d]=%d size=%d virt=%x ",
4243             __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j],
4244             (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j],
4245             (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]);
4246 
4247     jpegBufSize = subParms->svcBuffers[subParms->svcBufIndex].size.extS[0];
4248     if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) {
4249         ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
4250     } else {
4251         m_resizeBuf = resizeBufInfo;
4252 
4253         int jpegSize = subParms->svcBuffers[subParms->svcBufIndex].size.s;
4254         ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__,
4255             m_jpegPictureRect.w, m_jpegPictureRect.h, jpegBufSize, jpegSize);
4256         char * jpegBuffer = (char*)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4257         jpegBlob = (camera2_jpeg_blob*)(&jpegBuffer[jpegBufSize - sizeof(camera2_jpeg_blob)]);
4258 
4259         if (jpegBuffer[jpegSize-1] == 0)
4260             jpegSize--;
4261         jpegBlob->jpeg_size = jpegSize;
4262         jpegBlob->jpeg_blob_id = CAMERA2_JPEG_BLOB_ID;
4263     }
4264     subParms->svcBuffers[subParms->svcBufIndex].size.extS[0] = jpegBufSize;
4265     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4266 
4267     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4268             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4269     if (res == 0) {
4270         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4271         subParms->numSvcBufsInHal--;
4272     }
4273     else {
4274         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4275     }
4276 
4277     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4278     {
4279         bool found = false;
4280         int checkingIndex = 0;
4281 
4282         ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4283 
4284         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4285         if (res != NO_ERROR || buf == NULL) {
4286             ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4287             break;
4288         }
4289         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4290         subParms->numSvcBufsInHal ++;
4291         ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4292            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4293 
4294 
4295         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4296             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4297                 found = true;
4298                 break;
4299             }
4300         }
4301         ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found);
4302 
4303         if (!found) {
4304              break;
4305         }
4306 
4307         subParms->svcBufIndex = checkingIndex;
4308         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4309             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4310         }
4311         else {
4312             ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4313                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4314         }
4315     }
4316     m_jpegEncodingCount--;
4317     return 0;
4318 }
4319 
m_recordCreator(StreamThread * selfThread,ExynosBuffer * srcImageBuf,nsecs_t frameTimeStamp)4320 int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4321 {
4322     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4323     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_RECORD];
4324     status_t    res;
4325     ExynosRect jpegRect;
4326     bool found = false;
4327     int cropX, cropY, cropW, cropH = 0;
4328     buffer_handle_t * buf = NULL;
4329 
4330     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4331     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4332         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4333             found = true;
4334             break;
4335         }
4336         subParms->svcBufIndex++;
4337         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4338             subParms->svcBufIndex = 0;
4339     }
4340     if (!found) {
4341         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4342         subParms->svcBufIndex++;
4343         return 1;
4344     }
4345 
4346     if (m_exynosVideoCSC) {
4347         int videoW = subParms->width, videoH = subParms->height;
4348         int cropX, cropY, cropW, cropH = 0;
4349         int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4350         m_getRatioSize(previewW, previewH,
4351                        videoW, videoH,
4352                        &cropX, &cropY,
4353                        &cropW, &cropH,
4354                        0);
4355 
4356         ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4357                  __FUNCTION__, cropX, cropY, cropW, cropH);
4358 
4359         csc_set_src_format(m_exynosVideoCSC,
4360                            ALIGN(previewW, 32), previewH,
4361                            cropX, cropY, cropW, cropH,
4362                            selfStreamParms->format,
4363                            0);
4364 
4365         csc_set_dst_format(m_exynosVideoCSC,
4366                            videoW, videoH,
4367                            0, 0, videoW, videoH,
4368                            subParms->format,
4369                            1);
4370 
4371         csc_set_src_buffer(m_exynosVideoCSC,
4372                         (void **)&srcImageBuf->fd.fd);
4373 
4374         csc_set_dst_buffer(m_exynosVideoCSC,
4375             (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd)));
4376 
4377         if (csc_convert(m_exynosVideoCSC) != 0) {
4378             ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
4379         }
4380         else {
4381             ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
4382         }
4383     }
4384     else {
4385         ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4386     }
4387 
4388     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4389 
4390     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4391             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4392     if (res == 0) {
4393         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4394         subParms->numSvcBufsInHal--;
4395     }
4396     else {
4397         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4398     }
4399 
4400     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4401     {
4402         bool found = false;
4403         int checkingIndex = 0;
4404 
4405         ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4406 
4407         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4408         if (res != NO_ERROR || buf == NULL) {
4409             ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4410             break;
4411         }
4412         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4413         subParms->numSvcBufsInHal ++;
4414         ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4415            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4416 
4417         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4418             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4419                 found = true;
4420                 break;
4421             }
4422         }
4423         ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4424 
4425         if (!found) {
4426              break;
4427         }
4428 
4429         subParms->svcBufIndex = checkingIndex;
4430         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4431             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4432         }
4433         else {
4434             ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4435                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4436         }
4437     }
4438     return 0;
4439 }
4440 
m_prvcbCreator(StreamThread * selfThread,ExynosBuffer * srcImageBuf,nsecs_t frameTimeStamp)4441 int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4442 {
4443     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4444     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_PRVCB];
4445     status_t    res;
4446     bool found = false;
4447     int cropX, cropY, cropW, cropH = 0;
4448     buffer_handle_t * buf = NULL;
4449 
4450     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4451     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4452         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4453             found = true;
4454             break;
4455         }
4456         subParms->svcBufIndex++;
4457         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4458             subParms->svcBufIndex = 0;
4459     }
4460     if (!found) {
4461         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4462         subParms->svcBufIndex++;
4463         return 1;
4464     }
4465 
4466     if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
4467         if (m_exynosVideoCSC) {
4468             int previewCbW = subParms->width, previewCbH = subParms->height;
4469             int cropX, cropY, cropW, cropH = 0;
4470             int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4471             m_getRatioSize(previewW, previewH,
4472                            previewCbW, previewCbH,
4473                            &cropX, &cropY,
4474                            &cropW, &cropH,
4475                            0);
4476 
4477             ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4478                      __FUNCTION__, cropX, cropY, cropW, cropH);
4479             csc_set_src_format(m_exynosVideoCSC,
4480                                ALIGN(previewW, 32), previewH,
4481                                cropX, cropY, cropW, cropH,
4482                                selfStreamParms->format,
4483                                0);
4484 
4485             csc_set_dst_format(m_exynosVideoCSC,
4486                                previewCbW, previewCbH,
4487                                0, 0, previewCbW, previewCbH,
4488                                subParms->internalFormat,
4489                                1);
4490 
4491             csc_set_src_buffer(m_exynosVideoCSC,
4492                         (void **)&srcImageBuf->fd.fd);
4493 
4494             csc_set_dst_buffer(m_exynosVideoCSC,
4495                 (void **)(&(m_previewCbBuf.fd.fd)));
4496 
4497             if (csc_convert(m_exynosVideoCSC) != 0) {
4498                 ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
4499             }
4500             else {
4501                 ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
4502             }
4503             if (previewCbW == ALIGN(previewCbW, 16)) {
4504                 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4505                     m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
4506                 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
4507                     m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
4508             }
4509             else {
4510                 // TODO : copy line by line ?
4511             }
4512         }
4513         else {
4514             ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4515         }
4516     }
4517     else if (subParms->format == HAL_PIXEL_FORMAT_YV12) {
4518         int previewCbW = subParms->width, previewCbH = subParms->height;
4519         int stride = ALIGN(previewCbW, 16);
4520         int uv_stride = ALIGN(previewCbW/2, 16);
4521         int c_stride = ALIGN(stride / 2, 16);
4522 
4523         if (previewCbW == ALIGN(previewCbW, 32)) {
4524             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4525                 srcImageBuf->virt.extP[0], stride * previewCbH);
4526             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
4527                 srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 );
4528             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
4529                 srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 );
4530         } else {
4531             char * dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4532             char * srcAddr = (char *)(srcImageBuf->virt.extP[0]);
4533             for (int i = 0 ; i < previewCbH ; i++) {
4534                 memcpy(dstAddr, srcAddr, previewCbW);
4535                 dstAddr += stride;
4536                 srcAddr += ALIGN(stride, 32);
4537             }
4538             dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH);
4539             srcAddr = (char *)(srcImageBuf->virt.extP[1]);
4540             for (int i = 0 ; i < previewCbH/2 ; i++) {
4541                 memcpy(dstAddr, srcAddr, previewCbW/2);
4542                 dstAddr += c_stride;
4543                 srcAddr += uv_stride;
4544             }
4545             srcAddr = (char *)(srcImageBuf->virt.extP[2]);
4546             for (int i = 0 ; i < previewCbH/2 ; i++) {
4547                 memcpy(dstAddr, srcAddr, previewCbW/2);
4548                 dstAddr += c_stride;
4549                 srcAddr += uv_stride;
4550             }
4551         }
4552     }
4553     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4554 
4555     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4556             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4557     if (res == 0) {
4558         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4559         subParms->numSvcBufsInHal--;
4560     }
4561     else {
4562         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4563     }
4564 
4565     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4566     {
4567         bool found = false;
4568         int checkingIndex = 0;
4569 
4570         ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4571 
4572         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4573         if (res != NO_ERROR || buf == NULL) {
4574             ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4575             break;
4576         }
4577         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4578         subParms->numSvcBufsInHal ++;
4579         ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4580            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4581 
4582 
4583         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4584             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4585                 found = true;
4586                 break;
4587             }
4588         }
4589         ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4590 
4591         if (!found) {
4592              break;
4593         }
4594 
4595         subParms->svcBufIndex = checkingIndex;
4596         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4597             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4598         }
4599         else {
4600             ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4601                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4602         }
4603     }
4604     return 0;
4605 }
4606 
m_checkThumbnailSize(int w,int h)4607 bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
4608 {
4609     int sizeOfSupportList;
4610 
4611     //REAR Camera
4612     if(this->getCameraId() == 0) {
4613         sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2);
4614 
4615         for(int i = 0; i < sizeOfSupportList; i++) {
4616             if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
4617                 return true;
4618         }
4619 
4620     }
4621     else {
4622         sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2);
4623 
4624         for(int i = 0; i < sizeOfSupportList; i++) {
4625             if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
4626                 return true;
4627         }
4628     }
4629 
4630     return false;
4631 }
yuv2Jpeg(ExynosBuffer * yuvBuf,ExynosBuffer * jpegBuf,ExynosRect * rect)4632 bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
4633                             ExynosBuffer *jpegBuf,
4634                             ExynosRect *rect)
4635 {
4636     unsigned char *addr;
4637 
4638     ExynosJpegEncoderForCamera jpegEnc;
4639     bool ret = false;
4640     int res = 0;
4641 
4642     unsigned int *yuvSize = yuvBuf->size.extS;
4643 
4644     if (jpegEnc.create()) {
4645         ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
4646         goto jpeg_encode_done;
4647     }
4648 
4649     if (jpegEnc.setQuality(m_jpegMetadata.shot.ctl.jpeg.quality)) {
4650         ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
4651         goto jpeg_encode_done;
4652     }
4653 
4654     if (jpegEnc.setSize(rect->w, rect->h)) {
4655         ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
4656         goto jpeg_encode_done;
4657     }
4658     ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
4659 
4660     if (jpegEnc.setColorFormat(rect->colorFormat)) {
4661         ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
4662         goto jpeg_encode_done;
4663     }
4664 
4665     if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
4666         ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
4667         goto jpeg_encode_done;
4668     }
4669 
4670     if((m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1] != 0)) {
4671         mExifInfo.enableThumb = true;
4672         if(!m_checkThumbnailSize(m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1])) {
4673             // in the case of unsupported parameter, disable thumbnail
4674             mExifInfo.enableThumb = false;
4675         } else {
4676             m_thumbNailW = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0];
4677             m_thumbNailH = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1];
4678         }
4679 
4680         ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
4681 
4682     } else {
4683         mExifInfo.enableThumb = false;
4684     }
4685 
4686     if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
4687         ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
4688         goto jpeg_encode_done;
4689     }
4690 
4691     ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
4692     if (jpegEnc.setThumbnailQuality(m_jpegMetadata.shot.ctl.jpeg.thumbnailQuality)) {
4693         ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
4694         goto jpeg_encode_done;
4695     }
4696 
4697     m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
4698     ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
4699     if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
4700         ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
4701         goto jpeg_encode_done;
4702     }
4703     if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
4704         ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
4705         goto jpeg_encode_done;
4706     }
4707 
4708     if (jpegEnc.updateConfig()) {
4709         ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
4710         goto jpeg_encode_done;
4711     }
4712 
4713     if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) {
4714         ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
4715         goto jpeg_encode_done;
4716     }
4717 
4718     ret = true;
4719 
4720 jpeg_encode_done:
4721 
4722     if (jpegEnc.flagCreate() == true)
4723         jpegEnc.destroy();
4724 
4725     return ret;
4726 }
4727 
OnPrecaptureMeteringTriggerStart(int id)4728 void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id)
4729 {
4730     m_ctlInfo.flash.m_precaptureTriggerId = id;
4731     m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
4732     if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) {
4733         // flash is required
4734         switch (m_ctlInfo.flash.m_flashCnt) {
4735         case IS_FLASH_STATE_AUTO_DONE:
4736         case IS_FLASH_STATE_AUTO_OFF:
4737             // Flash capture sequence, AF flash was executed before
4738             break;
4739         default:
4740             // Full flash sequence
4741             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4742             m_ctlInfo.flash.m_flashEnableFlg = true;
4743             m_ctlInfo.flash.m_flashTimeOut = 0;
4744         }
4745     } else {
4746         // Skip pre-capture in case of non-flash.
4747         ALOGV("[PreCap] Flash OFF mode ");
4748         m_ctlInfo.flash.m_flashEnableFlg = false;
4749         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
4750     }
4751     ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt);
4752     OnPrecaptureMeteringNotificationSensor();
4753 }
4754 
OnAfTrigger(int id)4755 void ExynosCameraHWInterface2::OnAfTrigger(int id)
4756 {
4757     m_afTriggerId = id;
4758 
4759     switch (m_afMode) {
4760     case AA_AFMODE_AUTO:
4761     case AA_AFMODE_MACRO:
4762     case AA_AFMODE_MANUAL:
4763         ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode);
4764         // If flash is enable, Flash operation is executed before triggering AF
4765         if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4766                 && (m_ctlInfo.flash.m_flashEnableFlg == false)
4767                 && (m_cameraId == 0)) {
4768             ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode);
4769             m_ctlInfo.flash.m_flashEnableFlg = true;
4770             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4771             m_ctlInfo.flash.m_flashDecisionResult = false;
4772             m_ctlInfo.flash.m_afFlashDoneFlg = true;
4773         }
4774         OnAfTriggerAutoMacro(id);
4775         break;
4776     case AA_AFMODE_CONTINUOUS_VIDEO:
4777         ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode);
4778         OnAfTriggerCAFVideo(id);
4779         break;
4780     case AA_AFMODE_CONTINUOUS_PICTURE:
4781         ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode);
4782         OnAfTriggerCAFPicture(id);
4783         break;
4784 
4785     case AA_AFMODE_OFF:
4786     default:
4787         break;
4788     }
4789 }
4790 
OnAfTriggerAutoMacro(int id)4791 void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id)
4792 {
4793     int nextState = NO_TRANSITION;
4794 
4795     switch (m_afState) {
4796     case HAL_AFSTATE_INACTIVE:
4797     case HAL_AFSTATE_PASSIVE_FOCUSED:
4798     case HAL_AFSTATE_SCANNING:
4799         nextState = HAL_AFSTATE_NEEDS_COMMAND;
4800         m_IsAfTriggerRequired = true;
4801         break;
4802     case HAL_AFSTATE_NEEDS_COMMAND:
4803         nextState = NO_TRANSITION;
4804         break;
4805     case HAL_AFSTATE_STARTED:
4806         nextState = NO_TRANSITION;
4807         break;
4808     case HAL_AFSTATE_LOCKED:
4809         nextState = HAL_AFSTATE_NEEDS_COMMAND;
4810         m_IsAfTriggerRequired = true;
4811         break;
4812     case HAL_AFSTATE_FAILED:
4813         nextState = HAL_AFSTATE_NEEDS_COMMAND;
4814         m_IsAfTriggerRequired = true;
4815         break;
4816     default:
4817         break;
4818     }
4819     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4820     if (nextState != NO_TRANSITION)
4821         m_afState = nextState;
4822 }
4823 
OnAfTriggerCAFPicture(int id)4824 void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
4825 {
4826     int nextState = NO_TRANSITION;
4827 
4828     switch (m_afState) {
4829     case HAL_AFSTATE_INACTIVE:
4830         nextState = HAL_AFSTATE_FAILED;
4831         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4832         break;
4833     case HAL_AFSTATE_NEEDS_COMMAND:
4834         // not used
4835         break;
4836     case HAL_AFSTATE_STARTED:
4837         nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4838         m_AfHwStateFailed = false;
4839         break;
4840     case HAL_AFSTATE_SCANNING:
4841         nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4842         m_AfHwStateFailed = false;
4843         // If flash is enable, Flash operation is executed before triggering AF
4844         if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4845                 && (m_ctlInfo.flash.m_flashEnableFlg == false)
4846                 && (m_cameraId == 0)) {
4847             ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
4848             m_ctlInfo.flash.m_flashEnableFlg = true;
4849             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4850             m_ctlInfo.flash.m_flashDecisionResult = false;
4851             m_ctlInfo.flash.m_afFlashDoneFlg = true;
4852         }
4853         break;
4854     case HAL_AFSTATE_NEEDS_DETERMINATION:
4855         nextState = NO_TRANSITION;
4856         break;
4857     case HAL_AFSTATE_PASSIVE_FOCUSED:
4858         m_IsAfLockRequired = true;
4859         if (m_AfHwStateFailed) {
4860             ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__);
4861             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4862             nextState = HAL_AFSTATE_FAILED;
4863         }
4864         else {
4865             ALOGV("(%s): [CAF] LAST : success", __FUNCTION__);
4866             SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4867             nextState = HAL_AFSTATE_LOCKED;
4868         }
4869         m_AfHwStateFailed = false;
4870         break;
4871     case HAL_AFSTATE_LOCKED:
4872         nextState = NO_TRANSITION;
4873         break;
4874     case HAL_AFSTATE_FAILED:
4875         nextState = NO_TRANSITION;
4876         break;
4877     default:
4878         break;
4879     }
4880     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4881     if (nextState != NO_TRANSITION)
4882         m_afState = nextState;
4883 }
4884 
4885 
OnAfTriggerCAFVideo(int id)4886 void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id)
4887 {
4888     int nextState = NO_TRANSITION;
4889 
4890     switch (m_afState) {
4891     case HAL_AFSTATE_INACTIVE:
4892         nextState = HAL_AFSTATE_FAILED;
4893         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4894         break;
4895     case HAL_AFSTATE_NEEDS_COMMAND:
4896         // not used
4897         break;
4898     case HAL_AFSTATE_STARTED:
4899         m_IsAfLockRequired = true;
4900         nextState = HAL_AFSTATE_FAILED;
4901         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4902         break;
4903     case HAL_AFSTATE_SCANNING:
4904         m_IsAfLockRequired = true;
4905         nextState = HAL_AFSTATE_FAILED;
4906         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4907         break;
4908     case HAL_AFSTATE_NEEDS_DETERMINATION:
4909         // not used
4910         break;
4911     case HAL_AFSTATE_PASSIVE_FOCUSED:
4912         m_IsAfLockRequired = true;
4913         SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4914         nextState = HAL_AFSTATE_LOCKED;
4915         break;
4916     case HAL_AFSTATE_LOCKED:
4917         nextState = NO_TRANSITION;
4918         break;
4919     case HAL_AFSTATE_FAILED:
4920         nextState = NO_TRANSITION;
4921         break;
4922     default:
4923         break;
4924     }
4925     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4926     if (nextState != NO_TRANSITION)
4927         m_afState = nextState;
4928 }
4929 
OnPrecaptureMeteringNotificationSensor()4930 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor()
4931 {
4932     if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4933         // Just noti of pre-capture start
4934         if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) {
4935             m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4936                         ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4937                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4938             ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4939             m_notifyCb(CAMERA2_MSG_AUTOWB,
4940                         ANDROID_CONTROL_AWB_STATE_CONVERGED,
4941                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4942             m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
4943         }
4944     }
4945 }
4946 
OnPrecaptureMeteringNotificationISP()4947 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP()
4948 {
4949     if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4950         if (m_ctlInfo.flash.m_flashEnableFlg) {
4951             // flash case
4952             switch (m_ctlInfo.flash.m_flashCnt) {
4953             case IS_FLASH_STATE_AUTO_DONE:
4954             case IS_FLASH_STATE_AUTO_OFF:
4955                 if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
4956                     // End notification
4957                     m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4958                                     ANDROID_CONTROL_AE_STATE_CONVERGED,
4959                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4960                     ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4961                     m_notifyCb(CAMERA2_MSG_AUTOWB,
4962                                     ANDROID_CONTROL_AWB_STATE_CONVERGED,
4963                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4964                     m_ctlInfo.flash.m_precaptureTriggerId = 0;
4965                 } else {
4966                     m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4967                                     ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4968                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4969                     ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4970                     m_notifyCb(CAMERA2_MSG_AUTOWB,
4971                                     ANDROID_CONTROL_AWB_STATE_CONVERGED,
4972                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4973                     m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
4974                 }
4975                 break;
4976             case IS_FLASH_STATE_CAPTURE:
4977             case IS_FLASH_STATE_CAPTURE_WAIT:
4978             case IS_FLASH_STATE_CAPTURE_JPEG:
4979             case IS_FLASH_STATE_CAPTURE_END:
4980                 ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt);
4981                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
4982                 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4983                         ANDROID_CONTROL_AE_STATE_CONVERGED,
4984                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4985                 m_notifyCb(CAMERA2_MSG_AUTOWB,
4986                         ANDROID_CONTROL_AWB_STATE_CONVERGED,
4987                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4988                 m_ctlInfo.flash.m_precaptureTriggerId = 0;
4989                 break;
4990             }
4991         } else {
4992             // non-flash case
4993             if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
4994                 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4995                                 ANDROID_CONTROL_AE_STATE_CONVERGED,
4996                                 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4997                 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4998                 m_notifyCb(CAMERA2_MSG_AUTOWB,
4999                                 ANDROID_CONTROL_AWB_STATE_CONVERGED,
5000                                 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5001                 m_ctlInfo.flash.m_precaptureTriggerId = 0;
5002             }
5003         }
5004     }
5005 }
5006 
OnAfNotification(enum aa_afstate noti)5007 void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
5008 {
5009     switch (m_afMode) {
5010     case AA_AFMODE_AUTO:
5011     case AA_AFMODE_MACRO:
5012         OnAfNotificationAutoMacro(noti);
5013         break;
5014     case AA_AFMODE_CONTINUOUS_VIDEO:
5015         OnAfNotificationCAFVideo(noti);
5016         break;
5017     case AA_AFMODE_CONTINUOUS_PICTURE:
5018         OnAfNotificationCAFPicture(noti);
5019         break;
5020     case AA_AFMODE_OFF:
5021     default:
5022         break;
5023     }
5024 }
5025 
OnAfNotificationAutoMacro(enum aa_afstate noti)5026 void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
5027 {
5028     int nextState = NO_TRANSITION;
5029     bool bWrongTransition = false;
5030 
5031     if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
5032         switch (noti) {
5033         case AA_AFSTATE_INACTIVE:
5034         case AA_AFSTATE_ACTIVE_SCAN:
5035         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5036         case AA_AFSTATE_AF_FAILED_FOCUS:
5037         default:
5038             nextState = NO_TRANSITION;
5039             break;
5040         }
5041     }
5042     else if (m_afState == HAL_AFSTATE_STARTED) {
5043         switch (noti) {
5044         case AA_AFSTATE_INACTIVE:
5045             nextState = NO_TRANSITION;
5046             break;
5047         case AA_AFSTATE_ACTIVE_SCAN:
5048             nextState = HAL_AFSTATE_SCANNING;
5049             SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
5050             break;
5051         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5052             nextState = NO_TRANSITION;
5053             break;
5054         case AA_AFSTATE_AF_FAILED_FOCUS:
5055             nextState = NO_TRANSITION;
5056             break;
5057         default:
5058             bWrongTransition = true;
5059             break;
5060         }
5061     }
5062     else if (m_afState == HAL_AFSTATE_SCANNING) {
5063         switch (noti) {
5064         case AA_AFSTATE_INACTIVE:
5065             bWrongTransition = true;
5066             break;
5067         case AA_AFSTATE_ACTIVE_SCAN:
5068             nextState = NO_TRANSITION;
5069             break;
5070         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5071             // If Flash mode is enable, after AF execute pre-capture metering
5072             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5073                 switch (m_ctlInfo.flash.m_flashCnt) {
5074                 case IS_FLASH_STATE_ON_DONE:
5075                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5076                     nextState = NO_TRANSITION;
5077                     break;
5078                 case IS_FLASH_STATE_AUTO_DONE:
5079                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5080                     nextState = HAL_AFSTATE_LOCKED;
5081                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5082                     break;
5083                 default:
5084                     nextState = NO_TRANSITION;
5085                 }
5086             } else {
5087                 nextState = HAL_AFSTATE_LOCKED;
5088                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5089             }
5090             break;
5091         case AA_AFSTATE_AF_FAILED_FOCUS:
5092             // If Flash mode is enable, after AF execute pre-capture metering
5093             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5094                 switch (m_ctlInfo.flash.m_flashCnt) {
5095                 case IS_FLASH_STATE_ON_DONE:
5096                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5097                     nextState = NO_TRANSITION;
5098                     break;
5099                 case IS_FLASH_STATE_AUTO_DONE:
5100                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5101                     nextState = HAL_AFSTATE_FAILED;
5102                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5103                     break;
5104                 default:
5105                     nextState = NO_TRANSITION;
5106                 }
5107             } else {
5108                 nextState = HAL_AFSTATE_FAILED;
5109                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5110             }
5111             break;
5112         default:
5113             bWrongTransition = true;
5114             break;
5115         }
5116     }
5117     else if (m_afState == HAL_AFSTATE_LOCKED) {
5118         switch (noti) {
5119             case AA_AFSTATE_INACTIVE:
5120             case AA_AFSTATE_ACTIVE_SCAN:
5121                 bWrongTransition = true;
5122                 break;
5123             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5124                 nextState = NO_TRANSITION;
5125                 break;
5126             case AA_AFSTATE_AF_FAILED_FOCUS:
5127             default:
5128                 bWrongTransition = true;
5129                 break;
5130         }
5131     }
5132     else if (m_afState == HAL_AFSTATE_FAILED) {
5133         switch (noti) {
5134             case AA_AFSTATE_INACTIVE:
5135             case AA_AFSTATE_ACTIVE_SCAN:
5136             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5137                 bWrongTransition = true;
5138                 break;
5139             case AA_AFSTATE_AF_FAILED_FOCUS:
5140                 nextState = NO_TRANSITION;
5141                 break;
5142             default:
5143                 bWrongTransition = true;
5144                 break;
5145         }
5146     }
5147     if (bWrongTransition) {
5148         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5149         return;
5150     }
5151     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5152     if (nextState != NO_TRANSITION)
5153         m_afState = nextState;
5154 }
5155 
OnAfNotificationCAFPicture(enum aa_afstate noti)5156 void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
5157 {
5158     int nextState = NO_TRANSITION;
5159     bool bWrongTransition = false;
5160 
5161     if (m_afState == HAL_AFSTATE_INACTIVE) {
5162         switch (noti) {
5163         case AA_AFSTATE_INACTIVE:
5164         case AA_AFSTATE_ACTIVE_SCAN:
5165         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5166         case AA_AFSTATE_AF_FAILED_FOCUS:
5167         default:
5168             nextState = NO_TRANSITION;
5169             break;
5170         }
5171         // Check AF notification after triggering
5172         if (m_ctlInfo.af.m_afTriggerTimeOut > 0) {
5173             if (m_ctlInfo.af.m_afTriggerTimeOut > 5) {
5174                 ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__, m_afMode);
5175                 SetAfMode(AA_AFMODE_OFF);
5176                 SetAfMode(m_afMode);
5177                 m_ctlInfo.af.m_afTriggerTimeOut = 0;
5178             } else {
5179                 m_ctlInfo.af.m_afTriggerTimeOut++;
5180             }
5181         }
5182     }
5183     else if (m_afState == HAL_AFSTATE_STARTED) {
5184         switch (noti) {
5185         case AA_AFSTATE_INACTIVE:
5186             nextState = NO_TRANSITION;
5187             break;
5188         case AA_AFSTATE_ACTIVE_SCAN:
5189             nextState = HAL_AFSTATE_SCANNING;
5190             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5191             m_ctlInfo.af.m_afTriggerTimeOut = 0;
5192             break;
5193         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5194             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5195             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5196             m_ctlInfo.af.m_afTriggerTimeOut = 0;
5197             break;
5198         case AA_AFSTATE_AF_FAILED_FOCUS:
5199             //nextState = HAL_AFSTATE_FAILED;
5200             //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5201             nextState = NO_TRANSITION;
5202             break;
5203         default:
5204             bWrongTransition = true;
5205             break;
5206         }
5207     }
5208     else if (m_afState == HAL_AFSTATE_SCANNING) {
5209         switch (noti) {
5210         case AA_AFSTATE_INACTIVE:
5211             nextState = NO_TRANSITION;
5212             break;
5213         case AA_AFSTATE_ACTIVE_SCAN:
5214             nextState = NO_TRANSITION;
5215             m_AfHwStateFailed = false;
5216             break;
5217         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5218             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5219             m_AfHwStateFailed = false;
5220             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5221             break;
5222         case AA_AFSTATE_AF_FAILED_FOCUS:
5223             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5224             m_AfHwStateFailed = true;
5225             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5226             break;
5227         default:
5228             bWrongTransition = true;
5229             break;
5230         }
5231     }
5232     else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5233         switch (noti) {
5234         case AA_AFSTATE_INACTIVE:
5235             nextState = NO_TRANSITION;
5236             break;
5237         case AA_AFSTATE_ACTIVE_SCAN:
5238             nextState = HAL_AFSTATE_SCANNING;
5239             m_AfHwStateFailed = false;
5240             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5241             break;
5242         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5243             nextState = NO_TRANSITION;
5244             m_AfHwStateFailed = false;
5245             break;
5246         case AA_AFSTATE_AF_FAILED_FOCUS:
5247             nextState = NO_TRANSITION;
5248             m_AfHwStateFailed = true;
5249             break;
5250         default:
5251             bWrongTransition = true;
5252             break;
5253         }
5254     }
5255     else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5256         //Skip notification in case of flash, wait the end of flash on
5257         if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5258             if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE)
5259                 return;
5260         }
5261         switch (noti) {
5262         case AA_AFSTATE_INACTIVE:
5263             nextState = NO_TRANSITION;
5264             break;
5265         case AA_AFSTATE_ACTIVE_SCAN:
5266             nextState = NO_TRANSITION;
5267             break;
5268         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5269             // If Flash mode is enable, after AF execute pre-capture metering
5270             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5271                 switch (m_ctlInfo.flash.m_flashCnt) {
5272                 case IS_FLASH_STATE_ON_DONE:
5273                     ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5274                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5275                     nextState = NO_TRANSITION;
5276                     break;
5277                 case IS_FLASH_STATE_AUTO_DONE:
5278                     ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5279                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5280                     m_IsAfLockRequired = true;
5281                     nextState = HAL_AFSTATE_LOCKED;
5282                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5283                     break;
5284                 default:
5285                     nextState = NO_TRANSITION;
5286                 }
5287             } else {
5288                 m_IsAfLockRequired = true;
5289                 nextState = HAL_AFSTATE_LOCKED;
5290                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5291             }
5292             break;
5293         case AA_AFSTATE_AF_FAILED_FOCUS:
5294             // If Flash mode is enable, after AF execute pre-capture metering
5295             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5296                 switch (m_ctlInfo.flash.m_flashCnt) {
5297                 case IS_FLASH_STATE_ON_DONE:
5298                     ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5299                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5300                     nextState = NO_TRANSITION;
5301                     break;
5302                 case IS_FLASH_STATE_AUTO_DONE:
5303                     ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5304                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5305                     m_IsAfLockRequired = true;
5306                     nextState = HAL_AFSTATE_FAILED;
5307                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5308                     break;
5309                 default:
5310                     nextState = NO_TRANSITION;
5311                 }
5312             } else {
5313                 m_IsAfLockRequired = true;
5314                 nextState = HAL_AFSTATE_FAILED;
5315                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5316             }
5317             break;
5318         default:
5319             bWrongTransition = true;
5320             break;
5321         }
5322     }
5323     else if (m_afState == HAL_AFSTATE_LOCKED) {
5324         switch (noti) {
5325             case AA_AFSTATE_INACTIVE:
5326                 nextState = NO_TRANSITION;
5327                 break;
5328             case AA_AFSTATE_ACTIVE_SCAN:
5329                 bWrongTransition = true;
5330                 break;
5331             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5332                 nextState = NO_TRANSITION;
5333                 break;
5334             case AA_AFSTATE_AF_FAILED_FOCUS:
5335             default:
5336                 bWrongTransition = true;
5337                 break;
5338         }
5339     }
5340     else if (m_afState == HAL_AFSTATE_FAILED) {
5341         switch (noti) {
5342             case AA_AFSTATE_INACTIVE:
5343                 bWrongTransition = true;
5344                 break;
5345             case AA_AFSTATE_ACTIVE_SCAN:
5346                 nextState = HAL_AFSTATE_SCANNING;
5347                 break;
5348             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5349                 bWrongTransition = true;
5350                 break;
5351             case AA_AFSTATE_AF_FAILED_FOCUS:
5352                 nextState = NO_TRANSITION;
5353                 break;
5354             default:
5355                 bWrongTransition = true;
5356                 break;
5357         }
5358     }
5359     if (bWrongTransition) {
5360         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5361         return;
5362     }
5363     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5364     if (nextState != NO_TRANSITION)
5365         m_afState = nextState;
5366 }
5367 
OnAfNotificationCAFVideo(enum aa_afstate noti)5368 void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
5369 {
5370     int nextState = NO_TRANSITION;
5371     bool bWrongTransition = false;
5372 
5373     if (m_afState == HAL_AFSTATE_INACTIVE) {
5374         switch (noti) {
5375         case AA_AFSTATE_INACTIVE:
5376         case AA_AFSTATE_ACTIVE_SCAN:
5377         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5378         case AA_AFSTATE_AF_FAILED_FOCUS:
5379         default:
5380             nextState = NO_TRANSITION;
5381             break;
5382         }
5383     }
5384     else if (m_afState == HAL_AFSTATE_STARTED) {
5385         switch (noti) {
5386         case AA_AFSTATE_INACTIVE:
5387             nextState = NO_TRANSITION;
5388             break;
5389         case AA_AFSTATE_ACTIVE_SCAN:
5390             nextState = HAL_AFSTATE_SCANNING;
5391             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5392             break;
5393         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5394             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5395             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5396             break;
5397         case AA_AFSTATE_AF_FAILED_FOCUS:
5398             nextState = HAL_AFSTATE_FAILED;
5399             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5400             break;
5401         default:
5402             bWrongTransition = true;
5403             break;
5404         }
5405     }
5406     else if (m_afState == HAL_AFSTATE_SCANNING) {
5407         switch (noti) {
5408         case AA_AFSTATE_INACTIVE:
5409             bWrongTransition = true;
5410             break;
5411         case AA_AFSTATE_ACTIVE_SCAN:
5412             nextState = NO_TRANSITION;
5413             break;
5414         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5415             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5416             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5417             break;
5418         case AA_AFSTATE_AF_FAILED_FOCUS:
5419             nextState = NO_TRANSITION;
5420             break;
5421         default:
5422             bWrongTransition = true;
5423             break;
5424         }
5425     }
5426     else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5427         switch (noti) {
5428         case AA_AFSTATE_INACTIVE:
5429             bWrongTransition = true;
5430             break;
5431         case AA_AFSTATE_ACTIVE_SCAN:
5432             nextState = HAL_AFSTATE_SCANNING;
5433             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5434             break;
5435         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5436             nextState = NO_TRANSITION;
5437             break;
5438         case AA_AFSTATE_AF_FAILED_FOCUS:
5439             nextState = HAL_AFSTATE_FAILED;
5440             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5441             // TODO : needs NO_TRANSITION ?
5442             break;
5443         default:
5444             bWrongTransition = true;
5445             break;
5446         }
5447     }
5448     else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5449         switch (noti) {
5450         case AA_AFSTATE_INACTIVE:
5451             bWrongTransition = true;
5452             break;
5453         case AA_AFSTATE_ACTIVE_SCAN:
5454             nextState = NO_TRANSITION;
5455             break;
5456         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5457             m_IsAfLockRequired = true;
5458             nextState = HAL_AFSTATE_LOCKED;
5459             SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5460             break;
5461         case AA_AFSTATE_AF_FAILED_FOCUS:
5462             nextState = HAL_AFSTATE_FAILED;
5463             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5464             break;
5465         default:
5466             bWrongTransition = true;
5467             break;
5468         }
5469     }
5470     else if (m_afState == HAL_AFSTATE_LOCKED) {
5471         switch (noti) {
5472             case AA_AFSTATE_INACTIVE:
5473                 nextState = NO_TRANSITION;
5474                 break;
5475             case AA_AFSTATE_ACTIVE_SCAN:
5476                 bWrongTransition = true;
5477                 break;
5478             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5479                 nextState = NO_TRANSITION;
5480                 break;
5481             case AA_AFSTATE_AF_FAILED_FOCUS:
5482             default:
5483                 bWrongTransition = true;
5484                 break;
5485         }
5486     }
5487     else if (m_afState == HAL_AFSTATE_FAILED) {
5488         switch (noti) {
5489             case AA_AFSTATE_INACTIVE:
5490             case AA_AFSTATE_ACTIVE_SCAN:
5491             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5492                 bWrongTransition = true;
5493                 break;
5494             case AA_AFSTATE_AF_FAILED_FOCUS:
5495                 nextState = NO_TRANSITION;
5496                 break;
5497             default:
5498                 bWrongTransition = true;
5499                 break;
5500         }
5501     }
5502     if (bWrongTransition) {
5503         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5504         return;
5505     }
5506     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5507     if (nextState != NO_TRANSITION)
5508         m_afState = nextState;
5509 }
5510 
OnAfCancel(int id)5511 void ExynosCameraHWInterface2::OnAfCancel(int id)
5512 {
5513     m_afTriggerId = id;
5514 
5515     switch (m_afMode) {
5516     case AA_AFMODE_AUTO:
5517     case AA_AFMODE_MACRO:
5518     case AA_AFMODE_OFF:
5519     case AA_AFMODE_MANUAL:
5520         OnAfCancelAutoMacro(id);
5521         break;
5522     case AA_AFMODE_CONTINUOUS_VIDEO:
5523         OnAfCancelCAFVideo(id);
5524         break;
5525     case AA_AFMODE_CONTINUOUS_PICTURE:
5526         OnAfCancelCAFPicture(id);
5527         break;
5528     default:
5529         break;
5530     }
5531 }
5532 
OnAfCancelAutoMacro(int id)5533 void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id)
5534 {
5535     int nextState = NO_TRANSITION;
5536 
5537     if (m_ctlInfo.flash.m_flashEnableFlg  && m_ctlInfo.flash.m_afFlashDoneFlg) {
5538         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5539     }
5540     switch (m_afState) {
5541     case HAL_AFSTATE_INACTIVE:
5542         nextState = NO_TRANSITION;
5543         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5544         break;
5545     case HAL_AFSTATE_NEEDS_COMMAND:
5546     case HAL_AFSTATE_STARTED:
5547     case HAL_AFSTATE_SCANNING:
5548     case HAL_AFSTATE_LOCKED:
5549     case HAL_AFSTATE_FAILED:
5550         SetAfMode(AA_AFMODE_OFF);
5551         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5552         nextState = HAL_AFSTATE_INACTIVE;
5553         break;
5554     default:
5555         break;
5556     }
5557     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5558     if (nextState != NO_TRANSITION)
5559         m_afState = nextState;
5560 }
5561 
OnAfCancelCAFPicture(int id)5562 void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id)
5563 {
5564     int nextState = NO_TRANSITION;
5565 
5566     switch (m_afState) {
5567     case HAL_AFSTATE_INACTIVE:
5568         nextState = NO_TRANSITION;
5569         break;
5570     case HAL_AFSTATE_NEEDS_COMMAND:
5571     case HAL_AFSTATE_STARTED:
5572     case HAL_AFSTATE_SCANNING:
5573     case HAL_AFSTATE_LOCKED:
5574     case HAL_AFSTATE_FAILED:
5575     case HAL_AFSTATE_NEEDS_DETERMINATION:
5576     case HAL_AFSTATE_PASSIVE_FOCUSED:
5577         SetAfMode(AA_AFMODE_OFF);
5578         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5579         SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
5580         nextState = HAL_AFSTATE_INACTIVE;
5581         break;
5582     default:
5583         break;
5584     }
5585     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5586     if (nextState != NO_TRANSITION)
5587         m_afState = nextState;
5588 }
5589 
OnAfCancelCAFVideo(int id)5590 void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id)
5591 {
5592     int nextState = NO_TRANSITION;
5593 
5594     switch (m_afState) {
5595     case HAL_AFSTATE_INACTIVE:
5596         nextState = NO_TRANSITION;
5597         break;
5598     case HAL_AFSTATE_NEEDS_COMMAND:
5599     case HAL_AFSTATE_STARTED:
5600     case HAL_AFSTATE_SCANNING:
5601     case HAL_AFSTATE_LOCKED:
5602     case HAL_AFSTATE_FAILED:
5603     case HAL_AFSTATE_NEEDS_DETERMINATION:
5604     case HAL_AFSTATE_PASSIVE_FOCUSED:
5605         SetAfMode(AA_AFMODE_OFF);
5606         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5607         SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
5608         nextState = HAL_AFSTATE_INACTIVE;
5609         break;
5610     default:
5611         break;
5612     }
5613     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5614     if (nextState != NO_TRANSITION)
5615         m_afState = nextState;
5616 }
5617 
SetAfStateForService(int newState)5618 void ExynosCameraHWInterface2::SetAfStateForService(int newState)
5619 {
5620     if (m_serviceAfState != newState || newState == 0)
5621         m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
5622     m_serviceAfState = newState;
5623 }
5624 
GetAfStateForService()5625 int ExynosCameraHWInterface2::GetAfStateForService()
5626 {
5627    return m_serviceAfState;
5628 }
5629 
SetAfMode(enum aa_afmode afMode)5630 void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
5631 {
5632     if (m_afMode != afMode) {
5633         if (m_IsAfModeUpdateRequired && m_afMode != AA_AFMODE_OFF) {
5634             m_afMode2 = afMode;
5635             ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
5636         }
5637         else {
5638             ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
5639             m_IsAfModeUpdateRequired = true;
5640             m_afMode = afMode;
5641             SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5642             m_afState = HAL_AFSTATE_INACTIVE;
5643         }
5644     }
5645 }
5646 
m_setExifFixedAttribute(void)5647 void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
5648 {
5649     char property[PROPERTY_VALUE_MAX];
5650 
5651     //2 0th IFD TIFF Tags
5652     //3 Maker
5653     property_get("ro.product.brand", property, EXIF_DEF_MAKER);
5654     strncpy((char *)mExifInfo.maker, property,
5655                 sizeof(mExifInfo.maker) - 1);
5656     mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
5657     //3 Model
5658     property_get("ro.product.model", property, EXIF_DEF_MODEL);
5659     strncpy((char *)mExifInfo.model, property,
5660                 sizeof(mExifInfo.model) - 1);
5661     mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
5662     //3 Software
5663     property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
5664     strncpy((char *)mExifInfo.software, property,
5665                 sizeof(mExifInfo.software) - 1);
5666     mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
5667 
5668     //3 YCbCr Positioning
5669     mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
5670 
5671     //2 0th IFD Exif Private Tags
5672     //3 F Number
5673     mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN);
5674     mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
5675     //3 Exposure Program
5676     mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
5677     //3 Exif Version
5678     memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
5679     //3 Aperture
5680     double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
5681     mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN);
5682     mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
5683     //3 Maximum lens aperture
5684     mExifInfo.max_aperture.num = mExifInfo.aperture.num;
5685     mExifInfo.max_aperture.den = mExifInfo.aperture.den;
5686     //3 Lens Focal Length
5687     mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100);
5688 
5689     mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
5690     //3 User Comments
5691     strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
5692     //3 Color Space information
5693     mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
5694     //3 Exposure Mode
5695     mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
5696 
5697     //2 0th IFD GPS Info Tags
5698     unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
5699     memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
5700 
5701     //2 1th IFD TIFF Tags
5702     mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
5703     mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5704     mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5705     mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5706     mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5707     mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
5708 }
5709 
m_setExifChangedAttribute(exif_attribute_t * exifInfo,ExynosRect * rect,camera2_shot_ext * currentEntry)5710 void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
5711 	camera2_shot_ext *currentEntry)
5712 {
5713     camera2_dm *dm = &(currentEntry->shot.dm);
5714     camera2_ctl *ctl = &(currentEntry->shot.ctl);
5715 
5716     ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
5717     if (!ctl->request.frameCount)
5718        return;
5719     //2 0th IFD TIFF Tags
5720     //3 Width
5721     exifInfo->width = rect->w;
5722     //3 Height
5723     exifInfo->height = rect->h;
5724     //3 Orientation
5725     switch (ctl->jpeg.orientation) {
5726     case 90:
5727         exifInfo->orientation = EXIF_ORIENTATION_90;
5728         break;
5729     case 180:
5730         exifInfo->orientation = EXIF_ORIENTATION_180;
5731         break;
5732     case 270:
5733         exifInfo->orientation = EXIF_ORIENTATION_270;
5734         break;
5735     case 0:
5736     default:
5737         exifInfo->orientation = EXIF_ORIENTATION_UP;
5738         break;
5739     }
5740 
5741     //3 Date time
5742     time_t rawtime;
5743     struct tm *timeinfo;
5744     time(&rawtime);
5745     timeinfo = localtime(&rawtime);
5746     strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
5747 
5748     //2 0th IFD Exif Private Tags
5749     //3 Exposure Time
5750     int shutterSpeed = (dm->sensor.exposureTime/1000);
5751 
5752     // To display exposure time just above 500ms as 1/2sec, not 1 sec.
5753     if (shutterSpeed > 500000)
5754         shutterSpeed -=  100000;
5755 
5756     if (shutterSpeed < 0) {
5757         shutterSpeed = 100;
5758     }
5759 
5760     exifInfo->exposure_time.num = 1;
5761     // x us -> 1/x s */
5762     //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
5763     exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
5764 
5765     //3 ISO Speed Rating
5766     exifInfo->iso_speed_rating = dm->aa.isoValue;
5767 
5768     uint32_t av, tv, bv, sv, ev;
5769     av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
5770     tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
5771     sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
5772     bv = av + tv - sv;
5773     ev = av + tv;
5774     //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
5775     ALOGV("AV=%d, TV=%d, SV=%d", av, tv, sv);
5776 
5777     //3 Shutter Speed
5778     exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
5779     exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
5780     //3 Brightness
5781     exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
5782     exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
5783     //3 Exposure Bias
5784     if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
5785         ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
5786         exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
5787         exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
5788     } else {
5789         exifInfo->exposure_bias.num = 0;
5790         exifInfo->exposure_bias.den = 0;
5791     }
5792     //3 Metering Mode
5793     /*switch (m_curCameraInfo->metering) {
5794     case METERING_MODE_CENTER:
5795         exifInfo->metering_mode = EXIF_METERING_CENTER;
5796         break;
5797     case METERING_MODE_MATRIX:
5798         exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
5799         break;
5800     case METERING_MODE_SPOT:
5801         exifInfo->metering_mode = EXIF_METERING_SPOT;
5802         break;
5803     case METERING_MODE_AVERAGE:
5804     default:
5805         exifInfo->metering_mode = EXIF_METERING_AVERAGE;
5806         break;
5807     }*/
5808     exifInfo->metering_mode = EXIF_METERING_CENTER;
5809 
5810     //3 Flash
5811     if (m_ctlInfo.flash.m_flashDecisionResult)
5812         exifInfo->flash = 1;
5813     else
5814         exifInfo->flash = EXIF_DEF_FLASH;
5815 
5816     //3 White Balance
5817     if (currentEntry->awb_mode_dm == AA_AWBMODE_WB_AUTO)
5818         exifInfo->white_balance = EXIF_WB_AUTO;
5819     else
5820         exifInfo->white_balance = EXIF_WB_MANUAL;
5821 
5822     //3 Scene Capture Type
5823     switch (ctl->aa.sceneMode) {
5824     case AA_SCENE_MODE_PORTRAIT:
5825         exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
5826         break;
5827     case AA_SCENE_MODE_LANDSCAPE:
5828         exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
5829         break;
5830     case AA_SCENE_MODE_NIGHT_PORTRAIT:
5831         exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
5832         break;
5833     default:
5834         exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
5835         break;
5836     }
5837 
5838     //2 0th IFD GPS Info Tags
5839     if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
5840 
5841         if (ctl->jpeg.gpsCoordinates[0] > 0)
5842             strcpy((char *)exifInfo->gps_latitude_ref, "N");
5843         else
5844             strcpy((char *)exifInfo->gps_latitude_ref, "S");
5845 
5846         if (ctl->jpeg.gpsCoordinates[1] > 0)
5847             strcpy((char *)exifInfo->gps_longitude_ref, "E");
5848         else
5849             strcpy((char *)exifInfo->gps_longitude_ref, "W");
5850 
5851         if (ctl->jpeg.gpsCoordinates[2] > 0)
5852             exifInfo->gps_altitude_ref = 0;
5853         else
5854             exifInfo->gps_altitude_ref = 1;
5855 
5856         double latitude = fabs(ctl->jpeg.gpsCoordinates[0]);
5857         double longitude = fabs(ctl->jpeg.gpsCoordinates[1]);
5858         double altitude = fabs(ctl->jpeg.gpsCoordinates[2]);
5859 
5860         exifInfo->gps_latitude[0].num = (uint32_t)latitude;
5861         exifInfo->gps_latitude[0].den = 1;
5862         exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
5863         exifInfo->gps_latitude[1].den = 1;
5864         exifInfo->gps_latitude[2].num = (uint32_t)round((((latitude - exifInfo->gps_latitude[0].num) * 60)
5865                                         - exifInfo->gps_latitude[1].num) * 60);
5866         exifInfo->gps_latitude[2].den = 1;
5867 
5868         exifInfo->gps_longitude[0].num = (uint32_t)longitude;
5869         exifInfo->gps_longitude[0].den = 1;
5870         exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
5871         exifInfo->gps_longitude[1].den = 1;
5872         exifInfo->gps_longitude[2].num = (uint32_t)round((((longitude - exifInfo->gps_longitude[0].num) * 60)
5873                                         - exifInfo->gps_longitude[1].num) * 60);
5874         exifInfo->gps_longitude[2].den = 1;
5875 
5876         exifInfo->gps_altitude.num = (uint32_t)round(altitude);
5877         exifInfo->gps_altitude.den = 1;
5878 
5879         struct tm tm_data;
5880         long timestamp;
5881         timestamp = (long)ctl->jpeg.gpsTimestamp;
5882         gmtime_r(&timestamp, &tm_data);
5883         exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
5884         exifInfo->gps_timestamp[0].den = 1;
5885         exifInfo->gps_timestamp[1].num = tm_data.tm_min;
5886         exifInfo->gps_timestamp[1].den = 1;
5887         exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
5888         exifInfo->gps_timestamp[2].den = 1;
5889         snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
5890                 "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
5891 
5892         memset(exifInfo->gps_processing_method, 0, 100);
5893         memcpy(exifInfo->gps_processing_method, currentEntry->gpsProcessingMethod, 32);
5894         exifInfo->enableGps = true;
5895     } else {
5896         exifInfo->enableGps = false;
5897     }
5898 
5899     //2 1th IFD TIFF Tags
5900     exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
5901     exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
5902 }
5903 
~MainThread()5904 ExynosCameraHWInterface2::MainThread::~MainThread()
5905 {
5906     ALOGV("(%s):", __FUNCTION__);
5907 }
5908 
release()5909 void ExynosCameraHWInterface2::MainThread::release()
5910 {
5911     ALOGV("(%s):", __func__);
5912     SetSignal(SIGNAL_THREAD_RELEASE);
5913 }
5914 
~SensorThread()5915 ExynosCameraHWInterface2::SensorThread::~SensorThread()
5916 {
5917     ALOGV("(%s):", __FUNCTION__);
5918 }
5919 
release()5920 void ExynosCameraHWInterface2::SensorThread::release()
5921 {
5922     ALOGV("(%s):", __func__);
5923     SetSignal(SIGNAL_THREAD_RELEASE);
5924 }
5925 
~StreamThread()5926 ExynosCameraHWInterface2::StreamThread::~StreamThread()
5927 {
5928     ALOGV("(%s):", __FUNCTION__);
5929 }
5930 
setParameter(stream_parameters_t * new_parameters)5931 void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
5932 {
5933     ALOGV("DEBUG(%s):", __FUNCTION__);
5934     memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t));
5935 }
5936 
release()5937 void ExynosCameraHWInterface2::StreamThread::release()
5938 {
5939     ALOGV("(%s):", __func__);
5940     SetSignal(SIGNAL_THREAD_RELEASE);
5941 }
5942 
findBufferIndex(void * bufAddr)5943 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
5944 {
5945     int index;
5946     for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5947         if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
5948             return index;
5949     }
5950     return -1;
5951 }
5952 
findBufferIndex(buffer_handle_t * bufHandle)5953 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle)
5954 {
5955     int index;
5956     for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5957         if (m_parameters.svcBufHandle[index] == *bufHandle)
5958             return index;
5959     }
5960     return -1;
5961 }
5962 
attachSubStream(int stream_id,int priority)5963 status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority)
5964 {
5965     ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
5966     int index, vacantIndex;
5967     bool vacancy = false;
5968 
5969     for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
5970         if (!vacancy && m_attachedSubStreams[index].streamId == -1) {
5971             vacancy = true;
5972             vacantIndex = index;
5973         } else if (m_attachedSubStreams[index].streamId == stream_id) {
5974             return BAD_VALUE;
5975         }
5976     }
5977     if (!vacancy)
5978         return NO_MEMORY;
5979     m_attachedSubStreams[vacantIndex].streamId = stream_id;
5980     m_attachedSubStreams[vacantIndex].priority = priority;
5981     m_numRegisteredStream++;
5982     return NO_ERROR;
5983 }
5984 
detachSubStream(int stream_id)5985 status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id)
5986 {
5987     ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
5988     int index;
5989     bool found = false;
5990 
5991     for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
5992         if (m_attachedSubStreams[index].streamId == stream_id) {
5993             found = true;
5994             break;
5995         }
5996     }
5997     if (!found)
5998         return BAD_VALUE;
5999     m_attachedSubStreams[index].streamId = -1;
6000     m_attachedSubStreams[index].priority = 0;
6001     m_numRegisteredStream--;
6002     return NO_ERROR;
6003 }
6004 
createIonClient(ion_client ionClient)6005 int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
6006 {
6007     if (ionClient == 0) {
6008         ionClient = ion_client_create();
6009         if (ionClient < 0) {
6010             ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
6011             return 0;
6012         }
6013     }
6014     return ionClient;
6015 }
6016 
deleteIonClient(ion_client ionClient)6017 int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
6018 {
6019     if (ionClient != 0) {
6020         if (ionClient > 0) {
6021             ion_client_destroy(ionClient);
6022         }
6023         ionClient = 0;
6024     }
6025     return ionClient;
6026 }
6027 
allocCameraMemory(ion_client ionClient,ExynosBuffer * buf,int iMemoryNum)6028 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
6029 {
6030     return allocCameraMemory(ionClient, buf, iMemoryNum, 0);
6031 }
6032 
allocCameraMemory(ion_client ionClient,ExynosBuffer * buf,int iMemoryNum,int cacheFlag)6033 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag)
6034 {
6035     int ret = 0;
6036     int i = 0;
6037     int flag = 0;
6038 
6039     if (ionClient == 0) {
6040         ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
6041         return -1;
6042     }
6043 
6044     for (i = 0 ; i < iMemoryNum ; i++) {
6045         if (buf->size.extS[i] == 0) {
6046             break;
6047         }
6048         if (1 << i & cacheFlag)
6049             flag = ION_FLAG_CACHED;
6050         else
6051             flag = 0;
6052         buf->fd.extFd[i] = ion_alloc(ionClient, \
6053                                       buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK, flag);
6054         if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
6055             ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
6056             buf->fd.extFd[i] = -1;
6057             freeCameraMemory(buf, iMemoryNum);
6058             return -1;
6059         }
6060 
6061         buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
6062                                         buf->size.extS[i], 0);
6063         if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
6064             ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
6065             buf->virt.extP[i] = (char *)MAP_FAILED;
6066             freeCameraMemory(buf, iMemoryNum);
6067             return -1;
6068         }
6069         ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag);
6070     }
6071 
6072     return ret;
6073 }
6074 
freeCameraMemory(ExynosBuffer * buf,int iMemoryNum)6075 void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6076 {
6077 
6078     int i = 0 ;
6079     int ret = 0;
6080 
6081     for (i=0;i<iMemoryNum;i++) {
6082         if (buf->fd.extFd[i] != -1) {
6083             if (buf->virt.extP[i] != (char *)MAP_FAILED) {
6084                 ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
6085                 if (ret < 0)
6086                     ALOGE("ERR(%s)", __FUNCTION__);
6087             }
6088             ion_free(buf->fd.extFd[i]);
6089         ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
6090         }
6091         buf->fd.extFd[i] = -1;
6092         buf->virt.extP[i] = (char *)MAP_FAILED;
6093         buf->size.extS[i] = 0;
6094     }
6095 }
6096 
initCameraMemory(ExynosBuffer * buf,int iMemoryNum)6097 void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6098 {
6099     int i =0 ;
6100     for (i=0;i<iMemoryNum;i++) {
6101         buf->virt.extP[i] = (char *)MAP_FAILED;
6102         buf->fd.extFd[i] = -1;
6103         buf->size.extS[i] = 0;
6104     }
6105 }
6106 
6107 
6108 
6109 
6110 static camera2_device_t *g_cam2_device = NULL;
6111 static bool g_camera_vaild = false;
6112 static Mutex g_camera_mutex;
6113 ExynosCamera2 * g_camera2[2] = { NULL, NULL };
6114 
HAL2_camera_device_close(struct hw_device_t * device)6115 static int HAL2_camera_device_close(struct hw_device_t* device)
6116 {
6117     Mutex::Autolock lock(g_camera_mutex);
6118     ALOGD("(%s): ENTER", __FUNCTION__);
6119     if (device) {
6120 
6121         camera2_device_t *cam_device = (camera2_device_t *)device;
6122         ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
6123         ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
6124         delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
6125         free(cam_device);
6126         g_camera_vaild = false;
6127         g_cam2_device = NULL;
6128     }
6129 
6130     ALOGD("(%s): EXIT", __FUNCTION__);
6131     return 0;
6132 }
6133 
obj(const struct camera2_device * dev)6134 static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
6135 {
6136     return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
6137 }
6138 
HAL2_device_set_request_queue_src_ops(const struct camera2_device * dev,const camera2_request_queue_src_ops_t * request_src_ops)6139 static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
6140             const camera2_request_queue_src_ops_t *request_src_ops)
6141 {
6142     ALOGV("DEBUG(%s):", __FUNCTION__);
6143     return obj(dev)->setRequestQueueSrcOps(request_src_ops);
6144 }
6145 
HAL2_device_notify_request_queue_not_empty(const struct camera2_device * dev)6146 static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
6147 {
6148     ALOGV("DEBUG(%s):", __FUNCTION__);
6149     return obj(dev)->notifyRequestQueueNotEmpty();
6150 }
6151 
HAL2_device_set_frame_queue_dst_ops(const struct camera2_device * dev,const camera2_frame_queue_dst_ops_t * frame_dst_ops)6152 static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
6153             const camera2_frame_queue_dst_ops_t *frame_dst_ops)
6154 {
6155     ALOGV("DEBUG(%s):", __FUNCTION__);
6156     return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
6157 }
6158 
HAL2_device_get_in_progress_count(const struct camera2_device * dev)6159 static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
6160 {
6161     ALOGV("DEBUG(%s):", __FUNCTION__);
6162     return obj(dev)->getInProgressCount();
6163 }
6164 
HAL2_device_flush_captures_in_progress(const struct camera2_device * dev)6165 static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
6166 {
6167     ALOGV("DEBUG(%s):", __FUNCTION__);
6168     return obj(dev)->flushCapturesInProgress();
6169 }
6170 
HAL2_device_construct_default_request(const struct camera2_device * dev,int request_template,camera_metadata_t ** request)6171 static int HAL2_device_construct_default_request(const struct camera2_device *dev,
6172             int request_template, camera_metadata_t **request)
6173 {
6174     ALOGV("DEBUG(%s):", __FUNCTION__);
6175     return obj(dev)->constructDefaultRequest(request_template, request);
6176 }
6177 
HAL2_device_allocate_stream(const struct camera2_device * dev,uint32_t width,uint32_t height,int format,const camera2_stream_ops_t * stream_ops,uint32_t * stream_id,uint32_t * format_actual,uint32_t * usage,uint32_t * max_buffers)6178 static int HAL2_device_allocate_stream(
6179             const struct camera2_device *dev,
6180             // inputs
6181             uint32_t width,
6182             uint32_t height,
6183             int      format,
6184             const camera2_stream_ops_t *stream_ops,
6185             // outputs
6186             uint32_t *stream_id,
6187             uint32_t *format_actual,
6188             uint32_t *usage,
6189             uint32_t *max_buffers)
6190 {
6191     ALOGV("(%s): ", __FUNCTION__);
6192     return obj(dev)->allocateStream(width, height, format, stream_ops,
6193                                     stream_id, format_actual, usage, max_buffers);
6194 }
6195 
HAL2_device_register_stream_buffers(const struct camera2_device * dev,uint32_t stream_id,int num_buffers,buffer_handle_t * buffers)6196 static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
6197             uint32_t stream_id,
6198             int num_buffers,
6199             buffer_handle_t *buffers)
6200 {
6201     ALOGV("DEBUG(%s):", __FUNCTION__);
6202     return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
6203 }
6204 
HAL2_device_release_stream(const struct camera2_device * dev,uint32_t stream_id)6205 static int HAL2_device_release_stream(
6206         const struct camera2_device *dev,
6207             uint32_t stream_id)
6208 {
6209     ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
6210     if (!g_camera_vaild)
6211         return 0;
6212     return obj(dev)->releaseStream(stream_id);
6213 }
6214 
HAL2_device_allocate_reprocess_stream(const struct camera2_device * dev,uint32_t width,uint32_t height,uint32_t format,const camera2_stream_in_ops_t * reprocess_stream_ops,uint32_t * stream_id,uint32_t * consumer_usage,uint32_t * max_buffers)6215 static int HAL2_device_allocate_reprocess_stream(
6216            const struct camera2_device *dev,
6217             uint32_t width,
6218             uint32_t height,
6219             uint32_t format,
6220             const camera2_stream_in_ops_t *reprocess_stream_ops,
6221             // outputs
6222             uint32_t *stream_id,
6223             uint32_t *consumer_usage,
6224             uint32_t *max_buffers)
6225 {
6226     ALOGV("DEBUG(%s):", __FUNCTION__);
6227     return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
6228                                     stream_id, consumer_usage, max_buffers);
6229 }
6230 
HAL2_device_allocate_reprocess_stream_from_stream(const struct camera2_device * dev,uint32_t output_stream_id,const camera2_stream_in_ops_t * reprocess_stream_ops,uint32_t * stream_id)6231 static int HAL2_device_allocate_reprocess_stream_from_stream(
6232            const struct camera2_device *dev,
6233             uint32_t output_stream_id,
6234             const camera2_stream_in_ops_t *reprocess_stream_ops,
6235             // outputs
6236             uint32_t *stream_id)
6237 {
6238     ALOGV("DEBUG(%s):", __FUNCTION__);
6239     return obj(dev)->allocateReprocessStreamFromStream(output_stream_id,
6240                                     reprocess_stream_ops, stream_id);
6241 }
6242 
HAL2_device_release_reprocess_stream(const struct camera2_device * dev,uint32_t stream_id)6243 static int HAL2_device_release_reprocess_stream(
6244         const struct camera2_device *dev,
6245             uint32_t stream_id)
6246 {
6247     ALOGV("DEBUG(%s):", __FUNCTION__);
6248     return obj(dev)->releaseReprocessStream(stream_id);
6249 }
6250 
HAL2_device_trigger_action(const struct camera2_device * dev,uint32_t trigger_id,int ext1,int ext2)6251 static int HAL2_device_trigger_action(const struct camera2_device *dev,
6252            uint32_t trigger_id,
6253             int ext1,
6254             int ext2)
6255 {
6256     ALOGV("DEBUG(%s):", __FUNCTION__);
6257     if (!g_camera_vaild)
6258         return 0;
6259     return obj(dev)->triggerAction(trigger_id, ext1, ext2);
6260 }
6261 
HAL2_device_set_notify_callback(const struct camera2_device * dev,camera2_notify_callback notify_cb,void * user)6262 static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
6263             camera2_notify_callback notify_cb,
6264             void *user)
6265 {
6266     ALOGV("DEBUG(%s):", __FUNCTION__);
6267     return obj(dev)->setNotifyCallback(notify_cb, user);
6268 }
6269 
HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device * dev,vendor_tag_query_ops_t ** ops)6270 static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
6271             vendor_tag_query_ops_t **ops)
6272 {
6273     ALOGV("DEBUG(%s):", __FUNCTION__);
6274     return obj(dev)->getMetadataVendorTagOps(ops);
6275 }
6276 
HAL2_device_dump(const struct camera2_device * dev,int fd)6277 static int HAL2_device_dump(const struct camera2_device *dev, int fd)
6278 {
6279     ALOGV("DEBUG(%s):", __FUNCTION__);
6280     return obj(dev)->dump(fd);
6281 }
6282 
6283 
6284 
6285 
6286 
HAL2_getNumberOfCameras()6287 static int HAL2_getNumberOfCameras()
6288 {
6289     ALOGV("(%s): returning 2", __FUNCTION__);
6290     return 2;
6291 }
6292 
6293 
HAL2_getCameraInfo(int cameraId,struct camera_info * info)6294 static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
6295 {
6296     ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
6297     static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
6298 
6299     status_t res;
6300 
6301     if (cameraId == 0) {
6302         info->facing = CAMERA_FACING_BACK;
6303         if (!g_camera2[0])
6304             g_camera2[0] = new ExynosCamera2(0);
6305     }
6306     else if (cameraId == 1) {
6307         info->facing = CAMERA_FACING_FRONT;
6308         if (!g_camera2[1])
6309             g_camera2[1] = new ExynosCamera2(1);
6310     }
6311     else
6312         return BAD_VALUE;
6313 
6314     info->orientation = 0;
6315     info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
6316     if (mCameraInfo[cameraId] == NULL) {
6317         res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
6318         if (res != OK) {
6319             ALOGE("%s: Unable to allocate static info: %s (%d)",
6320                     __FUNCTION__, strerror(-res), res);
6321             return res;
6322         }
6323         res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
6324         if (res != OK) {
6325             ALOGE("%s: Unable to fill in static info: %s (%d)",
6326                     __FUNCTION__, strerror(-res), res);
6327             return res;
6328         }
6329     }
6330     info->static_camera_characteristics = mCameraInfo[cameraId];
6331     return NO_ERROR;
6332 }
6333 
6334 #define SET_METHOD(m) m : HAL2_device_##m
6335 
6336 static camera2_device_ops_t camera2_device_ops = {
6337         SET_METHOD(set_request_queue_src_ops),
6338         SET_METHOD(notify_request_queue_not_empty),
6339         SET_METHOD(set_frame_queue_dst_ops),
6340         SET_METHOD(get_in_progress_count),
6341         SET_METHOD(flush_captures_in_progress),
6342         SET_METHOD(construct_default_request),
6343         SET_METHOD(allocate_stream),
6344         SET_METHOD(register_stream_buffers),
6345         SET_METHOD(release_stream),
6346         SET_METHOD(allocate_reprocess_stream),
6347         SET_METHOD(allocate_reprocess_stream_from_stream),
6348         SET_METHOD(release_reprocess_stream),
6349         SET_METHOD(trigger_action),
6350         SET_METHOD(set_notify_callback),
6351         SET_METHOD(get_metadata_vendor_tag_ops),
6352         SET_METHOD(dump),
6353 };
6354 
6355 #undef SET_METHOD
6356 
6357 
HAL2_camera_device_open(const struct hw_module_t * module,const char * id,struct hw_device_t ** device)6358 static int HAL2_camera_device_open(const struct hw_module_t* module,
6359                                   const char *id,
6360                                   struct hw_device_t** device)
6361 {
6362     int cameraId = atoi(id);
6363     int openInvalid = 0;
6364 
6365     Mutex::Autolock lock(g_camera_mutex);
6366     if (g_camera_vaild) {
6367         ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__);
6368         return -EBUSY;
6369     }
6370     g_camera_vaild = false;
6371     ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
6372     if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
6373         ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
6374         return -EINVAL;
6375     }
6376 
6377     ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6378     if (g_cam2_device) {
6379         if (obj(g_cam2_device)->getCameraId() == cameraId) {
6380             ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
6381             goto done;
6382         } else {
6383             ALOGD("(%s): START waiting for cam device free", __FUNCTION__);
6384             while (g_cam2_device)
6385                 usleep(SIG_WAITING_TICK);
6386             ALOGD("(%s): END   waiting for cam device free", __FUNCTION__);
6387         }
6388     }
6389 
6390     g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
6391     ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6392 
6393     if (!g_cam2_device)
6394         return -ENOMEM;
6395 
6396     g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
6397     g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
6398     g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
6399     g_cam2_device->common.close   = HAL2_camera_device_close;
6400 
6401     g_cam2_device->ops = &camera2_device_ops;
6402 
6403     ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
6404 
6405     g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
6406     if (!openInvalid) {
6407         ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__);
6408         return -ENODEV;
6409     }
6410 done:
6411     *device = (hw_device_t *)g_cam2_device;
6412     ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
6413     g_camera_vaild = true;
6414 
6415     return 0;
6416 }
6417 
6418 
6419 static hw_module_methods_t camera_module_methods = {
6420             open : HAL2_camera_device_open
6421 };
6422 
6423 extern "C" {
6424     struct camera_module HAL_MODULE_INFO_SYM = {
6425       common : {
6426           tag                : HARDWARE_MODULE_TAG,
6427           module_api_version : CAMERA_MODULE_API_VERSION_2_0,
6428           hal_api_version    : HARDWARE_HAL_API_VERSION,
6429           id                 : CAMERA_HARDWARE_MODULE_ID,
6430           name               : "Exynos Camera HAL2",
6431           author             : "Samsung Corporation",
6432           methods            : &camera_module_methods,
6433           dso:                NULL,
6434           reserved:           {0},
6435       },
6436       get_number_of_cameras : HAL2_getNumberOfCameras,
6437       get_camera_info       : HAL2_getCameraInfo
6438     };
6439 }
6440 
6441 }; // namespace android
6442