• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2 **
3 ** Copyright 2008, The Android Open Source Project
4 ** Copyright 2012, Samsung Electronics Co. LTD
5 **
6 ** Licensed under the Apache License, Version 2.0 (the "License");
7 ** you may not use this file except in compliance with the License.
8 ** You may obtain a copy of the License at
9 **
10 **     http://www.apache.org/licenses/LICENSE-2.0
11 **
12 ** Unless required by applicable law or agreed to in writing, software
13 ** distributed under the License is distributed on an "AS IS" BASIS,
14 ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 ** See the License for the specific language governing permissions and
16 ** limitations under the License.
17 */
18 
19 /*!
20  * \file      ExynosCameraHWInterface2.cpp
21  * \brief     source file for Android Camera API 2.0 HAL
22  * \author    Sungjoong Kang(sj3.kang@samsung.com)
23  * \date      2012/07/10
24  *
25  * <b>Revision History: </b>
26  * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27  *   Initial Release
28  *
29  * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30  *   2nd Release
31  *
32  */
33 
34 //#define LOG_NDEBUG 0
35 #define LOG_TAG "ExynosCameraHAL2"
36 #include <sys/time.h>
37 #include <utils/Log.h>
38 #include <math.h>
39 
40 #include "ExynosCameraHWInterface2.h"
41 #include "exynos_format.h"
42 
43 namespace android {
44 
m_savePostView(const char * fname,uint8_t * buf,uint32_t size)45 void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
46 {
47     int nw;
48     int cnt = 0;
49     uint32_t written = 0;
50 
51     ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
52     int fd = open(fname, O_RDWR | O_CREAT, 0644);
53     if (fd < 0) {
54         ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
55         return;
56     }
57 
58     ALOGV("writing %d bytes to file [%s]", size, fname);
59     while (written < size) {
60         nw = ::write(fd, buf + written, size - written);
61         if (nw < 0) {
62             ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
63             break;
64         }
65         written += nw;
66         cnt++;
67     }
68     ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
69     ::close(fd);
70 }
71 
get_pixel_depth(uint32_t fmt)72 int get_pixel_depth(uint32_t fmt)
73 {
74     int depth = 0;
75 
76     switch (fmt) {
77     case V4L2_PIX_FMT_JPEG:
78         depth = 8;
79         break;
80 
81     case V4L2_PIX_FMT_NV12:
82     case V4L2_PIX_FMT_NV21:
83     case V4L2_PIX_FMT_YUV420:
84     case V4L2_PIX_FMT_YVU420M:
85     case V4L2_PIX_FMT_NV12M:
86     case V4L2_PIX_FMT_NV12MT:
87         depth = 12;
88         break;
89 
90     case V4L2_PIX_FMT_RGB565:
91     case V4L2_PIX_FMT_YUYV:
92     case V4L2_PIX_FMT_YVYU:
93     case V4L2_PIX_FMT_UYVY:
94     case V4L2_PIX_FMT_VYUY:
95     case V4L2_PIX_FMT_NV16:
96     case V4L2_PIX_FMT_NV61:
97     case V4L2_PIX_FMT_YUV422P:
98     case V4L2_PIX_FMT_SBGGR10:
99     case V4L2_PIX_FMT_SBGGR12:
100     case V4L2_PIX_FMT_SBGGR16:
101         depth = 16;
102         break;
103 
104     case V4L2_PIX_FMT_RGB32:
105         depth = 32;
106         break;
107     default:
108         ALOGE("Get depth failed(format : %d)", fmt);
109         break;
110     }
111 
112     return depth;
113 }
114 
cam_int_s_fmt(node_info_t * node)115 int cam_int_s_fmt(node_info_t *node)
116 {
117     struct v4l2_format v4l2_fmt;
118     unsigned int framesize;
119     int ret;
120 
121     memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
122 
123     v4l2_fmt.type = node->type;
124     framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
125 
126     if (node->planes >= 1) {
127         v4l2_fmt.fmt.pix_mp.width       = node->width;
128         v4l2_fmt.fmt.pix_mp.height      = node->height;
129         v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
130         v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
131     } else {
132         ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
133     }
134 
135     /* Set up for capture */
136     ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
137 
138     if (ret < 0)
139         ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
140 
141 
142     return ret;
143 }
144 
cam_int_reqbufs(node_info_t * node)145 int cam_int_reqbufs(node_info_t *node)
146 {
147     struct v4l2_requestbuffers req;
148     int ret;
149 
150     req.count = node->buffers;
151     req.type = node->type;
152     req.memory = node->memory;
153 
154     ret = exynos_v4l2_reqbufs(node->fd, &req);
155 
156     if (ret < 0)
157         ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
158 
159     return req.count;
160 }
161 
cam_int_qbuf(node_info_t * node,int index)162 int cam_int_qbuf(node_info_t *node, int index)
163 {
164     struct v4l2_buffer v4l2_buf;
165     struct v4l2_plane planes[VIDEO_MAX_PLANES];
166     int i;
167     int ret = 0;
168 
169     v4l2_buf.m.planes   = planes;
170     v4l2_buf.type       = node->type;
171     v4l2_buf.memory     = node->memory;
172     v4l2_buf.index      = index;
173     v4l2_buf.length     = node->planes;
174 
175     for(i = 0; i < node->planes; i++){
176         v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
177         v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
178     }
179 
180     ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
181 
182     if (ret < 0)
183         ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
184 
185     return ret;
186 }
187 
cam_int_streamon(node_info_t * node)188 int cam_int_streamon(node_info_t *node)
189 {
190     enum v4l2_buf_type type = node->type;
191     int ret;
192 
193 
194     ret = exynos_v4l2_streamon(node->fd, type);
195 
196     if (ret < 0)
197         ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
198 
199     ALOGV("On streaming I/O... ... fd(%d)", node->fd);
200 
201     return ret;
202 }
203 
cam_int_streamoff(node_info_t * node)204 int cam_int_streamoff(node_info_t *node)
205 {
206     enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
207     int ret;
208 
209 
210     ALOGV("Off streaming I/O... fd(%d)", node->fd);
211     ret = exynos_v4l2_streamoff(node->fd, type);
212 
213     if (ret < 0)
214         ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
215 
216     return ret;
217 }
218 
isp_int_streamoff(node_info_t * node)219 int isp_int_streamoff(node_info_t *node)
220 {
221     enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
222     int ret;
223 
224     ALOGV("Off streaming I/O... fd(%d)", node->fd);
225     ret = exynos_v4l2_streamoff(node->fd, type);
226 
227     if (ret < 0)
228         ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
229 
230     return ret;
231 }
232 
cam_int_dqbuf(node_info_t * node)233 int cam_int_dqbuf(node_info_t *node)
234 {
235     struct v4l2_buffer v4l2_buf;
236     struct v4l2_plane planes[VIDEO_MAX_PLANES];
237     int ret;
238 
239     v4l2_buf.type       = node->type;
240     v4l2_buf.memory     = node->memory;
241     v4l2_buf.m.planes   = planes;
242     v4l2_buf.length     = node->planes;
243 
244     ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
245     if (ret < 0)
246         ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
247 
248     return v4l2_buf.index;
249 }
250 
cam_int_dqbuf(node_info_t * node,int num_plane)251 int cam_int_dqbuf(node_info_t *node, int num_plane)
252 {
253     struct v4l2_buffer v4l2_buf;
254     struct v4l2_plane planes[VIDEO_MAX_PLANES];
255     int ret;
256 
257     v4l2_buf.type       = node->type;
258     v4l2_buf.memory     = node->memory;
259     v4l2_buf.m.planes   = planes;
260     v4l2_buf.length     = num_plane;
261 
262     ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
263     if (ret < 0)
264         ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
265 
266     return v4l2_buf.index;
267 }
268 
cam_int_s_input(node_info_t * node,int index)269 int cam_int_s_input(node_info_t *node, int index)
270 {
271     int ret;
272 
273     ret = exynos_v4l2_s_input(node->fd, index);
274     if (ret < 0)
275         ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
276 
277     return ret;
278 }
279 
280 
281 gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
282 
RequestManager(SignalDrivenThread * main_thread)283 RequestManager::RequestManager(SignalDrivenThread* main_thread):
284     m_vdisEnable(false),
285     m_lastCompletedFrameCnt(-1),
286     m_lastAeMode(0),
287     m_lastAaMode(0),
288     m_lastAwbMode(0),
289     m_lastAeComp(0),
290     m_vdisBubbleEn(false)
291 {
292     m_metadataConverter = new MetadataConverter;
293     m_mainThread = main_thread;
294     ResetEntry();
295     m_sensorPipelineSkipCnt = 0;
296     return;
297 }
298 
~RequestManager()299 RequestManager::~RequestManager()
300 {
301     ALOGV("%s", __FUNCTION__);
302     if (m_metadataConverter != NULL) {
303         delete m_metadataConverter;
304         m_metadataConverter = NULL;
305     }
306 
307     releaseSensorQ();
308     return;
309 }
310 
ResetEntry()311 void RequestManager::ResetEntry()
312 {
313     Mutex::Autolock lock(m_requestMutex);
314     Mutex::Autolock lock2(m_numOfEntriesLock);
315     for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
316         memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
317         entries[i].internal_shot.shot.ctl.request.frameCount = -1;
318     }
319     m_numOfEntries = 0;
320     m_entryInsertionIndex = -1;
321     m_entryProcessingIndex = -1;
322     m_entryFrameOutputIndex = -1;
323 }
324 
GetNumEntries()325 int RequestManager::GetNumEntries()
326 {
327     Mutex::Autolock lock(m_numOfEntriesLock);
328     return m_numOfEntries;
329 }
330 
SetDefaultParameters(int cropX)331 void RequestManager::SetDefaultParameters(int cropX)
332 {
333     m_cropX = cropX;
334 }
335 
IsRequestQueueFull()336 bool RequestManager::IsRequestQueueFull()
337 {
338     Mutex::Autolock lock(m_requestMutex);
339     Mutex::Autolock lock2(m_numOfEntriesLock);
340     if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
341         return true;
342     else
343         return false;
344 }
345 
RegisterRequest(camera_metadata_t * new_request,int * afMode,uint32_t * afRegion)346 void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion)
347 {
348     ALOGV("DEBUG(%s):", __FUNCTION__);
349 
350     Mutex::Autolock lock(m_requestMutex);
351     Mutex::Autolock lock2(m_numOfEntriesLock);
352 
353     request_manager_entry * newEntry = NULL;
354     int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
355     ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries );
356 
357 
358     newEntry = &(entries[newInsertionIndex]);
359 
360     if (newEntry->status!=EMPTY) {
361         ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
362         return;
363     }
364     newEntry->status = REGISTERED;
365     newEntry->original_request = new_request;
366     memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
367     m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
368     newEntry->output_stream_count = 0;
369     if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP)
370         newEntry->output_stream_count++;
371 
372     if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC)
373         newEntry->output_stream_count++;
374 
375     m_numOfEntries++;
376     m_entryInsertionIndex = newInsertionIndex;
377 
378 
379     *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
380     afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0];
381     afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1];
382     afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2];
383     afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3];
384     ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
385     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
386 }
387 
DeregisterRequest(camera_metadata_t ** deregistered_request)388 void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
389 {
390     ALOGV("DEBUG(%s):", __FUNCTION__);
391     int frame_index;
392     request_manager_entry * currentEntry;
393 
394     Mutex::Autolock lock(m_requestMutex);
395     Mutex::Autolock lock2(m_numOfEntriesLock);
396 
397     frame_index = GetCompletedIndex();
398     currentEntry =  &(entries[frame_index]);
399     if (currentEntry->status != COMPLETED) {
400         CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
401                        m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
402         return;
403     }
404     if (deregistered_request)  *deregistered_request = currentEntry->original_request;
405 
406     m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount;
407 
408     currentEntry->status = EMPTY;
409     currentEntry->original_request = NULL;
410     memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
411     currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
412     currentEntry->output_stream_count = 0;
413     m_numOfEntries--;
414     ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
415      m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
416 
417     CheckCompleted(GetNextIndex(frame_index));
418     return;
419 }
420 
PrepareFrame(size_t * num_entries,size_t * frame_size,camera_metadata_t ** prepared_frame,int afState)421 bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
422                 camera_metadata_t ** prepared_frame, int afState)
423 {
424     ALOGV("DEBUG(%s):", __FUNCTION__);
425     Mutex::Autolock lock(m_requestMutex);
426     status_t res = NO_ERROR;
427     int tempFrameOutputIndex = GetCompletedIndex();
428     request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
429     ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
430         m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
431 
432     if (currentEntry->status != COMPLETED) {
433         ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
434 
435         return false;
436     }
437     m_entryFrameOutputIndex = tempFrameOutputIndex;
438     m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated
439     add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
440     res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
441                 m_tempFrameMetadata);
442     if (res!=NO_ERROR) {
443         ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
444         return false;
445     }
446     *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
447     *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
448     *prepared_frame = m_tempFrameMetadata;
449     ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
450         currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
451     // Dump();
452     return true;
453 }
454 
MarkProcessingRequest(ExynosBuffer * buf)455 int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
456 {
457     struct camera2_shot_ext * shot_ext;
458     struct camera2_shot_ext * request_shot;
459     int targetStreamIndex = 0;
460     request_manager_entry * newEntry = NULL;
461     static int count = 0;
462 
463     Mutex::Autolock lock(m_requestMutex);
464     Mutex::Autolock lock2(m_numOfEntriesLock);
465     if (m_numOfEntries == 0)  {
466         CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
467         return -1;
468     }
469 
470     if ((m_entryProcessingIndex == m_entryInsertionIndex)
471         && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
472         ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
473          m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
474         return -1;
475     }
476 
477     int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
478     ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
479 
480     newEntry = &(entries[newProcessingIndex]);
481     request_shot = &(newEntry->internal_shot);
482     if (newEntry->status != REGISTERED) {
483         CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
484         for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
485                 CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
486         }
487         return -1;
488     }
489 
490     newEntry->status = REQUESTED;
491 
492     shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
493 
494     memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
495     shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
496     shot_ext->request_sensor = 1;
497     shot_ext->dis_bypass = 1;
498     shot_ext->dnr_bypass = 1;
499     shot_ext->fd_bypass = 1;
500     shot_ext->setfile = 0;
501 
502     targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
503     shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
504     if (targetStreamIndex & MASK_OUTPUT_SCP)
505         shot_ext->request_scp = 1;
506 
507     if (targetStreamIndex & MASK_OUTPUT_SCC)
508         shot_ext->request_scc = 1;
509 
510     if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
511         shot_ext->fd_bypass = 0;
512 
513     if (count == 0){
514         shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
515     } else
516         shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
517 
518     count++;
519     shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
520     shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
521     shot_ext->shot.magicNumber = 0x23456789;
522     shot_ext->shot.ctl.sensor.exposureTime = 0;
523     shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
524     shot_ext->shot.ctl.sensor.sensitivity = 0;
525 
526 
527     shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
528     shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
529     shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
530 
531     m_entryProcessingIndex = newProcessingIndex;
532     return newProcessingIndex;
533 }
534 
NotifyStreamOutput(int frameCnt)535 void RequestManager::NotifyStreamOutput(int frameCnt)
536 {
537     int index;
538 
539     Mutex::Autolock lock(m_requestMutex);
540     ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
541 
542     index = FindEntryIndexByFrameCnt(frameCnt);
543     if (index == -1) {
544         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
545         return;
546     }
547     ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt,   entries[index].output_stream_count);
548 
549     entries[index].output_stream_count--;  //TODO : match stream id also
550     CheckCompleted(index);
551 }
552 
CheckCompleted(int index)553 void RequestManager::CheckCompleted(int index)
554 {
555     if ((entries[index].status == METADONE || entries[index].status == COMPLETED)
556         && (entries[index].output_stream_count <= 0)){
557         ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__,
558                 index, entries[index].internal_shot.shot.ctl.request.frameCount );
559         entries[index].status = COMPLETED;
560         if (m_lastCompletedFrameCnt + 1 == (int)entries[index].internal_shot.shot.ctl.request.frameCount)
561             m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
562     }
563 }
564 
GetCompletedIndex()565 int RequestManager::GetCompletedIndex()
566 {
567     return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1);
568 }
569 
pushSensorQ(int index)570 void  RequestManager::pushSensorQ(int index)
571 {
572     Mutex::Autolock lock(m_requestMutex);
573     m_sensorQ.push_back(index);
574 }
575 
popSensorQ()576 int RequestManager::popSensorQ()
577 {
578    List<int>::iterator sensor_token;
579    int index;
580 
581     Mutex::Autolock lock(m_requestMutex);
582 
583     if(m_sensorQ.size() == 0)
584         return -1;
585 
586     sensor_token = m_sensorQ.begin()++;
587     index = *sensor_token;
588     m_sensorQ.erase(sensor_token);
589 
590     return (index);
591 }
592 
releaseSensorQ()593 void RequestManager::releaseSensorQ()
594 {
595     List<int>::iterator r;
596 
597     Mutex::Autolock lock(m_requestMutex);
598     ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
599 
600     while(m_sensorQ.size() > 0){
601         r  = m_sensorQ.begin()++;
602         m_sensorQ.erase(r);
603     }
604     return;
605 }
606 
ApplyDynamicMetadata(struct camera2_shot_ext * shot_ext)607 void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
608 {
609     int index;
610     struct camera2_shot_ext * request_shot;
611     nsecs_t timeStamp;
612     int i;
613 
614     Mutex::Autolock lock(m_requestMutex);
615     ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
616 
617     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
618         if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
619             && (entries[i].status == CAPTURED)){
620             entries[i].status = METADONE;
621             break;
622         }
623     }
624 
625     if (i == NUM_MAX_REQUEST_MGR_ENTRY){
626         ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
627         return;
628     }
629 
630     request_manager_entry * newEntry = &(entries[i]);
631     request_shot = &(newEntry->internal_shot);
632 
633     timeStamp = request_shot->shot.dm.sensor.timeStamp;
634     memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
635     request_shot->shot.dm.sensor.timeStamp = timeStamp;
636     m_lastTimeStamp = timeStamp;
637     CheckCompleted(i);
638 }
639 
UpdateIspParameters(struct camera2_shot_ext * shot_ext,int frameCnt,ctl_request_info_t * ctl_info)640 void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
641 {
642     int index, targetStreamIndex;
643     struct camera2_shot_ext * request_shot;
644 
645     ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
646     if (frameCnt < 0)
647         return;
648 
649     index = FindEntryIndexByFrameCnt(frameCnt);
650     if (index == -1) {
651         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
652         return;
653     }
654 
655     request_manager_entry * newEntry = &(entries[index]);
656     request_shot = &(newEntry->internal_shot);
657     memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
658     shot_ext->shot.ctl.request.frameCount = frameCnt;
659     shot_ext->request_sensor = 1;
660     shot_ext->dis_bypass = 1;
661     shot_ext->dnr_bypass = 1;
662     shot_ext->fd_bypass = 1;
663     shot_ext->drc_bypass = 1;
664     shot_ext->setfile = 0;
665 
666     shot_ext->request_scc = 0;
667     shot_ext->request_scp = 0;
668 
669     shot_ext->isReprocessing = request_shot->isReprocessing;
670     shot_ext->reprocessInput = request_shot->reprocessInput;
671     shot_ext->shot.ctl.request.outputStreams[0] = 0;
672 
673     shot_ext->awb_mode_dm = request_shot->awb_mode_dm;
674 
675     shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
676     shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
677     shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
678 
679     // mapping flash UI mode from aeMode
680     if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
681         if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
682             ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
683         else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD)
684             ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
685         request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
686     }
687 
688     // Apply ae/awb lock or unlock
689     if (request_shot->ae_lock == AEMODE_LOCK_ON)
690             request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
691     if (request_shot->awb_lock == AWBMODE_LOCK_ON)
692             request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
693 
694     if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
695         shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
696     }
697     else {
698         shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
699         m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
700     }
701     if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
702         shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
703     }
704     else {
705         shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
706         m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
707     }
708     if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
709         shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
710     }
711     else {
712         shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
713         m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
714     }
715     if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
716         shot_ext->shot.ctl.aa.aeExpCompensation = 0;
717     }
718     else {
719         shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
720         m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
721     }
722 
723     if (request_shot->shot.ctl.aa.videoStabilizationMode && m_vdisEnable) {
724         m_vdisBubbleEn = true;
725         shot_ext->dis_bypass = 0;
726         shot_ext->dnr_bypass = 0;
727     } else {
728         m_vdisBubbleEn = false;
729         shot_ext->dis_bypass = 1;
730         shot_ext->dnr_bypass = 1;
731     }
732 
733     shot_ext->shot.ctl.aa.afTrigger = 0;
734 
735     targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
736     shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
737     if (targetStreamIndex & MASK_OUTPUT_SCP)
738         shot_ext->request_scp = 1;
739 
740     if (targetStreamIndex & MASK_OUTPUT_SCC)
741         shot_ext->request_scc = 1;
742 
743     if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
744         shot_ext->fd_bypass = 0;
745 
746     shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0];
747     shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1];
748 
749     ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
750     (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
751     (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
752     (int)(shot_ext->shot.ctl.aa.afMode));
753 }
754 
IsVdisEnable(void)755 bool    RequestManager::IsVdisEnable(void)
756 {
757         return m_vdisBubbleEn;
758 }
759 
FindEntryIndexByFrameCnt(int frameCnt)760 int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
761 {
762     for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
763         if ((int)entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
764             return i;
765     }
766     return -1;
767 }
768 
RegisterTimestamp(int frameCnt,nsecs_t * frameTime)769 void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
770 {
771     int index = FindEntryIndexByFrameCnt(frameCnt);
772     if (index == -1) {
773         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
774         return;
775     }
776 
777     request_manager_entry * currentEntry = &(entries[index]);
778     if (currentEntry->internal_shot.isReprocessing == 1) {
779         ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
780         index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
781     } else {
782         currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
783         ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
784             index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
785     }
786 }
787 
788 
GetTimestampByFrameCnt(int frameCnt)789 nsecs_t  RequestManager::GetTimestampByFrameCnt(int frameCnt)
790 {
791     int index = FindEntryIndexByFrameCnt(frameCnt);
792     if (index == -1) {
793         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
794         return m_lastTimeStamp;
795     }
796     else
797         return GetTimestamp(index);
798 }
799 
GetTimestamp(int index)800 nsecs_t  RequestManager::GetTimestamp(int index)
801 {
802     Mutex::Autolock lock(m_requestMutex);
803     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
804         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
805         return 0;
806     }
807 
808     request_manager_entry * currentEntry = &(entries[index]);
809     nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
810     if (frameTime == 0) {
811         ALOGV("DEBUG(%s): timestamp null,  returning saved value", __FUNCTION__);
812         frameTime = m_lastTimeStamp;
813     }
814     ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
815     return frameTime;
816 }
817 
GetOutputStreamByFrameCnt(int frameCnt)818 uint8_t  RequestManager::GetOutputStreamByFrameCnt(int frameCnt)
819 {
820     int index = FindEntryIndexByFrameCnt(frameCnt);
821     if (index == -1) {
822         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
823         return 0;
824     }
825     else
826         return GetOutputStream(index);
827 }
828 
GetOutputStream(int index)829 uint8_t  RequestManager::GetOutputStream(int index)
830 {
831     Mutex::Autolock lock(m_requestMutex);
832     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
833         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
834         return 0;
835     }
836 
837     request_manager_entry * currentEntry = &(entries[index]);
838     return currentEntry->internal_shot.shot.ctl.request.outputStreams[0];
839 }
840 
GetInternalShotExtByFrameCnt(int frameCnt)841 camera2_shot_ext *  RequestManager::GetInternalShotExtByFrameCnt(int frameCnt)
842 {
843     int index = FindEntryIndexByFrameCnt(frameCnt);
844     if (index == -1) {
845         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
846         return 0;
847     }
848     else
849         return GetInternalShotExt(index);
850 }
851 
GetInternalShotExt(int index)852 camera2_shot_ext *  RequestManager::GetInternalShotExt(int index)
853 {
854     Mutex::Autolock lock(m_requestMutex);
855     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
856         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
857         return 0;
858     }
859 
860     request_manager_entry * currentEntry = &(entries[index]);
861     return &currentEntry->internal_shot;
862 }
863 
FindFrameCnt(struct camera2_shot_ext * shot_ext)864 int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
865 {
866     Mutex::Autolock lock(m_requestMutex);
867     int i;
868 
869     if (m_numOfEntries == 0) {
870         CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
871         return -1;
872     }
873 
874     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
875         if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
876             continue;
877 
878         if (entries[i].status == REQUESTED) {
879             entries[i].status = CAPTURED;
880             return entries[i].internal_shot.shot.ctl.request.frameCount;
881         }
882         CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
883 
884     }
885     CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
886 
887     return -1;
888 }
889 
SetInitialSkip(int count)890 void     RequestManager::SetInitialSkip(int count)
891 {
892     ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
893     if (count > m_sensorPipelineSkipCnt)
894         m_sensorPipelineSkipCnt = count;
895 }
896 
GetSkipCnt()897 int     RequestManager::GetSkipCnt()
898 {
899     ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
900     if (m_sensorPipelineSkipCnt == 0)
901         return m_sensorPipelineSkipCnt;
902     else
903         return --m_sensorPipelineSkipCnt;
904 }
905 
Dump(void)906 void RequestManager::Dump(void)
907 {
908     int i = 0;
909     request_manager_entry * currentEntry;
910     Mutex::Autolock lock(m_numOfEntriesLock);
911     ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
912     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
913 
914     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
915         currentEntry =  &(entries[i]);
916         ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
917         currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
918             currentEntry->output_stream_count,
919             currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
920     }
921 }
922 
GetNextIndex(int index)923 int     RequestManager::GetNextIndex(int index)
924 {
925     index++;
926     if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
927         index = 0;
928 
929     return index;
930 }
931 
GetPrevIndex(int index)932 int     RequestManager::GetPrevIndex(int index)
933 {
934     index--;
935     if (index < 0)
936         index = NUM_MAX_REQUEST_MGR_ENTRY-1;
937 
938     return index;
939 }
940 
ExynosCameraHWInterface2(int cameraId,camera2_device_t * dev,ExynosCamera2 * camera,int * openInvalid)941 ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
942             m_requestQueueOps(NULL),
943             m_frameQueueOps(NULL),
944             m_callbackCookie(NULL),
945             m_numOfRemainingReqInSvc(0),
946             m_isRequestQueuePending(false),
947             m_isRequestQueueNull(true),
948             m_halDevice(dev),
949             m_ionCameraClient(0),
950             m_isIspStarted(false),
951             m_sccLocalBufferValid(false),
952             m_cameraId(cameraId),
953             m_scp_closing(false),
954             m_scp_closed(false),
955             m_wideAspect(false),
956             m_zoomRatio(1),
957             m_vdisBubbleCnt(0),
958             m_vdisDupFrame(0),
959             m_jpegEncodingCount(0),
960             m_scpForceSuspended(false),
961             m_afState(HAL_AFSTATE_INACTIVE),
962             m_afTriggerId(0),
963             m_afMode(NO_CHANGE),
964             m_afMode2(NO_CHANGE),
965             m_IsAfModeUpdateRequired(false),
966             m_IsAfTriggerRequired(false),
967             m_IsAfLockRequired(false),
968             m_serviceAfState(ANDROID_CONTROL_AF_STATE_INACTIVE),
969             m_afPendingTriggerId(0),
970             m_afModeWaitingCnt(0),
971             m_scpOutputSignalCnt(0),
972             m_scpOutputImageCnt(0),
973             m_nightCaptureCnt(0),
974             m_nightCaptureFrameCnt(0),
975             m_lastSceneMode(0),
976             m_thumbNailW(160),
977             m_thumbNailH(120)
978 {
979     ALOGD("(%s): ENTER", __FUNCTION__);
980     int ret = 0;
981     int res = 0;
982 
983     m_exynosPictureCSC = NULL;
984     m_exynosVideoCSC = NULL;
985 
986     if (!m_grallocHal) {
987         ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
988         if (ret)
989             ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
990     }
991 
992     m_camera2 = camera;
993     m_ionCameraClient = createIonClient(m_ionCameraClient);
994     if(m_ionCameraClient == 0)
995         ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
996 
997 
998     m_BayerManager = new BayerBufManager();
999     m_mainThread    = new MainThread(this);
1000     m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
1001     *openInvalid = InitializeISPChain();
1002     if (*openInvalid < 0) {
1003         ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__);
1004         // clean process
1005         // 1. close video nodes
1006         // SCP
1007         res = exynos_v4l2_close(m_camera_info.scp.fd);
1008         if (res != NO_ERROR ) {
1009             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1010         }
1011         // SCC
1012         res = exynos_v4l2_close(m_camera_info.capture.fd);
1013         if (res != NO_ERROR ) {
1014             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1015         }
1016         // Sensor
1017         res = exynos_v4l2_close(m_camera_info.sensor.fd);
1018         if (res != NO_ERROR ) {
1019             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1020         }
1021         // ISP
1022         res = exynos_v4l2_close(m_camera_info.isp.fd);
1023         if (res != NO_ERROR ) {
1024             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1025         }
1026     } else {
1027         m_sensorThread  = new SensorThread(this);
1028         m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
1029         m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1030         ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__);
1031 
1032         for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
1033             m_subStreams[i].type =  SUBSTREAM_TYPE_NONE;
1034         CSC_METHOD cscMethod = CSC_METHOD_HW;
1035         m_exynosPictureCSC = csc_init(cscMethod);
1036         if (m_exynosPictureCSC == NULL)
1037             ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1038         csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
1039         csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
1040 
1041         m_exynosVideoCSC = csc_init(cscMethod);
1042         if (m_exynosVideoCSC == NULL)
1043             ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1044         csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
1045         csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
1046 
1047         m_setExifFixedAttribute();
1048 
1049         // contol information clear
1050         // flash
1051         m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
1052         m_ctlInfo.flash.m_afFlashDoneFlg= false;
1053         m_ctlInfo.flash.m_flashEnableFlg = false;
1054         m_ctlInfo.flash.m_flashFrameCount = 0;
1055         m_ctlInfo.flash.m_flashCnt = 0;
1056         m_ctlInfo.flash.m_flashTimeOut = 0;
1057         m_ctlInfo.flash.m_flashDecisionResult = false;
1058         m_ctlInfo.flash.m_flashTorchMode = false;
1059         m_ctlInfo.flash.m_precaptureState = 0;
1060         m_ctlInfo.flash.m_precaptureTriggerId = 0;
1061         // ae
1062         m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
1063         // af
1064         m_ctlInfo.af.m_afTriggerTimeOut = 0;
1065         // scene
1066         m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX;
1067     }
1068     ALOGD("(%s): EXIT", __FUNCTION__);
1069 }
1070 
~ExynosCameraHWInterface2()1071 ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
1072 {
1073     ALOGD("(%s): ENTER", __FUNCTION__);
1074     this->release();
1075     ALOGD("(%s): EXIT", __FUNCTION__);
1076 }
1077 
release()1078 void ExynosCameraHWInterface2::release()
1079 {
1080     int i, res;
1081     ALOGD("(HAL2::release): ENTER");
1082 
1083     if (m_streamThreads[1] != NULL) {
1084         m_streamThreads[1]->release();
1085         m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
1086     }
1087 
1088     if (m_streamThreads[0] != NULL) {
1089         m_streamThreads[0]->release();
1090         m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
1091     }
1092 
1093     if (m_sensorThread != NULL) {
1094         m_sensorThread->release();
1095     }
1096 
1097     if (m_mainThread != NULL) {
1098         m_mainThread->release();
1099     }
1100 
1101     if (m_exynosPictureCSC)
1102         csc_deinit(m_exynosPictureCSC);
1103     m_exynosPictureCSC = NULL;
1104 
1105     if (m_exynosVideoCSC)
1106         csc_deinit(m_exynosVideoCSC);
1107     m_exynosVideoCSC = NULL;
1108 
1109     if (m_streamThreads[1] != NULL) {
1110         ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
1111         while (!m_streamThreads[1]->IsTerminated())
1112             usleep(SIG_WAITING_TICK);
1113         ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 1 termination");
1114         m_streamThreads[1] = NULL;
1115     }
1116 
1117     if (m_streamThreads[0] != NULL) {
1118         ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
1119         while (!m_streamThreads[0]->IsTerminated())
1120             usleep(SIG_WAITING_TICK);
1121         ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 0 termination");
1122         m_streamThreads[0] = NULL;
1123     }
1124 
1125     if (m_sensorThread != NULL) {
1126         ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
1127         while (!m_sensorThread->IsTerminated())
1128             usleep(SIG_WAITING_TICK);
1129         ALOGD("(HAL2::release): END   Waiting for (indirect) sensor thread termination");
1130         m_sensorThread = NULL;
1131     }
1132 
1133     if (m_mainThread != NULL) {
1134         ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
1135         while (!m_mainThread->IsTerminated())
1136             usleep(SIG_WAITING_TICK);
1137         ALOGD("(HAL2::release): END   Waiting for (indirect) main thread termination");
1138         m_mainThread = NULL;
1139     }
1140 
1141     if (m_requestManager != NULL) {
1142         delete m_requestManager;
1143         m_requestManager = NULL;
1144     }
1145 
1146     if (m_BayerManager != NULL) {
1147         delete m_BayerManager;
1148         m_BayerManager = NULL;
1149     }
1150     for (i = 0; i < NUM_BAYER_BUFFERS; i++)
1151         freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1152 
1153     if (m_sccLocalBufferValid) {
1154         for (i = 0; i < NUM_SCC_BUFFERS; i++)
1155 #ifdef ENABLE_FRAME_SYNC
1156             freeCameraMemory(&m_sccLocalBuffer[i], 2);
1157 #else
1158             freeCameraMemory(&m_sccLocalBuffer[i], 1);
1159 #endif
1160     }
1161     else {
1162         for (i = 0; i < NUM_SCC_BUFFERS; i++)
1163             freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1164     }
1165 
1166     ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
1167     res = exynos_v4l2_close(m_camera_info.sensor.fd);
1168     if (res != NO_ERROR ) {
1169         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1170     }
1171 
1172     ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
1173     res = exynos_v4l2_close(m_camera_info.isp.fd);
1174     if (res != NO_ERROR ) {
1175         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1176     }
1177 
1178     ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
1179     res = exynos_v4l2_close(m_camera_info.capture.fd);
1180     if (res != NO_ERROR ) {
1181         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1182     }
1183 
1184     ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
1185     res = exynos_v4l2_close(m_camera_info.scp.fd);
1186     if (res != NO_ERROR ) {
1187         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1188     }
1189     ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
1190     deleteIonClient(m_ionCameraClient);
1191 
1192     ALOGD("(HAL2::release): EXIT");
1193 }
1194 
InitializeISPChain()1195 int ExynosCameraHWInterface2::InitializeISPChain()
1196 {
1197     char node_name[30];
1198     int fd = 0;
1199     int i;
1200     int ret = 0;
1201 
1202     /* Open Sensor */
1203     memset(&node_name, 0x00, sizeof(char[30]));
1204     sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1205     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1206 
1207     if (fd < 0) {
1208         ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1209     }
1210     else {
1211         ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1212     }
1213     m_camera_info.sensor.fd = fd;
1214 
1215     /* Open ISP */
1216     memset(&node_name, 0x00, sizeof(char[30]));
1217     sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1218     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1219 
1220     if (fd < 0) {
1221         ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1222     }
1223     else {
1224         ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1225     }
1226     m_camera_info.isp.fd = fd;
1227 
1228     /* Open ScalerC */
1229     memset(&node_name, 0x00, sizeof(char[30]));
1230     sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1231     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1232 
1233     if (fd < 0) {
1234         ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1235     }
1236     else {
1237         ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1238     }
1239     m_camera_info.capture.fd = fd;
1240 
1241     /* Open ScalerP */
1242     memset(&node_name, 0x00, sizeof(char[30]));
1243     sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1244     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1245     if (fd < 0) {
1246         ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1247     }
1248     else {
1249         ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1250     }
1251     m_camera_info.scp.fd = fd;
1252 
1253     if(m_cameraId == 0)
1254         m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1255     else
1256         m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1257 
1258     memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1259     m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1260     m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1261 
1262     m_camera_info.dummy_shot.dis_bypass = 1;
1263     m_camera_info.dummy_shot.dnr_bypass = 1;
1264     m_camera_info.dummy_shot.fd_bypass = 1;
1265 
1266     /*sensor setting*/
1267     m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1268     m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1269     m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1270 
1271     m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1272     m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1273 
1274     /*request setting*/
1275     m_camera_info.dummy_shot.request_sensor = 1;
1276     m_camera_info.dummy_shot.request_scc = 0;
1277     m_camera_info.dummy_shot.request_scp = 0;
1278     m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1279 
1280     m_camera_info.sensor.width = m_camera2->getSensorRawW();
1281     m_camera_info.sensor.height = m_camera2->getSensorRawH();
1282 
1283     m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1284     m_camera_info.sensor.planes = 2;
1285     m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1286     m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1287     m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1288 
1289     for(i = 0; i < m_camera_info.sensor.buffers; i++){
1290         int res;
1291         initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1292         m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1293         m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1294         res = allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
1295         if (res) {
1296             ALOGE("ERROR(%s): failed to allocateCameraMemory for sensor buffer %d", __FUNCTION__, i);
1297             // Free allocated sensor buffers
1298             for (int j = 0; j < i; j++) {
1299                 freeCameraMemory(&m_camera_info.sensor.buffer[j], m_camera_info.sensor.planes);
1300             }
1301             return false;
1302         }
1303     }
1304 
1305     m_camera_info.isp.width = m_camera_info.sensor.width;
1306     m_camera_info.isp.height = m_camera_info.sensor.height;
1307     m_camera_info.isp.format = m_camera_info.sensor.format;
1308     m_camera_info.isp.planes = m_camera_info.sensor.planes;
1309     m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1310     m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1311     m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1312 
1313     for(i = 0; i < m_camera_info.isp.buffers; i++){
1314         initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1315         m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
1316         m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
1317         m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
1318         m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
1319         m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
1320         m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
1321     };
1322 
1323     /* init ISP */
1324     ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1325     if (ret < 0) {
1326         ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ",  __FUNCTION__, m_camera_info.sensor_id);
1327         return false;
1328     }
1329     cam_int_s_fmt(&(m_camera_info.isp));
1330     ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1331     cam_int_reqbufs(&(m_camera_info.isp));
1332     ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1333     ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
1334 
1335     /* init Sensor */
1336     cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1337     ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
1338     if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1339         ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
1340     }
1341     ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
1342     cam_int_reqbufs(&(m_camera_info.sensor));
1343     ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
1344     for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1345         ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1346         m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1347         m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1348         memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1349                 sizeof(struct camera2_shot_ext));
1350     }
1351 
1352     for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1353         cam_int_qbuf(&(m_camera_info.sensor), i);
1354 
1355     for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1356         m_requestManager->pushSensorQ(i);
1357 
1358     ALOGV("== stream_on :: sensor");
1359     cam_int_streamon(&(m_camera_info.sensor));
1360     m_camera_info.sensor.status = true;
1361 
1362     /* init Capture */
1363     m_camera_info.capture.width = m_camera2->getSensorW();
1364     m_camera_info.capture.height = m_camera2->getSensorH();
1365     m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1366 #ifdef ENABLE_FRAME_SYNC
1367     m_camera_info.capture.planes = 2;
1368 #else
1369     m_camera_info.capture.planes = 1;
1370 #endif
1371     m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1372     m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1373     m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1374 
1375     m_camera_info.capture.status = false;
1376 
1377     return true;
1378 }
1379 
StartSCCThread(bool threadExists)1380 void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
1381 {
1382     ALOGV("(%s)", __FUNCTION__);
1383     StreamThread *AllocatedStream;
1384     stream_parameters_t newParameters;
1385     uint32_t format_actual;
1386 
1387 
1388     if (!threadExists) {
1389         m_streamThreads[1]  = new StreamThread(this, 1);
1390     }
1391     AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1392     if (!threadExists) {
1393         AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1394         m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1395         AllocatedStream->m_numRegisteredStream = 1;
1396     }
1397     AllocatedStream->m_index        = 1;
1398 
1399     format_actual                   = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1400 
1401     newParameters.width             = m_camera2->getSensorW();
1402     newParameters.height            = m_camera2->getSensorH();
1403     newParameters.format            = format_actual;
1404     newParameters.streamOps         = NULL;
1405     newParameters.numHwBuffers      = NUM_SCC_BUFFERS;
1406 #ifdef ENABLE_FRAME_SYNC
1407     newParameters.planes            = 2;
1408 #else
1409     newParameters.planes            = 1;
1410 #endif
1411 
1412     newParameters.numSvcBufsInHal   = 0;
1413 
1414     newParameters.node              = &m_camera_info.capture;
1415 
1416     AllocatedStream->streamType     = STREAM_TYPE_INDIRECT;
1417     ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1418 
1419     if (!threadExists) {
1420         if (!m_sccLocalBufferValid) {
1421             for (int i = 0; i < m_camera_info.capture.buffers; i++){
1422                 initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
1423                 m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1424 #ifdef ENABLE_FRAME_SYNC
1425                 m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
1426                 allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
1427 #else
1428                 allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1429 #endif
1430                 m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i];
1431             }
1432             m_sccLocalBufferValid = true;
1433         }
1434     } else {
1435         if (m_sccLocalBufferValid) {
1436              for (int i = 0; i < m_camera_info.capture.buffers; i++)
1437                 m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i];
1438         } else {
1439             ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__);
1440         }
1441     }
1442     cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
1443     m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1444     cam_int_s_fmt(newParameters.node);
1445     ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1446     cam_int_reqbufs(newParameters.node);
1447     ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1448 
1449     for (int i = 0; i < newParameters.node->buffers; i++) {
1450         ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1451         cam_int_qbuf(newParameters.node, i);
1452         newParameters.svcBufStatus[i] = ON_DRIVER;
1453     }
1454 
1455     ALOGV("== stream_on :: capture");
1456     if (cam_int_streamon(newParameters.node) < 0) {
1457         ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1458     } else {
1459         m_camera_info.capture.status = true;
1460     }
1461 
1462     AllocatedStream->setParameter(&newParameters);
1463     AllocatedStream->m_activated    = true;
1464     AllocatedStream->m_isBufferInit = true;
1465 }
1466 
StartISP()1467 void ExynosCameraHWInterface2::StartISP()
1468 {
1469     ALOGV("== stream_on :: isp");
1470     cam_int_streamon(&(m_camera_info.isp));
1471     exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1472 }
1473 
getCameraId() const1474 int ExynosCameraHWInterface2::getCameraId() const
1475 {
1476     return m_cameraId;
1477 }
1478 
setRequestQueueSrcOps(const camera2_request_queue_src_ops_t * request_src_ops)1479 int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1480 {
1481     ALOGV("DEBUG(%s):", __FUNCTION__);
1482     if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1483             && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1484         m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1485         return 0;
1486     }
1487     else {
1488         ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1489         return 1;
1490     }
1491 }
1492 
notifyRequestQueueNotEmpty()1493 int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1494 {
1495     int i = 0;
1496 
1497     ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1498     if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1499         ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1500         return 0;
1501     }
1502     m_isRequestQueueNull = false;
1503     if (m_requestManager->GetNumEntries() == 0)
1504         m_requestManager->SetInitialSkip(0);
1505 
1506     if (m_isIspStarted == false) {
1507         /* isp */
1508         m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1509         m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1510         cam_int_s_fmt(&(m_camera_info.isp));
1511         cam_int_reqbufs(&(m_camera_info.isp));
1512 
1513         /* sensor */
1514         if (m_camera_info.sensor.status == false) {
1515             cam_int_s_fmt(&(m_camera_info.sensor));
1516             cam_int_reqbufs(&(m_camera_info.sensor));
1517 
1518             for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1519                 ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1520                 m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1521                 m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1522                 memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1523                         sizeof(struct camera2_shot_ext));
1524             }
1525             for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1526                 cam_int_qbuf(&(m_camera_info.sensor), i);
1527 
1528             for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1529                 m_requestManager->pushSensorQ(i);
1530             ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
1531             cam_int_streamon(&(m_camera_info.sensor));
1532             m_camera_info.sensor.status = true;
1533         }
1534     }
1535     if (!(m_streamThreads[1].get())) {
1536         ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
1537         StartSCCThread(false);
1538     } else {
1539         if (m_streamThreads[1]->m_activated ==  false) {
1540             ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1541             StartSCCThread(true);
1542         } else {
1543             if (m_camera_info.capture.status == false) {
1544                 m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1545                 cam_int_s_fmt(&(m_camera_info.capture));
1546                 ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1547                 cam_int_reqbufs(&(m_camera_info.capture));
1548                 ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1549 
1550                 if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) {
1551                     StreamThread *          targetStream = m_streamThreads[1].get();
1552                     stream_parameters_t     *targetStreamParms = &(targetStream->m_parameters);
1553                     node_info_t             *currentNode = targetStreamParms->node;
1554 
1555                     struct v4l2_buffer v4l2_buf;
1556                     struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1557 
1558                     for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
1559                         v4l2_buf.m.planes   = planes;
1560                         v4l2_buf.type       = currentNode->type;
1561                         v4l2_buf.memory     = currentNode->memory;
1562 
1563                         v4l2_buf.length     = currentNode->planes;
1564                         v4l2_buf.index      = i;
1565                         ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i];
1566 
1567                         if (i < currentNode->buffers) {
1568 #ifdef ENABLE_FRAME_SYNC
1569                             v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0];
1570                             v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1];
1571                             v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2];
1572                             v4l2_buf.length += targetStreamParms->metaPlanes;
1573                             v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
1574                             v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
1575 
1576                             ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
1577 #endif
1578                             if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1579                                 ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd);
1580                             }
1581                             ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd);
1582                             targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1583                         }
1584                         else {
1585                             targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1586                         }
1587 
1588                     }
1589 
1590                 } else {
1591                     for (int i = 0; i < m_camera_info.capture.buffers; i++) {
1592                         ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1593                         cam_int_qbuf(&(m_camera_info.capture), i);
1594                     }
1595                 }
1596                 ALOGV("== stream_on :: capture");
1597                 if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1598                     ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1599                 } else {
1600                     m_camera_info.capture.status = true;
1601                 }
1602             }
1603             if (m_scpForceSuspended) {
1604                 m_scpForceSuspended = false;
1605             }
1606         }
1607     }
1608     if (m_isIspStarted == false) {
1609         StartISP();
1610         ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
1611         m_requestManager->SetInitialSkip(6);
1612         m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1613         m_isIspStarted = true;
1614     }
1615     m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1616     return 0;
1617 }
1618 
setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t * frame_dst_ops)1619 int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1620 {
1621     ALOGV("DEBUG(%s):", __FUNCTION__);
1622     if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1623             && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1624         m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1625         return 0;
1626     }
1627     else {
1628         ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1629         return 1;
1630     }
1631 }
1632 
getInProgressCount()1633 int ExynosCameraHWInterface2::getInProgressCount()
1634 {
1635     int inProgressJpeg;
1636     int inProgressCount;
1637 
1638     {
1639         Mutex::Autolock lock(m_jpegEncoderLock);
1640         inProgressJpeg = m_jpegEncodingCount;
1641         inProgressCount = m_requestManager->GetNumEntries();
1642     }
1643     ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__,
1644         inProgressCount, inProgressJpeg, (inProgressCount + inProgressJpeg));
1645     return (inProgressCount + inProgressJpeg);
1646 }
1647 
flushCapturesInProgress()1648 int ExynosCameraHWInterface2::flushCapturesInProgress()
1649 {
1650     return 0;
1651 }
1652 
constructDefaultRequest(int request_template,camera_metadata_t ** request)1653 int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1654 {
1655     ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1656 
1657     if (request == NULL) return BAD_VALUE;
1658     if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1659         return BAD_VALUE;
1660     }
1661     status_t res;
1662     // Pass 1, calculate size and allocate
1663     res = m_camera2->constructDefaultRequest(request_template,
1664             request,
1665             true);
1666     if (res != OK) {
1667         return res;
1668     }
1669     // Pass 2, build request
1670     res = m_camera2->constructDefaultRequest(request_template,
1671             request,
1672             false);
1673     if (res != OK) {
1674         ALOGE("Unable to populate new request for template %d",
1675                 request_template);
1676     }
1677 
1678     return res;
1679 }
1680 
allocateStream(uint32_t width,uint32_t height,int format,const camera2_stream_ops_t * stream_ops,uint32_t * stream_id,uint32_t * format_actual,uint32_t * usage,uint32_t * max_buffers)1681 int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1682                                     uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1683 {
1684     ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
1685     bool useDirectOutput = false;
1686     StreamThread *AllocatedStream;
1687     stream_parameters_t newParameters;
1688     substream_parameters_t *subParameters;
1689     StreamThread *parentStream;
1690     status_t res;
1691     int allocCase = 0;
1692 
1693     if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE)  &&
1694             m_camera2->isSupportedResolution(width, height)) {
1695         if (!(m_streamThreads[0].get())) {
1696             ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1697             allocCase = 0;
1698         }
1699         else {
1700             if ((m_streamThreads[0].get())->m_activated == true) {
1701                 ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1702                 allocCase = 1;
1703             }
1704             else {
1705                 ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1706                 allocCase = 2;
1707             }
1708         }
1709 
1710         // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
1711         if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
1712                     || (width == 720 && height == 480) || (width == 1440 && height == 960)
1713                     || (width == 1344 && height == 896)) {
1714             m_wideAspect = true;
1715         } else {
1716             m_wideAspect = false;
1717         }
1718         ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1719 
1720         if (allocCase == 0 || allocCase == 2) {
1721             *stream_id = STREAM_ID_PREVIEW;
1722 
1723             m_streamThreads[0]  = new StreamThread(this, *stream_id);
1724 
1725             AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1726             AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1727             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1728 
1729             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1730             *usage                              = GRALLOC_USAGE_SW_WRITE_OFTEN;
1731             if (m_wideAspect)
1732                 *usage                         |= GRALLOC_USAGE_PRIVATE_CHROMA;
1733             *max_buffers                        = 7;
1734 
1735             newParameters.width                 = width;
1736             newParameters.height                = height;
1737             newParameters.format                = *format_actual;
1738             newParameters.streamOps             = stream_ops;
1739             newParameters.usage                 = *usage;
1740             newParameters.numHwBuffers          = NUM_SCP_BUFFERS;
1741             newParameters.numOwnSvcBuffers      = *max_buffers;
1742             newParameters.planes                = NUM_PLANES(*format_actual);
1743             newParameters.metaPlanes            = 1;
1744             newParameters.numSvcBufsInHal       = 0;
1745             newParameters.minUndequedBuffer     = 3;
1746             newParameters.needsIonMap           = true;
1747 
1748             newParameters.node                  = &m_camera_info.scp;
1749             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1750             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1751 
1752             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1753             AllocatedStream->m_index            = 0;
1754             AllocatedStream->setParameter(&newParameters);
1755             AllocatedStream->m_activated = true;
1756             AllocatedStream->m_numRegisteredStream = 1;
1757             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1758             m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1759             m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1760             if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
1761                 AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
1762             if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
1763                 AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
1764 
1765             // set video stabilization killswitch
1766             m_requestManager->m_vdisEnable = width > 352 && height > 288;
1767 
1768             return 0;
1769         } else if (allocCase == 1) {
1770             *stream_id = STREAM_ID_RECORD;
1771 
1772             subParameters = &m_subStreams[STREAM_ID_RECORD];
1773             memset(subParameters, 0, sizeof(substream_parameters_t));
1774 
1775             parentStream = (StreamThread*)(m_streamThreads[0].get());
1776             if (!parentStream) {
1777                 return 1;
1778             }
1779 
1780             *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1781             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1782             if (m_wideAspect)
1783                 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1784             *max_buffers = 7;
1785 
1786             subParameters->type         = SUBSTREAM_TYPE_RECORD;
1787             subParameters->width        = width;
1788             subParameters->height       = height;
1789             subParameters->format       = *format_actual;
1790             subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1791             subParameters->streamOps     = stream_ops;
1792             subParameters->usage         = *usage;
1793             subParameters->numOwnSvcBuffers = *max_buffers;
1794             subParameters->numSvcBufsInHal  = 0;
1795             subParameters->needBufferInit    = false;
1796             subParameters->minUndequedBuffer = 2;
1797 
1798             res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
1799             if (res != NO_ERROR) {
1800                 ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1801                 return 1;
1802             }
1803             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1804             ALOGV("(%s): Enabling Record", __FUNCTION__);
1805             return 0;
1806         }
1807     }
1808     else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL)
1809             && ((int32_t)width == m_camera2->getSensorW()) && ((int32_t)height == m_camera2->getSensorH())) {
1810 
1811         if (!(m_streamThreads[1].get())) {
1812             ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1813             useDirectOutput = true;
1814         }
1815         else {
1816             ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
1817             useDirectOutput = false;
1818         }
1819         if (useDirectOutput) {
1820             *stream_id = STREAM_ID_ZSL;
1821 
1822             m_streamThreads[1]  = new StreamThread(this, *stream_id);
1823             AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1824             AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1825             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1826 
1827             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1828 
1829             *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1830             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1831             if (m_wideAspect)
1832                 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1833             *max_buffers = 7;
1834 
1835             newParameters.width                 = width;
1836             newParameters.height                = height;
1837             newParameters.format                = *format_actual;
1838             newParameters.streamOps             = stream_ops;
1839             newParameters.usage                 = *usage;
1840             newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1841             newParameters.numOwnSvcBuffers      = *max_buffers;
1842             newParameters.planes                = NUM_PLANES(*format_actual);
1843             newParameters.metaPlanes            = 1;
1844 
1845             newParameters.numSvcBufsInHal       = 0;
1846             newParameters.minUndequedBuffer     = 2;
1847             newParameters.needsIonMap           = false;
1848 
1849             newParameters.node                  = &m_camera_info.capture;
1850             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1851             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1852 
1853             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1854             AllocatedStream->m_index            = 1;
1855             AllocatedStream->setParameter(&newParameters);
1856             AllocatedStream->m_activated = true;
1857             AllocatedStream->m_numRegisteredStream = 1;
1858             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1859             return 0;
1860         } else {
1861             bool bJpegExists = false;
1862             AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1863             subParameters = &m_subStreams[STREAM_ID_JPEG];
1864             if (subParameters->type == SUBSTREAM_TYPE_JPEG) {
1865                 ALOGD("(%s): jpeg stream exists", __FUNCTION__);
1866                 bJpegExists = true;
1867                 AllocatedStream->detachSubStream(STREAM_ID_JPEG);
1868             }
1869             AllocatedStream->m_releasing = true;
1870             ALOGD("START stream thread 1 release %d", __LINE__);
1871             do {
1872                 AllocatedStream->release();
1873                 usleep(SIG_WAITING_TICK);
1874             } while (AllocatedStream->m_releasing);
1875             ALOGD("END   stream thread 1 release %d", __LINE__);
1876 
1877             *stream_id = STREAM_ID_ZSL;
1878 
1879             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1880 
1881             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1882 
1883             *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1884             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1885             if (m_wideAspect)
1886                 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1887             *max_buffers = 7;
1888 
1889             newParameters.width                 = width;
1890             newParameters.height                = height;
1891             newParameters.format                = *format_actual;
1892             newParameters.streamOps             = stream_ops;
1893             newParameters.usage                 = *usage;
1894             newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1895             newParameters.numOwnSvcBuffers      = *max_buffers;
1896             newParameters.planes                = NUM_PLANES(*format_actual);
1897             newParameters.metaPlanes            = 1;
1898 
1899             newParameters.numSvcBufsInHal       = 0;
1900             newParameters.minUndequedBuffer     = 2;
1901             newParameters.needsIonMap           = false;
1902 
1903             newParameters.node                  = &m_camera_info.capture;
1904             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1905             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1906 
1907             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1908             AllocatedStream->m_index            = 1;
1909             AllocatedStream->setParameter(&newParameters);
1910             AllocatedStream->m_activated = true;
1911             AllocatedStream->m_numRegisteredStream = 1;
1912             if (bJpegExists) {
1913                 AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10);
1914             }
1915             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1916             return 0;
1917 
1918         }
1919     }
1920     else if (format == HAL_PIXEL_FORMAT_BLOB
1921             && m_camera2->isSupportedJpegResolution(width, height)) {
1922         *stream_id = STREAM_ID_JPEG;
1923 
1924         subParameters = &m_subStreams[*stream_id];
1925         memset(subParameters, 0, sizeof(substream_parameters_t));
1926 
1927         if (!(m_streamThreads[1].get())) {
1928             ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1929             StartSCCThread(false);
1930         }
1931         else if (m_streamThreads[1]->m_activated ==  false) {
1932             ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1933             StartSCCThread(true);
1934         }
1935         parentStream = (StreamThread*)(m_streamThreads[1].get());
1936 
1937         *format_actual = HAL_PIXEL_FORMAT_BLOB;
1938         *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1939         if (m_wideAspect)
1940             *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1941         *max_buffers = 5;
1942 
1943         subParameters->type          = SUBSTREAM_TYPE_JPEG;
1944         subParameters->width         = width;
1945         subParameters->height        = height;
1946         subParameters->format        = *format_actual;
1947         subParameters->svcPlanes     = 1;
1948         subParameters->streamOps     = stream_ops;
1949         subParameters->usage         = *usage;
1950         subParameters->numOwnSvcBuffers = *max_buffers;
1951         subParameters->numSvcBufsInHal  = 0;
1952         subParameters->needBufferInit    = false;
1953         subParameters->minUndequedBuffer = 2;
1954 
1955         res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
1956         if (res != NO_ERROR) {
1957             ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1958             return 1;
1959         }
1960         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1961         ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
1962         return 0;
1963     }
1964     else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
1965         *stream_id = STREAM_ID_PRVCB;
1966 
1967         subParameters = &m_subStreams[STREAM_ID_PRVCB];
1968         memset(subParameters, 0, sizeof(substream_parameters_t));
1969 
1970         parentStream = (StreamThread*)(m_streamThreads[0].get());
1971         if (!parentStream) {
1972             return 1;
1973         }
1974 
1975         *format_actual = format;
1976         *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1977         if (m_wideAspect)
1978             *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1979         *max_buffers = 7;
1980 
1981         subParameters->type         = SUBSTREAM_TYPE_PRVCB;
1982         subParameters->width        = width;
1983         subParameters->height       = height;
1984         subParameters->format       = *format_actual;
1985         subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1986         subParameters->streamOps     = stream_ops;
1987         subParameters->usage         = *usage;
1988         subParameters->numOwnSvcBuffers = *max_buffers;
1989         subParameters->numSvcBufsInHal  = 0;
1990         subParameters->needBufferInit    = false;
1991         subParameters->minUndequedBuffer = 2;
1992 
1993         if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
1994             subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
1995             subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
1996         }
1997         else {
1998             subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1999             subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
2000         }
2001 
2002         res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
2003         if (res != NO_ERROR) {
2004             ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
2005             return 1;
2006         }
2007         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
2008         ALOGV("(%s): Enabling previewcb", __FUNCTION__);
2009         return 0;
2010     }
2011     ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__);
2012     return 1;
2013 }
2014 
registerStreamBuffers(uint32_t stream_id,int num_buffers,buffer_handle_t * registeringBuffers)2015 int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
2016         int num_buffers, buffer_handle_t *registeringBuffers)
2017 {
2018     int                     i,j;
2019     void                    *virtAddr[3];
2020     int                     plane_index = 0;
2021     StreamThread *          targetStream;
2022     stream_parameters_t     *targetStreamParms;
2023     node_info_t             *currentNode;
2024 
2025     struct v4l2_buffer v4l2_buf;
2026     struct v4l2_plane  planes[VIDEO_MAX_PLANES];
2027 
2028     ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__,
2029         stream_id, num_buffers, (uint32_t)registeringBuffers);
2030 
2031     if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
2032         targetStream = m_streamThreads[0].get();
2033         targetStreamParms = &(m_streamThreads[0]->m_parameters);
2034 
2035     }
2036     else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
2037         substream_parameters_t  *targetParms;
2038         targetParms = &m_subStreams[stream_id];
2039 
2040         targetParms->numSvcBuffers = num_buffers;
2041 
2042         for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
2043             ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
2044                 i, stream_id, (uint32_t)(registeringBuffers[i]));
2045             if (m_grallocHal) {
2046                 if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
2047                        targetParms->usage, 0, 0,
2048                        targetParms->width, targetParms->height, virtAddr) != 0) {
2049                     ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2050                 }
2051                 else {
2052                     ExynosBuffer currentBuf;
2053                     const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2054                     if (targetParms->svcPlanes == 1) {
2055                         currentBuf.fd.extFd[0] = priv_handle->fd;
2056                         currentBuf.size.extS[0] = priv_handle->size;
2057                         currentBuf.size.extS[1] = 0;
2058                         currentBuf.size.extS[2] = 0;
2059                     } else if (targetParms->svcPlanes == 2) {
2060                         currentBuf.fd.extFd[0] = priv_handle->fd;
2061                         currentBuf.fd.extFd[1] = priv_handle->fd1;
2062 
2063                     } else if (targetParms->svcPlanes == 3) {
2064                         currentBuf.fd.extFd[0] = priv_handle->fd;
2065                         currentBuf.fd.extFd[1] = priv_handle->fd1;
2066                         currentBuf.fd.extFd[2] = priv_handle->fd2;
2067                     }
2068                     for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
2069                         currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
2070                         CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
2071                              __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
2072                              (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
2073                     }
2074                     targetParms->svcBufStatus[i]  = ON_SERVICE;
2075                     targetParms->svcBuffers[i]    = currentBuf;
2076                     targetParms->svcBufHandle[i]  = registeringBuffers[i];
2077                 }
2078             }
2079         }
2080         targetParms->needBufferInit = true;
2081         return 0;
2082     }
2083     else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
2084         targetStream = m_streamThreads[1].get();
2085         targetStreamParms = &(m_streamThreads[1]->m_parameters);
2086     }
2087     else {
2088         ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id);
2089         return 1;
2090     }
2091 
2092     if (targetStream->streamType == STREAM_TYPE_DIRECT) {
2093         if (num_buffers < targetStreamParms->numHwBuffers) {
2094             ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
2095                 __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
2096             return 1;
2097         }
2098     }
2099     CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
2100             __FUNCTION__, targetStreamParms->format, targetStreamParms->width,
2101             targetStreamParms->height, targetStreamParms->planes);
2102     targetStreamParms->numSvcBuffers = num_buffers;
2103     currentNode = targetStreamParms->node;
2104     currentNode->width      = targetStreamParms->width;
2105     currentNode->height     = targetStreamParms->height;
2106     currentNode->format     = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
2107     currentNode->planes     = targetStreamParms->planes;
2108     currentNode->buffers    = targetStreamParms->numHwBuffers;
2109     cam_int_s_input(currentNode, m_camera_info.sensor_id);
2110     cam_int_s_fmt(currentNode);
2111     cam_int_reqbufs(currentNode);
2112     for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
2113         ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
2114             i, (uint32_t)(registeringBuffers[i]));
2115                 v4l2_buf.m.planes   = planes;
2116                 v4l2_buf.type       = currentNode->type;
2117                 v4l2_buf.memory     = currentNode->memory;
2118                 v4l2_buf.index      = i;
2119                 v4l2_buf.length     = currentNode->planes;
2120 
2121                 ExynosBuffer currentBuf;
2122                 ExynosBuffer metaBuf;
2123                 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2124 
2125                 m_getAlignedYUVSize(currentNode->format,
2126                     currentNode->width, currentNode->height, &currentBuf);
2127 
2128                 ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
2129                 if (currentNode->planes == 1) {
2130                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2131                     currentBuf.fd.extFd[0] = priv_handle->fd;
2132                     currentBuf.size.extS[0] = priv_handle->size;
2133                     currentBuf.size.extS[1] = 0;
2134                     currentBuf.size.extS[2] = 0;
2135                 } else if (currentNode->planes == 2) {
2136                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2137                     v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
2138                     currentBuf.fd.extFd[0] = priv_handle->fd;
2139                     currentBuf.fd.extFd[1] = priv_handle->fd1;
2140 
2141                 } else if (currentNode->planes == 3) {
2142                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2143                     v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
2144                     v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
2145                     currentBuf.fd.extFd[0] = priv_handle->fd;
2146                     currentBuf.fd.extFd[2] = priv_handle->fd1;
2147                     currentBuf.fd.extFd[1] = priv_handle->fd2;
2148                 }
2149 
2150                 for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
2151                     if (targetStreamParms->needsIonMap)
2152                         currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
2153                     v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
2154                     ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)",
2155                          __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
2156                          (unsigned int)currentBuf.virt.extP[plane_index],
2157                          v4l2_buf.m.planes[plane_index].length);
2158                 }
2159 
2160                 if (i < currentNode->buffers) {
2161 
2162 
2163 #ifdef ENABLE_FRAME_SYNC
2164                     /* add plane for metadata*/
2165                     metaBuf.size.extS[0] = 4*1024;
2166                     allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
2167 
2168                     v4l2_buf.length += targetStreamParms->metaPlanes;
2169                     v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
2170                     v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
2171 
2172                     ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
2173 #endif
2174                     if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
2175                         ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
2176                             __FUNCTION__, stream_id, currentNode->fd);
2177                     }
2178                     ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
2179                             __FUNCTION__, stream_id, currentNode->fd);
2180                     targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
2181                 }
2182                 else {
2183                     targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
2184                 }
2185 
2186                 targetStreamParms->svcBuffers[i]       = currentBuf;
2187                 targetStreamParms->metaBuffers[i] = metaBuf;
2188                 targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
2189             }
2190 
2191     ALOGV("DEBUG(%s): calling  streamon stream id = %d", __FUNCTION__, stream_id);
2192     cam_int_streamon(targetStreamParms->node);
2193     ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
2194     currentNode->status = true;
2195     ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
2196 
2197     return 0;
2198 }
2199 
releaseStream(uint32_t stream_id)2200 int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
2201 {
2202     StreamThread *targetStream;
2203     status_t res = NO_ERROR;
2204     ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2205     bool releasingScpMain = false;
2206 
2207     if (stream_id == STREAM_ID_PREVIEW) {
2208         targetStream = (StreamThread*)(m_streamThreads[0].get());
2209         if (!targetStream) {
2210             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2211             return NO_ERROR;
2212         }
2213         targetStream->m_numRegisteredStream--;
2214         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2215         releasingScpMain = true;
2216         if (targetStream->m_parameters.needsIonMap) {
2217             for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2218                 for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2219                     ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2220                                     targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2221                     ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2222                                   targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
2223                 }
2224             }
2225         }
2226     } else if (stream_id == STREAM_ID_JPEG) {
2227         if (m_resizeBuf.size.s != 0) {
2228             freeCameraMemory(&m_resizeBuf, 1);
2229         }
2230         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2231 
2232         targetStream = (StreamThread*)(m_streamThreads[1].get());
2233         if (!targetStream) {
2234             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2235             return NO_ERROR;
2236         }
2237 
2238         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2239             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2240             return 1;
2241         }
2242         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2243         return 0;
2244     } else if (stream_id == STREAM_ID_RECORD) {
2245         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2246 
2247         targetStream = (StreamThread*)(m_streamThreads[0].get());
2248         if (!targetStream) {
2249             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2250             return NO_ERROR;
2251         }
2252 
2253         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2254             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2255             return 1;
2256         }
2257 
2258         if (targetStream->m_numRegisteredStream != 0)
2259             return 0;
2260     } else if (stream_id == STREAM_ID_PRVCB) {
2261         if (m_previewCbBuf.size.s != 0) {
2262             freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
2263         }
2264         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2265 
2266         targetStream = (StreamThread*)(m_streamThreads[0].get());
2267         if (!targetStream) {
2268             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2269             return NO_ERROR;
2270         }
2271 
2272         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2273             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2274             return 1;
2275         }
2276 
2277         if (targetStream->m_numRegisteredStream != 0)
2278             return 0;
2279     } else if (stream_id == STREAM_ID_ZSL) {
2280         targetStream = (StreamThread*)(m_streamThreads[1].get());
2281         if (!targetStream) {
2282             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2283             return NO_ERROR;
2284         }
2285 
2286         targetStream->m_numRegisteredStream--;
2287         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2288         if (targetStream->m_parameters.needsIonMap) {
2289             for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2290                 for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2291                     ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2292                                     targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2293                     ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2294                                   targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
2295                 }
2296             }
2297         }
2298     } else {
2299         ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
2300         return 1;
2301     }
2302 
2303     if (m_sensorThread != NULL && releasingScpMain) {
2304         m_sensorThread->release();
2305         ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__);
2306         while (!m_sensorThread->IsTerminated())
2307             usleep(SIG_WAITING_TICK);
2308         ALOGD("(%s): END   Waiting for (indirect) sensor thread termination", __FUNCTION__);
2309     }
2310 
2311     if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
2312         ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
2313         targetStream = (StreamThread*)(m_streamThreads[1].get());
2314         targetStream->m_releasing = true;
2315         ALOGD("START stream thread release %d", __LINE__);
2316         do {
2317             targetStream->release();
2318             usleep(SIG_WAITING_TICK);
2319         } while (targetStream->m_releasing);
2320         m_camera_info.capture.status = false;
2321         ALOGD("END   stream thread release %d", __LINE__);
2322     }
2323 
2324     if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
2325         ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
2326         targetStream = (StreamThread*)(m_streamThreads[0].get());
2327         targetStream->m_releasing = true;
2328         ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2329         do {
2330             targetStream->release();
2331             usleep(SIG_WAITING_TICK);
2332         } while (targetStream->m_releasing);
2333         ALOGD("(%s): END   Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2334         targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
2335 
2336         if (targetStream != NULL) {
2337             ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__);
2338             while (!targetStream->IsTerminated())
2339                 usleep(SIG_WAITING_TICK);
2340             ALOGD("(%s): END   Waiting for (indirect) stream thread termination", __FUNCTION__);
2341             m_streamThreads[0] = NULL;
2342         }
2343         if (m_camera_info.capture.status == true) {
2344             m_scpForceSuspended = true;
2345         }
2346         m_isIspStarted = false;
2347     }
2348     ALOGV("(%s): END", __FUNCTION__);
2349     return 0;
2350 }
2351 
allocateReprocessStream(uint32_t,uint32_t,uint32_t,const camera2_stream_in_ops_t *,uint32_t *,uint32_t *,uint32_t *)2352 int ExynosCameraHWInterface2::allocateReprocessStream(
2353     uint32_t /*width*/, uint32_t /*height*/, uint32_t /*format*/,
2354     const camera2_stream_in_ops_t* /*reprocess_stream_ops*/,
2355     uint32_t* /*stream_id*/, uint32_t* /*consumer_usage*/, uint32_t* /*max_buffers*/)
2356 {
2357     ALOGV("DEBUG(%s):", __FUNCTION__);
2358     return 0;
2359 }
2360 
allocateReprocessStreamFromStream(uint32_t output_stream_id,const camera2_stream_in_ops_t * reprocess_stream_ops,uint32_t * stream_id)2361 int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
2362             uint32_t output_stream_id,
2363             const camera2_stream_in_ops_t *reprocess_stream_ops,
2364             // outputs
2365             uint32_t *stream_id)
2366 {
2367     ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
2368     *stream_id = STREAM_ID_JPEG_REPROCESS;
2369 
2370     m_reprocessStreamId = *stream_id;
2371     m_reprocessOps = reprocess_stream_ops;
2372     m_reprocessOutputStreamId = output_stream_id;
2373     return 0;
2374 }
2375 
releaseReprocessStream(uint32_t stream_id)2376 int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
2377 {
2378     ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2379     if (stream_id == STREAM_ID_JPEG_REPROCESS) {
2380         m_reprocessStreamId = 0;
2381         m_reprocessOps = NULL;
2382         m_reprocessOutputStreamId = 0;
2383         return 0;
2384     }
2385     return 1;
2386 }
2387 
triggerAction(uint32_t trigger_id,int ext1,int ext2)2388 int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
2389 {
2390     Mutex::Autolock lock(m_afModeTriggerLock);
2391     ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
2392 
2393     switch (trigger_id) {
2394     case CAMERA2_TRIGGER_AUTOFOCUS:
2395         ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2396         OnAfTrigger(ext1);
2397         break;
2398 
2399     case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
2400         ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2401         OnAfCancel(ext1);
2402         break;
2403     case CAMERA2_TRIGGER_PRECAPTURE_METERING:
2404         ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
2405         OnPrecaptureMeteringTriggerStart(ext1);
2406         break;
2407     default:
2408         break;
2409     }
2410     return 0;
2411 }
2412 
setNotifyCallback(camera2_notify_callback notify_cb,void * user)2413 int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
2414 {
2415     ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
2416     m_notifyCb = notify_cb;
2417     m_callbackCookie = user;
2418     return 0;
2419 }
2420 
getMetadataVendorTagOps(vendor_tag_query_ops_t ** ops)2421 int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
2422 {
2423     ALOGV("DEBUG(%s):", __FUNCTION__);
2424     *ops = NULL;
2425     return 0;
2426 }
2427 
dump(int)2428 int ExynosCameraHWInterface2::dump(int /*fd*/)
2429 {
2430     ALOGV("DEBUG(%s):", __FUNCTION__);
2431     return 0;
2432 }
2433 
m_getAlignedYUVSize(int colorFormat,int w,int h,ExynosBuffer * buf)2434 void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
2435 {
2436     switch (colorFormat) {
2437     // 1p
2438     case V4L2_PIX_FMT_RGB565 :
2439     case V4L2_PIX_FMT_YUYV :
2440     case V4L2_PIX_FMT_UYVY :
2441     case V4L2_PIX_FMT_VYUY :
2442     case V4L2_PIX_FMT_YVYU :
2443         buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
2444         buf->size.extS[1] = 0;
2445         buf->size.extS[2] = 0;
2446         break;
2447     // 2p
2448     case V4L2_PIX_FMT_NV12 :
2449     case V4L2_PIX_FMT_NV12T :
2450     case V4L2_PIX_FMT_NV21 :
2451         buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
2452         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
2453         buf->size.extS[2] = 0;
2454         break;
2455     case V4L2_PIX_FMT_NV12M :
2456     case V4L2_PIX_FMT_NV12MT_16X16 :
2457     case V4L2_PIX_FMT_NV21M:
2458         buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
2459         buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
2460         buf->size.extS[2] = 0;
2461         break;
2462     case V4L2_PIX_FMT_NV16 :
2463     case V4L2_PIX_FMT_NV61 :
2464         buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2465         buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
2466         buf->size.extS[2] = 0;
2467         break;
2468      // 3p
2469     case V4L2_PIX_FMT_YUV420 :
2470     case V4L2_PIX_FMT_YVU420 :
2471         buf->size.extS[0] = (w * h);
2472         buf->size.extS[1] = (w * h) >> 2;
2473         buf->size.extS[2] = (w * h) >> 2;
2474         break;
2475     case V4L2_PIX_FMT_YUV420M:
2476     case V4L2_PIX_FMT_YVU420M :
2477         buf->size.extS[0] = ALIGN(w,  32) * ALIGN(h,  16);
2478         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2479         buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2480         break;
2481     case V4L2_PIX_FMT_YUV422P :
2482         buf->size.extS[0] = ALIGN(w,  16) * ALIGN(h,  16);
2483         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2484         buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2485         break;
2486     default:
2487         ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
2488         return;
2489         break;
2490     }
2491 }
2492 
m_getRatioSize(int src_w,int src_h,int dst_w,int dst_h,int * crop_x,int * crop_y,int * crop_w,int * crop_h,int zoom)2493 bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
2494                                              int  dst_w,  int   dst_h,
2495                                              int *crop_x, int *crop_y,
2496                                              int *crop_w, int *crop_h,
2497                                              int zoom)
2498 {
2499     *crop_w = src_w;
2500     *crop_h = src_h;
2501 
2502     if (   src_w != dst_w
2503         || src_h != dst_h) {
2504         float src_ratio = 1.0f;
2505         float dst_ratio = 1.0f;
2506 
2507         // ex : 1024 / 768
2508         src_ratio = (float)src_w / (float)src_h;
2509 
2510         // ex : 352  / 288
2511         dst_ratio = (float)dst_w / (float)dst_h;
2512 
2513         if (dst_w * dst_h < src_w * src_h) {
2514             if (dst_ratio <= src_ratio) {
2515                 // shrink w
2516                 *crop_w = src_h * dst_ratio;
2517                 *crop_h = src_h;
2518             } else {
2519                 // shrink h
2520                 *crop_w = src_w;
2521                 *crop_h = src_w / dst_ratio;
2522             }
2523         } else {
2524             if (dst_ratio <= src_ratio) {
2525                 // shrink w
2526                 *crop_w = src_h * dst_ratio;
2527                 *crop_h = src_h;
2528             } else {
2529                 // shrink h
2530                 *crop_w = src_w;
2531                 *crop_h = src_w / dst_ratio;
2532             }
2533         }
2534     }
2535 
2536     if (zoom != 0) {
2537         float zoomLevel = ((float)zoom + 10.0) / 10.0;
2538         *crop_w = (int)((float)*crop_w / zoomLevel);
2539         *crop_h = (int)((float)*crop_h / zoomLevel);
2540     }
2541 
2542     #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
2543     unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
2544     if (w_align != 0) {
2545         if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
2546             && (int)(*crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align)) <= dst_w) {
2547             *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
2548         }
2549         else
2550             *crop_w -= w_align;
2551     }
2552 
2553     #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
2554     unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
2555     if (h_align != 0) {
2556         if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
2557             && (int)(*crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align)) <= dst_h) {
2558             *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
2559         }
2560         else
2561             *crop_h -= h_align;
2562     }
2563 
2564     *crop_x = (src_w - *crop_w) >> 1;
2565     *crop_y = (src_h - *crop_h) >> 1;
2566 
2567     if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
2568         *crop_x -= 1;
2569 
2570     if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
2571         *crop_y -= 1;
2572 
2573     return true;
2574 }
2575 
BayerBufManager()2576 BayerBufManager::BayerBufManager()
2577 {
2578     ALOGV("DEBUG(%s): ", __FUNCTION__);
2579     for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
2580         entries[i].status = BAYER_ON_HAL_EMPTY;
2581         entries[i].reqFrameCnt = 0;
2582     }
2583     sensorEnqueueHead = 0;
2584     sensorDequeueHead = 0;
2585     ispEnqueueHead = 0;
2586     ispDequeueHead = 0;
2587     numOnSensor = 0;
2588     numOnIsp = 0;
2589     numOnHalFilled = 0;
2590     numOnHalEmpty = NUM_BAYER_BUFFERS;
2591 }
2592 
~BayerBufManager()2593 BayerBufManager::~BayerBufManager()
2594 {
2595     ALOGV("%s", __FUNCTION__);
2596 }
2597 
GetIndexForSensorEnqueue()2598 int     BayerBufManager::GetIndexForSensorEnqueue()
2599 {
2600     int ret = 0;
2601     if (numOnHalEmpty == 0)
2602         ret = -1;
2603     else
2604         ret = sensorEnqueueHead;
2605     ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
2606     return ret;
2607 }
2608 
MarkSensorEnqueue(int index)2609 int    BayerBufManager::MarkSensorEnqueue(int index)
2610 {
2611     ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2612 
2613     // sanity check
2614     if (index != sensorEnqueueHead) {
2615         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
2616         return -1;
2617     }
2618     if (entries[index].status != BAYER_ON_HAL_EMPTY) {
2619         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2620             index, entries[index].status, BAYER_ON_HAL_EMPTY);
2621         return -1;
2622     }
2623 
2624     entries[index].status = BAYER_ON_SENSOR;
2625     entries[index].reqFrameCnt = 0;
2626     numOnHalEmpty--;
2627     numOnSensor++;
2628     sensorEnqueueHead = GetNextIndex(index);
2629     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2630         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2631     return 0;
2632 }
2633 
MarkSensorDequeue(int index,int reqFrameCnt,nsecs_t *)2634 int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t* /*timeStamp*/)
2635 {
2636     ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2637 
2638     if (entries[index].status != BAYER_ON_SENSOR) {
2639         ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2640             index, entries[index].status, BAYER_ON_SENSOR);
2641         return -1;
2642     }
2643 
2644     entries[index].status = BAYER_ON_HAL_FILLED;
2645     numOnHalFilled++;
2646     numOnSensor--;
2647 
2648     return 0;
2649 }
2650 
GetIndexForIspEnqueue(int * reqFrameCnt)2651 int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2652 {
2653     int ret = 0;
2654     if (numOnHalFilled == 0)
2655         ret = -1;
2656     else {
2657         *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2658         ret = ispEnqueueHead;
2659     }
2660     ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2661     return ret;
2662 }
2663 
GetIndexForIspDequeue(int * reqFrameCnt)2664 int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2665 {
2666     int ret = 0;
2667     if (numOnIsp == 0)
2668         ret = -1;
2669     else {
2670         *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2671         ret = ispDequeueHead;
2672     }
2673     ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2674     return ret;
2675 }
2676 
MarkIspEnqueue(int index)2677 int    BayerBufManager::MarkIspEnqueue(int index)
2678 {
2679     ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2680 
2681     // sanity check
2682     if (index != ispEnqueueHead) {
2683         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2684         return -1;
2685     }
2686     if (entries[index].status != BAYER_ON_HAL_FILLED) {
2687         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2688             index, entries[index].status, BAYER_ON_HAL_FILLED);
2689         return -1;
2690     }
2691 
2692     entries[index].status = BAYER_ON_ISP;
2693     numOnHalFilled--;
2694     numOnIsp++;
2695     ispEnqueueHead = GetNextIndex(index);
2696     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2697         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2698     return 0;
2699 }
2700 
MarkIspDequeue(int index)2701 int    BayerBufManager::MarkIspDequeue(int index)
2702 {
2703     ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
2704 
2705     // sanity check
2706     if (index != ispDequeueHead) {
2707         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2708         return -1;
2709     }
2710     if (entries[index].status != BAYER_ON_ISP) {
2711         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2712             index, entries[index].status, BAYER_ON_ISP);
2713         return -1;
2714     }
2715 
2716     entries[index].status = BAYER_ON_HAL_EMPTY;
2717     entries[index].reqFrameCnt = 0;
2718     numOnHalEmpty++;
2719     numOnIsp--;
2720     ispDequeueHead = GetNextIndex(index);
2721     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2722         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2723     return 0;
2724 }
2725 
GetNumOnSensor()2726 int BayerBufManager::GetNumOnSensor()
2727 {
2728     return numOnSensor;
2729 }
2730 
GetNumOnHalFilled()2731 int BayerBufManager::GetNumOnHalFilled()
2732 {
2733     return numOnHalFilled;
2734 }
2735 
GetNumOnIsp()2736 int BayerBufManager::GetNumOnIsp()
2737 {
2738     return numOnIsp;
2739 }
2740 
GetNextIndex(int index)2741 int     BayerBufManager::GetNextIndex(int index)
2742 {
2743     index++;
2744     if (index >= NUM_BAYER_BUFFERS)
2745         index = 0;
2746 
2747     return index;
2748 }
2749 
m_mainThreadFunc(SignalDrivenThread * self)2750 void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2751 {
2752     camera_metadata_t *currentRequest = NULL;
2753     camera_metadata_t *currentFrame = NULL;
2754     size_t numEntries = 0;
2755     size_t frameSize = 0;
2756     camera_metadata_t * preparedFrame = NULL;
2757     camera_metadata_t *deregisteredRequest = NULL;
2758     uint32_t currentSignal = self->GetProcessingSignal();
2759     MainThread *  selfThread      = ((MainThread*)self);
2760     int res = 0;
2761 
2762     int ret;
2763     int afMode;
2764     uint32_t afRegion[4];
2765 
2766     ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2767 
2768     if (currentSignal & SIGNAL_THREAD_RELEASE) {
2769         ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2770 
2771         ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2772         selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2773         return;
2774     }
2775 
2776     if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2777         ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2778         if (m_requestManager->IsRequestQueueFull()==false) {
2779             Mutex::Autolock lock(m_afModeTriggerLock);
2780             m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2781             if (NULL == currentRequest) {
2782                 ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__, currentSignal);
2783                 m_isRequestQueueNull = true;
2784                 if (m_requestManager->IsVdisEnable())
2785                     m_vdisBubbleCnt = 1;
2786             }
2787             else {
2788                 m_requestManager->RegisterRequest(currentRequest, &afMode, afRegion);
2789 
2790                 SetAfMode((enum aa_afmode)afMode);
2791                 SetAfRegion(afRegion);
2792 
2793                 m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2794                 ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2795                 if (m_requestManager->IsRequestQueueFull()==false)
2796                     selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2797 
2798                 m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2799             }
2800         }
2801         else {
2802             m_isRequestQueuePending = true;
2803         }
2804     }
2805 
2806     if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2807         ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2808         /*while (1)*/ {
2809             ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2810             if (ret == false)
2811                 CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret);
2812 
2813             m_requestManager->DeregisterRequest(&deregisteredRequest);
2814 
2815             ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2816             if (ret < 0)
2817                 CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret);
2818 
2819             ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2820             if (ret < 0)
2821                 CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret);
2822 
2823             if (currentFrame==NULL) {
2824                 ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2825             }
2826             else {
2827                 ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2828             }
2829             res = append_camera_metadata(currentFrame, preparedFrame);
2830             if (res==0) {
2831                 ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2832                 m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2833             }
2834             else {
2835                 ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2836             }
2837         }
2838         if (!m_isRequestQueueNull) {
2839             selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2840         }
2841 
2842         if (getInProgressCount()>0) {
2843             ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2844             m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2845         }
2846     }
2847     ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2848     return;
2849 }
2850 
DumpInfoWithShot(struct camera2_shot_ext * shot_ext)2851 void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2852 {
2853     ALOGD("####  common Section");
2854     ALOGD("####                 magic(%x) ",
2855         shot_ext->shot.magicNumber);
2856     ALOGD("####  ctl Section");
2857     ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2858         shot_ext->shot.ctl.request.metadataMode,
2859         shot_ext->shot.ctl.lens.aperture,
2860         shot_ext->shot.ctl.sensor.exposureTime,
2861         shot_ext->shot.ctl.sensor.frameDuration,
2862         shot_ext->shot.ctl.sensor.sensitivity,
2863         shot_ext->shot.ctl.aa.awbMode);
2864 
2865     ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
2866         shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2867         shot_ext->shot.ctl.request.outputStreams[0]);
2868 
2869     ALOGD("####  DM Section");
2870     ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2871         shot_ext->shot.dm.request.metadataMode,
2872         shot_ext->shot.dm.lens.aperture,
2873         shot_ext->shot.dm.sensor.exposureTime,
2874         shot_ext->shot.dm.sensor.frameDuration,
2875         shot_ext->shot.dm.sensor.sensitivity,
2876         shot_ext->shot.dm.sensor.timeStamp,
2877         shot_ext->shot.dm.aa.awbMode,
2878         shot_ext->shot.dm.request.frameCount );
2879 }
2880 
m_preCaptureSetter(struct camera2_shot_ext * shot_ext)2881 void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext)
2882 {
2883     // Flash
2884     switch (m_ctlInfo.flash.m_flashCnt) {
2885     case IS_FLASH_STATE_ON:
2886         ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2887         // check AF locked
2888         if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
2889             if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2890                 if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2891                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2892                     m_ctlInfo.flash.m_flashTimeOut = 5;
2893                 } else
2894                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2895                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2896             } else {
2897                 m_ctlInfo.flash.m_flashTimeOut--;
2898             }
2899         } else {
2900             if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2901                 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2902                 m_ctlInfo.flash.m_flashTimeOut = 5;
2903             } else
2904                 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2905             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2906         }
2907         break;
2908     case IS_FLASH_STATE_ON_WAIT:
2909         break;
2910     case IS_FLASH_STATE_ON_DONE:
2911         if (!m_ctlInfo.flash.m_afFlashDoneFlg)
2912             // auto transition at pre-capture trigger
2913             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
2914         break;
2915     case IS_FLASH_STATE_AUTO_AE_AWB_LOCK:
2916         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2917         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
2918         //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
2919         shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
2920         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT;
2921         break;
2922     case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2923     case IS_FLASH_STATE_AUTO_WAIT:
2924         shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0;
2925         shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0;
2926         break;
2927     case IS_FLASH_STATE_AUTO_DONE:
2928         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2929         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2930         break;
2931     case IS_FLASH_STATE_AUTO_OFF:
2932         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2933         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2934         m_ctlInfo.flash.m_flashEnableFlg = false;
2935         break;
2936     case IS_FLASH_STATE_CAPTURE:
2937         ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2938         m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT;
2939         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
2940         shot_ext->request_scc = 0;
2941         shot_ext->request_scp = 0;
2942         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition
2943         break;
2944     case IS_FLASH_STATE_CAPTURE_WAIT:
2945         shot_ext->request_scc = 0;
2946         shot_ext->request_scp = 0;
2947         break;
2948     case IS_FLASH_STATE_CAPTURE_JPEG:
2949         ALOGV("(%s): [Flash] Flash Capture  (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut));
2950         shot_ext->request_scc = 1;
2951         shot_ext->request_scp = 1;
2952         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END;  // auto transition
2953         break;
2954     case IS_FLASH_STATE_CAPTURE_END:
2955         ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2956         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2957         shot_ext->request_scc = 0;
2958         shot_ext->request_scp = 0;
2959         m_ctlInfo.flash.m_flashEnableFlg = false;
2960         m_ctlInfo.flash.m_flashCnt = 0;
2961         m_ctlInfo.flash.m_afFlashDoneFlg= false;
2962         break;
2963     case IS_FLASH_STATE_NONE:
2964         break;
2965     default:
2966         ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
2967     }
2968 }
2969 
m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)2970 void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)
2971 {
2972     // Flash
2973     switch (m_ctlInfo.flash.m_flashCnt) {
2974     case IS_FLASH_STATE_AUTO_WAIT:
2975         if (m_ctlInfo.flash.m_flashDecisionResult) {
2976             if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
2977                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2978                 ALOGV("(%s): [Flash] Lis :  AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode);
2979             } else {
2980                 ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__);
2981             }
2982         } else {
2983             //If flash isn't activated at flash auto mode, skip flash auto control
2984             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2985             ALOGV("(%s): [Flash] Skip :  AUTO -> OFF", __FUNCTION__);
2986         }
2987         break;
2988     }
2989 }
2990 
m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)2991 void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)
2992 {
2993     // Flash
2994     switch (m_ctlInfo.flash.m_flashCnt) {
2995     case IS_FLASH_STATE_ON_WAIT:
2996         if (shot_ext->shot.dm.flash.decision > 0) {
2997             // store decision result to skip capture sequenece
2998             ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
2999             if (shot_ext->shot.dm.flash.decision == 2)
3000                 m_ctlInfo.flash.m_flashDecisionResult = false;
3001             else
3002                 m_ctlInfo.flash.m_flashDecisionResult = true;
3003             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
3004         } else {
3005             if (m_ctlInfo.flash.m_flashTimeOut == 0) {
3006                 ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__);
3007                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
3008                 m_ctlInfo.flash.m_flashDecisionResult = false;
3009             } else {
3010                 m_ctlInfo.flash.m_flashTimeOut--;
3011             }
3012         }
3013         break;
3014     case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
3015         if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) {
3016             ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode);
3017             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT;
3018         } else {
3019             ALOGV("(%s):  [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__);
3020         }
3021         break;
3022     case IS_FLASH_STATE_CAPTURE_WAIT:
3023         if (m_ctlInfo.flash.m_flashDecisionResult) {
3024             if (shot_ext->shot.dm.flash.firingStable) {
3025                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3026             } else {
3027                 if (m_ctlInfo.flash.m_flashTimeOut == 0) {
3028                     ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__);
3029                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3030                 } else {
3031                     ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut);
3032                     m_ctlInfo.flash.m_flashTimeOut--;
3033                 }
3034             }
3035         } else {
3036             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3037         }
3038         break;
3039     }
3040 }
3041 
m_preCaptureAeState(struct camera2_shot_ext * shot_ext)3042 void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext * shot_ext)
3043 {
3044     switch (m_ctlInfo.flash.i_flashMode) {
3045     case AA_AEMODE_ON:
3046         // At flash off mode, capture can be done as zsl capture
3047         shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED;
3048         break;
3049     case AA_AEMODE_ON_AUTO_FLASH:
3050         // At flash auto mode, main flash have to be done if pre-flash was done.
3051         if (m_ctlInfo.flash.m_flashDecisionResult && m_ctlInfo.flash.m_afFlashDoneFlg)
3052             shot_ext->shot.dm.aa.aeState = AE_STATE_FLASH_REQUIRED;
3053         // FALLTHRU
3054     default:
3055         break;
3056     }
3057 }
3058 
m_updateAfRegion(struct camera2_shot_ext * shot_ext)3059 void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext)
3060 {
3061     shot_ext->shot.ctl.aa.afRegions[0] = currentAfRegion[0];
3062     shot_ext->shot.ctl.aa.afRegions[1] = currentAfRegion[1];
3063     shot_ext->shot.ctl.aa.afRegions[2] = currentAfRegion[2];
3064     shot_ext->shot.ctl.aa.afRegions[3] = currentAfRegion[3];
3065 }
3066 
SetAfRegion(uint32_t * afRegion)3067 void ExynosCameraHWInterface2::SetAfRegion(uint32_t * afRegion)
3068 {
3069     currentAfRegion[0] = afRegion[0];
3070     currentAfRegion[1] = afRegion[1];
3071     currentAfRegion[2] = afRegion[2];
3072     currentAfRegion[3] = afRegion[3];
3073 }
3074 
m_afTrigger(struct camera2_shot_ext * shot_ext,int mode)3075 void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode)
3076 {
3077     if (m_afState == HAL_AFSTATE_SCANNING) {
3078         ALOGD("(%s): restarting trigger ", __FUNCTION__);
3079     } else if (!mode) {
3080         if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
3081             ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
3082         else
3083             m_afState = HAL_AFSTATE_STARTED;
3084     }
3085     ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode, m_afState);
3086     shot_ext->shot.ctl.aa.afTrigger = 1;
3087     shot_ext->shot.ctl.aa.afMode = m_afMode;
3088     m_IsAfTriggerRequired = false;
3089 }
3090 
m_sensorThreadFunc(SignalDrivenThread * self)3091 void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
3092 {
3093     uint32_t        currentSignal = self->GetProcessingSignal();
3094     SensorThread *  selfThread      = ((SensorThread*)self);
3095     int index;
3096     int index_isp;
3097     status_t res;
3098     nsecs_t frameTime;
3099     int bayersOnSensor = 0, bayersOnIsp = 0;
3100     int j = 0;
3101     bool isCapture = false;
3102     ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
3103 
3104     if (currentSignal & SIGNAL_THREAD_RELEASE) {
3105         CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3106 
3107         ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
3108         cam_int_streamoff(&(m_camera_info.sensor));
3109         ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
3110 
3111         m_camera_info.sensor.buffers = 0;
3112         ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
3113         cam_int_reqbufs(&(m_camera_info.sensor));
3114         ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
3115         m_camera_info.sensor.status = false;
3116 
3117         ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
3118         isp_int_streamoff(&(m_camera_info.isp));
3119         ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
3120 
3121         m_camera_info.isp.buffers = 0;
3122         ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
3123         cam_int_reqbufs(&(m_camera_info.isp));
3124         ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
3125 
3126         exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
3127 
3128         m_requestManager->releaseSensorQ();
3129         m_requestManager->ResetEntry();
3130         ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3131         selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
3132         return;
3133     }
3134 
3135     if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
3136     {
3137         ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
3138         int targetStreamIndex = 0, i=0;
3139         int matchedFrameCnt = -1, processingReqIndex;
3140         struct camera2_shot_ext *shot_ext;
3141         struct camera2_shot_ext *shot_ext_capture;
3142         bool triggered = false;
3143 
3144         /* dqbuf from sensor */
3145         ALOGV("Sensor DQbuf start");
3146         index = cam_int_dqbuf(&(m_camera_info.sensor));
3147         m_requestManager->pushSensorQ(index);
3148         ALOGV("Sensor DQbuf done(%d)", index);
3149         shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3150 
3151         if (m_nightCaptureCnt != 0) {
3152             matchedFrameCnt = m_nightCaptureFrameCnt;
3153         } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
3154             matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount;
3155             ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt);
3156         } else {
3157             matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
3158         }
3159 
3160         if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) {
3161             matchedFrameCnt = m_vdisDupFrame;
3162         }
3163 
3164         if (matchedFrameCnt != -1) {
3165             if (m_vdisBubbleCnt == 0 || m_vdisDupFrame != matchedFrameCnt) {
3166                 frameTime = systemTime();
3167                 m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
3168                 m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
3169             } else {
3170                 ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame, matchedFrameCnt);
3171             }
3172 
3173             // face af mode setting in case of face priority scene mode
3174             if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) {
3175                 ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__, shot_ext->shot.ctl.aa.sceneMode);
3176                 m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3177             }
3178 
3179             m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2];
3180             float zoomLeft, zoomTop, zoomWidth, zoomHeight;
3181             int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
3182 
3183             m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
3184                            m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
3185                            &crop_x, &crop_y,
3186                            &crop_w, &crop_h,
3187                            0);
3188 
3189             if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) {
3190                 zoomWidth =  m_camera2->getSensorW() / m_zoomRatio;
3191                 zoomHeight = zoomWidth *
3192                         m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width;
3193             } else {
3194                 zoomHeight = m_camera2->getSensorH() / m_zoomRatio;
3195                 zoomWidth = zoomHeight *
3196                         m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height;
3197             }
3198             zoomLeft = (crop_w - zoomWidth) / 2;
3199             zoomTop = (crop_h - zoomHeight) / 2;
3200 
3201             int32_t new_cropRegion[3] = { (int32_t)zoomLeft, (int32_t)zoomTop, (int32_t)zoomWidth };
3202 
3203             int cropCompensation = (new_cropRegion[0] * 2 + new_cropRegion[2]) - ALIGN(crop_w, 4);
3204             if (cropCompensation)
3205                 new_cropRegion[2] -= cropCompensation;
3206 
3207             shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
3208             shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
3209             shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
3210             if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) {
3211                 ALOGD("### Applying AF Mode change(Mode %d) ", m_afMode);
3212                 shot_ext->shot.ctl.aa.afMode = m_afMode;
3213                 if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
3214                     ALOGD("### With Automatic triger for continuous modes");
3215                     m_afState = HAL_AFSTATE_STARTED;
3216                     shot_ext->shot.ctl.aa.afTrigger = 1;
3217                     triggered = true;
3218                     if ((m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_UNSUPPORTED) ||
3219                             (m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_FACE_PRIORITY)) {
3220                         switch (m_afMode) {
3221                         case AA_AFMODE_CONTINUOUS_PICTURE:
3222                             shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
3223                             ALOGD("### Face AF Mode change (Mode %d) ", shot_ext->shot.ctl.aa.afMode);
3224                             // FALLTHRU
3225                         default:
3226                             break;
3227                         }
3228                     }
3229                     // reset flash result
3230                     if (m_ctlInfo.flash.m_afFlashDoneFlg) {
3231                         m_ctlInfo.flash.m_flashEnableFlg = false;
3232                         m_ctlInfo.flash.m_afFlashDoneFlg = false;
3233                         m_ctlInfo.flash.m_flashDecisionResult = false;
3234                         m_ctlInfo.flash.m_flashCnt = 0;
3235                     }
3236                     m_ctlInfo.af.m_afTriggerTimeOut = 1;
3237                 }
3238 
3239                 m_IsAfModeUpdateRequired = false;
3240                 // support inifinity focus mode
3241                 if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) {
3242                     shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY;
3243                     shot_ext->shot.ctl.aa.afTrigger = 1;
3244                     triggered = true;
3245                 }
3246                 if (m_afMode2 != NO_CHANGE) {
3247                     enum aa_afmode tempAfMode = m_afMode2;
3248                     m_afMode2 = NO_CHANGE;
3249                     SetAfMode(tempAfMode);
3250                 }
3251             }
3252             else {
3253                 shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
3254             }
3255             if (m_IsAfTriggerRequired) {
3256                 if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
3257                     // flash case
3258                     if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) {
3259                         if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) {
3260                             // Flash is enabled and start AF
3261                             m_afTrigger(shot_ext, 1);
3262                         } else {
3263                             m_afTrigger(shot_ext, 0);
3264                         }
3265                     }
3266                 } else {
3267                     // non-flash case
3268                     m_afTrigger(shot_ext, 0);
3269                 }
3270             } else {
3271                 shot_ext->shot.ctl.aa.afTrigger = 0;
3272             }
3273 
3274             if (m_wideAspect) {
3275                 shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3276             } else {
3277                 shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3278             }
3279             if (triggered)
3280                 shot_ext->shot.ctl.aa.afTrigger = 1;
3281 
3282             // TODO : check collision with AFMode Update
3283             if (m_IsAfLockRequired) {
3284                 shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
3285                 m_IsAfLockRequired = false;
3286             }
3287             ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
3288                 index,
3289                 shot_ext->shot.ctl.request.frameCount,
3290                 shot_ext->request_scp,
3291                 shot_ext->request_scc,
3292                 shot_ext->dis_bypass, sizeof(camera2_shot));
3293 
3294             // update AF region
3295             m_updateAfRegion(shot_ext);
3296 
3297             m_lastSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3298             if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT
3299                     && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED)
3300                 shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON;
3301             if (m_nightCaptureCnt == 0) {
3302                 if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE
3303                         && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3304                     shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3305                     shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3306                     shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3307                     m_nightCaptureCnt = 4;
3308                     m_nightCaptureFrameCnt = matchedFrameCnt;
3309                     shot_ext->request_scc = 0;
3310                 }
3311             }
3312             else if (m_nightCaptureCnt == 1) {
3313                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3314                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
3315                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3316                 m_nightCaptureCnt--;
3317                 m_nightCaptureFrameCnt = 0;
3318                 shot_ext->request_scc = 1;
3319             }
3320             else if (m_nightCaptureCnt == 2) {
3321                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3322                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3323                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3324                 m_nightCaptureCnt--;
3325                 shot_ext->request_scc = 0;
3326             }
3327             else if (m_nightCaptureCnt == 3) {
3328                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3329                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3330                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3331                 m_nightCaptureCnt--;
3332                 shot_ext->request_scc = 0;
3333             }
3334             else if (m_nightCaptureCnt == 4) {
3335                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3336                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3337                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3338                 m_nightCaptureCnt--;
3339                 shot_ext->request_scc = 0;
3340             }
3341 
3342             switch (shot_ext->shot.ctl.aa.aeTargetFpsRange[1]) {
3343             case 15:
3344                 shot_ext->shot.ctl.sensor.frameDuration = (66666 * 1000);
3345                 break;
3346 
3347             case 24:
3348                 shot_ext->shot.ctl.sensor.frameDuration = (41666 * 1000);
3349                 break;
3350 
3351             case 25:
3352                 shot_ext->shot.ctl.sensor.frameDuration = (40000 * 1000);
3353                 break;
3354 
3355             case 30:
3356             default:
3357                 shot_ext->shot.ctl.sensor.frameDuration = (33333 * 1000);
3358                 break;
3359             }
3360             shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3361 
3362             // Flash mode
3363             // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
3364             if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
3365                     && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE)
3366                     && (m_cameraId == 0)) {
3367                 if (!m_ctlInfo.flash.m_flashDecisionResult) {
3368                     m_ctlInfo.flash.m_flashEnableFlg = false;
3369                     m_ctlInfo.flash.m_afFlashDoneFlg = false;
3370                     m_ctlInfo.flash.m_flashCnt = 0;
3371                 } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) ||
3372                                           (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) {
3373                     ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
3374                     shot_ext->request_scc = 0;
3375                     m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt;
3376                     m_ctlInfo.flash.m_flashEnableFlg = true;
3377                     m_ctlInfo.flash.m_afFlashDoneFlg = false;
3378                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE;
3379                 } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) {
3380                     ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
3381                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3382                     m_ctlInfo.flash.m_flashEnableFlg = false;
3383                     m_ctlInfo.flash.m_afFlashDoneFlg= false;
3384                     m_ctlInfo.flash.m_flashCnt = 0;
3385                 }
3386             } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) {
3387                 m_ctlInfo.flash.m_flashDecisionResult = false;
3388             }
3389 
3390             if (shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) {
3391                 if (m_ctlInfo.flash.m_flashTorchMode == false) {
3392                     m_ctlInfo.flash.m_flashTorchMode = true;
3393                 }
3394             } else {
3395                 if (m_ctlInfo.flash.m_flashTorchMode == true) {
3396                     shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3397                     shot_ext->shot.ctl.flash.firingPower = 0;
3398                     m_ctlInfo.flash.m_flashTorchMode = false;
3399                 } else {
3400                     shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP;
3401                 }
3402             }
3403 
3404             if (shot_ext->isReprocessing) {
3405                 ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__);
3406                 m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0];
3407                 shot_ext->request_scp = 0;
3408                 shot_ext->request_scc = 0;
3409                 m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount;
3410                 m_ctlInfo.flash.m_flashDecisionResult = false;
3411                 memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt)),
3412                     sizeof(struct camera2_shot_ext));
3413                 m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START);
3414                 m_ctlInfo.flash.m_flashEnableFlg = false;
3415             }
3416 
3417             if (m_ctlInfo.flash.m_flashEnableFlg) {
3418                 m_preCaptureListenerSensor(shot_ext);
3419                 m_preCaptureSetter(shot_ext);
3420             }
3421 
3422             ALOGV("(%s): queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
3423             (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3424             (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3425             (int)(shot_ext->shot.ctl.aa.afTrigger));
3426 
3427             if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) {
3428                 shot_ext->dis_bypass = 1;
3429                 shot_ext->dnr_bypass = 1;
3430                 shot_ext->request_scp = 0;
3431                 shot_ext->request_scc = 0;
3432                 m_vdisBubbleCnt--;
3433                 matchedFrameCnt = -1;
3434             } else {
3435                 m_vdisDupFrame = matchedFrameCnt;
3436             }
3437             if (m_scpForceSuspended)
3438                 shot_ext->request_scc = 0;
3439 
3440             uint32_t current_scp = shot_ext->request_scp;
3441             uint32_t current_scc = shot_ext->request_scc;
3442 
3443             if (shot_ext->shot.dm.request.frameCount == 0) {
3444                 CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount);
3445             }
3446 
3447             cam_int_qbuf(&(m_camera_info.isp), index);
3448 
3449             ALOGV("### isp DQBUF start");
3450             index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3451 
3452             shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3453 
3454             if (m_ctlInfo.flash.m_flashEnableFlg)
3455                 m_preCaptureListenerISP(shot_ext);
3456 
3457             ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)",
3458                 index,
3459                 shot_ext->shot.ctl.request.frameCount,
3460                 shot_ext->request_scp,
3461                 shot_ext->request_scc,
3462                 shot_ext->dis_bypass,
3463                 shot_ext->dnr_bypass, sizeof(camera2_shot));
3464 
3465             ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
3466                 (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3467                 (int)(shot_ext->shot.dm.aa.awbMode),
3468                 (int)(shot_ext->shot.dm.aa.afMode));
3469 
3470 #ifndef ENABLE_FRAME_SYNC
3471             m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0];
3472 #endif
3473 
3474             if (!shot_ext->fd_bypass) {
3475                 /* FD orientation axis transformation */
3476                 for (int i=0; i < CAMERA2_MAX_FACES; i++) {
3477                     if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
3478                         shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
3479                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][0])
3480                                                                                                 / m_streamThreads[0].get()->m_parameters.width;
3481                     if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
3482                         shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
3483                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][1])
3484                                                                                                 / m_streamThreads[0].get()->m_parameters.height;
3485                     if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
3486                         shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
3487                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][2])
3488                                                                                                 / m_streamThreads[0].get()->m_parameters.width;
3489                     if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
3490                         shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
3491                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][3])
3492                                                                                                 / m_streamThreads[0].get()->m_parameters.height;
3493                 }
3494             }
3495             // aeState control
3496             if (shot_ext->shot.ctl.aa.sceneMode != AA_SCENE_MODE_NIGHT)
3497                 m_preCaptureAeState(shot_ext);
3498 
3499             // At scene mode face priority
3500             if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE)
3501                 shot_ext->shot.dm.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
3502 
3503             if (matchedFrameCnt != -1 && m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) {
3504                 m_requestManager->ApplyDynamicMetadata(shot_ext);
3505             }
3506 
3507             if (current_scc != shot_ext->request_scc) {
3508                 ALOGD("(%s): scc frame drop1 request_scc(%d to %d)",
3509                                 __FUNCTION__, current_scc, shot_ext->request_scc);
3510                 m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3511             }
3512             if (shot_ext->request_scc) {
3513                 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)");
3514                 if (shot_ext->shot.ctl.request.outputStreams[0] & STREAM_MASK_JPEG) {
3515                     if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)
3516                         memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(shot_ext->shot.ctl.request.frameCount)),
3517                             sizeof(struct camera2_shot_ext));
3518                     else
3519                         memcpy(&m_jpegMetadata, (void*)shot_ext, sizeof(struct camera2_shot_ext));
3520                 }
3521                 m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3522             }
3523             if (current_scp != shot_ext->request_scp) {
3524                 ALOGD("(%s): scp frame drop1 request_scp(%d to %d)",
3525                                 __FUNCTION__, current_scp, shot_ext->request_scp);
3526                 m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3527             }
3528             if (shot_ext->request_scp) {
3529                 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)");
3530                 m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3531             }
3532 
3533             ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
3534                shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
3535             if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
3536                 ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
3537                 m_scp_closed = true;
3538             }
3539             else
3540                 m_scp_closed = false;
3541 
3542             OnAfNotification(shot_ext->shot.dm.aa.afState);
3543             OnPrecaptureMeteringNotificationISP();
3544         }   else {
3545             memcpy(&shot_ext->shot.ctl, &m_camera_info.dummy_shot.shot.ctl, sizeof(struct camera2_ctl));
3546             shot_ext->shot.ctl.request.frameCount = 0xfffffffe;
3547             shot_ext->request_sensor = 1;
3548             shot_ext->dis_bypass = 1;
3549             shot_ext->dnr_bypass = 1;
3550             shot_ext->fd_bypass = 1;
3551             shot_ext->drc_bypass = 1;
3552             shot_ext->request_scc = 0;
3553             shot_ext->request_scp = 0;
3554             if (m_wideAspect) {
3555                 shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3556             } else {
3557                 shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3558             }
3559             shot_ext->shot.ctl.aa.sceneMode = (enum aa_scene_mode)m_lastSceneMode;
3560             if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT_CAPTURE || shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3561                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
3562                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3563             }
3564             shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3565             shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3566             ALOGV("### isp QBUF start (bubble)");
3567             ALOGV("bubble: queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)",
3568                 (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3569                 (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3570                 (int)(shot_ext->shot.ctl.aa.afTrigger));
3571 
3572             cam_int_qbuf(&(m_camera_info.isp), index);
3573             ALOGV("### isp DQBUF start (bubble)");
3574             index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3575             shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3576             ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)",
3577                 (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3578                 (int)(shot_ext->shot.dm.aa.awbMode),
3579                 (int)(shot_ext->shot.dm.aa.afMode));
3580 
3581             OnAfNotification(shot_ext->shot.dm.aa.afState);
3582         }
3583 
3584         index = m_requestManager->popSensorQ();
3585         if(index < 0){
3586             ALOGE("sensorQ is empty");
3587             return;
3588         }
3589 
3590         processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
3591         shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3592         if (m_scp_closing || m_scp_closed) {
3593             ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
3594             shot_ext->request_scc = 0;
3595             shot_ext->request_scp = 0;
3596             shot_ext->request_sensor = 0;
3597         }
3598         cam_int_qbuf(&(m_camera_info.sensor), index);
3599         ALOGV("Sensor Qbuf done(%d)", index);
3600 
3601         if (!m_scp_closing
3602             && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
3603             ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
3604                                     matchedFrameCnt, processingReqIndex);
3605             selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
3606         }
3607     }
3608     return;
3609 }
3610 
m_streamBufferInit(SignalDrivenThread * self)3611 void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
3612 {
3613     uint32_t                currentSignal   = self->GetProcessingSignal();
3614     StreamThread *          selfThread      = ((StreamThread*)self);
3615     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3616     node_info_t             *currentNode    = selfStreamParms->node;
3617     substream_parameters_t  *subParms;
3618     buffer_handle_t * buf = NULL;
3619     status_t res;
3620     void *virtAddr[3];
3621     int i, j;
3622     int index;
3623     nsecs_t timestamp;
3624 
3625     if (!(selfThread->m_isBufferInit))
3626     {
3627         for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
3628             res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3629             if (res != NO_ERROR || buf == NULL) {
3630                 ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3631                 return;
3632             }
3633             ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3634                ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3635 
3636             index = selfThread->findBufferIndex(buf);
3637             if (index == -1) {
3638                 ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
3639             }
3640             else {
3641                 ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3642                     __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
3643                 if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
3644                     selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3645                 else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
3646                     selfStreamParms->svcBufStatus[index] = ON_HAL;
3647                 else {
3648                     ALOGV("DBG(%s): buffer status abnormal (%d) "
3649                         , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
3650                 }
3651                 selfStreamParms->numSvcBufsInHal++;
3652             }
3653             selfStreamParms->bufIndex = 0;
3654         }
3655         selfThread->m_isBufferInit = true;
3656     }
3657     for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3658         if (selfThread->m_attachedSubStreams[i].streamId == -1)
3659             continue;
3660 
3661         subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId];
3662         if (subParms->type && subParms->needBufferInit) {
3663             ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
3664                 __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers);
3665             int checkingIndex = 0;
3666             bool found = false;
3667             for ( i = 0 ; i < subParms->numSvcBuffers; i++) {
3668                 res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3669                 if (res != NO_ERROR || buf == NULL) {
3670                     ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3671                     return;
3672                 }
3673                 subParms->numSvcBufsInHal++;
3674                 ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3675                    subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3676 
3677                 if (m_grallocHal->lock(m_grallocHal, *buf,
3678                        subParms->usage, 0, 0,
3679                        subParms->width, subParms->height, virtAddr) != 0) {
3680                     ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
3681                 }
3682                 else {
3683                       ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3684                         __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
3685                 }
3686                 found = false;
3687                 for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3688                     if (subParms->svcBufHandle[checkingIndex] == *buf ) {
3689                         found = true;
3690                         break;
3691                     }
3692                 }
3693                 ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
3694                 if (!found) break;
3695 
3696                 index = checkingIndex;
3697 
3698                 if (index == -1) {
3699                     ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
3700                 }
3701                 else {
3702                     ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3703                         __FUNCTION__, index, subParms->svcBufStatus[index]);
3704                     if (subParms->svcBufStatus[index]== ON_SERVICE)
3705                         subParms->svcBufStatus[index] = ON_HAL;
3706                     else {
3707                         ALOGV("DBG(%s): buffer status abnormal (%d) "
3708                             , __FUNCTION__, subParms->svcBufStatus[index]);
3709                     }
3710                     if (*buf != subParms->svcBufHandle[index])
3711                         ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
3712                     else
3713                         ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3714                 }
3715                 subParms->svcBufIndex = 0;
3716             }
3717             if (subParms->type == SUBSTREAM_TYPE_JPEG) {
3718                 m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2;
3719                 m_resizeBuf.size.extS[1] = 0;
3720                 m_resizeBuf.size.extS[2] = 0;
3721 
3722                 if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) {
3723                     ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
3724                 }
3725             }
3726             if (subParms->type == SUBSTREAM_TYPE_PRVCB) {
3727                 m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width,
3728                 subParms->height, &m_previewCbBuf);
3729 
3730                 if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) {
3731                     ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__);
3732                 }
3733             }
3734             subParms->needBufferInit= false;
3735         }
3736     }
3737 }
3738 
m_streamThreadInitialize(SignalDrivenThread * self)3739 void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
3740 {
3741     StreamThread *          selfThread      = ((StreamThread*)self);
3742     ALOGV("DEBUG(%s): ", __FUNCTION__ );
3743     memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
3744     selfThread->m_isBufferInit = false;
3745     for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3746         selfThread->m_attachedSubStreams[i].streamId    = -1;
3747         selfThread->m_attachedSubStreams[i].priority    = 0;
3748     }
3749     return;
3750 }
3751 
m_runSubStreamFunc(StreamThread * selfThread,ExynosBuffer * srcImageBuf,int stream_id,nsecs_t frameTimeStamp)3752 int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf,
3753     int stream_id, nsecs_t frameTimeStamp)
3754 {
3755     substream_parameters_t  *subParms = &m_subStreams[stream_id];
3756 
3757     switch (stream_id) {
3758 
3759     case STREAM_ID_JPEG:
3760         return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp);
3761 
3762     case STREAM_ID_RECORD:
3763         return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp);
3764 
3765     case STREAM_ID_PRVCB:
3766         return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp);
3767 
3768     default:
3769         return 0;
3770     }
3771 }
m_streamFunc_direct(SignalDrivenThread * self)3772 void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self)
3773 {
3774     uint32_t                currentSignal   = self->GetProcessingSignal();
3775     StreamThread *          selfThread      = ((StreamThread*)self);
3776     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3777     node_info_t             *currentNode    = selfStreamParms->node;
3778     int i = 0;
3779     nsecs_t frameTimeStamp;
3780 
3781     if (currentSignal & SIGNAL_THREAD_RELEASE) {
3782         CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3783 
3784         if (selfThread->m_isBufferInit) {
3785             if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) {
3786                 ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__,
3787                     selfThread->m_index, currentNode->fd);
3788                 if (cam_int_streamoff(currentNode) < 0 ) {
3789                     ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3790                 }
3791                 ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__,
3792                         selfThread->m_index, currentNode->fd);
3793                 currentNode->buffers = 0;
3794                 cam_int_reqbufs(currentNode);
3795                 ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__,
3796                         selfThread->m_index, currentNode->fd);
3797             }
3798         }
3799 #ifdef ENABLE_FRAME_SYNC
3800         // free metabuffers
3801         for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
3802             if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) {
3803                 freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
3804                 selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
3805                 selfStreamParms->metaBuffers[i].size.extS[0] = 0;
3806             }
3807 #endif
3808         selfThread->m_isBufferInit = false;
3809         selfThread->m_releasing = false;
3810         selfThread->m_activated = false;
3811         ALOGV("(%s): [%d] END  SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3812         return;
3813     }
3814     if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) {
3815         status_t    res;
3816         buffer_handle_t * buf = NULL;
3817         bool found = false;
3818         ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
3819             __FUNCTION__, selfThread->m_index);
3820         res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf);
3821         if (res != NO_ERROR || buf == NULL) {
3822             ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res);
3823             return;
3824         }
3825         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3826         int checkingIndex = 0;
3827         for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3828             if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3829                 found = true;
3830                 break;
3831             }
3832         }
3833         ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
3834             __FUNCTION__, (unsigned int)buf, found, checkingIndex);
3835 
3836         if (!found) return;
3837 
3838         for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3839             if (selfThread->m_attachedSubStreams[i].streamId == -1)
3840                 continue;
3841 
3842 #ifdef ENABLE_FRAME_SYNC
3843             frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt);
3844             m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt);
3845 #else
3846             frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3847 #endif
3848             if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId))
3849                 m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]),
3850                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3851         }
3852 
3853         res = m_reprocessOps->release_buffer(m_reprocessOps, buf);
3854         if (res != NO_ERROR) {
3855             ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res);
3856             return;
3857         }
3858         ALOGV("(%s): streamthread[%d] END   SIGNAL_STREAM_REPROCESSING_START",
3859             __FUNCTION__,selfThread->m_index);
3860 
3861         return;
3862     }
3863     if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3864         buffer_handle_t * buf = NULL;
3865         status_t res = 0;
3866         int i, j;
3867         int index;
3868         nsecs_t timestamp;
3869 #ifdef ENABLE_FRAME_SYNC
3870         camera2_stream *frame;
3871         uint8_t currentOutputStreams;
3872         bool directOutputEnabled = false;
3873 #endif
3874         int numOfUndqbuf = 0;
3875 
3876         ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3877 
3878         m_streamBufferInit(self);
3879 
3880         do {
3881             ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__,
3882                 selfThread->m_index, selfThread->streamType);
3883 
3884 #ifdef ENABLE_FRAME_SYNC
3885             selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes);
3886             frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]);
3887             frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3888             currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
3889             ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams);
3890             if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)||
3891                  ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) {
3892                 directOutputEnabled = true;
3893             }
3894             if (!directOutputEnabled) {
3895                 if (!m_nightCaptureFrameCnt)
3896                     m_requestManager->NotifyStreamOutput(frame->rcount);
3897             }
3898 #else
3899             selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3900             frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex())
3901 #endif
3902             ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)  sigcnt(%d)",__FUNCTION__,
3903                 selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt);
3904 
3905             if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] !=  ON_DRIVER)
3906                 ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
3907                        __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]);
3908             selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3909 
3910             for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3911                 if (selfThread->m_attachedSubStreams[i].streamId == -1)
3912                     continue;
3913 #ifdef ENABLE_FRAME_SYNC
3914                 if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3915                     m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3916                         selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3917                 }
3918 #else
3919                 if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3920                     m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3921                         selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3922                 }
3923 #endif
3924             }
3925 
3926             if (m_requestManager->GetSkipCnt() <= 0) {
3927 #ifdef ENABLE_FRAME_SYNC
3928                 if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3929                     ALOGV("** Display Preview(frameCnt:%d)", frame->rcount);
3930                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3931                             frameTimeStamp,
3932                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3933                 }
3934                 else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3935                     ALOGV("** SCC output (frameCnt:%d)", frame->rcount);
3936                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3937                                 frameTimeStamp,
3938                                 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3939                 }
3940                 else {
3941                     res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3942                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3943                     ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3944                 }
3945 #else
3946                 if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3947                     ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
3948                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3949                             frameTimeStamp,
3950                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3951                 }
3952                 else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3953                     ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex());
3954                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3955                                 frameTimeStamp,
3956                                 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3957                 }
3958 #endif
3959                 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3960             }
3961             else {
3962                 res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3963                         &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3964                 ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3965             }
3966 #ifdef ENABLE_FRAME_SYNC
3967             if (directOutputEnabled) {
3968                 if (!m_nightCaptureFrameCnt)
3969                      m_requestManager->NotifyStreamOutput(frame->rcount);
3970             }
3971 #endif
3972             if (res == 0) {
3973                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE;
3974                 selfStreamParms->numSvcBufsInHal--;
3975             }
3976             else {
3977                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3978             }
3979 
3980         }
3981         while(0);
3982 
3983         while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS))
3984                     < selfStreamParms->minUndequedBuffer) {
3985             res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3986             if (res != NO_ERROR || buf == NULL) {
3987                 ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index,  res, selfStreamParms->numSvcBufsInHal);
3988                 break;
3989             }
3990             selfStreamParms->numSvcBufsInHal++;
3991             ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3992                 selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3993                ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3994             const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3995 
3996             bool found = false;
3997             int checkingIndex = 0;
3998             for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3999                 if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4000                     found = true;
4001                     break;
4002                 }
4003             }
4004             if (!found) break;
4005             selfStreamParms->bufIndex = checkingIndex;
4006             if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) {
4007                 uint32_t    plane_index = 0;
4008                 ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]);
4009                 struct v4l2_buffer v4l2_buf;
4010                 struct v4l2_plane  planes[VIDEO_MAX_PLANES];
4011 
4012                 v4l2_buf.m.planes   = planes;
4013                 v4l2_buf.type       = currentNode->type;
4014                 v4l2_buf.memory     = currentNode->memory;
4015                 v4l2_buf.index      = selfStreamParms->bufIndex;
4016                 v4l2_buf.length     = currentNode->planes;
4017 
4018                 v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
4019                 v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
4020                 v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
4021                 for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
4022                     v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
4023                 }
4024 #ifdef ENABLE_FRAME_SYNC
4025                 /* add plane for metadata*/
4026                 v4l2_buf.length += selfStreamParms->metaPlanes;
4027                 v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0];
4028                 v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0];
4029 #endif
4030                 if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
4031                     ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
4032                         __FUNCTION__, selfThread->m_index);
4033                     return;
4034                 }
4035                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER;
4036                 ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
4037                     __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex);
4038             }
4039         }
4040 
4041         ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
4042     }
4043     return;
4044 }
4045 
m_streamFunc_indirect(SignalDrivenThread * self)4046 void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self)
4047 {
4048     uint32_t                currentSignal   = self->GetProcessingSignal();
4049     StreamThread *          selfThread      = ((StreamThread*)self);
4050     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
4051     node_info_t             *currentNode    = selfStreamParms->node;
4052 
4053 
4054     if (currentSignal & SIGNAL_THREAD_RELEASE) {
4055         CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4056 
4057         if (selfThread->m_isBufferInit) {
4058             if (currentNode->fd == m_camera_info.capture.fd) {
4059                 if (m_camera_info.capture.status == true) {
4060                     ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4061                     selfThread->m_index, currentNode->fd);
4062                     if (cam_int_streamoff(currentNode) < 0 ){
4063                         ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4064                     } else {
4065                         m_camera_info.capture.status = false;
4066                     }
4067                 }
4068             } else {
4069                 ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4070                 selfThread->m_index, currentNode->fd);
4071                 if (cam_int_streamoff(currentNode) < 0 ){
4072                     ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4073                 }
4074             }
4075             ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index);
4076             ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__,
4077                     selfThread->m_index, currentNode->fd);
4078             currentNode->buffers = 0;
4079             cam_int_reqbufs(currentNode);
4080             ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__,
4081                     selfThread->m_index, currentNode->fd);
4082         }
4083 
4084         selfThread->m_isBufferInit = false;
4085         selfThread->m_releasing = false;
4086         selfThread->m_activated = false;
4087         ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4088         return;
4089     }
4090 
4091     if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
4092 #ifdef ENABLE_FRAME_SYNC
4093         camera2_stream *frame;
4094         uint8_t currentOutputStreams;
4095 #endif
4096         nsecs_t frameTimeStamp;
4097 
4098         ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
4099             __FUNCTION__,selfThread->m_index);
4100 
4101         m_streamBufferInit(self);
4102 
4103         ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index);
4104         selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
4105         ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__,
4106             selfThread->m_index, selfStreamParms->bufIndex);
4107 
4108 #ifdef ENABLE_FRAME_SYNC
4109         frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]);
4110         frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
4111         currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
4112         ALOGV("frame count(SCC) : %d outputStream(%x)",  frame->rcount, currentOutputStreams);
4113 #else
4114         frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
4115 #endif
4116 
4117         for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
4118             if (selfThread->m_attachedSubStreams[i].streamId == -1)
4119                 continue;
4120 #ifdef ENABLE_FRAME_SYNC
4121             if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4122                 m_requestManager->NotifyStreamOutput(frame->rcount);
4123                 m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4124                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4125             }
4126 #else
4127             if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4128                 m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4129                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4130             }
4131 #endif
4132         }
4133         cam_int_qbuf(currentNode, selfStreamParms->bufIndex);
4134         ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index);
4135 
4136 
4137 
4138         ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
4139             __FUNCTION__, selfThread->m_index);
4140     }
4141 
4142 
4143     return;
4144 }
4145 
m_streamThreadFunc(SignalDrivenThread * self)4146 void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
4147 {
4148     uint32_t                currentSignal   = self->GetProcessingSignal();
4149     StreamThread *          selfThread      = ((StreamThread*)self);
4150     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
4151     node_info_t             *currentNode    = selfStreamParms->node;
4152 
4153     ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
4154 
4155     // Do something in Child thread handler
4156     // Should change function to class that inherited StreamThread class to support dynamic stream allocation
4157     if (selfThread->streamType == STREAM_TYPE_DIRECT) {
4158         m_streamFunc_direct(self);
4159     } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) {
4160         m_streamFunc_indirect(self);
4161     }
4162 
4163     return;
4164 }
m_jpegCreator(StreamThread * selfThread,ExynosBuffer * srcImageBuf,nsecs_t frameTimeStamp)4165 int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4166 {
4167     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4168     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_JPEG];
4169     status_t    res;
4170     ExynosRect jpegRect;
4171     bool found = false;
4172     int srcW, srcH, srcCropX, srcCropY;
4173     int pictureW, pictureH, pictureFramesize = 0;
4174     int pictureFormat;
4175     int cropX, cropY, cropW, cropH = 0;
4176     ExynosBuffer resizeBufInfo;
4177     ExynosRect   m_jpegPictureRect;
4178     buffer_handle_t * buf = NULL;
4179     camera2_jpeg_blob * jpegBlob = NULL;
4180     int jpegBufSize = 0;
4181 
4182     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4183     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4184         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4185             found = true;
4186             break;
4187         }
4188         subParms->svcBufIndex++;
4189         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4190             subParms->svcBufIndex = 0;
4191     }
4192     if (!found) {
4193         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4194         subParms->svcBufIndex++;
4195         return 1;
4196     }
4197 
4198     {
4199         Mutex::Autolock lock(m_jpegEncoderLock);
4200         m_jpegEncodingCount++;
4201     }
4202 
4203     m_getRatioSize(selfStreamParms->width, selfStreamParms->height,
4204                     m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
4205                     &srcCropX, &srcCropY,
4206                     &srcW, &srcH,
4207                     0);
4208 
4209     m_jpegPictureRect.w = subParms->width;
4210     m_jpegPictureRect.h = subParms->height;
4211 
4212      ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
4213               __FUNCTION__, selfStreamParms->width, selfStreamParms->height,
4214                    m_jpegPictureRect.w, m_jpegPictureRect.h);
4215 
4216     m_getRatioSize(srcW, srcH,
4217                    m_jpegPictureRect.w, m_jpegPictureRect.h,
4218                    &cropX, &cropY,
4219                    &pictureW, &pictureH,
4220                    0);
4221     pictureFormat = V4L2_PIX_FMT_YUYV;
4222     pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
4223 
4224     if (m_exynosPictureCSC) {
4225         float zoom_w = 0, zoom_h = 0;
4226         if (m_zoomRatio == 0)
4227             m_zoomRatio = 1;
4228 
4229         if (m_jpegPictureRect.w >= m_jpegPictureRect.h) {
4230             zoom_w =  pictureW / m_zoomRatio;
4231             zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w;
4232         } else {
4233             zoom_h = pictureH / m_zoomRatio;
4234             zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h;
4235         }
4236         cropX = (srcW - zoom_w) / 2;
4237         cropY = (srcH - zoom_h) / 2;
4238         cropW = zoom_w;
4239         cropH = zoom_h;
4240 
4241         ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4242               __FUNCTION__, cropX, cropY, cropW, cropH);
4243 
4244         csc_set_src_format(m_exynosPictureCSC,
4245                            ALIGN(srcW, 16), ALIGN(srcH, 16),
4246                            cropX, cropY, cropW, cropH,
4247                            V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
4248                            0);
4249 
4250         csc_set_dst_format(m_exynosPictureCSC,
4251                            m_jpegPictureRect.w, m_jpegPictureRect.h,
4252                            0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h,
4253                            V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
4254                            0);
4255         for (int i = 0 ; i < 3 ; i++)
4256             ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
4257                 __FUNCTION__, i, srcImageBuf->fd.extFd[i]);
4258         csc_set_src_buffer(m_exynosPictureCSC,
4259                            (void **)&srcImageBuf->fd.fd);
4260 
4261         csc_set_dst_buffer(m_exynosPictureCSC,
4262                            (void **)&m_resizeBuf.fd.fd);
4263         for (int i = 0 ; i < 3 ; i++)
4264             ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
4265                 __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
4266 
4267         if (csc_convert(m_exynosPictureCSC) != 0)
4268             ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
4269 
4270     }
4271     else {
4272         ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
4273     }
4274 
4275     resizeBufInfo = m_resizeBuf;
4276 
4277     m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf);
4278 
4279     for (int i = 1; i < 3; i++) {
4280         if (m_resizeBuf.size.extS[i] != 0)
4281             m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
4282 
4283         ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
4284     }
4285 
4286     jpegRect.w = m_jpegPictureRect.w;
4287     jpegRect.h = m_jpegPictureRect.h;
4288     jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
4289 
4290     for (int j = 0 ; j < 3 ; j++)
4291         ALOGV("DEBUG(%s): dest buf node  fd.extFd[%d]=%d size=%d virt=%x ",
4292             __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j],
4293             (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j],
4294             (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]);
4295 
4296     jpegBufSize = subParms->svcBuffers[subParms->svcBufIndex].size.extS[0];
4297     if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) {
4298         ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
4299     } else {
4300         m_resizeBuf = resizeBufInfo;
4301 
4302         int jpegSize = subParms->svcBuffers[subParms->svcBufIndex].size.s;
4303         ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__,
4304             m_jpegPictureRect.w, m_jpegPictureRect.h, jpegBufSize, jpegSize);
4305         char * jpegBuffer = (char*)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4306         jpegBlob = (camera2_jpeg_blob*)(&jpegBuffer[jpegBufSize - sizeof(camera2_jpeg_blob)]);
4307 
4308         if (jpegBuffer[jpegSize-1] == 0)
4309             jpegSize--;
4310         jpegBlob->jpeg_size = jpegSize;
4311         jpegBlob->jpeg_blob_id = CAMERA2_JPEG_BLOB_ID;
4312     }
4313     subParms->svcBuffers[subParms->svcBufIndex].size.extS[0] = jpegBufSize;
4314     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4315 
4316     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4317             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4318     if (res == 0) {
4319         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4320         subParms->numSvcBufsInHal--;
4321     }
4322     else {
4323         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4324     }
4325 
4326     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4327     {
4328         bool found = false;
4329         int checkingIndex = 0;
4330 
4331         ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4332 
4333         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4334         if (res != NO_ERROR || buf == NULL) {
4335             ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4336             break;
4337         }
4338         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4339         subParms->numSvcBufsInHal ++;
4340         ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4341            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4342 
4343 
4344         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4345             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4346                 found = true;
4347                 break;
4348             }
4349         }
4350         ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found);
4351 
4352         if (!found) {
4353              break;
4354         }
4355 
4356         subParms->svcBufIndex = checkingIndex;
4357         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4358             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4359         }
4360         else {
4361             ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4362                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4363         }
4364     }
4365     {
4366         Mutex::Autolock lock(m_jpegEncoderLock);
4367         m_jpegEncodingCount--;
4368     }
4369     return 0;
4370 }
4371 
m_recordCreator(StreamThread * selfThread,ExynosBuffer * srcImageBuf,nsecs_t frameTimeStamp)4372 int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4373 {
4374     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4375     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_RECORD];
4376     status_t    res;
4377     ExynosRect jpegRect;
4378     bool found = false;
4379     int cropX, cropY, cropW, cropH = 0;
4380     buffer_handle_t * buf = NULL;
4381 
4382     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4383     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4384         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4385             found = true;
4386             break;
4387         }
4388         subParms->svcBufIndex++;
4389         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4390             subParms->svcBufIndex = 0;
4391     }
4392     if (!found) {
4393         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4394         subParms->svcBufIndex++;
4395         return 1;
4396     }
4397 
4398     if (m_exynosVideoCSC) {
4399         int videoW = subParms->width, videoH = subParms->height;
4400         int cropX, cropY, cropW, cropH = 0;
4401         int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4402         m_getRatioSize(previewW, previewH,
4403                        videoW, videoH,
4404                        &cropX, &cropY,
4405                        &cropW, &cropH,
4406                        0);
4407 
4408         ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4409                  __FUNCTION__, cropX, cropY, cropW, cropH);
4410 
4411         csc_set_src_format(m_exynosVideoCSC,
4412                            ALIGN(previewW, 32), previewH,
4413                            cropX, cropY, cropW, cropH,
4414                            selfStreamParms->format,
4415                            0);
4416 
4417         csc_set_dst_format(m_exynosVideoCSC,
4418                            videoW, videoH,
4419                            0, 0, videoW, videoH,
4420                            subParms->format,
4421                            1);
4422 
4423         csc_set_src_buffer(m_exynosVideoCSC,
4424                         (void **)&srcImageBuf->fd.fd);
4425 
4426         csc_set_dst_buffer(m_exynosVideoCSC,
4427             (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd)));
4428 
4429         if (csc_convert(m_exynosVideoCSC) != 0) {
4430             ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
4431         }
4432         else {
4433             ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
4434         }
4435     }
4436     else {
4437         ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4438     }
4439 
4440     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4441 
4442     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4443             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4444     if (res == 0) {
4445         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4446         subParms->numSvcBufsInHal--;
4447     }
4448     else {
4449         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4450     }
4451 
4452     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4453     {
4454         bool found = false;
4455         int checkingIndex = 0;
4456 
4457         ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4458 
4459         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4460         if (res != NO_ERROR || buf == NULL) {
4461             ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4462             break;
4463         }
4464         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4465         subParms->numSvcBufsInHal ++;
4466         ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4467            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4468 
4469         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4470             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4471                 found = true;
4472                 break;
4473             }
4474         }
4475         ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4476 
4477         if (!found) {
4478              break;
4479         }
4480 
4481         subParms->svcBufIndex = checkingIndex;
4482         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4483             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4484         }
4485         else {
4486             ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4487                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4488         }
4489     }
4490     return 0;
4491 }
4492 
m_prvcbCreator(StreamThread * selfThread,ExynosBuffer * srcImageBuf,nsecs_t frameTimeStamp)4493 int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4494 {
4495     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4496     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_PRVCB];
4497     status_t    res;
4498     bool found = false;
4499     int cropX, cropY, cropW, cropH = 0;
4500     buffer_handle_t * buf = NULL;
4501 
4502     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4503     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4504         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4505             found = true;
4506             break;
4507         }
4508         subParms->svcBufIndex++;
4509         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4510             subParms->svcBufIndex = 0;
4511     }
4512     if (!found) {
4513         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4514         subParms->svcBufIndex++;
4515         return 1;
4516     }
4517 
4518     if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
4519         if (m_exynosVideoCSC) {
4520             int previewCbW = subParms->width, previewCbH = subParms->height;
4521             int cropX, cropY, cropW, cropH = 0;
4522             int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4523             m_getRatioSize(previewW, previewH,
4524                            previewCbW, previewCbH,
4525                            &cropX, &cropY,
4526                            &cropW, &cropH,
4527                            0);
4528 
4529             ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4530                      __FUNCTION__, cropX, cropY, cropW, cropH);
4531             csc_set_src_format(m_exynosVideoCSC,
4532                                ALIGN(previewW, 32), previewH,
4533                                cropX, cropY, cropW, cropH,
4534                                selfStreamParms->format,
4535                                0);
4536 
4537             csc_set_dst_format(m_exynosVideoCSC,
4538                                previewCbW, previewCbH,
4539                                0, 0, previewCbW, previewCbH,
4540                                subParms->internalFormat,
4541                                1);
4542 
4543             csc_set_src_buffer(m_exynosVideoCSC,
4544                         (void **)&srcImageBuf->fd.fd);
4545 
4546             csc_set_dst_buffer(m_exynosVideoCSC,
4547                 (void **)(&(m_previewCbBuf.fd.fd)));
4548 
4549             if (csc_convert(m_exynosVideoCSC) != 0) {
4550                 ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
4551             }
4552             else {
4553                 ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
4554             }
4555             if (previewCbW == ALIGN(previewCbW, 16)) {
4556                 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4557                     m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
4558                 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
4559                     m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
4560             }
4561             else {
4562                 // TODO : copy line by line ?
4563             }
4564         }
4565         else {
4566             ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4567         }
4568     }
4569     else if (subParms->format == HAL_PIXEL_FORMAT_YV12) {
4570         int previewCbW = subParms->width, previewCbH = subParms->height;
4571         int stride = ALIGN(previewCbW, 16);
4572         int uv_stride = ALIGN(previewCbW/2, 16);
4573         int c_stride = ALIGN(stride / 2, 16);
4574 
4575         if (previewCbW == ALIGN(previewCbW, 32)) {
4576             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4577                 srcImageBuf->virt.extP[0], stride * previewCbH);
4578             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
4579                 srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 );
4580             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
4581                 srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 );
4582         } else {
4583             char * dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4584             char * srcAddr = (char *)(srcImageBuf->virt.extP[0]);
4585             for (int i = 0 ; i < previewCbH ; i++) {
4586                 memcpy(dstAddr, srcAddr, previewCbW);
4587                 dstAddr += stride;
4588                 srcAddr += ALIGN(stride, 32);
4589             }
4590             dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH);
4591             srcAddr = (char *)(srcImageBuf->virt.extP[1]);
4592             for (int i = 0 ; i < previewCbH/2 ; i++) {
4593                 memcpy(dstAddr, srcAddr, previewCbW/2);
4594                 dstAddr += c_stride;
4595                 srcAddr += uv_stride;
4596             }
4597             srcAddr = (char *)(srcImageBuf->virt.extP[2]);
4598             for (int i = 0 ; i < previewCbH/2 ; i++) {
4599                 memcpy(dstAddr, srcAddr, previewCbW/2);
4600                 dstAddr += c_stride;
4601                 srcAddr += uv_stride;
4602             }
4603         }
4604     }
4605     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4606 
4607     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4608             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4609     if (res == 0) {
4610         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4611         subParms->numSvcBufsInHal--;
4612     }
4613     else {
4614         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4615     }
4616 
4617     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4618     {
4619         bool found = false;
4620         int checkingIndex = 0;
4621 
4622         ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4623 
4624         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4625         if (res != NO_ERROR || buf == NULL) {
4626             ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4627             break;
4628         }
4629         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4630         subParms->numSvcBufsInHal ++;
4631         ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4632            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4633 
4634 
4635         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4636             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4637                 found = true;
4638                 break;
4639             }
4640         }
4641         ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4642 
4643         if (!found) {
4644              break;
4645         }
4646 
4647         subParms->svcBufIndex = checkingIndex;
4648         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4649             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4650         }
4651         else {
4652             ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4653                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4654         }
4655     }
4656     return 0;
4657 }
4658 
m_checkThumbnailSize(int w,int h)4659 bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
4660 {
4661     int sizeOfSupportList;
4662 
4663     //REAR Camera
4664     if(this->getCameraId() == 0) {
4665         sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int32_t)*2);
4666 
4667         for(int i = 0; i < sizeOfSupportList; i++) {
4668             if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
4669                 return true;
4670         }
4671 
4672     }
4673     else {
4674         sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int32_t)*2);
4675 
4676         for(int i = 0; i < sizeOfSupportList; i++) {
4677             if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
4678                 return true;
4679         }
4680     }
4681 
4682     return false;
4683 }
yuv2Jpeg(ExynosBuffer * yuvBuf,ExynosBuffer * jpegBuf,ExynosRect * rect)4684 bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
4685                             ExynosBuffer *jpegBuf,
4686                             ExynosRect *rect)
4687 {
4688     unsigned char *addr;
4689 
4690     ExynosJpegEncoderForCamera jpegEnc;
4691     bool ret = false;
4692     int res = 0;
4693 
4694     unsigned int *yuvSize = yuvBuf->size.extS;
4695 
4696     if (jpegEnc.create()) {
4697         ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
4698         goto jpeg_encode_done;
4699     }
4700 
4701     if (jpegEnc.setQuality(m_jpegMetadata.shot.ctl.jpeg.quality)) {
4702         ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
4703         goto jpeg_encode_done;
4704     }
4705 
4706     if (jpegEnc.setSize(rect->w, rect->h)) {
4707         ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
4708         goto jpeg_encode_done;
4709     }
4710     ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
4711 
4712     if (jpegEnc.setColorFormat(rect->colorFormat)) {
4713         ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
4714         goto jpeg_encode_done;
4715     }
4716 
4717     if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
4718         ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
4719         goto jpeg_encode_done;
4720     }
4721 
4722     if((m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1] != 0)) {
4723         mExifInfo.enableThumb = true;
4724         if(!m_checkThumbnailSize(m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1])) {
4725             // in the case of unsupported parameter, disable thumbnail
4726             mExifInfo.enableThumb = false;
4727         } else {
4728             m_thumbNailW = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0];
4729             m_thumbNailH = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1];
4730         }
4731 
4732         ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
4733 
4734     } else {
4735         mExifInfo.enableThumb = false;
4736     }
4737 
4738     if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
4739         ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
4740         goto jpeg_encode_done;
4741     }
4742 
4743     ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
4744     if (jpegEnc.setThumbnailQuality(m_jpegMetadata.shot.ctl.jpeg.thumbnailQuality)) {
4745         ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
4746         goto jpeg_encode_done;
4747     }
4748 
4749     m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
4750     ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
4751     if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
4752         ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
4753         goto jpeg_encode_done;
4754     }
4755     if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
4756         ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
4757         goto jpeg_encode_done;
4758     }
4759 
4760     if (jpegEnc.updateConfig()) {
4761         ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
4762         goto jpeg_encode_done;
4763     }
4764 
4765     if ((res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo))) {
4766         ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
4767         goto jpeg_encode_done;
4768     }
4769 
4770     ret = true;
4771 
4772 jpeg_encode_done:
4773 
4774     if (jpegEnc.flagCreate() == true)
4775         jpegEnc.destroy();
4776 
4777     return ret;
4778 }
4779 
OnPrecaptureMeteringTriggerStart(int id)4780 void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id)
4781 {
4782     m_ctlInfo.flash.m_precaptureTriggerId = id;
4783     m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
4784     if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) {
4785         // flash is required
4786         switch (m_ctlInfo.flash.m_flashCnt) {
4787         case IS_FLASH_STATE_AUTO_DONE:
4788         case IS_FLASH_STATE_AUTO_OFF:
4789             // Flash capture sequence, AF flash was executed before
4790             break;
4791         default:
4792             // Full flash sequence
4793             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4794             m_ctlInfo.flash.m_flashEnableFlg = true;
4795             m_ctlInfo.flash.m_flashTimeOut = 0;
4796         }
4797     } else {
4798         // Skip pre-capture in case of non-flash.
4799         ALOGV("[PreCap] Flash OFF mode ");
4800         m_ctlInfo.flash.m_flashEnableFlg = false;
4801         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
4802     }
4803     ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt);
4804     OnPrecaptureMeteringNotificationSensor();
4805 }
4806 
OnAfTrigger(int id)4807 void ExynosCameraHWInterface2::OnAfTrigger(int id)
4808 {
4809     m_afTriggerId = id;
4810 
4811     switch (m_afMode) {
4812     case AA_AFMODE_AUTO:
4813     case AA_AFMODE_MACRO:
4814     case AA_AFMODE_MANUAL:
4815         ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode);
4816         // If flash is enable, Flash operation is executed before triggering AF
4817         if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4818                 && (m_ctlInfo.flash.m_flashEnableFlg == false)
4819                 && (m_cameraId == 0)) {
4820             ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode);
4821             m_ctlInfo.flash.m_flashEnableFlg = true;
4822             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4823             m_ctlInfo.flash.m_flashDecisionResult = false;
4824             m_ctlInfo.flash.m_afFlashDoneFlg = true;
4825         }
4826         OnAfTriggerAutoMacro(id);
4827         break;
4828     case AA_AFMODE_CONTINUOUS_VIDEO:
4829         ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode);
4830         OnAfTriggerCAFVideo(id);
4831         break;
4832     case AA_AFMODE_CONTINUOUS_PICTURE:
4833         ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode);
4834         OnAfTriggerCAFPicture(id);
4835         break;
4836 
4837     case AA_AFMODE_OFF:
4838     default:
4839         break;
4840     }
4841 }
4842 
OnAfTriggerAutoMacro(int)4843 void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int /*id*/)
4844 {
4845     int nextState = NO_TRANSITION;
4846 
4847     switch (m_afState) {
4848     case HAL_AFSTATE_INACTIVE:
4849     case HAL_AFSTATE_PASSIVE_FOCUSED:
4850     case HAL_AFSTATE_SCANNING:
4851         nextState = HAL_AFSTATE_NEEDS_COMMAND;
4852         m_IsAfTriggerRequired = true;
4853         break;
4854     case HAL_AFSTATE_NEEDS_COMMAND:
4855         nextState = NO_TRANSITION;
4856         break;
4857     case HAL_AFSTATE_STARTED:
4858         nextState = NO_TRANSITION;
4859         break;
4860     case HAL_AFSTATE_LOCKED:
4861         nextState = HAL_AFSTATE_NEEDS_COMMAND;
4862         m_IsAfTriggerRequired = true;
4863         break;
4864     case HAL_AFSTATE_FAILED:
4865         nextState = HAL_AFSTATE_NEEDS_COMMAND;
4866         m_IsAfTriggerRequired = true;
4867         break;
4868     default:
4869         break;
4870     }
4871     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4872     if (nextState != NO_TRANSITION)
4873         m_afState = nextState;
4874 }
4875 
OnAfTriggerCAFPicture(int id)4876 void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
4877 {
4878     int nextState = NO_TRANSITION;
4879 
4880     switch (m_afState) {
4881     case HAL_AFSTATE_INACTIVE:
4882         nextState = HAL_AFSTATE_FAILED;
4883         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4884         break;
4885     case HAL_AFSTATE_NEEDS_COMMAND:
4886         // not used
4887         break;
4888     case HAL_AFSTATE_STARTED:
4889         nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4890         m_AfHwStateFailed = false;
4891         break;
4892     case HAL_AFSTATE_SCANNING:
4893         nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4894         m_AfHwStateFailed = false;
4895         // If flash is enable, Flash operation is executed before triggering AF
4896         if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4897                 && (m_ctlInfo.flash.m_flashEnableFlg == false)
4898                 && (m_cameraId == 0)) {
4899             ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
4900             m_ctlInfo.flash.m_flashEnableFlg = true;
4901             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4902             m_ctlInfo.flash.m_flashDecisionResult = false;
4903             m_ctlInfo.flash.m_afFlashDoneFlg = true;
4904         }
4905         break;
4906     case HAL_AFSTATE_NEEDS_DETERMINATION:
4907         nextState = NO_TRANSITION;
4908         break;
4909     case HAL_AFSTATE_PASSIVE_FOCUSED:
4910         m_IsAfLockRequired = true;
4911         if (m_AfHwStateFailed) {
4912             ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__);
4913             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4914             nextState = HAL_AFSTATE_FAILED;
4915         }
4916         else {
4917             ALOGV("(%s): [CAF] LAST : success", __FUNCTION__);
4918             SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4919             nextState = HAL_AFSTATE_LOCKED;
4920         }
4921         m_AfHwStateFailed = false;
4922         break;
4923     case HAL_AFSTATE_LOCKED:
4924         nextState = NO_TRANSITION;
4925         break;
4926     case HAL_AFSTATE_FAILED:
4927         nextState = NO_TRANSITION;
4928         break;
4929     default:
4930         break;
4931     }
4932     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4933     if (nextState != NO_TRANSITION)
4934         m_afState = nextState;
4935 }
4936 
4937 
OnAfTriggerCAFVideo(int)4938 void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int /*id*/)
4939 {
4940     int nextState = NO_TRANSITION;
4941 
4942     switch (m_afState) {
4943     case HAL_AFSTATE_INACTIVE:
4944         nextState = HAL_AFSTATE_FAILED;
4945         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4946         break;
4947     case HAL_AFSTATE_NEEDS_COMMAND:
4948         // not used
4949         break;
4950     case HAL_AFSTATE_STARTED:
4951         m_IsAfLockRequired = true;
4952         nextState = HAL_AFSTATE_FAILED;
4953         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4954         break;
4955     case HAL_AFSTATE_SCANNING:
4956         m_IsAfLockRequired = true;
4957         nextState = HAL_AFSTATE_FAILED;
4958         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4959         break;
4960     case HAL_AFSTATE_NEEDS_DETERMINATION:
4961         // not used
4962         break;
4963     case HAL_AFSTATE_PASSIVE_FOCUSED:
4964         m_IsAfLockRequired = true;
4965         SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4966         nextState = HAL_AFSTATE_LOCKED;
4967         break;
4968     case HAL_AFSTATE_LOCKED:
4969         nextState = NO_TRANSITION;
4970         break;
4971     case HAL_AFSTATE_FAILED:
4972         nextState = NO_TRANSITION;
4973         break;
4974     default:
4975         break;
4976     }
4977     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4978     if (nextState != NO_TRANSITION)
4979         m_afState = nextState;
4980 }
4981 
OnPrecaptureMeteringNotificationSensor()4982 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor()
4983 {
4984     if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4985         // Just noti of pre-capture start
4986         if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) {
4987             m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4988                         ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4989                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4990             ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4991             m_notifyCb(CAMERA2_MSG_AUTOWB,
4992                         ANDROID_CONTROL_AWB_STATE_CONVERGED,
4993                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4994             m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
4995         }
4996     }
4997 }
4998 
OnPrecaptureMeteringNotificationISP()4999 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP()
5000 {
5001     if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
5002         if (m_ctlInfo.flash.m_flashEnableFlg) {
5003             // flash case
5004             switch (m_ctlInfo.flash.m_flashCnt) {
5005             case IS_FLASH_STATE_AUTO_DONE:
5006             case IS_FLASH_STATE_AUTO_OFF:
5007                 if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
5008                     // End notification
5009                     m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5010                                     ANDROID_CONTROL_AE_STATE_CONVERGED,
5011                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5012                     ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5013                     m_notifyCb(CAMERA2_MSG_AUTOWB,
5014                                     ANDROID_CONTROL_AWB_STATE_CONVERGED,
5015                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5016                     m_ctlInfo.flash.m_precaptureTriggerId = 0;
5017                 } else {
5018                     m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5019                                     ANDROID_CONTROL_AE_STATE_PRECAPTURE,
5020                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5021                     ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5022                     m_notifyCb(CAMERA2_MSG_AUTOWB,
5023                                     ANDROID_CONTROL_AWB_STATE_CONVERGED,
5024                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5025                     m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
5026                 }
5027                 break;
5028             case IS_FLASH_STATE_CAPTURE:
5029             case IS_FLASH_STATE_CAPTURE_WAIT:
5030             case IS_FLASH_STATE_CAPTURE_JPEG:
5031             case IS_FLASH_STATE_CAPTURE_END:
5032                 ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt);
5033                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
5034                 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5035                         ANDROID_CONTROL_AE_STATE_CONVERGED,
5036                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5037                 m_notifyCb(CAMERA2_MSG_AUTOWB,
5038                         ANDROID_CONTROL_AWB_STATE_CONVERGED,
5039                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5040                 m_ctlInfo.flash.m_precaptureTriggerId = 0;
5041                 break;
5042             }
5043         } else {
5044             // non-flash case
5045             if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
5046                 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5047                                 ANDROID_CONTROL_AE_STATE_CONVERGED,
5048                                 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5049                 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5050                 m_notifyCb(CAMERA2_MSG_AUTOWB,
5051                                 ANDROID_CONTROL_AWB_STATE_CONVERGED,
5052                                 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5053                 m_ctlInfo.flash.m_precaptureTriggerId = 0;
5054             }
5055         }
5056     }
5057 }
5058 
OnAfNotification(enum aa_afstate noti)5059 void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
5060 {
5061     switch (m_afMode) {
5062     case AA_AFMODE_AUTO:
5063     case AA_AFMODE_MACRO:
5064         OnAfNotificationAutoMacro(noti);
5065         break;
5066     case AA_AFMODE_CONTINUOUS_VIDEO:
5067         OnAfNotificationCAFVideo(noti);
5068         break;
5069     case AA_AFMODE_CONTINUOUS_PICTURE:
5070         OnAfNotificationCAFPicture(noti);
5071         break;
5072     case AA_AFMODE_OFF:
5073     default:
5074         break;
5075     }
5076 }
5077 
OnAfNotificationAutoMacro(enum aa_afstate noti)5078 void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
5079 {
5080     int nextState = NO_TRANSITION;
5081     bool bWrongTransition = false;
5082 
5083     if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
5084         switch (noti) {
5085         case AA_AFSTATE_INACTIVE:
5086         case AA_AFSTATE_ACTIVE_SCAN:
5087         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5088         case AA_AFSTATE_AF_FAILED_FOCUS:
5089         default:
5090             nextState = NO_TRANSITION;
5091             break;
5092         }
5093     }
5094     else if (m_afState == HAL_AFSTATE_STARTED) {
5095         switch (noti) {
5096         case AA_AFSTATE_INACTIVE:
5097             nextState = NO_TRANSITION;
5098             break;
5099         case AA_AFSTATE_ACTIVE_SCAN:
5100             nextState = HAL_AFSTATE_SCANNING;
5101             SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
5102             break;
5103         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5104             nextState = NO_TRANSITION;
5105             break;
5106         case AA_AFSTATE_AF_FAILED_FOCUS:
5107             nextState = NO_TRANSITION;
5108             break;
5109         default:
5110             bWrongTransition = true;
5111             break;
5112         }
5113     }
5114     else if (m_afState == HAL_AFSTATE_SCANNING) {
5115         switch (noti) {
5116         case AA_AFSTATE_INACTIVE:
5117             bWrongTransition = true;
5118             break;
5119         case AA_AFSTATE_ACTIVE_SCAN:
5120             nextState = NO_TRANSITION;
5121             break;
5122         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5123             // If Flash mode is enable, after AF execute pre-capture metering
5124             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5125                 switch (m_ctlInfo.flash.m_flashCnt) {
5126                 case IS_FLASH_STATE_ON_DONE:
5127                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5128                     nextState = NO_TRANSITION;
5129                     break;
5130                 case IS_FLASH_STATE_AUTO_DONE:
5131                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5132                     nextState = HAL_AFSTATE_LOCKED;
5133                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5134                     break;
5135                 default:
5136                     nextState = NO_TRANSITION;
5137                 }
5138             } else {
5139                 nextState = HAL_AFSTATE_LOCKED;
5140                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5141             }
5142             break;
5143         case AA_AFSTATE_AF_FAILED_FOCUS:
5144             // If Flash mode is enable, after AF execute pre-capture metering
5145             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5146                 switch (m_ctlInfo.flash.m_flashCnt) {
5147                 case IS_FLASH_STATE_ON_DONE:
5148                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5149                     nextState = NO_TRANSITION;
5150                     break;
5151                 case IS_FLASH_STATE_AUTO_DONE:
5152                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5153                     nextState = HAL_AFSTATE_FAILED;
5154                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5155                     break;
5156                 default:
5157                     nextState = NO_TRANSITION;
5158                 }
5159             } else {
5160                 nextState = HAL_AFSTATE_FAILED;
5161                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5162             }
5163             break;
5164         default:
5165             bWrongTransition = true;
5166             break;
5167         }
5168     }
5169     else if (m_afState == HAL_AFSTATE_LOCKED) {
5170         switch (noti) {
5171             case AA_AFSTATE_INACTIVE:
5172             case AA_AFSTATE_ACTIVE_SCAN:
5173                 bWrongTransition = true;
5174                 break;
5175             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5176                 nextState = NO_TRANSITION;
5177                 break;
5178             case AA_AFSTATE_AF_FAILED_FOCUS:
5179             default:
5180                 bWrongTransition = true;
5181                 break;
5182         }
5183     }
5184     else if (m_afState == HAL_AFSTATE_FAILED) {
5185         switch (noti) {
5186             case AA_AFSTATE_INACTIVE:
5187             case AA_AFSTATE_ACTIVE_SCAN:
5188             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5189                 bWrongTransition = true;
5190                 break;
5191             case AA_AFSTATE_AF_FAILED_FOCUS:
5192                 nextState = NO_TRANSITION;
5193                 break;
5194             default:
5195                 bWrongTransition = true;
5196                 break;
5197         }
5198     }
5199     if (bWrongTransition) {
5200         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5201         return;
5202     }
5203     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5204     if (nextState != NO_TRANSITION)
5205         m_afState = nextState;
5206 }
5207 
OnAfNotificationCAFPicture(enum aa_afstate noti)5208 void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
5209 {
5210     int nextState = NO_TRANSITION;
5211     bool bWrongTransition = false;
5212 
5213     if (m_afState == HAL_AFSTATE_INACTIVE) {
5214         switch (noti) {
5215         case AA_AFSTATE_INACTIVE:
5216         case AA_AFSTATE_ACTIVE_SCAN:
5217         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5218         case AA_AFSTATE_AF_FAILED_FOCUS:
5219         default:
5220             nextState = NO_TRANSITION;
5221             break;
5222         }
5223         // Check AF notification after triggering
5224         if (m_ctlInfo.af.m_afTriggerTimeOut > 0) {
5225             if (m_ctlInfo.af.m_afTriggerTimeOut > 5) {
5226                 ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__, m_afMode);
5227                 SetAfMode(AA_AFMODE_OFF);
5228                 SetAfMode(m_afMode);
5229                 m_ctlInfo.af.m_afTriggerTimeOut = 0;
5230             } else {
5231                 m_ctlInfo.af.m_afTriggerTimeOut++;
5232             }
5233         }
5234     }
5235     else if (m_afState == HAL_AFSTATE_STARTED) {
5236         switch (noti) {
5237         case AA_AFSTATE_INACTIVE:
5238             nextState = NO_TRANSITION;
5239             break;
5240         case AA_AFSTATE_ACTIVE_SCAN:
5241             nextState = HAL_AFSTATE_SCANNING;
5242             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5243             m_ctlInfo.af.m_afTriggerTimeOut = 0;
5244             break;
5245         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5246             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5247             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5248             m_ctlInfo.af.m_afTriggerTimeOut = 0;
5249             break;
5250         case AA_AFSTATE_AF_FAILED_FOCUS:
5251             //nextState = HAL_AFSTATE_FAILED;
5252             //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5253             nextState = NO_TRANSITION;
5254             break;
5255         default:
5256             bWrongTransition = true;
5257             break;
5258         }
5259     }
5260     else if (m_afState == HAL_AFSTATE_SCANNING) {
5261         switch (noti) {
5262         case AA_AFSTATE_INACTIVE:
5263             nextState = NO_TRANSITION;
5264             break;
5265         case AA_AFSTATE_ACTIVE_SCAN:
5266             nextState = NO_TRANSITION;
5267             m_AfHwStateFailed = false;
5268             break;
5269         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5270             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5271             m_AfHwStateFailed = false;
5272             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5273             break;
5274         case AA_AFSTATE_AF_FAILED_FOCUS:
5275             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5276             m_AfHwStateFailed = true;
5277             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5278             break;
5279         default:
5280             bWrongTransition = true;
5281             break;
5282         }
5283     }
5284     else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5285         switch (noti) {
5286         case AA_AFSTATE_INACTIVE:
5287             nextState = NO_TRANSITION;
5288             break;
5289         case AA_AFSTATE_ACTIVE_SCAN:
5290             nextState = HAL_AFSTATE_SCANNING;
5291             m_AfHwStateFailed = false;
5292             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5293             break;
5294         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5295             nextState = NO_TRANSITION;
5296             m_AfHwStateFailed = false;
5297             break;
5298         case AA_AFSTATE_AF_FAILED_FOCUS:
5299             nextState = NO_TRANSITION;
5300             m_AfHwStateFailed = true;
5301             break;
5302         default:
5303             bWrongTransition = true;
5304             break;
5305         }
5306     }
5307     else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5308         //Skip notification in case of flash, wait the end of flash on
5309         if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5310             if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE)
5311                 return;
5312         }
5313         switch (noti) {
5314         case AA_AFSTATE_INACTIVE:
5315             nextState = NO_TRANSITION;
5316             break;
5317         case AA_AFSTATE_ACTIVE_SCAN:
5318             nextState = NO_TRANSITION;
5319             break;
5320         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5321             // If Flash mode is enable, after AF execute pre-capture metering
5322             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5323                 switch (m_ctlInfo.flash.m_flashCnt) {
5324                 case IS_FLASH_STATE_ON_DONE:
5325                     ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5326                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5327                     nextState = NO_TRANSITION;
5328                     break;
5329                 case IS_FLASH_STATE_AUTO_DONE:
5330                     ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5331                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5332                     m_IsAfLockRequired = true;
5333                     nextState = HAL_AFSTATE_LOCKED;
5334                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5335                     break;
5336                 default:
5337                     nextState = NO_TRANSITION;
5338                 }
5339             } else {
5340                 m_IsAfLockRequired = true;
5341                 nextState = HAL_AFSTATE_LOCKED;
5342                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5343             }
5344             break;
5345         case AA_AFSTATE_AF_FAILED_FOCUS:
5346             // If Flash mode is enable, after AF execute pre-capture metering
5347             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5348                 switch (m_ctlInfo.flash.m_flashCnt) {
5349                 case IS_FLASH_STATE_ON_DONE:
5350                     ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5351                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5352                     nextState = NO_TRANSITION;
5353                     break;
5354                 case IS_FLASH_STATE_AUTO_DONE:
5355                     ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5356                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5357                     m_IsAfLockRequired = true;
5358                     nextState = HAL_AFSTATE_FAILED;
5359                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5360                     break;
5361                 default:
5362                     nextState = NO_TRANSITION;
5363                 }
5364             } else {
5365                 m_IsAfLockRequired = true;
5366                 nextState = HAL_AFSTATE_FAILED;
5367                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5368             }
5369             break;
5370         default:
5371             bWrongTransition = true;
5372             break;
5373         }
5374     }
5375     else if (m_afState == HAL_AFSTATE_LOCKED) {
5376         switch (noti) {
5377             case AA_AFSTATE_INACTIVE:
5378                 nextState = NO_TRANSITION;
5379                 break;
5380             case AA_AFSTATE_ACTIVE_SCAN:
5381                 bWrongTransition = true;
5382                 break;
5383             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5384                 nextState = NO_TRANSITION;
5385                 break;
5386             case AA_AFSTATE_AF_FAILED_FOCUS:
5387             default:
5388                 bWrongTransition = true;
5389                 break;
5390         }
5391     }
5392     else if (m_afState == HAL_AFSTATE_FAILED) {
5393         switch (noti) {
5394             case AA_AFSTATE_INACTIVE:
5395                 bWrongTransition = true;
5396                 break;
5397             case AA_AFSTATE_ACTIVE_SCAN:
5398                 nextState = HAL_AFSTATE_SCANNING;
5399                 break;
5400             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5401                 bWrongTransition = true;
5402                 break;
5403             case AA_AFSTATE_AF_FAILED_FOCUS:
5404                 nextState = NO_TRANSITION;
5405                 break;
5406             default:
5407                 bWrongTransition = true;
5408                 break;
5409         }
5410     }
5411     if (bWrongTransition) {
5412         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5413         return;
5414     }
5415     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5416     if (nextState != NO_TRANSITION)
5417         m_afState = nextState;
5418 }
5419 
OnAfNotificationCAFVideo(enum aa_afstate noti)5420 void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
5421 {
5422     int nextState = NO_TRANSITION;
5423     bool bWrongTransition = false;
5424 
5425     if (m_afState == HAL_AFSTATE_INACTIVE) {
5426         switch (noti) {
5427         case AA_AFSTATE_INACTIVE:
5428         case AA_AFSTATE_ACTIVE_SCAN:
5429         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5430         case AA_AFSTATE_AF_FAILED_FOCUS:
5431         default:
5432             nextState = NO_TRANSITION;
5433             break;
5434         }
5435     }
5436     else if (m_afState == HAL_AFSTATE_STARTED) {
5437         switch (noti) {
5438         case AA_AFSTATE_INACTIVE:
5439             nextState = NO_TRANSITION;
5440             break;
5441         case AA_AFSTATE_ACTIVE_SCAN:
5442             nextState = HAL_AFSTATE_SCANNING;
5443             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5444             break;
5445         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5446             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5447             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5448             break;
5449         case AA_AFSTATE_AF_FAILED_FOCUS:
5450             nextState = HAL_AFSTATE_FAILED;
5451             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5452             break;
5453         default:
5454             bWrongTransition = true;
5455             break;
5456         }
5457     }
5458     else if (m_afState == HAL_AFSTATE_SCANNING) {
5459         switch (noti) {
5460         case AA_AFSTATE_INACTIVE:
5461             bWrongTransition = true;
5462             break;
5463         case AA_AFSTATE_ACTIVE_SCAN:
5464             nextState = NO_TRANSITION;
5465             break;
5466         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5467             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5468             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5469             break;
5470         case AA_AFSTATE_AF_FAILED_FOCUS:
5471             nextState = NO_TRANSITION;
5472             break;
5473         default:
5474             bWrongTransition = true;
5475             break;
5476         }
5477     }
5478     else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5479         switch (noti) {
5480         case AA_AFSTATE_INACTIVE:
5481             bWrongTransition = true;
5482             break;
5483         case AA_AFSTATE_ACTIVE_SCAN:
5484             nextState = HAL_AFSTATE_SCANNING;
5485             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5486             break;
5487         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5488             nextState = NO_TRANSITION;
5489             break;
5490         case AA_AFSTATE_AF_FAILED_FOCUS:
5491             nextState = HAL_AFSTATE_FAILED;
5492             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5493             // TODO : needs NO_TRANSITION ?
5494             break;
5495         default:
5496             bWrongTransition = true;
5497             break;
5498         }
5499     }
5500     else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5501         switch (noti) {
5502         case AA_AFSTATE_INACTIVE:
5503             bWrongTransition = true;
5504             break;
5505         case AA_AFSTATE_ACTIVE_SCAN:
5506             nextState = NO_TRANSITION;
5507             break;
5508         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5509             m_IsAfLockRequired = true;
5510             nextState = HAL_AFSTATE_LOCKED;
5511             SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5512             break;
5513         case AA_AFSTATE_AF_FAILED_FOCUS:
5514             nextState = HAL_AFSTATE_FAILED;
5515             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5516             break;
5517         default:
5518             bWrongTransition = true;
5519             break;
5520         }
5521     }
5522     else if (m_afState == HAL_AFSTATE_LOCKED) {
5523         switch (noti) {
5524             case AA_AFSTATE_INACTIVE:
5525                 nextState = NO_TRANSITION;
5526                 break;
5527             case AA_AFSTATE_ACTIVE_SCAN:
5528                 bWrongTransition = true;
5529                 break;
5530             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5531                 nextState = NO_TRANSITION;
5532                 break;
5533             case AA_AFSTATE_AF_FAILED_FOCUS:
5534             default:
5535                 bWrongTransition = true;
5536                 break;
5537         }
5538     }
5539     else if (m_afState == HAL_AFSTATE_FAILED) {
5540         switch (noti) {
5541             case AA_AFSTATE_INACTIVE:
5542             case AA_AFSTATE_ACTIVE_SCAN:
5543             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5544                 bWrongTransition = true;
5545                 break;
5546             case AA_AFSTATE_AF_FAILED_FOCUS:
5547                 nextState = NO_TRANSITION;
5548                 break;
5549             default:
5550                 bWrongTransition = true;
5551                 break;
5552         }
5553     }
5554     if (bWrongTransition) {
5555         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5556         return;
5557     }
5558     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5559     if (nextState != NO_TRANSITION)
5560         m_afState = nextState;
5561 }
5562 
OnAfCancel(int id)5563 void ExynosCameraHWInterface2::OnAfCancel(int id)
5564 {
5565     m_afTriggerId = id;
5566 
5567     switch (m_afMode) {
5568     case AA_AFMODE_AUTO:
5569     case AA_AFMODE_MACRO:
5570     case AA_AFMODE_OFF:
5571     case AA_AFMODE_MANUAL:
5572         OnAfCancelAutoMacro(id);
5573         break;
5574     case AA_AFMODE_CONTINUOUS_VIDEO:
5575         OnAfCancelCAFVideo(id);
5576         break;
5577     case AA_AFMODE_CONTINUOUS_PICTURE:
5578         OnAfCancelCAFPicture(id);
5579         break;
5580     default:
5581         break;
5582     }
5583 }
5584 
OnAfCancelAutoMacro(int)5585 void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int /*id*/)
5586 {
5587     int nextState = NO_TRANSITION;
5588 
5589     if (m_ctlInfo.flash.m_flashEnableFlg  && m_ctlInfo.flash.m_afFlashDoneFlg) {
5590         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5591     }
5592     switch (m_afState) {
5593     case HAL_AFSTATE_INACTIVE:
5594         nextState = NO_TRANSITION;
5595         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5596         break;
5597     case HAL_AFSTATE_NEEDS_COMMAND:
5598     case HAL_AFSTATE_STARTED:
5599     case HAL_AFSTATE_SCANNING:
5600     case HAL_AFSTATE_LOCKED:
5601     case HAL_AFSTATE_FAILED:
5602         SetAfMode(AA_AFMODE_OFF);
5603         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5604         nextState = HAL_AFSTATE_INACTIVE;
5605         break;
5606     default:
5607         break;
5608     }
5609     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5610     if (nextState != NO_TRANSITION)
5611         m_afState = nextState;
5612 }
5613 
OnAfCancelCAFPicture(int)5614 void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int /*id*/)
5615 {
5616     int nextState = NO_TRANSITION;
5617 
5618     switch (m_afState) {
5619     case HAL_AFSTATE_INACTIVE:
5620         nextState = NO_TRANSITION;
5621         break;
5622     case HAL_AFSTATE_NEEDS_COMMAND:
5623     case HAL_AFSTATE_STARTED:
5624     case HAL_AFSTATE_SCANNING:
5625     case HAL_AFSTATE_LOCKED:
5626     case HAL_AFSTATE_FAILED:
5627     case HAL_AFSTATE_NEEDS_DETERMINATION:
5628     case HAL_AFSTATE_PASSIVE_FOCUSED:
5629         SetAfMode(AA_AFMODE_OFF);
5630         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5631         SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
5632         nextState = HAL_AFSTATE_INACTIVE;
5633         break;
5634     default:
5635         break;
5636     }
5637     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5638     if (nextState != NO_TRANSITION)
5639         m_afState = nextState;
5640 }
5641 
OnAfCancelCAFVideo(int)5642 void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int /*id*/)
5643 {
5644     int nextState = NO_TRANSITION;
5645 
5646     switch (m_afState) {
5647     case HAL_AFSTATE_INACTIVE:
5648         nextState = NO_TRANSITION;
5649         break;
5650     case HAL_AFSTATE_NEEDS_COMMAND:
5651     case HAL_AFSTATE_STARTED:
5652     case HAL_AFSTATE_SCANNING:
5653     case HAL_AFSTATE_LOCKED:
5654     case HAL_AFSTATE_FAILED:
5655     case HAL_AFSTATE_NEEDS_DETERMINATION:
5656     case HAL_AFSTATE_PASSIVE_FOCUSED:
5657         SetAfMode(AA_AFMODE_OFF);
5658         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5659         SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
5660         nextState = HAL_AFSTATE_INACTIVE;
5661         break;
5662     default:
5663         break;
5664     }
5665     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5666     if (nextState != NO_TRANSITION)
5667         m_afState = nextState;
5668 }
5669 
SetAfStateForService(int newState)5670 void ExynosCameraHWInterface2::SetAfStateForService(int newState)
5671 {
5672     if (m_serviceAfState != newState || newState == 0)
5673         m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
5674     m_serviceAfState = newState;
5675 }
5676 
GetAfStateForService()5677 int ExynosCameraHWInterface2::GetAfStateForService()
5678 {
5679    return m_serviceAfState;
5680 }
5681 
SetAfMode(enum aa_afmode afMode)5682 void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
5683 {
5684     if (m_afMode != afMode) {
5685         if (m_IsAfModeUpdateRequired && m_afMode != AA_AFMODE_OFF) {
5686             m_afMode2 = afMode;
5687             ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
5688         }
5689         else {
5690             ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
5691             m_IsAfModeUpdateRequired = true;
5692             m_afMode = afMode;
5693             SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5694             m_afState = HAL_AFSTATE_INACTIVE;
5695         }
5696     }
5697 }
5698 
m_setExifFixedAttribute(void)5699 void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
5700 {
5701     char property[PROPERTY_VALUE_MAX];
5702 
5703     //2 0th IFD TIFF Tags
5704     //3 Maker
5705     property_get("ro.product.brand", property, EXIF_DEF_MAKER);
5706     strncpy((char *)mExifInfo.maker, property,
5707                 sizeof(mExifInfo.maker) - 1);
5708     mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
5709     //3 Model
5710     property_get("ro.product.model", property, EXIF_DEF_MODEL);
5711     strncpy((char *)mExifInfo.model, property,
5712                 sizeof(mExifInfo.model) - 1);
5713     mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
5714     //3 Software
5715     property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
5716     strncpy((char *)mExifInfo.software, property,
5717                 sizeof(mExifInfo.software) - 1);
5718     mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
5719 
5720     //3 YCbCr Positioning
5721     mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
5722 
5723     //2 0th IFD Exif Private Tags
5724     //3 F Number
5725     mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN);
5726     mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
5727     //3 Exposure Program
5728     mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
5729     //3 Exif Version
5730     memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
5731     //3 Aperture
5732     double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
5733     mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN);
5734     mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
5735     //3 Maximum lens aperture
5736     mExifInfo.max_aperture.num = mExifInfo.aperture.num;
5737     mExifInfo.max_aperture.den = mExifInfo.aperture.den;
5738     //3 Lens Focal Length
5739     mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100);
5740 
5741     mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
5742     //3 User Comments
5743     strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
5744     //3 Color Space information
5745     mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
5746     //3 Exposure Mode
5747     mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
5748 
5749     //2 0th IFD GPS Info Tags
5750     unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
5751     memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
5752 
5753     //2 1th IFD TIFF Tags
5754     mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
5755     mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5756     mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5757     mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5758     mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5759     mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
5760 }
5761 
m_setExifChangedAttribute(exif_attribute_t * exifInfo,ExynosRect * rect,camera2_shot_ext * currentEntry)5762 void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
5763         camera2_shot_ext *currentEntry)
5764 {
5765     camera2_dm *dm = &(currentEntry->shot.dm);
5766     camera2_ctl *ctl = &(currentEntry->shot.ctl);
5767 
5768     ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
5769     if (!ctl->request.frameCount)
5770        return;
5771     //2 0th IFD TIFF Tags
5772     //3 Width
5773     exifInfo->width = rect->w;
5774     //3 Height
5775     exifInfo->height = rect->h;
5776     //3 Orientation
5777     switch (ctl->jpeg.orientation) {
5778     case 90:
5779         exifInfo->orientation = EXIF_ORIENTATION_90;
5780         break;
5781     case 180:
5782         exifInfo->orientation = EXIF_ORIENTATION_180;
5783         break;
5784     case 270:
5785         exifInfo->orientation = EXIF_ORIENTATION_270;
5786         break;
5787     case 0:
5788     default:
5789         exifInfo->orientation = EXIF_ORIENTATION_UP;
5790         break;
5791     }
5792 
5793     //3 Date time
5794     struct timeval rawtime;
5795     struct tm *timeinfo;
5796     gettimeofday(&rawtime, NULL);
5797     timeinfo = localtime(&rawtime.tv_sec);
5798     strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
5799     snprintf((char *)exifInfo->sub_sec, sizeof(exifInfo->sub_sec), "%03lu",
5800         (unsigned long)rawtime.tv_usec / 1000UL);
5801 
5802     //2 0th IFD Exif Private Tags
5803     //3 Exposure Time
5804     int shutterSpeed = (dm->sensor.exposureTime/1000);
5805 
5806     // To display exposure time just above 500ms as 1/2sec, not 1 sec.
5807     if (shutterSpeed > 500000)
5808         shutterSpeed -=  100000;
5809 
5810     if (shutterSpeed < 0) {
5811         shutterSpeed = 100;
5812     }
5813 
5814     exifInfo->exposure_time.num = 1;
5815     // x us -> 1/x s */
5816     //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
5817     exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
5818 
5819     //3 ISO Speed Rating
5820     exifInfo->iso_speed_rating = dm->aa.isoValue;
5821 
5822     uint32_t av, tv, bv, sv, ev;
5823     av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
5824     tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
5825     sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
5826     bv = av + tv - sv;
5827     ev = av + tv;
5828     //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
5829     ALOGV("AV=%d, TV=%d, SV=%d", av, tv, sv);
5830 
5831     //3 Shutter Speed
5832     exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
5833     exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
5834     //3 Brightness
5835     exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
5836     exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
5837     //3 Exposure Bias
5838     if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
5839         ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
5840         exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
5841         exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
5842     } else {
5843         exifInfo->exposure_bias.num = 0;
5844         exifInfo->exposure_bias.den = 0;
5845     }
5846     //3 Metering Mode
5847     /*switch (m_curCameraInfo->metering) {
5848     case METERING_MODE_CENTER:
5849         exifInfo->metering_mode = EXIF_METERING_CENTER;
5850         break;
5851     case METERING_MODE_MATRIX:
5852         exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
5853         break;
5854     case METERING_MODE_SPOT:
5855         exifInfo->metering_mode = EXIF_METERING_SPOT;
5856         break;
5857     case METERING_MODE_AVERAGE:
5858     default:
5859         exifInfo->metering_mode = EXIF_METERING_AVERAGE;
5860         break;
5861     }*/
5862     exifInfo->metering_mode = EXIF_METERING_CENTER;
5863 
5864     //3 Flash
5865     if (m_ctlInfo.flash.m_flashDecisionResult)
5866         exifInfo->flash = 1;
5867     else
5868         exifInfo->flash = EXIF_DEF_FLASH;
5869 
5870     //3 White Balance
5871     if (currentEntry->awb_mode_dm == AA_AWBMODE_WB_AUTO)
5872         exifInfo->white_balance = EXIF_WB_AUTO;
5873     else
5874         exifInfo->white_balance = EXIF_WB_MANUAL;
5875 
5876     //3 Scene Capture Type
5877     switch (ctl->aa.sceneMode) {
5878     case AA_SCENE_MODE_PORTRAIT:
5879         exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
5880         break;
5881     case AA_SCENE_MODE_LANDSCAPE:
5882         exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
5883         break;
5884     case AA_SCENE_MODE_NIGHT_PORTRAIT:
5885         exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
5886         break;
5887     default:
5888         exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
5889         break;
5890     }
5891 
5892     //2 0th IFD GPS Info Tags
5893     if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
5894 
5895         if (ctl->jpeg.gpsCoordinates[0] > 0)
5896             strcpy((char *)exifInfo->gps_latitude_ref, "N");
5897         else
5898             strcpy((char *)exifInfo->gps_latitude_ref, "S");
5899 
5900         if (ctl->jpeg.gpsCoordinates[1] > 0)
5901             strcpy((char *)exifInfo->gps_longitude_ref, "E");
5902         else
5903             strcpy((char *)exifInfo->gps_longitude_ref, "W");
5904 
5905         if (ctl->jpeg.gpsCoordinates[2] > 0)
5906             exifInfo->gps_altitude_ref = 0;
5907         else
5908             exifInfo->gps_altitude_ref = 1;
5909 
5910         double latitude = fabs(ctl->jpeg.gpsCoordinates[0]);
5911         double longitude = fabs(ctl->jpeg.gpsCoordinates[1]);
5912         double altitude = fabs(ctl->jpeg.gpsCoordinates[2]);
5913 
5914         exifInfo->gps_latitude[0].num = (uint32_t)latitude;
5915         exifInfo->gps_latitude[0].den = 1;
5916         exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
5917         exifInfo->gps_latitude[1].den = 1;
5918         exifInfo->gps_latitude[2].num = (uint32_t)round((((latitude - exifInfo->gps_latitude[0].num) * 60)
5919                                         - exifInfo->gps_latitude[1].num) * 60);
5920         exifInfo->gps_latitude[2].den = 1;
5921 
5922         exifInfo->gps_longitude[0].num = (uint32_t)longitude;
5923         exifInfo->gps_longitude[0].den = 1;
5924         exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
5925         exifInfo->gps_longitude[1].den = 1;
5926         exifInfo->gps_longitude[2].num = (uint32_t)round((((longitude - exifInfo->gps_longitude[0].num) * 60)
5927                                         - exifInfo->gps_longitude[1].num) * 60);
5928         exifInfo->gps_longitude[2].den = 1;
5929 
5930         exifInfo->gps_altitude.num = (uint32_t)round(altitude);
5931         exifInfo->gps_altitude.den = 1;
5932 
5933         struct tm tm_data;
5934         long timestamp;
5935         timestamp = (long)ctl->jpeg.gpsTimestamp;
5936         gmtime_r(&timestamp, &tm_data);
5937         exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
5938         exifInfo->gps_timestamp[0].den = 1;
5939         exifInfo->gps_timestamp[1].num = tm_data.tm_min;
5940         exifInfo->gps_timestamp[1].den = 1;
5941         exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
5942         exifInfo->gps_timestamp[2].den = 1;
5943         snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
5944                 "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
5945 
5946         memset(exifInfo->gps_processing_method, 0, 100);
5947         memcpy(exifInfo->gps_processing_method, currentEntry->gpsProcessingMethod, 32);
5948         exifInfo->enableGps = true;
5949     } else {
5950         exifInfo->enableGps = false;
5951     }
5952 
5953     //2 1th IFD TIFF Tags
5954     exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
5955     exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
5956 }
5957 
~MainThread()5958 ExynosCameraHWInterface2::MainThread::~MainThread()
5959 {
5960     ALOGV("(%s):", __FUNCTION__);
5961 }
5962 
release()5963 void ExynosCameraHWInterface2::MainThread::release()
5964 {
5965     ALOGV("(%s):", __func__);
5966     SetSignal(SIGNAL_THREAD_RELEASE);
5967 }
5968 
~SensorThread()5969 ExynosCameraHWInterface2::SensorThread::~SensorThread()
5970 {
5971     ALOGV("(%s):", __FUNCTION__);
5972 }
5973 
release()5974 void ExynosCameraHWInterface2::SensorThread::release()
5975 {
5976     ALOGV("(%s):", __func__);
5977     SetSignal(SIGNAL_THREAD_RELEASE);
5978 }
5979 
~StreamThread()5980 ExynosCameraHWInterface2::StreamThread::~StreamThread()
5981 {
5982     ALOGV("(%s):", __FUNCTION__);
5983 }
5984 
setParameter(stream_parameters_t * new_parameters)5985 void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
5986 {
5987     ALOGV("DEBUG(%s):", __FUNCTION__);
5988     memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t));
5989 }
5990 
release()5991 void ExynosCameraHWInterface2::StreamThread::release()
5992 {
5993     ALOGV("(%s):", __func__);
5994     SetSignal(SIGNAL_THREAD_RELEASE);
5995 }
5996 
findBufferIndex(void * bufAddr)5997 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
5998 {
5999     int index;
6000     for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
6001         if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
6002             return index;
6003     }
6004     return -1;
6005 }
6006 
findBufferIndex(buffer_handle_t * bufHandle)6007 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle)
6008 {
6009     int index;
6010     for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
6011         if (m_parameters.svcBufHandle[index] == *bufHandle)
6012             return index;
6013     }
6014     return -1;
6015 }
6016 
attachSubStream(int stream_id,int priority)6017 status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority)
6018 {
6019     ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
6020     int index, vacantIndex;
6021     bool vacancy = false;
6022 
6023     for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
6024         if (!vacancy && m_attachedSubStreams[index].streamId == -1) {
6025             vacancy = true;
6026             vacantIndex = index;
6027         } else if (m_attachedSubStreams[index].streamId == stream_id) {
6028             return BAD_VALUE;
6029         }
6030     }
6031     if (!vacancy)
6032         return NO_MEMORY;
6033     m_attachedSubStreams[vacantIndex].streamId = stream_id;
6034     m_attachedSubStreams[vacantIndex].priority = priority;
6035     m_numRegisteredStream++;
6036     return NO_ERROR;
6037 }
6038 
detachSubStream(int stream_id)6039 status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id)
6040 {
6041     ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
6042     int index;
6043     bool found = false;
6044 
6045     for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
6046         if (m_attachedSubStreams[index].streamId == stream_id) {
6047             found = true;
6048             break;
6049         }
6050     }
6051     if (!found)
6052         return BAD_VALUE;
6053     m_attachedSubStreams[index].streamId = -1;
6054     m_attachedSubStreams[index].priority = 0;
6055     m_numRegisteredStream--;
6056     return NO_ERROR;
6057 }
6058 
createIonClient(ion_client ionClient)6059 int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
6060 {
6061     if (ionClient == 0) {
6062         ionClient = ion_client_create();
6063         if (ionClient < 0) {
6064             ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
6065             return 0;
6066         }
6067     }
6068     return ionClient;
6069 }
6070 
deleteIonClient(ion_client ionClient)6071 int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
6072 {
6073     if (ionClient != 0) {
6074         if (ionClient > 0) {
6075             ion_client_destroy(ionClient);
6076         }
6077         ionClient = 0;
6078     }
6079     return ionClient;
6080 }
6081 
allocCameraMemory(ion_client ionClient,ExynosBuffer * buf,int iMemoryNum)6082 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
6083 {
6084     return allocCameraMemory(ionClient, buf, iMemoryNum, 0);
6085 }
6086 
allocCameraMemory(ion_client ionClient,ExynosBuffer * buf,int iMemoryNum,int cacheFlag)6087 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag)
6088 {
6089     int ret = 0;
6090     int i = 0;
6091     int flag = 0;
6092 
6093     if (ionClient == 0) {
6094         ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
6095         return -1;
6096     }
6097 
6098     for (i = 0 ; i < iMemoryNum ; i++) {
6099         if (buf->size.extS[i] == 0) {
6100             break;
6101         }
6102         if (1 << i & cacheFlag)
6103             flag = ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC;
6104         else
6105             flag = 0;
6106         buf->fd.extFd[i] = ion_alloc(ionClient, \
6107                                       buf->size.extS[i], 0, ION_HEAP_SYSTEM_MASK, flag);
6108         if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
6109             ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
6110             buf->fd.extFd[i] = -1;
6111             freeCameraMemory(buf, iMemoryNum);
6112             return -1;
6113         }
6114 
6115         buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
6116                                         buf->size.extS[i], 0);
6117         if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
6118             ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
6119             buf->virt.extP[i] = (char *)MAP_FAILED;
6120             freeCameraMemory(buf, iMemoryNum);
6121             return -1;
6122         }
6123         ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag);
6124     }
6125 
6126     return ret;
6127 }
6128 
freeCameraMemory(ExynosBuffer * buf,int iMemoryNum)6129 void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6130 {
6131 
6132     int i = 0 ;
6133     int ret = 0;
6134 
6135     for (i=0;i<iMemoryNum;i++) {
6136         if (buf->fd.extFd[i] != -1) {
6137             if (buf->virt.extP[i] != (char *)MAP_FAILED) {
6138                 ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
6139                 if (ret < 0)
6140                     ALOGE("ERR(%s)", __FUNCTION__);
6141             }
6142             ion_free(buf->fd.extFd[i]);
6143         ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
6144         }
6145         buf->fd.extFd[i] = -1;
6146         buf->virt.extP[i] = (char *)MAP_FAILED;
6147         buf->size.extS[i] = 0;
6148     }
6149 }
6150 
initCameraMemory(ExynosBuffer * buf,int iMemoryNum)6151 void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6152 {
6153     int i =0 ;
6154     for (i=0;i<iMemoryNum;i++) {
6155         buf->virt.extP[i] = (char *)MAP_FAILED;
6156         buf->fd.extFd[i] = -1;
6157         buf->size.extS[i] = 0;
6158     }
6159 }
6160 
6161 
6162 
6163 
6164 static camera2_device_t *g_cam2_device = NULL;
6165 static bool g_camera_vaild = false;
6166 static Mutex g_camera_mutex;
6167 ExynosCamera2 * g_camera2[2] = { NULL, NULL };
6168 
HAL2_camera_device_close(struct hw_device_t * device)6169 static int HAL2_camera_device_close(struct hw_device_t* device)
6170 {
6171     Mutex::Autolock lock(g_camera_mutex);
6172     ALOGD("(%s): ENTER", __FUNCTION__);
6173     if (device) {
6174 
6175         camera2_device_t *cam_device = (camera2_device_t *)device;
6176         ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
6177         ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
6178         delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
6179         free(cam_device);
6180         g_camera_vaild = false;
6181         g_cam2_device = NULL;
6182     }
6183 
6184     ALOGD("(%s): EXIT", __FUNCTION__);
6185     return 0;
6186 }
6187 
obj(const struct camera2_device * dev)6188 static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
6189 {
6190     return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
6191 }
6192 
HAL2_device_set_request_queue_src_ops(const struct camera2_device * dev,const camera2_request_queue_src_ops_t * request_src_ops)6193 static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
6194             const camera2_request_queue_src_ops_t *request_src_ops)
6195 {
6196     ALOGV("DEBUG(%s):", __FUNCTION__);
6197     return obj(dev)->setRequestQueueSrcOps(request_src_ops);
6198 }
6199 
HAL2_device_notify_request_queue_not_empty(const struct camera2_device * dev)6200 static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
6201 {
6202     ALOGV("DEBUG(%s):", __FUNCTION__);
6203     return obj(dev)->notifyRequestQueueNotEmpty();
6204 }
6205 
HAL2_device_set_frame_queue_dst_ops(const struct camera2_device * dev,const camera2_frame_queue_dst_ops_t * frame_dst_ops)6206 static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
6207             const camera2_frame_queue_dst_ops_t *frame_dst_ops)
6208 {
6209     ALOGV("DEBUG(%s):", __FUNCTION__);
6210     return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
6211 }
6212 
HAL2_device_get_in_progress_count(const struct camera2_device * dev)6213 static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
6214 {
6215     ALOGV("DEBUG(%s):", __FUNCTION__);
6216     return obj(dev)->getInProgressCount();
6217 }
6218 
HAL2_device_flush_captures_in_progress(const struct camera2_device * dev)6219 static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
6220 {
6221     ALOGV("DEBUG(%s):", __FUNCTION__);
6222     return obj(dev)->flushCapturesInProgress();
6223 }
6224 
HAL2_device_construct_default_request(const struct camera2_device * dev,int request_template,camera_metadata_t ** request)6225 static int HAL2_device_construct_default_request(const struct camera2_device *dev,
6226             int request_template, camera_metadata_t **request)
6227 {
6228     ALOGV("DEBUG(%s):", __FUNCTION__);
6229     return obj(dev)->constructDefaultRequest(request_template, request);
6230 }
6231 
HAL2_device_allocate_stream(const struct camera2_device * dev,uint32_t width,uint32_t height,int format,const camera2_stream_ops_t * stream_ops,uint32_t * stream_id,uint32_t * format_actual,uint32_t * usage,uint32_t * max_buffers)6232 static int HAL2_device_allocate_stream(
6233             const struct camera2_device *dev,
6234             // inputs
6235             uint32_t width,
6236             uint32_t height,
6237             int      format,
6238             const camera2_stream_ops_t *stream_ops,
6239             // outputs
6240             uint32_t *stream_id,
6241             uint32_t *format_actual,
6242             uint32_t *usage,
6243             uint32_t *max_buffers)
6244 {
6245     ALOGV("(%s): ", __FUNCTION__);
6246     return obj(dev)->allocateStream(width, height, format, stream_ops,
6247                                     stream_id, format_actual, usage, max_buffers);
6248 }
6249 
HAL2_device_register_stream_buffers(const struct camera2_device * dev,uint32_t stream_id,int num_buffers,buffer_handle_t * buffers)6250 static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
6251             uint32_t stream_id,
6252             int num_buffers,
6253             buffer_handle_t *buffers)
6254 {
6255     ALOGV("DEBUG(%s):", __FUNCTION__);
6256     return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
6257 }
6258 
HAL2_device_release_stream(const struct camera2_device * dev,uint32_t stream_id)6259 static int HAL2_device_release_stream(
6260         const struct camera2_device *dev,
6261             uint32_t stream_id)
6262 {
6263     ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
6264     if (!g_camera_vaild)
6265         return 0;
6266     return obj(dev)->releaseStream(stream_id);
6267 }
6268 
HAL2_device_allocate_reprocess_stream(const struct camera2_device * dev,uint32_t width,uint32_t height,uint32_t format,const camera2_stream_in_ops_t * reprocess_stream_ops,uint32_t * stream_id,uint32_t * consumer_usage,uint32_t * max_buffers)6269 static int HAL2_device_allocate_reprocess_stream(
6270            const struct camera2_device *dev,
6271             uint32_t width,
6272             uint32_t height,
6273             uint32_t format,
6274             const camera2_stream_in_ops_t *reprocess_stream_ops,
6275             // outputs
6276             uint32_t *stream_id,
6277             uint32_t *consumer_usage,
6278             uint32_t *max_buffers)
6279 {
6280     ALOGV("DEBUG(%s):", __FUNCTION__);
6281     return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
6282                                     stream_id, consumer_usage, max_buffers);
6283 }
6284 
HAL2_device_allocate_reprocess_stream_from_stream(const struct camera2_device * dev,uint32_t output_stream_id,const camera2_stream_in_ops_t * reprocess_stream_ops,uint32_t * stream_id)6285 static int HAL2_device_allocate_reprocess_stream_from_stream(
6286            const struct camera2_device *dev,
6287             uint32_t output_stream_id,
6288             const camera2_stream_in_ops_t *reprocess_stream_ops,
6289             // outputs
6290             uint32_t *stream_id)
6291 {
6292     ALOGV("DEBUG(%s):", __FUNCTION__);
6293     return obj(dev)->allocateReprocessStreamFromStream(output_stream_id,
6294                                     reprocess_stream_ops, stream_id);
6295 }
6296 
HAL2_device_release_reprocess_stream(const struct camera2_device * dev,uint32_t stream_id)6297 static int HAL2_device_release_reprocess_stream(
6298         const struct camera2_device *dev,
6299             uint32_t stream_id)
6300 {
6301     ALOGV("DEBUG(%s):", __FUNCTION__);
6302     return obj(dev)->releaseReprocessStream(stream_id);
6303 }
6304 
HAL2_device_trigger_action(const struct camera2_device * dev,uint32_t trigger_id,int ext1,int ext2)6305 static int HAL2_device_trigger_action(const struct camera2_device *dev,
6306            uint32_t trigger_id,
6307             int ext1,
6308             int ext2)
6309 {
6310     ALOGV("DEBUG(%s):", __FUNCTION__);
6311     if (!g_camera_vaild)
6312         return 0;
6313     return obj(dev)->triggerAction(trigger_id, ext1, ext2);
6314 }
6315 
HAL2_device_set_notify_callback(const struct camera2_device * dev,camera2_notify_callback notify_cb,void * user)6316 static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
6317             camera2_notify_callback notify_cb,
6318             void *user)
6319 {
6320     ALOGV("DEBUG(%s):", __FUNCTION__);
6321     return obj(dev)->setNotifyCallback(notify_cb, user);
6322 }
6323 
HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device * dev,vendor_tag_query_ops_t ** ops)6324 static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
6325             vendor_tag_query_ops_t **ops)
6326 {
6327     ALOGV("DEBUG(%s):", __FUNCTION__);
6328     return obj(dev)->getMetadataVendorTagOps(ops);
6329 }
6330 
HAL2_device_dump(const struct camera2_device * dev,int fd)6331 static int HAL2_device_dump(const struct camera2_device *dev, int fd)
6332 {
6333     ALOGV("DEBUG(%s):", __FUNCTION__);
6334     return obj(dev)->dump(fd);
6335 }
6336 
6337 
6338 
6339 
6340 
HAL2_getNumberOfCameras()6341 static int HAL2_getNumberOfCameras()
6342 {
6343     ALOGV("(%s): returning 2", __FUNCTION__);
6344     return 2;
6345 }
6346 
6347 
HAL2_getCameraInfo(int cameraId,struct camera_info * info)6348 static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
6349 {
6350     ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
6351     static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
6352 
6353     status_t res;
6354 
6355     if (cameraId == 0) {
6356         info->facing = CAMERA_FACING_BACK;
6357         if (!g_camera2[0])
6358             g_camera2[0] = new ExynosCamera2(0);
6359     }
6360     else if (cameraId == 1) {
6361         info->facing = CAMERA_FACING_FRONT;
6362         if (!g_camera2[1])
6363             g_camera2[1] = new ExynosCamera2(1);
6364     }
6365     else
6366         return BAD_VALUE;
6367 
6368     info->orientation = 0;
6369     info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
6370     if (mCameraInfo[cameraId] == NULL) {
6371         res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
6372         if (res != OK) {
6373             ALOGE("%s: Unable to allocate static info: %s (%d)",
6374                     __FUNCTION__, strerror(-res), res);
6375             return res;
6376         }
6377         res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
6378         if (res != OK) {
6379             ALOGE("%s: Unable to fill in static info: %s (%d)",
6380                     __FUNCTION__, strerror(-res), res);
6381             return res;
6382         }
6383     }
6384     info->static_camera_characteristics = mCameraInfo[cameraId];
6385     return NO_ERROR;
6386 }
6387 
6388 #define SET_METHOD(m) m : HAL2_device_##m
6389 
6390 static camera2_device_ops_t camera2_device_ops = {
6391         SET_METHOD(set_request_queue_src_ops),
6392         SET_METHOD(notify_request_queue_not_empty),
6393         SET_METHOD(set_frame_queue_dst_ops),
6394         SET_METHOD(get_in_progress_count),
6395         SET_METHOD(flush_captures_in_progress),
6396         SET_METHOD(construct_default_request),
6397         SET_METHOD(allocate_stream),
6398         SET_METHOD(register_stream_buffers),
6399         SET_METHOD(release_stream),
6400         SET_METHOD(allocate_reprocess_stream),
6401         SET_METHOD(allocate_reprocess_stream_from_stream),
6402         SET_METHOD(release_reprocess_stream),
6403         SET_METHOD(trigger_action),
6404         SET_METHOD(set_notify_callback),
6405         SET_METHOD(get_metadata_vendor_tag_ops),
6406         SET_METHOD(dump),
6407         get_instance_metadata : NULL
6408 };
6409 
6410 #undef SET_METHOD
6411 
6412 
HAL2_camera_device_open(const struct hw_module_t * module,const char * id,struct hw_device_t ** device)6413 static int HAL2_camera_device_open(const struct hw_module_t* module,
6414                                   const char *id,
6415                                   struct hw_device_t** device)
6416 {
6417     int cameraId = atoi(id);
6418     int openInvalid = 0;
6419 
6420     Mutex::Autolock lock(g_camera_mutex);
6421     if (g_camera_vaild) {
6422         ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__);
6423         return -EUSERS;
6424     }
6425     g_camera_vaild = false;
6426     ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
6427     if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
6428         ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
6429         return -EINVAL;
6430     }
6431 
6432     ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6433     if (g_cam2_device) {
6434         if (obj(g_cam2_device)->getCameraId() == cameraId) {
6435             ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
6436             goto done;
6437         } else {
6438             ALOGD("(%s): START waiting for cam device free", __FUNCTION__);
6439             while (g_cam2_device)
6440                 usleep(SIG_WAITING_TICK);
6441             ALOGD("(%s): END   waiting for cam device free", __FUNCTION__);
6442         }
6443     }
6444 
6445     g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
6446     ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6447 
6448     if (!g_cam2_device)
6449         return -ENOMEM;
6450 
6451     g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
6452     g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
6453     g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
6454     g_cam2_device->common.close   = HAL2_camera_device_close;
6455 
6456     g_cam2_device->ops = &camera2_device_ops;
6457 
6458     ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
6459 
6460     g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
6461     if (!openInvalid) {
6462         ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__);
6463         return -ENODEV;
6464     }
6465 done:
6466     *device = (hw_device_t *)g_cam2_device;
6467     ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
6468     g_camera_vaild = true;
6469 
6470     return 0;
6471 }
6472 
6473 
6474 static hw_module_methods_t camera_module_methods = {
6475             open : HAL2_camera_device_open
6476 };
6477 
6478 extern "C" {
6479     struct camera_module HAL_MODULE_INFO_SYM = {
6480       common : {
6481           tag                : HARDWARE_MODULE_TAG,
6482           module_api_version : CAMERA_MODULE_API_VERSION_2_0,
6483           hal_api_version    : HARDWARE_HAL_API_VERSION,
6484           id                 : CAMERA_HARDWARE_MODULE_ID,
6485           name               : "Exynos Camera HAL2",
6486           author             : "Samsung Corporation",
6487           methods            : &camera_module_methods,
6488           dso                : NULL,
6489           reserved           : {0},
6490       },
6491       get_number_of_cameras : HAL2_getNumberOfCameras,
6492       get_camera_info       : HAL2_getCameraInfo,
6493       set_callbacks         : NULL,
6494       get_vendor_tag_ops    : NULL,
6495       open_legacy           : NULL,
6496       reserved              : {0}
6497     };
6498 }
6499 
6500 }; // namespace android
6501