1 /*
2 * Copyright (c) 2020 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "camera_device.h"
17
18 #include <fcntl.h>
19 #include <pthread.h>
20 #include <string>
21 #include <sys/io.h>
22 #include <sys/prctl.h>
23 #include <sys/select.h>
24 #include <thread>
25 #include <unistd.h>
26 #include "codec_interface.h"
27 #include "display_layer.h"
28 #include "hal_camera.h"
29 #include "media_log.h"
30 #include "meta_data.h"
31 #include "securec.h"
32
33 #include <iostream>
34 #include <string>
35 #include <thread>
36
37 using namespace OHOS;
38 using namespace OHOS::Media;
39 using namespace std;
40
41 /** Indicates that the current frame is an Instantaneous Decoder Refresh (IDR) frame. */
42 const int32_t KEY_IS_SYNC_FRAME = 1;
43 /** Indicates the frame timestamp. */
44 const int32_t KEY_TIME_US = 2;
45
46 const int32_t IMAGE_WIDTH = 3; // "DATA_PIX_FORMAT"
47 const int32_t IMAGE_HEIGHT = 4; // "DATA_PIX_FORMAT"
48 const int32_t IMAGE_SIZE = 5; // "DATA_PIX_FORMAT"
49 const int32_t DELAY_TIME_ONE_FRAME = 30000;
50 const int32_t VIDEO_MAX_NUM = 2; // "video max num"
51 const int32_t INVALID_STREAM_ID = -1;
52
53 namespace OHOS {
54 namespace Media {
55 extern Surface *g_surface;
Convert2CodecSize(int32_t width,int32_t height)56 inline PicSize Convert2CodecSize(int32_t width, int32_t height)
57 {
58 struct SizeMap {
59 PicSize res_;
60 uint32_t width_;
61 uint32_t height_;
62 };
63 static SizeMap sizeMap[] = {
64 {RESOLUTION_CIF, 352, 288}, {RESOLUTION_360P, 640, 360}, {RESOLUTION_D1_PAL, 720, 576},
65 {RESOLUTION_D1_NTSC, 720, 480}, {RESOLUTION_720P, 1280, 720}, {RESOLUTION_1080P, 1920, 1080},
66 {RESOLUTION_2560X1440, 2560, 1440}, {RESOLUTION_2592X1520, 2592, 1520}, {RESOLUTION_2592X1536, 2592, 1536},
67 {RESOLUTION_2592X1944, 2592, 1944}, {RESOLUTION_2688X1536, 2688, 1536}, {RESOLUTION_2716X1524, 2716, 1524},
68 {RESOLUTION_3840X2160, 3840, 2160}, {RESOLUTION_4096X2160, 4096, 2160}, {RESOLUTION_3000X3000, 3000, 3000},
69 {RESOLUTION_4000X3000, 4000, 3000}, {RESOLUTION_7680X4320, 7680, 4320}, {RESOLUTION_3840X8640, 3840, 8640}
70 };
71
72 for (uint32_t i = 0; i < sizeof(sizeMap) / sizeof(SizeMap); i++) {
73 if (sizeMap[i].width_ == width && sizeMap[i].height_ == height) {
74 return sizeMap[i].res_;
75 }
76 }
77 return RESOLUTION_INVALID;
78 }
79
ConverFormat(ImageFormat format)80 AvCodecMime ConverFormat(ImageFormat format)
81 {
82 if (format == FORMAT_JPEG) {
83 return MEDIA_MIMETYPE_IMAGE_JPEG;
84 } else if (format == FORMAT_AVC) {
85 return MEDIA_MIMETYPE_VIDEO_AVC;
86 } else if (format == FORMAT_HEVC) {
87 return MEDIA_MIMETYPE_VIDEO_HEVC;
88 } else {
89 return MEDIA_MIMETYPE_INVALID;
90 }
91 }
92
SetVencSource(CODEC_HANDLETYPE codecHdl,uint32_t deviceId)93 static int32_t SetVencSource(CODEC_HANDLETYPE codecHdl, uint32_t deviceId)
94 {
95 Param param = {.key = KEY_DEVICE_ID, .val = (void *)&deviceId, .size = sizeof(uint32_t)};
96 int32_t ret = CodecSetParameter(codecHdl, ¶m, 1);
97 if (ret != 0) {
98 MEDIA_ERR_LOG("Set enc source failed.(ret=%d)", ret);
99 return ret;
100 }
101 return MEDIA_OK;
102 }
103
GetDefaultBitrate(PicSize size)104 static uint32_t GetDefaultBitrate(PicSize size)
105 {
106 uint32_t rate; /* auto calc bitrate if set 0 */
107 if (size == RESOLUTION_360P) {
108 rate = 0x800; /* 2048kbps */
109 } else if (size == RESOLUTION_720P) {
110 rate = 0x400; /* 1024kbps */
111 } else if (size >= RESOLUTION_2560X1440 && size <= RESOLUTION_2716X1524) {
112 rate = 0x1800; /* 6144kbps */
113 } else if (size == RESOLUTION_3840X2160 || size == RESOLUTION_4096X2160) {
114 rate = 0xa000; /* 40960kbps */
115 } else {
116 rate = 0x0;
117 }
118 return rate;
119 }
120
CameraCreateVideoEnc(FrameConfig & fc,StreamAttr stream,uint32_t srcDev,CODEC_HANDLETYPE * codecHdl)121 static int32_t CameraCreateVideoEnc(FrameConfig &fc,
122 StreamAttr stream,
123 uint32_t srcDev,
124 CODEC_HANDLETYPE *codecHdl)
125 {
126 const char *name = "codec.video.hardware.encoder";
127 const uint32_t maxParamNum = 10;
128 uint32_t paramIndex = 0;
129 Param param[maxParamNum];
130
131 CodecType domainKind = VIDEO_ENCODER;
132 param[paramIndex].key = KEY_CODEC_TYPE;
133 param[paramIndex].val = &domainKind;
134 param[paramIndex].size = sizeof(CodecType);
135 paramIndex++;
136
137 AvCodecMime codecMime = ConverFormat(stream.format);
138 param[paramIndex].key = KEY_MIMETYPE;
139 param[paramIndex].val = &codecMime;
140 param[paramIndex].size = sizeof(AvCodecMime);
141 paramIndex++;
142
143 VenCodeRcMode rcMode = VENCOD_RC_CBR;
144 param[paramIndex].key = KEY_VIDEO_RC_MODE;
145 param[paramIndex].val = &rcMode;
146 param[paramIndex].size = sizeof(VenCodeRcMode);
147 paramIndex++;
148
149 VenCodeGopMode gopMode = VENCOD_GOPMODE_NORMALP;
150 param[paramIndex].key = KEY_VIDEO_GOP_MODE;
151 param[paramIndex].val = &gopMode;
152 param[paramIndex].size = sizeof(VenCodeGopMode);
153 paramIndex++;
154
155 Profile profile = HEVC_MAIN_PROFILE;
156 param[paramIndex].key = KEY_VIDEO_PROFILE;
157 param[paramIndex].val = &profile;
158 param[paramIndex].size = sizeof(Profile);
159 paramIndex++;
160
161 #ifdef __LINUX__
162 PicSize picSize = Convert2CodecSize(g_surface->GetWidth(), g_surface->GetHeight());
163 #else
164 PicSize picSize = Convert2CodecSize(stream.width, stream.height);
165 #endif
166
167 MEDIA_DEBUG_LOG("picSize=%d", picSize);
168 param[paramIndex].key = KEY_VIDEO_PIC_SIZE;
169 param[paramIndex].val = &picSize;
170 param[paramIndex].size = sizeof(PicSize);
171 paramIndex++;
172
173 uint32_t frameRate = stream.fps;
174 MEDIA_DEBUG_LOG("frameRate=%u", frameRate);
175 param[paramIndex].key = KEY_VIDEO_FRAME_RATE;
176 param[paramIndex].val = &frameRate;
177 param[paramIndex].size = sizeof(uint32_t);
178 paramIndex++;
179
180 uint32_t bitRate = GetDefaultBitrate(picSize);
181 MEDIA_DEBUG_LOG("bitRate=%u kbps", bitRate);
182 param[paramIndex].key = KEY_BITRATE;
183 param[paramIndex].val = &bitRate;
184 param[paramIndex].size = sizeof(uint32_t);
185 paramIndex++;
186
187 int32_t ret = CodecCreate(name, param, paramIndex, codecHdl);
188 if (ret != 0) {
189 MEDIA_ERR_LOG("Create video encoder failed.");
190 return MEDIA_ERR;
191 }
192
193 ret = SetVencSource(*codecHdl, srcDev);
194 if (ret != 0) {
195 CodecDestroy(codecHdl);
196 return MEDIA_ERR;
197 }
198
199 return MEDIA_OK;
200 }
201
CameraCreateJpegEnc(FrameConfig & fc,StreamAttr stream,uint32_t srcDev,CODEC_HANDLETYPE * codecHdl)202 static int32_t CameraCreateJpegEnc(FrameConfig &fc, StreamAttr stream, uint32_t srcDev, CODEC_HANDLETYPE *codecHdl)
203 {
204 const char *videoEncName = "codec.jpeg.hardware.encoder";
205 const uint32_t maxParamNum = 5;
206 Param param[maxParamNum];
207 uint32_t paramIndex = 0;
208
209 CodecType domainKind = VIDEO_ENCODER;
210 param[paramIndex].key = KEY_CODEC_TYPE;
211 param[paramIndex].val = &domainKind;
212 param[paramIndex].size = sizeof(CodecType);
213 paramIndex++;
214
215 AvCodecMime codecMime = ConverFormat(stream.format);
216 param[paramIndex].key = KEY_MIMETYPE;
217 param[paramIndex].val = &codecMime;
218 param[paramIndex].size = sizeof(AvCodecMime);
219 paramIndex++;
220
221 auto surfaceList = fc.GetSurfaces();
222 Surface *surface = surfaceList.front();
223
224 std::cout<<"------2: CameraCreateJpegEnc: surface width and height: "
225 <<surface->GetWidth()<<", "<<surface->GetHeight()<<std::endl;
226 PicSize picSize = Convert2CodecSize(surface->GetWidth(), surface->GetHeight());
227 param[paramIndex].key = KEY_VIDEO_PIC_SIZE;
228 param[paramIndex].val = &picSize;
229 param[paramIndex].size = sizeof(PicSize);
230 paramIndex++;
231
232 int32_t ret = CodecCreate(videoEncName, param, paramIndex, codecHdl);
233 if (ret != 0) {
234 return MEDIA_ERR;
235 }
236 int32_t qfactor = -1;
237 fc.GetParameter(PARAM_KEY_IMAGE_ENCODE_QFACTOR, qfactor);
238 if (qfactor != -1) {
239 Param jpegParam = {
240 .key = KEY_IMAGE_Q_FACTOR,
241 .val = &qfactor,
242 .size = sizeof(qfactor)
243 };
244 ret = CodecSetParameter(*codecHdl, &jpegParam, 1);
245 if (ret != 0) {
246 MEDIA_ERR_LOG("CodecSetParameter set jpeg qfactor failed.(ret=%u)", ret);
247 }
248 }
249
250 ret = SetVencSource(*codecHdl, srcDev);
251 if (ret != 0) {
252 MEDIA_ERR_LOG("Set video encoder source failed.");
253 CodecDestroy(*codecHdl);
254 return MEDIA_ERR;
255 }
256
257 return MEDIA_OK;
258 }
259
CopyCodecOutput(void * dst,uint32_t * size,OutputInfo * buffer)260 static int32_t CopyCodecOutput(void *dst, uint32_t *size, OutputInfo *buffer)
261 {
262 char *dstBuf = reinterpret_cast<char *>(dst);
263 for (uint32_t i = 0; i < buffer->bufferCnt; i++) {
264 uint32_t packSize = buffer->buffers[i].length - buffer->buffers[i].offset;
265 errno_t ret = memcpy_s(dstBuf, *size, buffer->buffers[i].addr + buffer->buffers[i].offset, packSize);
266 if (ret != EOK) {
267 return MEDIA_ERR;
268 }
269 *size -= packSize;
270 dstBuf += packSize;
271 }
272 return MEDIA_OK;
273 }
274
StreamAttrInitialize(StreamAttr * streamAttr,Surface * surface,StreamType streamType,FrameConfig & fc)275 static void StreamAttrInitialize(StreamAttr *streamAttr, Surface *surface,
276 StreamType streamType, FrameConfig &fc)
277 {
278 if (streamAttr == nullptr || surface == nullptr) {
279 return;
280 }
281 memset_s(streamAttr, sizeof(StreamAttr), 0, sizeof(StreamAttr));
282 streamAttr->type = streamType;
283 fc.GetParameter(CAM_IMAGE_FORMAT, streamAttr->format);
284 streamAttr->width = surface->GetWidth();
285 streamAttr->height = surface->GetHeight();
286 fc.GetParameter(CAM_FRAME_FPS, streamAttr->fps);
287 fc.GetParameter(CAM_IMAGE_INVERT_MODE, streamAttr->invertMode);
288 fc.GetParameter(CAM_IMAGE_CROP_RECT, streamAttr->crop);
289 }
290
Convert2HalImageFormat(uint32_t format)291 static ImageFormat Convert2HalImageFormat(uint32_t format)
292 {
293 if (format == CAM_IMAGE_RAW12) {
294 return FORMAT_RGB_BAYER_12BPP;
295 }
296 return FORMAT_YVU420;
297 }
298
OnVencBufferAvailble(UINTPTR hComponent,UINTPTR dataIn,OutputInfo * buffer)299 int32_t RecordAssistant::OnVencBufferAvailble(UINTPTR hComponent, UINTPTR dataIn, OutputInfo *buffer)
300 {
301 CODEC_HANDLETYPE hdl = reinterpret_cast<CODEC_HANDLETYPE>(hComponent);
302 RecordAssistant *assistant = reinterpret_cast<RecordAssistant *>(dataIn);
303 list<Surface *> *surfaceList = nullptr;
304 for (uint32_t idx = 0; idx < assistant->vencHdls_.size(); idx++) {
305 if (assistant->vencHdls_[idx] == hdl) {
306 surfaceList = &(assistant->vencSurfaces_[idx]);
307 break;
308 }
309 }
310 if (surfaceList == nullptr || surfaceList->empty()) {
311 MEDIA_ERR_LOG("Encoder handle is illegal.");
312 return MEDIA_ERR;
313 }
314 int32_t ret = -1;
315 for (auto &surface : *surfaceList) {
316 #ifdef __LINUX__
317 SurfaceBuffer *surfaceBuf = g_surface->RequestBuffer();
318 #else
319 SurfaceBuffer *surfaceBuf = surface->RequestBuffer();
320 #endif
321 if (surfaceBuf == nullptr) {
322 MEDIA_ERR_LOG("No available buffer in surface.");
323 break;
324 }
325 #ifdef __LINUX__
326 uint32_t size = g_surface->GetSize();
327 #else
328 uint32_t size = surface->GetSize();
329 #endif
330 void *buf = surfaceBuf->GetVirAddr();
331 if (buf == nullptr) {
332 MEDIA_ERR_LOG("Invalid buffer address.");
333 break;
334 }
335 ret = CopyCodecOutput(buf, &size, buffer);
336 if (ret != MEDIA_OK) {
337 MEDIA_ERR_LOG("No available buffer in surface.");
338 #ifdef __LINUX__
339 g_surface->CancelBuffer(surfaceBuf);
340 #else
341 surface->CancelBuffer(surfaceBuf);
342 #endif
343 break;
344 }
345 surfaceBuf->SetInt32(KEY_IS_SYNC_FRAME, (((buffer->flag & STREAM_FLAG_KEYFRAME) == 0) ? 0 : 1));
346 surfaceBuf->SetInt64(KEY_TIME_US, buffer->timeStamp);
347 #ifdef __LINUX__
348 surfaceBuf->SetSize(g_surface->GetSize() - size);
349 if (g_surface->FlushBuffer(surfaceBuf) != 0) {
350 MEDIA_ERR_LOG("Flush surface failed.");
351 g_surface->CancelBuffer(surfaceBuf);
352 ret = -1;
353 break;
354 }
355 #else
356 surfaceBuf->SetSize(surface->GetSize() - size);
357 if (surface->FlushBuffer(surfaceBuf) != 0) {
358 MEDIA_ERR_LOG("Flush surface failed.");
359 surface->CancelBuffer(surfaceBuf);
360 ret = -1;
361 break;
362 }
363 #endif
364 }
365 if (CodecQueueOutput(hdl, buffer, 0, -1) != 0) {
366 MEDIA_ERR_LOG("Codec queue output failed.");
367 }
368 return ret;
369 }
370
371 CodecCallback RecordAssistant::recordCodecCb_ = {nullptr, nullptr, RecordAssistant::OnVencBufferAvailble};
372
SetFrameConfig(FrameConfig & fc,uint32_t * streamId)373 int32_t RecordAssistant::SetFrameConfig(FrameConfig &fc, uint32_t *streamId)
374 {
375 fc_ = &fc;
376 auto surfaceList = fc.GetSurfaces();
377 if (surfaceList.size() > VIDEO_MAX_NUM || surfaceList.size() == 0) {
378 MEDIA_ERR_LOG("the number of surface in frame config must 1 or 2 now.\n");
379 return MEDIA_ERR;
380 }
381 uint32_t num = 0;
382 int32_t ret = MEDIA_OK;
383 for (auto &surface : surfaceList) {
384 CODEC_HANDLETYPE codecHdl = nullptr;
385 StreamAttr stream = {};
386 #ifdef __LINUX__
387 StreamAttrInitialize(&stream, g_surface, STREAM_VIDEO, fc);
388 #else
389 StreamAttrInitialize(&stream, surface, STREAM_VIDEO, fc);
390 #endif
391 ret = HalCameraStreamCreate(cameraId_, &stream, streamId);
392 if (ret != MEDIA_OK) {
393 MEDIA_ERR_LOG(" creat recorder stream failed.");
394 return MEDIA_ERR;
395 }
396 streamIdNum_[num] = *streamId;
397 num++;
398
399 StreamInfo streamInfo;
400 streamInfo.type = STERAM_INFO_PRIVATE;
401 fc.GetVendorParameter(streamInfo.u.data, PRIVATE_TAG_LEN);
402 HalCameraStreamSetInfo(cameraId_, *streamId, &streamInfo);
403
404 uint32_t deviceId = 0;
405 HalCameraGetDeviceId(cameraId_, *streamId, &deviceId);
406 ret = CameraCreateVideoEnc(fc, stream, deviceId, &codecHdl);
407 if (ret != MEDIA_OK) {
408 MEDIA_ERR_LOG("Cannot create suitble video encoder.");
409 return MEDIA_ERR;
410 }
411 ret = CodecSetCallback(codecHdl, &recordCodecCb_, reinterpret_cast<UINTPTR>(this));
412 if (ret != 0) {
413 MEDIA_ERR_LOG("Set codec callback failed.(ret=%d)", ret);
414 CodecDestroy(codecHdl);
415 return MEDIA_ERR;
416 }
417 vencHdls_.emplace_back(codecHdl);
418 #ifdef __LINUX__
419 list<Surface*> conList({g_surface});
420 #else
421 list<Surface*> conList({surface});
422 #endif
423 vencSurfaces_.emplace_back(conList);
424 }
425 state_ = LOOP_READY;
426 return MEDIA_OK;
427 }
428
Start(uint32_t streamId)429 int32_t RecordAssistant::Start(uint32_t streamId)
430 {
431 if (state_ != LOOP_READY) {
432 return MEDIA_ERR;
433 }
434 HalCameraStreamOn(cameraId_, streamId);
435 int32_t ret = MEDIA_OK;
436 int32_t i;
437 for (i = 0; static_cast<uint32_t>(i) < vencHdls_.size(); i++) {
438 ret = CodecStart(vencHdls_[i]);
439 if (ret != MEDIA_OK) {
440 MEDIA_ERR_LOG("Video encoder start failed.");
441 ret = MEDIA_ERR;
442 break;
443 }
444 }
445 if (ret == MEDIA_ERR) {
446 /* rollback */
447 for (; i >= 0; i--) {
448 CodecStop(vencHdls_[i]);
449 }
450 return MEDIA_ERR;
451 }
452 state_ = LOOP_LOOPING;
453 MEDIA_INFO_LOG("Start camera recording succeed.");
454 return MEDIA_OK;
455 }
456
Stop()457 int32_t RecordAssistant::Stop()
458 {
459 if (state_ != LOOP_LOOPING) {
460 return MEDIA_ERR;
461 }
462 for (uint32_t i = 0; i < vencHdls_.size(); i++) {
463 CodecStop(vencHdls_[i]);
464 CodecDestroy(vencHdls_[i]);
465 }
466 vencHdls_.clear();
467 vencSurfaces_.clear();
468 for (uint32_t i = 0; i < VIDEO_MAX_NUM; i++) {
469 if (streamIdNum_[i] != INVALID_STREAM_ID) {
470 HalCameraStreamOff(cameraId_, streamIdNum_[i]);
471 HalCameraStreamDestroy(cameraId_, streamIdNum_[i]);
472 }
473 streamIdNum_[i] = INVALID_STREAM_ID;
474 }
475 state_ = LOOP_STOP;
476 return MEDIA_OK;
477 }
478
YuvCopyProcess(void * arg)479 void* PreviewAssistant::YuvCopyProcess(void *arg)
480 {
481 return nullptr;
482 }
483
SetFrameConfig(FrameConfig & fc,uint32_t * streamId)484 int32_t PreviewAssistant::SetFrameConfig(FrameConfig &fc, uint32_t *streamId)
485 {
486 fc_ = &fc;
487 auto surfaceList = fc.GetSurfaces();
488 if (surfaceList.size() != 1) {
489 MEDIA_ERR_LOG("Only support one surface in frame config now.");
490 return MEDIA_ERR;
491 }
492 Surface *surface = surfaceList.front();
493 StreamAttr stream = {};
494 StreamAttrInitialize(&stream, surface, STREAM_PREVIEW, fc);
495 int32_t ret = HalCameraStreamCreate(cameraId_, &stream, streamId);
496 if (ret != MEDIA_OK) {
497 MEDIA_ERR_LOG(" creat Preview stream failed.");
498 return MEDIA_ERR;
499 }
500 StreamInfo streamInfo;
501 streamInfo.type = STREAM_INFO_POS;
502 streamInfo.u.pos.x = std::stoi(surface->GetUserData(string("region_position_x")));
503 streamInfo.u.pos.y = std::stoi(surface->GetUserData(string("region_position_y")));
504
505 HalCameraStreamSetInfo(cameraId_, *streamId, &streamInfo);
506 streamId_ = *streamId;
507 return MEDIA_OK;
508 }
509
Start(uint32_t streamId)510 int32_t PreviewAssistant::Start(uint32_t streamId)
511 {
512 if (state_ == LOOP_LOOPING) {
513 return MEDIA_ERR;
514 }
515 state_ = LOOP_LOOPING;
516
517 int32_t retCode = pthread_create(&threadId, nullptr, YuvCopyProcess, this);
518 if (retCode != 0) {
519 MEDIA_ERR_LOG("fork thread YuvCopyProcess failed: %d.", retCode);
520 }
521
522 int32_t ret = HalCameraStreamOn(cameraId_, streamId);
523 if (ret != MEDIA_OK) {
524 MEDIA_ERR_LOG("Preview start failed. (ret=%d)", ret);
525 Stop();
526 return MEDIA_ERR;
527 }
528 return MEDIA_OK;
529 }
530
Stop()531 int32_t PreviewAssistant::Stop()
532 {
533 if (state_ != LOOP_LOOPING) {
534 return MEDIA_ERR;
535 }
536 state_ = LOOP_STOP;
537 pthread_join(threadId, NULL);
538 HalCameraStreamOff(cameraId_, streamId_);
539 HalCameraStreamDestroy(cameraId_, streamId_);
540 return MEDIA_OK;
541 }
542
SetFrameConfig(FrameConfig & fc,uint32_t * streamId)543 int32_t CaptureAssistant::SetFrameConfig(FrameConfig &fc, uint32_t *streamId)
544 {
545 auto surfaceList = fc.GetSurfaces();
546 if (surfaceList.size() != 1) {
547 MEDIA_ERR_LOG("Only support one surface in frame config now.");
548 return MEDIA_ERR;
549 }
550 if (surfaceList.empty()) {
551 MEDIA_ERR_LOG("Frame config with empty surface list.");
552 return MEDIA_ERR;
553 }
554 if (surfaceList.size() > 1) {
555 MEDIA_WARNING_LOG("Capture only fullfill the first surface in frame config.");
556 }
557 Surface *surface = surfaceList.front();
558
559 StreamAttr stream = {};
560 StreamAttrInitialize(&stream, surface, STREAM_CAPTURE, fc);
561
562 uint32_t deviceId = 0;
563 int32_t ret = HalCameraStreamCreate(cameraId_, &stream, streamId);
564 if (ret != MEDIA_OK) {
565 MEDIA_ERR_LOG(" creat capture stream failed.");
566 return MEDIA_ERR;
567 }
568 streamId_ = *streamId;
569 HalCameraGetDeviceId(cameraId_, *streamId, &deviceId);
570 ret = CameraCreateJpegEnc(fc, stream, deviceId, &vencHdl_);
571 if (ret != MEDIA_OK) {
572 MEDIA_ERR_LOG("Create capture venc failed.");
573 return MEDIA_ERR;
574 }
575
576 capSurface_ = surface;
577 state_ = LOOP_READY;
578 return MEDIA_OK;
579 }
580
581 /* Block method, waiting for capture completed */
Start(uint32_t streamId)582 int32_t CaptureAssistant::Start(uint32_t streamId)
583 {
584 int32_t retCode = MEDIA_ERR;
585 state_ = LOOP_LOOPING;
586 HalCameraStreamOn(cameraId_, streamId);
587 int pictures = capSurface_->GetQueueSize();
588 int32_t ret = CodecStart(vencHdl_);
589 if (ret != 0) {
590 MEDIA_ERR_LOG("Start capture encoder failed.(ret=%d)", ret);
591 goto FREE_RESOURCE;
592 }
593
594 do {
595 SurfaceBuffer *surfaceBuf = capSurface_->RequestBuffer();
596 if (surfaceBuf == nullptr) {
597 MEDIA_ERR_LOG("No available buffer in surface.");
598 break;
599 }
600
601 OutputInfo outInfo;
602 ret = CodecDequeueOutput(vencHdl_, 0, nullptr, &outInfo);
603 if (ret != 0) {
604 capSurface_->CancelBuffer(surfaceBuf);
605 MEDIA_ERR_LOG("Dequeue capture frame failed.(ret=%d)", ret);
606 break;
607 }
608
609 uint32_t size = capSurface_->GetSize();
610 void *buf = surfaceBuf->GetVirAddr();
611 if (buf == nullptr) {
612 MEDIA_ERR_LOG("Invalid buffer address.");
613 break;
614 }
615 if (CopyCodecOutput(buf, &size, &outInfo) != MEDIA_OK) {
616 MEDIA_ERR_LOG("No available buffer in capSurface_.");
617 capSurface_->CancelBuffer(surfaceBuf);
618 break;
619 }
620 surfaceBuf->SetSize(capSurface_->GetSize() - size);
621
622 if (capSurface_->FlushBuffer(surfaceBuf) != 0) {
623 MEDIA_ERR_LOG("Flush surface buffer failed.");
624 capSurface_->CancelBuffer(surfaceBuf);
625 break;
626 }
627
628 CodecQueueOutput(vencHdl_, &outInfo, 0, -1); // 0:no timeout -1:no fd
629 retCode = MEDIA_OK;
630 } while (--pictures);
631
632 CodecStop(vencHdl_);
633
634 FREE_RESOURCE:
635 CodecDestroy(vencHdl_);
636 HalCameraStreamOff(cameraId_, streamId);
637 HalCameraStreamDestroy(cameraId_, streamId);
638 delete capSurface_;
639 capSurface_ = nullptr;
640 state_ = LOOP_STOP;
641
642 return retCode;
643 }
644
Stop()645 int32_t CaptureAssistant::Stop()
646 {
647 MEDIA_DEBUG_LOG("No support method.");
648 return MEDIA_OK;
649 }
650
SetFrameConfig(FrameConfig & fc,uint32_t * streamId)651 int32_t CallbackAssistant::SetFrameConfig(FrameConfig &fc, uint32_t *streamId)
652 {
653 fc_ = &fc;
654 auto surfaceList = fc.GetSurfaces();
655 if (surfaceList.size() != 1) {
656 MEDIA_ERR_LOG("Only support one surface in frame config now.");
657 return MEDIA_ERR;
658 }
659 uint32_t imageFormat = 0;
660 fc.GetParameter(CAM_IMAGE_FORMAT, imageFormat);
661 ImageFormat halImageFormat = Convert2HalImageFormat(imageFormat);
662 MEDIA_INFO_LOG("Imageformat is %d", imageFormat);
663 Surface *surface = surfaceList.front();
664 StreamAttr stream = {};
665 StreamAttrInitialize(&stream, surface, STREAM_CALLBACK, fc);
666 stream.format = halImageFormat;
667 int32_t ret = HalCameraStreamCreate(cameraId_, &stream, streamId);
668 if (ret != MEDIA_OK) {
669 MEDIA_ERR_LOG(" creat callback stream failed.");
670 return MEDIA_ERR;
671 }
672 streamId_ = *streamId;
673 capSurface_ = surface;
674 state_ = LOOP_READY;
675 return MEDIA_OK;
676 }
677
Start(uint32_t streamId)678 int32_t CallbackAssistant::Start(uint32_t streamId)
679 {
680 if (state_ == LOOP_LOOPING) {
681 return MEDIA_ERR;
682 }
683 state_ = LOOP_LOOPING;
684 int32_t retCode = pthread_create(&threadId, nullptr, StreamCopyProcess, this);
685 if (retCode != 0) {
686 MEDIA_ERR_LOG("fork thread StreamCopyProcess failed: %d.", retCode);
687 }
688 HalCameraStreamOn(cameraId_, streamId);
689 return MEDIA_OK;
690 }
691
StreamCopyProcess(void * arg)692 void* CallbackAssistant::StreamCopyProcess(void *arg)
693 {
694 CallbackAssistant *assistant = (CallbackAssistant *)arg;
695 if (assistant == nullptr) {
696 MEDIA_ERR_LOG("CallbackAssistant create failed.");
697 return nullptr;
698 }
699 if (assistant->capSurface_ == nullptr) {
700 MEDIA_ERR_LOG("capSurface_ is null.\n");
701 return nullptr;
702 }
703
704 int32_t ret;
705 HalBuffer streamBuffer;
706 (void)memset_s(&streamBuffer, sizeof(HalBuffer), 0, sizeof(HalBuffer));
707 while (assistant->state_ == LOOP_LOOPING) {
708 SurfaceBuffer *surfaceBuf = assistant->capSurface_->RequestBuffer();
709 if (surfaceBuf == nullptr) {
710 usleep(DELAY_TIME_ONE_FRAME);
711 continue;
712 }
713
714 if (streamBuffer.size != 0x0) {
715 HalCameraQueueBuf(assistant->cameraId_, assistant->streamId_, &streamBuffer);
716 (void)memset_s(&streamBuffer, sizeof(HalBuffer), 0, sizeof(HalBuffer));
717 }
718 streamBuffer.format = FORMAT_PRIVATE;
719 streamBuffer.size = assistant->capSurface_->GetSize();
720 if (surfaceBuf->GetVirAddr() == NULL) {
721 MEDIA_ERR_LOG("Invalid buffer address.");
722 break;
723 }
724 streamBuffer.virAddr = surfaceBuf->GetVirAddr();
725
726 ret = HalCameraDequeueBuf(assistant->cameraId_, assistant->streamId_, &streamBuffer);
727 if (ret != MEDIA_OK) {
728 usleep(DELAY_TIME_ONE_FRAME);
729 continue;
730 }
731
732 if (assistant->capSurface_->FlushBuffer(surfaceBuf) != 0) {
733 MEDIA_ERR_LOG("Flush surface failed.");
734 assistant->capSurface_->CancelBuffer(surfaceBuf);
735 break;
736 }
737 usleep(DELAY_TIME_ONE_FRAME);
738 }
739 if (streamBuffer.size != 0x0) {
740 HalCameraQueueBuf(assistant->cameraId_, assistant->streamId_, &streamBuffer);
741 }
742 MEDIA_DEBUG_LOG(" yuv thread joined \n");
743 return nullptr;
744 }
745
Stop()746 int32_t CallbackAssistant::Stop()
747 {
748 if (state_ != LOOP_LOOPING) {
749 return MEDIA_ERR;
750 }
751 state_ = LOOP_STOP;
752 pthread_join(threadId, NULL);
753 HalCameraStreamOff(cameraId_, streamId_);
754 HalCameraStreamDestroy(cameraId_, streamId_);
755 return MEDIA_OK;
756 }
757
CameraDevice()758 CameraDevice::CameraDevice() {}
CameraDevice(uint32_t cameraId)759 CameraDevice::CameraDevice(uint32_t cameraId)
760 {
761 this->cameraId = cameraId;
762 }
763
~CameraDevice()764 CameraDevice::~CameraDevice() {}
765
Initialize()766 int32_t CameraDevice::Initialize()
767 {
768 // Need to be Refactored when delete config file
769 int32_t ret = CodecInit();
770 if (ret != 0) {
771 MEDIA_ERR_LOG("Codec module init failed.(ret=%d)", ret);
772 return MEDIA_ERR;
773 }
774 MEDIA_INFO_LOG("Codec module init succeed.");
775 captureAssistant_.state_ = LOOP_READY;
776 previewAssistant_.state_ = LOOP_READY;
777 recordAssistant_.state_ = LOOP_READY;
778 callbackAssistant_.state_ = LOOP_READY;
779 captureAssistant_.cameraId_ = cameraId;
780 previewAssistant_.cameraId_ = cameraId;
781 recordAssistant_.cameraId_ = cameraId;
782 callbackAssistant_.cameraId_ = cameraId;
783 return MEDIA_OK;
784 }
785
UnInitialize()786 int32_t CameraDevice::UnInitialize()
787 {
788 return MEDIA_OK;
789 }
790
TriggerLoopingCapture(FrameConfig & fc,uint32_t * streamId)791 int32_t CameraDevice::TriggerLoopingCapture(FrameConfig &fc, uint32_t *streamId)
792 {
793 MEDIA_DEBUG_LOG("Camera device start looping capture.");
794 DeviceAssistant *assistant = nullptr;
795 int32_t fcType = fc.GetFrameConfigType();
796 switch (fcType) {
797 case FRAME_CONFIG_RECORD:
798 assistant = &recordAssistant_;
799 break;
800 case FRAME_CONFIG_PREVIEW:
801 assistant = &previewAssistant_;
802 break;
803 case FRAME_CONFIG_CAPTURE:
804 assistant = &captureAssistant_;
805 break;
806 case FRAME_CONFIG_CALLBACK:
807 assistant = &callbackAssistant_;
808 break;
809 default:
810 break;
811 }
812 if (assistant == nullptr) {
813 MEDIA_ERR_LOG("Invalid frame config type.(type=%d)", fcType);
814 return MEDIA_ERR;
815 }
816 if (assistant->state_ == LOOP_IDLE || assistant->state_ == LOOP_LOOPING || assistant->state_ == LOOP_ERROR) {
817 MEDIA_ERR_LOG("Device state is %d, cannot start looping capture.", assistant->state_);
818 return MEDIA_ERR;
819 }
820
821 int32_t ret = assistant->SetFrameConfig(fc, streamId);
822 if (ret != MEDIA_OK) {
823 MEDIA_ERR_LOG("Check and set frame config failed.(ret=%d)", ret);
824 return MEDIA_ERR;
825 }
826
827 ret = assistant->Start(*streamId);
828 if (ret != MEDIA_OK) {
829 MEDIA_ERR_LOG("Start looping capture failed.(ret=%d)", ret);
830 return MEDIA_ERR;
831 }
832 return MEDIA_OK;
833 }
834
StopLoopingCapture()835 void CameraDevice::StopLoopingCapture()
836 {
837 MEDIA_INFO_LOG("Stop looping capture in camera_device.cpp");
838 previewAssistant_.Stop();
839 recordAssistant_.Stop();
840 callbackAssistant_.Stop();
841 }
842
TriggerSingleCapture(FrameConfig & fc,uint32_t * streamId)843 int32_t CameraDevice::TriggerSingleCapture(FrameConfig &fc, uint32_t *streamId)
844 {
845 return TriggerLoopingCapture(fc, streamId);
846 }
847
SetCameraConfig()848 int32_t CameraDevice::SetCameraConfig()
849 {
850 return MEDIA_OK;
851 }
852 } // namespace Media
853 } // namespace OHOS
854