1 /*!
2 * \copy
3 * Copyright (c) 2009-2014, Cisco Systems
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 *
10 * * Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 *
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
16 * distribution.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
21 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
22 * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
23 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
24 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26 * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
28 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE.
30 *
31 *
32 *************************************************************************************
33 */
34
35 #include <stdint.h>
36 #include <time.h>
37 #include <cmath>
38 #include <cstdio>
39 #include <cstring>
40 #include <iostream>
41 #include <string>
42 #include <memory>
43 #include <assert.h>
44 #include <limits.h>
45
46 #include "gmp-platform.h"
47 #include "gmp-video-host.h"
48 #include "gmp-video-encode.h"
49 #include "gmp-video-decode.h"
50 #include "gmp-video-frame-i420.h"
51 #include "gmp-video-frame-encoded.h"
52
53 #include "codec_def.h"
54 #include "codec_app_def.h"
55 #include "codec_api.h"
56
57 #if defined(_MSC_VER)
58 #define PUBLIC_FUNC __declspec(dllexport)
59 #else
60 #define PUBLIC_FUNC
61 #endif
62
63 // This is for supporting older versions which do not have support for nullptr.
64 #if defined(nullptr)
65 # define GMP_HAVE_NULLPTR
66
67 #elif defined(__clang__)
68 # ifndef __has_extension
69 # define __has_extension __has_feature
70 # endif
71
72 # if __has_extension(cxx_nullptr)
73 # define GMP_HAVE_NULLPTR
74 # endif
75
76 #elif defined(__GNUC__)
77 # if defined(__GXX_EXPERIMENTAL_CXX0X__) || __cplusplus >= 201103L
78 # if (__GNU_C__ >=4)
79 # if (__GNU_C_MINOR__ >= 6)
80 # define GMP_HAVE_NULLPTR
81 # endif
82 # endif
83 # endif
84
85 #elif defined(_MSC_VER)
86 # define GMP_HAVE_NULLPTR
87 #endif
88
89 #if !defined (GMP_HAVE_NULLPTR)
90 # define nullptr __null
91 #endif
92
93 #include "task_utils.h"
94
95 static int g_log_level = 0;
96
97 #define GMPLOG(l, x) do { \
98 if (l <= g_log_level) { \
99 const char *log_string = "unknown"; \
100 if ((l >= 0) && (l <= 3)) { \
101 log_string = kLogStrings[l]; \
102 } \
103 std::cerr << log_string << ": " << x << std::endl; \
104 } \
105 } while(0)
106
107 #define GL_CRIT 0
108 #define GL_ERROR 1
109 #define GL_INFO 2
110 #define GL_DEBUG 3
111
112 const char* kLogStrings[] = {
113 "Critical",
114 "Error",
115 "Info",
116 "Debug"
117 };
118
119 #define OPENH264_MAX_MB 36864
120
121 GMPPlatformAPI* g_platform_api = nullptr;
122
123 class OpenH264VideoEncoder;
124
GMPLogLevelToWelsLogLevel(GMPLogLevel aLevel)125 static uint32_t GMPLogLevelToWelsLogLevel(GMPLogLevel aLevel) {
126 switch (aLevel) {
127 default:
128 case kGMPLogInvalid:
129 case kGMPLogDefault:
130 return WELS_LOG_DEFAULT;
131 case kGMPLogQuiet:
132 return WELS_LOG_QUIET;
133 case kGMPLogError:
134 return WELS_LOG_ERROR;
135 case kGMPLogWarning:
136 return WELS_LOG_WARNING;
137 case kGMPLogInfo:
138 return WELS_LOG_INFO;
139 case kGMPLogDebug:
140 return WELS_LOG_DEBUG;
141 case kGMPLogDetail:
142 return WELS_LOG_DETAIL;
143 }
144 }
145
GMPVideoCodecModeToWelsUsageType(GMPVideoCodecMode aMode)146 static EUsageType GMPVideoCodecModeToWelsUsageType(GMPVideoCodecMode aMode) {
147 switch (aMode) {
148 default:
149 case kGMPRealtimeVideo:
150 return CAMERA_VIDEO_REAL_TIME;
151 case kGMPScreensharing:
152 return SCREEN_CONTENT_REAL_TIME;
153 case kGMPStreamingVideo:
154 return SCREEN_CONTENT_NON_REAL_TIME;
155 case kGMPNonRealtimeVideo:
156 return CAMERA_VIDEO_NON_REAL_TIME;
157 }
158 }
159
GMPRateControlModeToWelsRcModes(GMPRateControlMode aMode)160 static RC_MODES GMPRateControlModeToWelsRcModes(GMPRateControlMode aMode) {
161 switch (aMode) {
162 default:
163 case kGMPRateControlUnknown:
164 case kGMPRateControlBitrate:
165 return RC_BITRATE_MODE;
166 case kGMPRateControlQuality:
167 return RC_QUALITY_MODE;
168 case kGMPRateControlBufferBased:
169 return RC_BUFFERBASED_MODE;
170 case kGMPRateControlTimestamp:
171 return RC_TIMESTAMP_MODE;
172 case kGMPRateControlBitratePostskip:
173 return RC_BITRATE_MODE_POST_SKIP;
174 case kGMPRateControlOff:
175 return RC_OFF_MODE;
176 }
177 }
178
GMPProfileToWelsProfile(GMPProfile aProfile)179 EProfileIdc GMPProfileToWelsProfile(GMPProfile aProfile) {
180 switch (aProfile) {
181 default:
182 case kGMPH264ProfileUnknown:
183 return PRO_UNKNOWN;
184 case kGMPH264ProfileBaseline:
185 return PRO_BASELINE;
186 case kGMPH264ProfileMain:
187 return PRO_MAIN;
188 case kGMPH264ProfileExtended:
189 return PRO_EXTENDED;
190 case kGMPH264ProfileHigh:
191 return PRO_HIGH;
192 case kGMPH264ProfileHigh10:
193 return PRO_HIGH10;
194 case kGMPH264ProfileHigh422:
195 return PRO_HIGH422;
196 case kGMPH264ProfileHigh444:
197 return PRO_HIGH444;
198 case kGMPH264ProfileCavlc444:
199 return PRO_CAVLC444;
200 case kGMPH264ProfileScalableBaseline:
201 return PRO_SCALABLE_BASELINE;
202 case kGMPH264ProfileScalableHigh:
203 return PRO_SCALABLE_HIGH;
204 }
205 }
206
GMPLevelToWelsLevel(GMPLevel aLevel)207 ELevelIdc GMPLevelToWelsLevel(GMPLevel aLevel) {
208 switch (aLevel) {
209 default:
210 case kGMPH264LevelUnknown:
211 return LEVEL_UNKNOWN;
212 case kGMPH264Level1_0:
213 return LEVEL_1_0;
214 case kGMPH264Level1_B:
215 return LEVEL_1_B;
216 case kGMPH264Level1_1:
217 return LEVEL_1_1;
218 case kGMPH264Level1_2:
219 return LEVEL_1_2;
220 case kGMPH264Level1_3:
221 return LEVEL_1_3;
222 case kGMPH264Level2_0:
223 return LEVEL_2_0;
224 case kGMPH264Level2_1:
225 return LEVEL_2_1;
226 case kGMPH264Level2_2:
227 return LEVEL_2_2;
228 case kGMPH264Level3_0:
229 return LEVEL_3_0;
230 case kGMPH264Level3_1:
231 return LEVEL_3_1;
232 case kGMPH264Level3_2:
233 return LEVEL_3_2;
234 case kGMPH264Level4_0:
235 return LEVEL_4_0;
236 case kGMPH264Level4_1:
237 return LEVEL_4_1;
238 case kGMPH264Level4_2:
239 return LEVEL_4_2;
240 case kGMPH264Level5_0:
241 return LEVEL_5_0;
242 case kGMPH264Level5_1:
243 return LEVEL_5_1;
244 case kGMPH264Level5_2:
245 return LEVEL_5_2;
246 }
247 }
248
GMPSliceToWelsSliceMode(GMPSliceMode aMode)249 SliceModeEnum GMPSliceToWelsSliceMode(GMPSliceMode aMode) {
250 switch (aMode) {
251 default:
252 case kGMPSliceUnknown:
253 case kGMPSliceSingle:
254 return SM_SINGLE_SLICE;
255 case kGMPSliceSizeLimited:
256 return SM_SIZELIMITED_SLICE;
257 case kGMPSliceFixedSlcNum:
258 return SM_FIXEDSLCNUM_SLICE;
259 case kGMPSliceRaster:
260 return SM_RASTER_SLICE;
261 }
262 }
263
264 template <typename T> class SelfDestruct {
265 public:
SelfDestruct(T * t)266 SelfDestruct (T* t) : t_ (t) {}
~SelfDestruct()267 ~SelfDestruct() {
268 if (t_) {
269 t_->Destroy();
270 }
271 }
272
forget()273 T* forget() {
274 T* t = t_;
275 t_ = nullptr;
276
277 return t;
278 }
279
280 private:
281 T* t_;
282 };
283
284 class FrameStats {
285 public:
FrameStats(const char * type)286 FrameStats (const char* type) :
287 frames_in_ (0),
288 frames_out_ (0),
289 start_time_ (time (0)),
290 last_time_ (start_time_),
291 type_ (type) {}
292
FrameIn()293 void FrameIn() {
294 ++frames_in_;
295 time_t now = time (0);
296
297 if (now == last_time_) {
298 return;
299 }
300
301 if (! (frames_in_ % 10)) {
302 GMPLOG (GL_INFO, type_ << ": " << now << " Frame count "
303 << frames_in_
304 << "(" << (frames_in_ / (now - start_time_)) << "/"
305 << (30 / (now - last_time_)) << ")"
306 << " -- " << frames_out_);
307 last_time_ = now;
308 }
309 }
310
FrameOut()311 void FrameOut() {
312 ++frames_out_;
313 }
314
315 private:
316 uint64_t frames_in_;
317 uint64_t frames_out_;
318 time_t start_time_;
319 time_t last_time_;
320 const std::string type_;
321 };
322
323 class OpenH264VideoEncoder : public GMPVideoEncoder, public RefCounted {
324 public:
OpenH264VideoEncoder(GMPVideoHost * hostAPI)325 OpenH264VideoEncoder (GMPVideoHost* hostAPI) :
326 host_ (hostAPI),
327 worker_thread_ (nullptr),
328 encoder_ (nullptr),
329 max_payload_size_ (0),
330 callback_ (nullptr),
331 stats_ ("Encoder"),
332 gmp_api_version_ (kGMPVersion33),
333 shutting_down(false) {
334 AddRef();
335 }
336
InitEncode(const GMPVideoCodec & codecSettings,const uint8_t * aCodecSpecific,uint32_t aCodecSpecificSize,GMPVideoEncoderCallback * callback,int32_t numberOfCores,uint32_t maxPayloadSize)337 virtual void InitEncode (const GMPVideoCodec& codecSettings,
338 const uint8_t* aCodecSpecific,
339 uint32_t aCodecSpecificSize,
340 GMPVideoEncoderCallback* callback,
341 int32_t numberOfCores,
342 uint32_t maxPayloadSize) {
343 gmp_api_version_ = codecSettings.mGMPApiVersion;
344 callback_ = callback;
345
346 GMPErr err = g_platform_api->createthread (&worker_thread_);
347 if (err != GMPNoErr) {
348 GMPLOG (GL_ERROR, "Couldn't create new thread");
349 Error (GMPGenericErr);
350 return;
351 }
352
353 int rv = WelsCreateSVCEncoder (&encoder_);
354 if (rv) {
355 Error (GMPGenericErr);
356 return;
357 }
358
359 if (gmp_api_version_ >= kGMPVersion34) {
360 uint32_t logLevel = GMPLogLevelToWelsLogLevel(codecSettings.mLogLevel);
361 long rv = encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, &logLevel);
362 if (rv != cmResultSuccess) {
363 GMPLOG (GL_ERROR, "Encoder SetOption OPTION_TRACE_LEVEL failed " << rv);
364 }
365 }
366
367 SEncParamExt param;
368 memset (¶m, 0, sizeof (param));
369 encoder_->GetDefaultParams (¶m);
370
371 GMPLOG (GL_INFO, "Initializing encoder at "
372 << codecSettings.mWidth
373 << "x"
374 << codecSettings.mHeight
375 << "@"
376 << static_cast<int> (codecSettings.mMaxFramerate));
377
378 // Translate parameters.
379 if (gmp_api_version_ >= kGMPVersion35) {
380 param.iUsageType = GMPVideoCodecModeToWelsUsageType(codecSettings.mMode);
381 } else if (codecSettings.mMode == kGMPScreensharing) {
382 param.iUsageType = SCREEN_CONTENT_REAL_TIME;
383 } else {
384 param.iUsageType = CAMERA_VIDEO_REAL_TIME;
385 }
386 param.iPicWidth = codecSettings.mWidth;
387 param.iPicHeight = codecSettings.mHeight;
388 if (gmp_api_version_ >= kGMPVersion35) {
389 param.iRCMode = GMPRateControlModeToWelsRcModes(codecSettings.mRateControlMode);
390 } else {
391 param.iRCMode = RC_BITRATE_MODE;
392 }
393 param.iTargetBitrate = codecSettings.mStartBitrate * 1000;
394 param.iMaxBitrate = codecSettings.mMaxBitrate * 1000;
395 GMPLOG (GL_INFO, "Initializing Bit Rate at: Start: "
396 << codecSettings.mStartBitrate
397 << "; Min: "
398 << codecSettings.mMinBitrate
399 << "; Max: "
400 << codecSettings.mMaxBitrate
401 << "; Max payload size:"
402 << maxPayloadSize);
403
404 param.uiMaxNalSize = maxPayloadSize;
405
406 // TODO(ekr@rtfm.com). Scary conversion from unsigned char to float below.
407 param.fMaxFrameRate = static_cast<float> (codecSettings.mMaxFramerate);
408
409 // Set up layers. Currently we have one layer.
410 SSpatialLayerConfig* layer = ¶m.sSpatialLayers[0];
411
412 // Make sure the output resolution doesn't exceed the Openh264 capability
413 double width_mb = std::ceil(codecSettings.mWidth/16.0);
414 double height_mb = std::ceil(codecSettings.mHeight/16.0);
415 double input_mb = width_mb * height_mb;
416 if (static_cast<uint32_t>(input_mb) > OPENH264_MAX_MB) {
417 double scale = std::sqrt(OPENH264_MAX_MB / input_mb);
418 layer->iVideoWidth = static_cast<uint32_t>(width_mb * 16 * scale);
419 layer->iVideoHeight = static_cast<uint32_t>(height_mb * 16 * scale);
420 GMPLOG (GL_INFO, "InitEncode: the output resolution overflows, w x h = " << codecSettings.mWidth << " x " << codecSettings.mHeight
421 << ", turned to be " << layer->iVideoWidth << " x " << layer->iVideoHeight);
422 } else {
423 layer->iVideoWidth = codecSettings.mWidth;
424 layer->iVideoHeight = codecSettings.mHeight;
425 }
426 if (layer->iVideoWidth < 16) {
427 layer->iVideoWidth = 16;
428 }
429 if (layer->iVideoHeight < 16) {
430 layer->iVideoHeight = 16;
431 }
432
433 layer->fFrameRate = param.fMaxFrameRate;
434 layer->iSpatialBitrate = param.iTargetBitrate;
435 layer->iMaxSpatialBitrate = param.iMaxBitrate;
436
437 if (gmp_api_version_ >= kGMPVersion35) {
438 layer->uiProfileIdc = GMPProfileToWelsProfile(codecSettings.mProfile);
439 layer->uiLevelIdc = GMPLevelToWelsLevel(codecSettings.mLevel);
440 layer->sSliceArgument.uiSliceMode = GMPSliceToWelsSliceMode(codecSettings.mSliceMode);
441 if (codecSettings.mUseThreadedEncode) {
442 param.iMultipleThreadIdc = numberOfCores;
443 } else {
444 param.iMultipleThreadIdc = 1; // disabled
445 }
446 }
447
448 //for controlling the NAL size (normally for packetization-mode=0)
449 if (maxPayloadSize != 0) {
450 if (gmp_api_version_ < kGMPVersion35) {
451 layer->sSliceArgument.uiSliceMode = SM_SIZELIMITED_SLICE;
452 }
453 layer->sSliceArgument.uiSliceSizeConstraint = maxPayloadSize;
454 }
455 rv = encoder_->InitializeExt (¶m);
456 if (rv) {
457 GMPLOG (GL_ERROR, "Couldn't initialize encoder");
458 Error (GMPGenericErr);
459 return;
460 }
461 max_payload_size_ = maxPayloadSize;
462 GMPLOG (GL_INFO, "Initialized encoder");
463 }
464
Encode(GMPVideoi420Frame * inputImage,const uint8_t * aCodecSpecificInfo,uint32_t aCodecSpecificInfoLength,const GMPVideoFrameType * aFrameTypes,uint32_t aFrameTypesLength)465 virtual void Encode (GMPVideoi420Frame* inputImage,
466 const uint8_t* aCodecSpecificInfo,
467 uint32_t aCodecSpecificInfoLength,
468 const GMPVideoFrameType* aFrameTypes,
469 uint32_t aFrameTypesLength) {
470 GMPLOG (GL_DEBUG,
471 __FUNCTION__
472 << " size="
473 << inputImage->Width() << "x" << inputImage->Height());
474
475 stats_.FrameIn();
476
477 assert (aFrameTypesLength != 0);
478
479 worker_thread_->Post (WrapTaskRefCounted (
480 this, &OpenH264VideoEncoder::Encode_w,
481 inputImage,
482 (aFrameTypes)[0]));
483 }
484
SetChannelParameters(uint32_t aPacketLoss,uint32_t aRTT)485 virtual void SetChannelParameters (uint32_t aPacketLoss, uint32_t aRTT) {
486 }
487
SetRates(uint32_t aNewBitRate,uint32_t aFrameRate)488 virtual void SetRates (uint32_t aNewBitRate, uint32_t aFrameRate) {
489 GMPLOG (GL_INFO, "[SetRates] Begin with: "
490 << aNewBitRate << " , " << aFrameRate);
491 //update bitrate if needed
492 const int32_t newBitRate = aNewBitRate * 1000; //kbps->bps
493 SBitrateInfo existEncoderBitRate;
494 existEncoderBitRate.iLayer = SPATIAL_LAYER_ALL;
495 int rv = encoder_->GetOption (ENCODER_OPTION_BITRATE, &existEncoderBitRate);
496 if (rv != cmResultSuccess) {
497 GMPLOG (GL_ERROR, "[SetRates] Error in Getting Bit Rate at Layer:"
498 << rv
499 << " ; Layer = "
500 << existEncoderBitRate.iLayer
501 << " ; BR = "
502 << existEncoderBitRate.iBitrate);
503 Error (GMPGenericErr);
504 return;
505 }
506 if (rv == cmResultSuccess && existEncoderBitRate.iBitrate != newBitRate) {
507 SBitrateInfo newEncoderBitRate;
508 newEncoderBitRate.iLayer = SPATIAL_LAYER_ALL;
509 newEncoderBitRate.iBitrate = newBitRate;
510 rv = encoder_->SetOption (ENCODER_OPTION_BITRATE, &newEncoderBitRate);
511 if (rv == cmResultSuccess) {
512 GMPLOG (GL_INFO, "[SetRates] Update Encoder Bandwidth (AllLayers): ReturnValue: "
513 << rv
514 << "BitRate(kbps): "
515 << aNewBitRate);
516 } else {
517 GMPLOG (GL_ERROR, "[SetRates] Error in Setting Bit Rate at Layer:"
518 << rv
519 << " ; Layer = "
520 << newEncoderBitRate.iLayer
521 << " ; BR = "
522 << newEncoderBitRate.iBitrate);
523 Error (GMPGenericErr);
524 return;
525 }
526 }
527 //update framerate if needed
528 float existFrameRate = 0;
529 rv = encoder_->GetOption (ENCODER_OPTION_FRAME_RATE, &existFrameRate);
530 if (rv != cmResultSuccess) {
531 GMPLOG (GL_ERROR, "[SetRates] Error in Getting Frame Rate:"
532 << rv << " FrameRate: " << existFrameRate);
533 Error (GMPGenericErr);
534 return;
535 }
536 if (rv == cmResultSuccess &&
537 (aFrameRate - existFrameRate > 0.001f ||
538 existFrameRate - aFrameRate > 0.001f)) {
539 float newFrameRate = static_cast<float> (aFrameRate);
540 rv = encoder_->SetOption (ENCODER_OPTION_FRAME_RATE, &newFrameRate);
541 if (rv == cmResultSuccess) {
542 GMPLOG (GL_INFO, "[SetRates] Update Encoder Frame Rate: ReturnValue: "
543 << rv << " FrameRate: " << aFrameRate);
544 } else {
545 GMPLOG (GL_ERROR, "[SetRates] Error in Setting Frame Rate: ReturnValue: "
546 << rv << " FrameRate: " << aFrameRate);
547 Error (GMPGenericErr);
548 return;
549 }
550 }
551 }
552
SetPeriodicKeyFrames(bool aEnable)553 virtual void SetPeriodicKeyFrames (bool aEnable) {
554 }
555
EncodingComplete()556 virtual void EncodingComplete() {
557 shutting_down = true;
558
559 // Release the reference to the external objects, because it is no longer safe to call them
560 host_ = nullptr;
561 callback_ = nullptr;
562 TearDownEncoder();
563
564 Release();
565 }
566
567 private:
~OpenH264VideoEncoder()568 virtual ~OpenH264VideoEncoder() {
569 // Tear down the internal encoder in case of EncodingComplete() not being called
570 TearDownEncoder();
571 }
572
TearDownEncoder()573 void TearDownEncoder() {
574 // Stop the worker thread first
575 if (worker_thread_) {
576 worker_thread_->Join();
577 worker_thread_ = nullptr;
578 }
579
580 // Destroy OpenH264 encoder
581 if (encoder_) {
582 WelsDestroySVCEncoder(encoder_);
583 encoder_ = nullptr;
584 }
585 }
586
TrySyncRunOnMainThread(GMPTask * aTask)587 void TrySyncRunOnMainThread(GMPTask* aTask) {
588 if (!shutting_down && g_platform_api) {
589 g_platform_api->syncrunonmainthread (aTask);
590 }
591 }
592
Error(GMPErr error)593 void Error (GMPErr error) {
594 if (callback_) {
595 callback_->Error (error);
596 }
597 }
598
Encode_w(GMPVideoi420Frame * inputImage,GMPVideoFrameType frame_type)599 void Encode_w (GMPVideoi420Frame* inputImage,
600 GMPVideoFrameType frame_type) {
601 SFrameBSInfo encoded;
602
603 if (frame_type == kGMPKeyFrame) {
604 encoder_->ForceIntraFrame (true);
605 if (!inputImage)
606 return;
607 }
608 if (!inputImage) {
609 GMPLOG (GL_ERROR, "no input image");
610 return;
611 }
612 SSourcePicture src;
613
614 src.iColorFormat = videoFormatI420;
615 src.iStride[0] = inputImage->Stride (kGMPYPlane);
616 src.pData[0] = reinterpret_cast<unsigned char*> (
617 const_cast<uint8_t*> (inputImage->Buffer (kGMPYPlane)));
618 src.iStride[1] = inputImage->Stride (kGMPUPlane);
619 src.pData[1] = reinterpret_cast<unsigned char*> (
620 const_cast<uint8_t*> (inputImage->Buffer (kGMPUPlane)));
621 src.iStride[2] = inputImage->Stride (kGMPVPlane);
622 src.pData[2] = reinterpret_cast<unsigned char*> (
623 const_cast<uint8_t*> (inputImage->Buffer (kGMPVPlane)));
624 src.iStride[3] = 0;
625 src.pData[3] = nullptr;
626 src.iPicWidth = inputImage->Width();
627 src.iPicHeight = inputImage->Height();
628 src.uiTimeStamp = inputImage->Timestamp() / 1000; //encoder needs millisecond
629 const SSourcePicture* pics = &src;
630
631 int result = encoder_->EncodeFrame (pics, &encoded);
632 if (result != cmResultSuccess) {
633 GMPLOG (GL_ERROR, "Couldn't encode frame. Error = " << result);
634 }
635
636
637 // Translate int to enum
638 GMPVideoFrameType encoded_type;
639 bool has_frame = false;
640
641 switch (encoded.eFrameType) {
642 case videoFrameTypeIDR:
643 encoded_type = kGMPKeyFrame;
644 has_frame = true;
645 break;
646 case videoFrameTypeI:
647 encoded_type = kGMPKeyFrame;
648 has_frame = true;
649 break;
650 case videoFrameTypeP:
651 encoded_type = kGMPDeltaFrame;
652 has_frame = true;
653 break;
654 case videoFrameTypeSkip:
655 // Can skip the call back since no actual bitstream will be generated
656 break;
657 case videoFrameTypeIPMixed://this type is currently not suppported
658 case videoFrameTypeInvalid:
659 GMPLOG (GL_ERROR, "Couldn't encode frame. Type = "
660 << encoded.eFrameType);
661 break;
662 default:
663 // The API is defined as returning a type.
664 assert (false);
665 break;
666 }
667
668 if (!has_frame) {
669 // This frame must be destroyed on the main thread.
670 TrySyncRunOnMainThread (WrapTask (
671 this,
672 &OpenH264VideoEncoder::DestroyInputFrame_m,
673 inputImage));
674 return;
675 }
676
677 // Synchronously send this back to the main thread for delivery.
678 TrySyncRunOnMainThread (WrapTask (
679 this,
680 &OpenH264VideoEncoder::Encode_m,
681 inputImage,
682 &encoded,
683 encoded_type));
684 }
685
Encode_m(GMPVideoi420Frame * frame,SFrameBSInfo * encoded,GMPVideoFrameType frame_type)686 void Encode_m (GMPVideoi420Frame* frame, SFrameBSInfo* encoded,
687 GMPVideoFrameType frame_type) {
688 // Attach a self-destructor so that this dies on return.
689 SelfDestruct<GMPVideoi420Frame> ifd (frame);
690
691 if (!host_) {
692 return;
693 }
694
695 // Now return the encoded data back to the parent.
696 GMPVideoFrame* ftmp;
697 GMPErr err = host_->CreateFrame (kGMPEncodedVideoFrame, &ftmp);
698 if (err != GMPNoErr) {
699 GMPLOG (GL_ERROR, "Error creating encoded frame");
700 return;
701 }
702
703 GMPVideoEncodedFrame* f = static_cast<GMPVideoEncodedFrame*> (ftmp);
704 // Buffer up the data.
705 uint32_t length = 0;
706 std::vector<uint32_t> lengths;
707
708 for (int i = 0; i < encoded->iLayerNum; ++i) {
709 lengths.push_back (0);
710 uint8_t* tmp = encoded->sLayerInfo[i].pBsBuf;
711 for (int j = 0; j < encoded->sLayerInfo[i].iNalCount; ++j) {
712 lengths[i] += encoded->sLayerInfo[i].pNalLengthInByte[j];
713 // Convert from 4-byte start codes to GMP_BufferLength32 (NAL lengths)
714 assert (* (reinterpret_cast<uint32_t*> (tmp)) == 0x01000000);
715 // BufferType32 doesn't include the length of the length itself!
716 * (reinterpret_cast<uint32_t*> (tmp)) = encoded->sLayerInfo[i].pNalLengthInByte[j] - sizeof (uint32_t);
717 length += encoded->sLayerInfo[i].pNalLengthInByte[j];
718 tmp += encoded->sLayerInfo[i].pNalLengthInByte[j];
719 }
720 }
721
722 err = f->CreateEmptyFrame (length);
723 if (err != GMPNoErr) {
724 GMPLOG (GL_ERROR, "Error allocating frame data");
725 f->Destroy();
726 return;
727 }
728
729 // Copy the data.
730 // Here we concatenate into one big buffer
731 uint8_t* tmp = f->Buffer();
732 for (int i = 0; i < encoded->iLayerNum; ++i) {
733 memcpy (tmp, encoded->sLayerInfo[i].pBsBuf, lengths[i]);
734 tmp += lengths[i];
735 }
736
737 f->SetEncodedWidth (frame->Width());
738 f->SetEncodedHeight (frame->Height());
739 f->SetTimeStamp (frame->Timestamp());
740 f->SetFrameType (frame_type);
741 f->SetCompleteFrame (true);
742 f->SetBufferType (GMP_BufferLength32);
743
744 GMPLOG (GL_DEBUG, "Encoding complete. type= "
745 << f->FrameType()
746 << " length="
747 << f->Size()
748 << " timestamp="
749 << f->TimeStamp());
750
751 // Return the encoded frame.
752 GMPCodecSpecificInfo info;
753 memset (&info, 0, sizeof (info)); // shouldn't be needed, we init everything
754 info.mCodecType = kGMPVideoCodecH264;
755 info.mBufferType = GMP_BufferLength32;
756 info.mCodecSpecific.mH264.mSimulcastIdx = 0;
757
758 if (callback_) {
759 callback_->Encoded (f, reinterpret_cast<uint8_t*> (&info), sizeof (info));
760 }
761
762 stats_.FrameOut();
763 }
764
765 // These frames must be destroyed on the main thread.
DestroyInputFrame_m(GMPVideoi420Frame * frame)766 void DestroyInputFrame_m (GMPVideoi420Frame* frame) {
767 frame->Destroy();
768 }
769
770
771 private:
772 GMPVideoHost* host_;
773 GMPThread* worker_thread_;
774 ISVCEncoder* encoder_;
775 uint32_t max_payload_size_;
776 GMPVideoEncoderCallback* callback_;
777 FrameStats stats_;
778 uint32_t gmp_api_version_;
779 bool shutting_down;
780 };
781
readU16BE(const uint8_t * in)782 uint16_t readU16BE(const uint8_t* in) {
783 return in[0] << 8 | in[1];
784 }
785
copyWithStartCode(std::vector<uint8_t> & out,const uint8_t * in,size_t size)786 void copyWithStartCode(std::vector<uint8_t>& out, const uint8_t* in, size_t size) {
787 static const uint8_t code[] = { 0x00, 0x00, 0x00, 0x01 };
788 out.insert(out.end(), code, code + sizeof(code));
789 out.insert(out.end(), in, in + size);
790 }
791
792 class OpenH264VideoDecoder : public GMPVideoDecoder, public RefCounted {
793 public:
OpenH264VideoDecoder(GMPVideoHost * hostAPI)794 OpenH264VideoDecoder (GMPVideoHost* hostAPI) :
795 host_ (hostAPI),
796 worker_thread_ (nullptr),
797 callback_ (nullptr),
798 decoder_ (nullptr),
799 stats_ ("Decoder"),
800 gmp_api_version_ (kGMPVersion33),
801 shutting_down(false) {
802 AddRef();
803 }
804
InitDecode(const GMPVideoCodec & codecSettings,const uint8_t * aCodecSpecific,uint32_t aCodecSpecificSize,GMPVideoDecoderCallback * callback,int32_t coreCount)805 virtual void InitDecode (const GMPVideoCodec& codecSettings,
806 const uint8_t* aCodecSpecific,
807 uint32_t aCodecSpecificSize,
808 GMPVideoDecoderCallback* callback,
809 int32_t coreCount) {
810 gmp_api_version_ = codecSettings.mGMPApiVersion;
811 callback_ = callback;
812
813 GMPLOG (GL_INFO, "InitDecode");
814
815 GMPErr err = g_platform_api->createthread (&worker_thread_);
816 if (err != GMPNoErr) {
817 GMPLOG (GL_ERROR, "Couldn't create new thread");
818 Error (GMPGenericErr);
819 return;
820 }
821
822 if (WelsCreateDecoder (&decoder_)) {
823 GMPLOG (GL_ERROR, "Couldn't create decoder");
824 Error (GMPGenericErr);
825 return;
826 }
827
828 if (!decoder_) {
829 GMPLOG (GL_ERROR, "Couldn't create decoder");
830 Error (GMPGenericErr);
831 return;
832 }
833
834 if (gmp_api_version_ >= kGMPVersion34) {
835 if (codecSettings.mUseThreadedDecode) {
836 long rv = decoder_->SetOption(DECODER_OPTION_NUM_OF_THREADS, &coreCount);
837 if (rv != cmResultSuccess) {
838 GMPLOG (GL_ERROR, "Decoder SetOption NUM_OF_THREADS failed " << rv);
839 }
840 }
841
842 uint32_t logLevel = GMPLogLevelToWelsLogLevel(codecSettings.mLogLevel);
843 long rv = decoder_->SetOption(DECODER_OPTION_TRACE_LEVEL, &logLevel);
844 if (rv != cmResultSuccess) {
845 GMPLOG (GL_ERROR, "Decoder SetOption OPTION_TRACE_LEVEL failed " << rv);
846 }
847 }
848
849 SDecodingParam param;
850 memset (¶m, 0, sizeof (param));
851 param.uiTargetDqLayer = UCHAR_MAX; // Default value
852 param.eEcActiveIdc = ERROR_CON_SLICE_MV_COPY_CROSS_IDR_FREEZE_RES_CHANGE; // Error concealment on.
853 param.sVideoProperty.size = sizeof(param.sVideoProperty);
854 param.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;
855
856 if (decoder_->Initialize (¶m)) {
857 GMPLOG (GL_ERROR, "Couldn't initialize decoder");
858 Error (GMPGenericErr);
859 return;
860 }
861
862 if (aCodecSpecific && aCodecSpecificSize >= sizeof(GMPVideoCodecH264)) {
863 std::vector<uint8_t> annexb;
864
865 // Convert the AVCC data, starting at the byte containing
866 // numOfSequenceParameterSets, to Annex B format.
867 const uint8_t* avcc = aCodecSpecific + offsetof(GMPVideoCodecH264, mAVCC.mNumSPS);
868
869 static const int kSPSMask = (1 << 5) - 1;
870 uint8_t spsCount = *avcc++ & kSPSMask;
871 for (int i = 0; i < spsCount; ++i) {
872 size_t size = readU16BE(avcc);
873 avcc += 2;
874 copyWithStartCode(annexb, avcc, size);
875 avcc += size;
876 }
877
878 uint8_t ppsCount = *avcc++;
879 for (int i = 0; i < ppsCount; ++i) {
880 size_t size = readU16BE(avcc);
881 avcc += 2;
882 copyWithStartCode(annexb, avcc, size);
883 avcc += size;
884 }
885
886 SBufferInfo decoded;
887 memset (&decoded, 0, sizeof (decoded));
888 unsigned char* data[3] = {nullptr, nullptr, nullptr};
889 DECODING_STATE dState = decoder_->DecodeFrame2 (&*annexb.begin(),
890 annexb.size(),
891 data,
892 &decoded);
893 if (dState) {
894 GMPLOG (GL_ERROR, "Decoding error dState=" << dState);
895 }
896 GMPLOG (GL_ERROR, "InitDecode iBufferStatus=" << decoded.iBufferStatus);
897 }
898 }
899
Decode(GMPVideoEncodedFrame * inputFrame,bool missingFrames,const uint8_t * aCodecSpecificInfo,uint32_t aCodecSpecificInfoLength,int64_t renderTimeMs=-1)900 virtual void Decode (GMPVideoEncodedFrame* inputFrame,
901 bool missingFrames,
902 const uint8_t* aCodecSpecificInfo,
903 uint32_t aCodecSpecificInfoLength,
904 int64_t renderTimeMs = -1) {
905 GMPLOG (GL_DEBUG, __FUNCTION__
906 << "Decoding frame size=" << inputFrame->Size()
907 << " timestamp=" << inputFrame->TimeStamp());
908 stats_.FrameIn();
909 //const GMPCodecSpecificInfo *codecSpecificInfo = (GMPCodecSpecificInfo) aCodecSpecificInfo;
910
911 // Convert to H.264 start codes
912 switch (inputFrame->BufferType()) {
913 case GMP_BufferSingle:
914 case GMP_BufferLength8:
915 case GMP_BufferLength16:
916 case GMP_BufferLength24:
917 // We should look to support these, especially GMP_BufferSingle
918 assert (false);
919 break;
920
921 case GMP_BufferLength32: {
922 uint8_t* start_code = inputFrame->Buffer();
923 // start code should be at least four bytes from the end or we risk
924 // reading/writing outside the buffer.
925 while (start_code < inputFrame->Buffer() + inputFrame->Size() - 4) {
926 static const uint8_t code[] = { 0x00, 0x00, 0x00, 0x01 };
927 uint8_t* lenp = start_code;
928 start_code += * (reinterpret_cast<int32_t*> (lenp));
929 memcpy (lenp, code, 4);
930 }
931 }
932 break;
933
934 default:
935 assert (false);
936 break;
937 }
938 DECODING_STATE dState = dsErrorFree;
939 worker_thread_->Post (WrapTaskRefCounted (
940 this, &OpenH264VideoDecoder::Decode_w,
941 inputFrame,
942 missingFrames,
943 dState,
944 renderTimeMs));
945 if (dState) {
946 Error (GMPGenericErr);
947 }
948 }
949
Reset()950 virtual void Reset() {
951 if (gmp_api_version_ >= kGMPVersion34) {
952 worker_thread_->Post (WrapTaskRefCounted (
953 this, &OpenH264VideoDecoder::Reset_w));
954 } else if (callback_) {
955 callback_->ResetComplete ();
956 }
957 }
958
Drain()959 virtual void Drain() {
960 if (callback_) {
961 callback_->DrainComplete ();
962 }
963 }
964
DecodingComplete()965 virtual void DecodingComplete() {
966 shutting_down = true;
967
968 // Release the reference to the external objects, because it is no longer safe to call them
969 host_ = nullptr;
970 callback_ = nullptr;
971 TearDownDecoder();
972
973 Release();
974 }
975
976 private:
~OpenH264VideoDecoder()977 virtual ~OpenH264VideoDecoder() {
978 // Tear down the internal decoder in case of DecodingComplete() not being called
979 TearDownDecoder();
980 }
981
TearDownDecoder()982 void TearDownDecoder() {
983 // Stop the worker thread first
984 if (worker_thread_) {
985 worker_thread_->Join();
986 worker_thread_ = nullptr;
987 }
988
989 // Destroy OpenH264 decoder
990 if (decoder_) {
991 WelsDestroyDecoder(decoder_);
992 decoder_ = nullptr;
993 }
994 }
995
TrySyncRunOnMainThread(GMPTask * aTask)996 void TrySyncRunOnMainThread(GMPTask* aTask) {
997 if (!shutting_down && g_platform_api) {
998 g_platform_api->syncrunonmainthread (aTask);
999 }
1000 }
1001
Error(GMPErr error)1002 void Error (GMPErr error) {
1003 if (callback_) {
1004 callback_->Error (error);
1005 }
1006 }
1007
Decode_w(GMPVideoEncodedFrame * inputFrame,bool missingFrames,DECODING_STATE & dState,int64_t renderTimeMs=-1)1008 void Decode_w (GMPVideoEncodedFrame* inputFrame,
1009 bool missingFrames,
1010 DECODING_STATE& dState,
1011 int64_t renderTimeMs = -1) {
1012 GMPLOG (GL_DEBUG, "Frame decode on worker thread length = "
1013 << inputFrame->Size());
1014
1015 SBufferInfo decoded;
1016 bool valid = false;
1017 memset (&decoded, 0, sizeof (decoded));
1018 if (gmp_api_version_ >= kGMPVersion34) {
1019 decoded.uiInBsTimeStamp = inputFrame->TimeStamp();
1020 }
1021 unsigned char* data[3] = {nullptr, nullptr, nullptr};
1022
1023 dState = decoder_->DecodeFrameNoDelay (inputFrame->Buffer(),
1024 inputFrame->Size(),
1025 data,
1026 &decoded);
1027
1028 if (dState) {
1029 GMPLOG (GL_ERROR, "Decoding error dState=" << dState);
1030 } else {
1031 valid = true;
1032 }
1033
1034 TrySyncRunOnMainThread (WrapTask (
1035 this,
1036 &OpenH264VideoDecoder::Decode_m,
1037 inputFrame,
1038 &decoded,
1039 data,
1040 renderTimeMs,
1041 valid));
1042 }
1043
1044 // Return the decoded data back to the parent.
Decode_m(GMPVideoEncodedFrame * inputFrame,SBufferInfo * decoded,unsigned char * data[3],int64_t renderTimeMs,bool valid)1045 void Decode_m (GMPVideoEncodedFrame* inputFrame,
1046 SBufferInfo* decoded,
1047 unsigned char* data[3],
1048 int64_t renderTimeMs,
1049 bool valid) {
1050 // Attach a self-destructor so that this dies on return.
1051 SelfDestruct<GMPVideoEncodedFrame> ifd (inputFrame);
1052
1053 // If we don't actually have data, just abort.
1054 if (!valid) {
1055 GMPLOG (GL_ERROR, "No valid data decoded");
1056 Error (GMPDecodeErr);
1057 return;
1058 }
1059
1060 if (decoded->iBufferStatus != 1) {
1061 GMPLOG (GL_ERROR, "iBufferStatus=" << decoded->iBufferStatus);
1062 if (callback_) {
1063 callback_->InputDataExhausted();
1064 }
1065 return;
1066 }
1067
1068 int width = decoded->UsrData.sSystemBuffer.iWidth;
1069 int height = decoded->UsrData.sSystemBuffer.iHeight;
1070 int ystride = decoded->UsrData.sSystemBuffer.iStride[0];
1071 int uvstride = decoded->UsrData.sSystemBuffer.iStride[1];
1072
1073 GMPLOG (GL_DEBUG, "Video frame ready for display "
1074 << width
1075 << "x"
1076 << height
1077 << " timestamp="
1078 << inputFrame->TimeStamp());
1079
1080 GMPVideoFrame* ftmp = nullptr;
1081
1082 if (!host_) {
1083 return;
1084 }
1085
1086 // Translate the image.
1087 GMPErr err = host_->CreateFrame (kGMPI420VideoFrame, &ftmp);
1088 if (err != GMPNoErr) {
1089 GMPLOG (GL_ERROR, "Couldn't allocate empty I420 frame");
1090 return;
1091 }
1092
1093
1094 GMPVideoi420Frame* frame = static_cast<GMPVideoi420Frame*> (ftmp);
1095 err = frame->CreateFrame (
1096 ystride * height, static_cast<uint8_t*> (data[0]),
1097 uvstride * height / 2, static_cast<uint8_t*> (data[1]),
1098 uvstride * height / 2, static_cast<uint8_t*> (data[2]),
1099 width, height,
1100 ystride, uvstride, uvstride);
1101 if (err != GMPNoErr) {
1102 GMPLOG (GL_ERROR, "Couldn't make decoded frame");
1103 return;
1104 }
1105
1106 GMPLOG (GL_DEBUG, "Allocated size = "
1107 << frame->AllocatedSize (kGMPYPlane));
1108 frame->SetTimestamp (inputFrame->TimeStamp());
1109 if (gmp_api_version_ >= kGMPVersion34) {
1110 frame->SetUpdatedTimestamp (decoded->uiOutYuvTimeStamp);
1111 }
1112 frame->SetDuration (inputFrame->Duration());
1113 if (callback_) {
1114 callback_->Decoded (frame);
1115 }
1116
1117 stats_.FrameOut();
1118 }
1119
Reset_w()1120 void Reset_w () {
1121 int eos = 1;
1122 long rv = decoder_->SetOption(DECODER_OPTION_END_OF_STREAM, &eos);
1123 if (rv != cmResultSuccess) {
1124 GMPLOG (GL_ERROR, "Decoder SetOption END_OF_STREAM failed " << rv);
1125 }
1126
1127 DECODING_STATE dState;
1128 SBufferInfo decoded;
1129 unsigned char* data[3];
1130 do {
1131 memset (&decoded, 0, sizeof (decoded));
1132 memset (data, 0, sizeof (data));
1133 dState = decoder_->FlushFrame(data, &decoded);
1134 if (dState) {
1135 GMPLOG (GL_ERROR, "Flush error dState=" << dState);
1136 break;
1137 }
1138 } while (decoded.iBufferStatus == 1);
1139
1140 TrySyncRunOnMainThread (WrapTask (
1141 this,
1142 &OpenH264VideoDecoder::Reset_m));
1143 }
1144
Reset_m()1145 void Reset_m () {
1146 if (callback_) {
1147 callback_->ResetComplete ();
1148 }
1149 }
1150
1151 GMPVideoHost* host_;
1152 GMPThread* worker_thread_;
1153 GMPVideoDecoderCallback* callback_;
1154 ISVCDecoder* decoder_;
1155 FrameStats stats_;
1156 uint32_t gmp_api_version_;
1157 bool shutting_down;
1158 };
1159
1160 extern "C" {
1161
1162 PUBLIC_FUNC GMPErr
GMPInit(GMPPlatformAPI * aPlatformAPI)1163 GMPInit (GMPPlatformAPI* aPlatformAPI) {
1164 g_platform_api = aPlatformAPI;
1165 return GMPNoErr;
1166 }
1167
1168 PUBLIC_FUNC GMPErr
GMPGetAPI(const char * aApiName,void * aHostAPI,void ** aPluginApi)1169 GMPGetAPI (const char* aApiName, void* aHostAPI, void** aPluginApi) {
1170 if (!strcmp (aApiName, "decode-video")) {
1171 *aPluginApi = new OpenH264VideoDecoder (static_cast<GMPVideoHost*> (aHostAPI));
1172 return GMPNoErr;
1173 } else if (!strcmp (aApiName, "encode-video")) {
1174 *aPluginApi = new OpenH264VideoEncoder (static_cast<GMPVideoHost*> (aHostAPI));
1175 return GMPNoErr;
1176 }
1177 return GMPGenericErr;
1178 }
1179
1180 PUBLIC_FUNC void
GMPShutdown(void)1181 GMPShutdown (void) {
1182 g_platform_api = nullptr;
1183 }
1184
1185 } // extern "C"
1186