• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*!
2  * \copy
3  *     Copyright (c)  2009-2014, Cisco Systems
4  *     All rights reserved.
5  *
6  *     Redistribution and use in source and binary forms, with or without
7  *     modification, are permitted provided that the following conditions
8  *     are met:
9  *
10  *        * Redistributions of source code must retain the above copyright
11  *          notice, this list of conditions and the following disclaimer.
12  *
13  *        * Redistributions in binary form must reproduce the above copyright
14  *          notice, this list of conditions and the following disclaimer in
15  *          the documentation and/or other materials provided with the
16  *          distribution.
17  *
18  *     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19  *     "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20  *     LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
21  *     FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
22  *     COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
23  *     INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
24  *     BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25  *     LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26  *     CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27  *     LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
28  *     ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29  *     POSSIBILITY OF SUCH DAMAGE.
30  *
31  *
32  *************************************************************************************
33  */
34 
35 #include <stdint.h>
36 #include <time.h>
37 #include <cmath>
38 #include <cstdio>
39 #include <cstring>
40 #include <iostream>
41 #include <string>
42 #include <memory>
43 #include <assert.h>
44 #include <limits.h>
45 
46 #include "gmp-platform.h"
47 #include "gmp-video-host.h"
48 #include "gmp-video-encode.h"
49 #include "gmp-video-decode.h"
50 #include "gmp-video-frame-i420.h"
51 #include "gmp-video-frame-encoded.h"
52 
53 #include "codec_def.h"
54 #include "codec_app_def.h"
55 #include "codec_api.h"
56 
57 #if defined(_MSC_VER)
58 #define PUBLIC_FUNC __declspec(dllexport)
59 #else
60 #define PUBLIC_FUNC
61 #endif
62 
63 // This is for supporting older versions which do not have support for nullptr.
64 #if defined(nullptr)
65 # define GMP_HAVE_NULLPTR
66 
67 #elif defined(__clang__)
68 # ifndef __has_extension
69 # define __has_extension __has_feature
70 # endif
71 
72 # if __has_extension(cxx_nullptr)
73 # define GMP_HAVE_NULLPTR
74 # endif
75 
76 #elif defined(__GNUC__)
77 # if defined(__GXX_EXPERIMENTAL_CXX0X__) || __cplusplus >= 201103L
78 # if (__GNU_C__ >=4)
79 # if (__GNU_C_MINOR__ >= 6)
80 # define GMP_HAVE_NULLPTR
81 # endif
82 # endif
83 # endif
84 
85 #elif defined(_MSC_VER)
86 # define GMP_HAVE_NULLPTR
87 #endif
88 
89 #if !defined (GMP_HAVE_NULLPTR)
90 # define nullptr __null
91 #endif
92 
93 #include "task_utils.h"
94 
95 static int g_log_level = 0;
96 
97 #define GMPLOG(l, x) do { \
98         if (l <= g_log_level) { \
99         const char *log_string = "unknown"; \
100         if ((l >= 0) && (l <= 3)) {               \
101         log_string = kLogStrings[l];            \
102         } \
103         std::cerr << log_string << ": " << x << std::endl; \
104         } \
105     } while(0)
106 
107 #define GL_CRIT 0
108 #define GL_ERROR 1
109 #define GL_INFO  2
110 #define GL_DEBUG 3
111 
112 const char* kLogStrings[] = {
113   "Critical",
114   "Error",
115   "Info",
116   "Debug"
117 };
118 
119 #define OPENH264_MAX_MB 36864
120 
121 GMPPlatformAPI* g_platform_api = nullptr;
122 
123 class OpenH264VideoEncoder;
124 
125 template <typename T> class SelfDestruct {
126  public:
SelfDestruct(T * t)127   SelfDestruct (T* t) : t_ (t) {}
~SelfDestruct()128   ~SelfDestruct() {
129     if (t_) {
130       t_->Destroy();
131     }
132   }
133 
forget()134   T* forget() {
135     T* t = t_;
136     t_ = nullptr;
137 
138     return t;
139   }
140 
141  private:
142   T* t_;
143 };
144 
145 class FrameStats {
146  public:
FrameStats(const char * type)147   FrameStats (const char* type) :
148     frames_in_ (0),
149     frames_out_ (0),
150     start_time_ (time (0)),
151     last_time_ (start_time_),
152     type_ (type) {}
153 
FrameIn()154   void FrameIn() {
155     ++frames_in_;
156     time_t now = time (0);
157 
158     if (now == last_time_) {
159       return;
160     }
161 
162     if (! (frames_in_ % 10)) {
163       GMPLOG (GL_INFO, type_ << ": " << now << " Frame count "
164               << frames_in_
165               << "(" << (frames_in_ / (now - start_time_)) << "/"
166               << (30 / (now - last_time_)) << ")"
167               << " -- " << frames_out_);
168       last_time_ = now;
169     }
170   }
171 
FrameOut()172   void FrameOut() {
173     ++frames_out_;
174   }
175 
176  private:
177   uint64_t frames_in_;
178   uint64_t frames_out_;
179   time_t start_time_;
180   time_t last_time_;
181   const std::string type_;
182 };
183 
184 class OpenH264VideoEncoder : public GMPVideoEncoder, public RefCounted {
185  public:
OpenH264VideoEncoder(GMPVideoHost * hostAPI)186   OpenH264VideoEncoder (GMPVideoHost* hostAPI) :
187     host_ (hostAPI),
188     worker_thread_ (nullptr),
189     encoder_ (nullptr),
190     max_payload_size_ (0),
191     callback_ (nullptr),
192     stats_ ("Encoder"),
193     shutting_down(false) {
194       AddRef();
195     }
196 
InitEncode(const GMPVideoCodec & codecSettings,const uint8_t * aCodecSpecific,uint32_t aCodecSpecificSize,GMPVideoEncoderCallback * callback,int32_t numberOfCores,uint32_t maxPayloadSize)197   virtual void InitEncode (const GMPVideoCodec& codecSettings,
198                            const uint8_t* aCodecSpecific,
199                            uint32_t aCodecSpecificSize,
200                            GMPVideoEncoderCallback* callback,
201                            int32_t numberOfCores,
202                            uint32_t maxPayloadSize) {
203     callback_ = callback;
204 
205     GMPErr err = g_platform_api->createthread (&worker_thread_);
206     if (err != GMPNoErr) {
207       GMPLOG (GL_ERROR, "Couldn't create new thread");
208       Error (GMPGenericErr);
209       return;
210     }
211 
212     int rv = WelsCreateSVCEncoder (&encoder_);
213     if (rv) {
214       Error (GMPGenericErr);
215       return;
216     }
217     SEncParamExt param;
218     memset (&param, 0, sizeof (param));
219     encoder_->GetDefaultParams (&param);
220 
221     GMPLOG (GL_INFO, "Initializing encoder at "
222             << codecSettings.mWidth
223             << "x"
224             << codecSettings.mHeight
225             << "@"
226             << static_cast<int> (codecSettings.mMaxFramerate));
227 
228     // Translate parameters.
229     param.iUsageType = CAMERA_VIDEO_REAL_TIME;
230     if(codecSettings.mMode == kGMPScreensharing)
231       param.iUsageType = SCREEN_CONTENT_REAL_TIME;
232     param.iPicWidth = codecSettings.mWidth;
233     param.iPicHeight = codecSettings.mHeight;
234     param.iRCMode = RC_BITRATE_MODE;
235     param.iTargetBitrate = codecSettings.mStartBitrate * 1000;
236     param.iMaxBitrate = codecSettings.mMaxBitrate * 1000;
237     GMPLOG (GL_INFO, "Initializing Bit Rate at: Start: "
238             << codecSettings.mStartBitrate
239             << "; Min: "
240             << codecSettings.mMinBitrate
241             << "; Max: "
242             << codecSettings.mMaxBitrate
243             << "; Max payload size:"
244             << maxPayloadSize);
245 
246     param.uiMaxNalSize = maxPayloadSize;
247 
248     // TODO(ekr@rtfm.com). Scary conversion from unsigned char to float below.
249     param.fMaxFrameRate = static_cast<float> (codecSettings.mMaxFramerate);
250 
251     // Set up layers. Currently we have one layer.
252     SSpatialLayerConfig* layer = &param.sSpatialLayers[0];
253 
254     // Make sure the output resolution doesn't exceed the Openh264 capability
255     double width_mb = std::ceil(codecSettings.mWidth/16.0);
256     double height_mb = std::ceil(codecSettings.mHeight/16.0);
257     double input_mb = width_mb * height_mb;
258     if (static_cast<uint32_t>(input_mb) > OPENH264_MAX_MB) {
259       double scale = std::sqrt(OPENH264_MAX_MB / input_mb);
260       layer->iVideoWidth = static_cast<uint32_t>(width_mb * 16 * scale);
261       layer->iVideoHeight = static_cast<uint32_t>(height_mb * 16 * scale);
262       GMPLOG (GL_INFO, "InitEncode: the output resolution overflows, w x h = " << codecSettings.mWidth << " x " << codecSettings.mHeight
263               << ", turned to be " << layer->iVideoWidth << " x " << layer->iVideoHeight);
264     } else {
265       layer->iVideoWidth = codecSettings.mWidth;
266       layer->iVideoHeight = codecSettings.mHeight;
267     }
268     if (layer->iVideoWidth < 16) {
269       layer->iVideoWidth = 16;
270     }
271     if (layer->iVideoHeight < 16) {
272       layer->iVideoHeight = 16;
273     }
274 
275     layer->fFrameRate = param.fMaxFrameRate;
276     layer->iSpatialBitrate = param.iTargetBitrate;
277     layer->iMaxSpatialBitrate = param.iMaxBitrate;
278 
279     //for controlling the NAL size (normally for packetization-mode=0)
280     if (maxPayloadSize != 0) {
281       layer->sSliceArgument.uiSliceMode = SM_SIZELIMITED_SLICE;
282       layer->sSliceArgument.uiSliceSizeConstraint = maxPayloadSize;
283     }
284     rv = encoder_->InitializeExt (&param);
285     if (rv) {
286       GMPLOG (GL_ERROR, "Couldn't initialize encoder");
287       Error (GMPGenericErr);
288       return;
289     }
290     max_payload_size_ = maxPayloadSize;
291     GMPLOG (GL_INFO, "Initialized encoder");
292   }
293 
Encode(GMPVideoi420Frame * inputImage,const uint8_t * aCodecSpecificInfo,uint32_t aCodecSpecificInfoLength,const GMPVideoFrameType * aFrameTypes,uint32_t aFrameTypesLength)294   virtual void Encode (GMPVideoi420Frame* inputImage,
295                        const uint8_t* aCodecSpecificInfo,
296                        uint32_t aCodecSpecificInfoLength,
297                        const GMPVideoFrameType* aFrameTypes,
298                        uint32_t aFrameTypesLength) {
299     GMPLOG (GL_DEBUG,
300             __FUNCTION__
301             << " size="
302             << inputImage->Width() << "x" << inputImage->Height());
303 
304     stats_.FrameIn();
305 
306     assert (aFrameTypesLength != 0);
307 
308     worker_thread_->Post (WrapTaskRefCounted (
309                             this, &OpenH264VideoEncoder::Encode_w,
310                             inputImage,
311                             (aFrameTypes)[0]));
312   }
313 
SetChannelParameters(uint32_t aPacketLoss,uint32_t aRTT)314   virtual void SetChannelParameters (uint32_t aPacketLoss, uint32_t aRTT) {
315   }
316 
SetRates(uint32_t aNewBitRate,uint32_t aFrameRate)317   virtual void SetRates (uint32_t aNewBitRate, uint32_t aFrameRate) {
318     GMPLOG (GL_INFO, "[SetRates] Begin with: "
319             << aNewBitRate << " , " << aFrameRate);
320     //update bitrate if needed
321     const int32_t newBitRate = aNewBitRate * 1000; //kbps->bps
322     SBitrateInfo existEncoderBitRate;
323     existEncoderBitRate.iLayer = SPATIAL_LAYER_ALL;
324     int rv = encoder_->GetOption (ENCODER_OPTION_BITRATE, &existEncoderBitRate);
325     if (rv != cmResultSuccess) {
326       GMPLOG (GL_ERROR, "[SetRates] Error in Getting Bit Rate at Layer:"
327               << rv
328               << " ; Layer = "
329               << existEncoderBitRate.iLayer
330               << " ; BR = "
331               << existEncoderBitRate.iBitrate);
332       Error (GMPGenericErr);
333       return;
334     }
335     if (rv == cmResultSuccess && existEncoderBitRate.iBitrate != newBitRate) {
336       SBitrateInfo newEncoderBitRate;
337       newEncoderBitRate.iLayer = SPATIAL_LAYER_ALL;
338       newEncoderBitRate.iBitrate = newBitRate;
339       rv = encoder_->SetOption (ENCODER_OPTION_BITRATE, &newEncoderBitRate);
340       if (rv == cmResultSuccess) {
341         GMPLOG (GL_INFO, "[SetRates] Update Encoder Bandwidth (AllLayers): ReturnValue: "
342                 << rv
343                 << "BitRate(kbps): "
344                 << aNewBitRate);
345       } else {
346         GMPLOG (GL_ERROR, "[SetRates] Error in Setting Bit Rate at Layer:"
347                 << rv
348                 << " ; Layer = "
349                 << newEncoderBitRate.iLayer
350                 << " ; BR = "
351                 << newEncoderBitRate.iBitrate);
352         Error (GMPGenericErr);
353         return;
354       }
355     }
356     //update framerate if needed
357     float existFrameRate = 0;
358     rv = encoder_->GetOption (ENCODER_OPTION_FRAME_RATE, &existFrameRate);
359     if (rv != cmResultSuccess) {
360       GMPLOG (GL_ERROR, "[SetRates] Error in Getting Frame Rate:"
361               << rv << " FrameRate: " << existFrameRate);
362       Error (GMPGenericErr);
363       return;
364     }
365     if (rv == cmResultSuccess &&
366         (aFrameRate - existFrameRate > 0.001f ||
367          existFrameRate - aFrameRate > 0.001f)) {
368       float newFrameRate = static_cast<float> (aFrameRate);
369       rv = encoder_->SetOption (ENCODER_OPTION_FRAME_RATE, &newFrameRate);
370       if (rv == cmResultSuccess) {
371         GMPLOG (GL_INFO, "[SetRates] Update Encoder Frame Rate: ReturnValue: "
372                 << rv << " FrameRate: " << aFrameRate);
373       } else {
374         GMPLOG (GL_ERROR, "[SetRates] Error in Setting Frame Rate: ReturnValue: "
375                 << rv << " FrameRate: " << aFrameRate);
376         Error (GMPGenericErr);
377         return;
378       }
379     }
380   }
381 
SetPeriodicKeyFrames(bool aEnable)382   virtual void SetPeriodicKeyFrames (bool aEnable) {
383   }
384 
EncodingComplete()385   virtual void EncodingComplete() {
386     shutting_down = true;
387 
388     // Release the reference to the external objects, because it is no longer safe to call them
389     host_     = nullptr;
390     callback_ = nullptr;
391     TearDownEncoder();
392 
393     Release();
394   }
395 
396  private:
~OpenH264VideoEncoder()397   virtual ~OpenH264VideoEncoder() {
398     // Tear down the internal encoder in case of EncodingComplete() not being called
399     TearDownEncoder();
400   }
401 
TearDownEncoder()402    void TearDownEncoder() {
403      // Stop the worker thread first
404      if (worker_thread_) {
405        worker_thread_->Join();
406        worker_thread_ = nullptr;
407      }
408 
409      // Destroy OpenH264 encoder
410      if (encoder_) {
411        WelsDestroySVCEncoder(encoder_);
412        encoder_ = nullptr;
413      }
414    }
415 
TrySyncRunOnMainThread(GMPTask * aTask)416   void TrySyncRunOnMainThread(GMPTask* aTask) {
417     if (!shutting_down && g_platform_api) {
418       g_platform_api->syncrunonmainthread (aTask);
419     }
420   }
421 
Error(GMPErr error)422   void Error (GMPErr error) {
423     if (callback_) {
424       callback_->Error (error);
425     }
426   }
427 
Encode_w(GMPVideoi420Frame * inputImage,GMPVideoFrameType frame_type)428   void Encode_w (GMPVideoi420Frame* inputImage,
429                  GMPVideoFrameType frame_type) {
430     SFrameBSInfo encoded;
431 
432     if (frame_type  == kGMPKeyFrame) {
433       encoder_->ForceIntraFrame (true);
434       if (!inputImage)
435         return;
436     }
437     if (!inputImage) {
438       GMPLOG (GL_ERROR, "no input image");
439       return;
440     }
441     SSourcePicture src;
442 
443     src.iColorFormat = videoFormatI420;
444     src.iStride[0] = inputImage->Stride (kGMPYPlane);
445     src.pData[0] = reinterpret_cast<unsigned char*> (
446                      const_cast<uint8_t*> (inputImage->Buffer (kGMPYPlane)));
447     src.iStride[1] = inputImage->Stride (kGMPUPlane);
448     src.pData[1] = reinterpret_cast<unsigned char*> (
449                      const_cast<uint8_t*> (inputImage->Buffer (kGMPUPlane)));
450     src.iStride[2] = inputImage->Stride (kGMPVPlane);
451     src.pData[2] = reinterpret_cast<unsigned char*> (
452                      const_cast<uint8_t*> (inputImage->Buffer (kGMPVPlane)));
453     src.iStride[3] = 0;
454     src.pData[3] = nullptr;
455     src.iPicWidth = inputImage->Width();
456     src.iPicHeight = inputImage->Height();
457     src.uiTimeStamp = inputImage->Timestamp() / 1000; //encoder needs millisecond
458     const SSourcePicture* pics = &src;
459 
460     int result = encoder_->EncodeFrame (pics, &encoded);
461     if (result != cmResultSuccess) {
462       GMPLOG (GL_ERROR, "Couldn't encode frame. Error = " << result);
463     }
464 
465 
466     // Translate int to enum
467     GMPVideoFrameType encoded_type;
468     bool has_frame = false;
469 
470     switch (encoded.eFrameType) {
471     case videoFrameTypeIDR:
472       encoded_type = kGMPKeyFrame;
473       has_frame = true;
474       break;
475     case videoFrameTypeI:
476       encoded_type = kGMPKeyFrame;
477       has_frame = true;
478       break;
479     case videoFrameTypeP:
480       encoded_type = kGMPDeltaFrame;
481       has_frame = true;
482       break;
483     case videoFrameTypeSkip:
484       // Can skip the call back since no actual bitstream will be generated
485       break;
486     case videoFrameTypeIPMixed://this type is currently not suppported
487     case videoFrameTypeInvalid:
488       GMPLOG (GL_ERROR, "Couldn't encode frame. Type = "
489               << encoded.eFrameType);
490       break;
491     default:
492       // The API is defined as returning a type.
493       assert (false);
494       break;
495     }
496 
497     if (!has_frame) {
498       // This frame must be destroyed on the main thread.
499       TrySyncRunOnMainThread (WrapTask (
500                                    this,
501                                    &OpenH264VideoEncoder::DestroyInputFrame_m,
502                                    inputImage));
503       return;
504     }
505 
506     // Synchronously send this back to the main thread for delivery.
507     TrySyncRunOnMainThread (WrapTask (
508                                    this,
509                                    &OpenH264VideoEncoder::Encode_m,
510                                    inputImage,
511                                    &encoded,
512                                    encoded_type));
513   }
514 
Encode_m(GMPVideoi420Frame * frame,SFrameBSInfo * encoded,GMPVideoFrameType frame_type)515   void Encode_m (GMPVideoi420Frame* frame, SFrameBSInfo* encoded,
516                  GMPVideoFrameType frame_type) {
517     // Attach a self-destructor so that this dies on return.
518     SelfDestruct<GMPVideoi420Frame> ifd (frame);
519 
520     if (!host_) {
521       return;
522     }
523 
524     // Now return the encoded data back to the parent.
525     GMPVideoFrame* ftmp;
526     GMPErr err = host_->CreateFrame (kGMPEncodedVideoFrame, &ftmp);
527     if (err != GMPNoErr) {
528       GMPLOG (GL_ERROR, "Error creating encoded frame");
529       return;
530     }
531 
532     GMPVideoEncodedFrame* f = static_cast<GMPVideoEncodedFrame*> (ftmp);
533     // Buffer up the data.
534     uint32_t length = 0;
535     std::vector<uint32_t> lengths;
536 
537     for (int i = 0; i < encoded->iLayerNum; ++i) {
538       lengths.push_back (0);
539       uint8_t* tmp = encoded->sLayerInfo[i].pBsBuf;
540       for (int j = 0; j < encoded->sLayerInfo[i].iNalCount; ++j) {
541         lengths[i] += encoded->sLayerInfo[i].pNalLengthInByte[j];
542         // Convert from 4-byte start codes to GMP_BufferLength32 (NAL lengths)
543         assert (* (reinterpret_cast<uint32_t*> (tmp)) == 0x01000000);
544         // BufferType32 doesn't include the length of the length itself!
545         * (reinterpret_cast<uint32_t*> (tmp)) = encoded->sLayerInfo[i].pNalLengthInByte[j] - sizeof (uint32_t);
546         length += encoded->sLayerInfo[i].pNalLengthInByte[j];
547         tmp += encoded->sLayerInfo[i].pNalLengthInByte[j];
548       }
549     }
550 
551     err = f->CreateEmptyFrame (length);
552     if (err != GMPNoErr) {
553       GMPLOG (GL_ERROR, "Error allocating frame data");
554       f->Destroy();
555       return;
556     }
557 
558     // Copy the data.
559     // Here we concatenate into one big buffer
560     uint8_t* tmp = f->Buffer();
561     for (int i = 0; i < encoded->iLayerNum; ++i) {
562       memcpy (tmp, encoded->sLayerInfo[i].pBsBuf, lengths[i]);
563       tmp += lengths[i];
564     }
565 
566     f->SetEncodedWidth (frame->Width());
567     f->SetEncodedHeight (frame->Height());
568     f->SetTimeStamp (frame->Timestamp());
569     f->SetFrameType (frame_type);
570     f->SetCompleteFrame (true);
571     f->SetBufferType (GMP_BufferLength32);
572 
573     GMPLOG (GL_DEBUG, "Encoding complete. type= "
574             << f->FrameType()
575             << " length="
576             << f->Size()
577             << " timestamp="
578             << f->TimeStamp());
579 
580     // Return the encoded frame.
581     GMPCodecSpecificInfo info;
582     memset (&info, 0, sizeof (info)); // shouldn't be needed, we init everything
583     info.mCodecType = kGMPVideoCodecH264;
584     info.mBufferType = GMP_BufferLength32;
585     info.mCodecSpecific.mH264.mSimulcastIdx = 0;
586 
587     if (callback_) {
588       callback_->Encoded (f, reinterpret_cast<uint8_t*> (&info), sizeof (info));
589     }
590 
591     stats_.FrameOut();
592   }
593 
594   // These frames must be destroyed on the main thread.
DestroyInputFrame_m(GMPVideoi420Frame * frame)595   void DestroyInputFrame_m (GMPVideoi420Frame* frame) {
596     frame->Destroy();
597   }
598 
599 
600  private:
601   GMPVideoHost* host_;
602   GMPThread* worker_thread_;
603   ISVCEncoder* encoder_;
604   uint32_t max_payload_size_;
605   GMPVideoEncoderCallback* callback_;
606   FrameStats stats_;
607   bool shutting_down;
608 };
609 
readU16BE(const uint8_t * in)610 uint16_t readU16BE(const uint8_t* in) {
611   return in[0] << 8 | in[1];
612 }
613 
copyWithStartCode(std::vector<uint8_t> & out,const uint8_t * in,size_t size)614 void copyWithStartCode(std::vector<uint8_t>& out, const uint8_t* in, size_t size) {
615   static const uint8_t code[] = { 0x00, 0x00, 0x00, 0x01 };
616   out.insert(out.end(), code, code + sizeof(code));
617   out.insert(out.end(), in, in + size);
618 }
619 
620 class OpenH264VideoDecoder : public GMPVideoDecoder, public RefCounted {
621  public:
OpenH264VideoDecoder(GMPVideoHost * hostAPI)622   OpenH264VideoDecoder (GMPVideoHost* hostAPI) :
623     host_ (hostAPI),
624     worker_thread_ (nullptr),
625     callback_ (nullptr),
626     decoder_ (nullptr),
627     stats_ ("Decoder"),
628     shutting_down(false) {
629       AddRef();
630     }
631 
InitDecode(const GMPVideoCodec & codecSettings,const uint8_t * aCodecSpecific,uint32_t aCodecSpecificSize,GMPVideoDecoderCallback * callback,int32_t coreCount)632   virtual void InitDecode (const GMPVideoCodec& codecSettings,
633                            const uint8_t* aCodecSpecific,
634                            uint32_t aCodecSpecificSize,
635                            GMPVideoDecoderCallback* callback,
636                            int32_t coreCount) {
637     callback_ = callback;
638 
639     GMPLOG (GL_INFO, "InitDecode");
640 
641     GMPErr err = g_platform_api->createthread (&worker_thread_);
642     if (err != GMPNoErr) {
643       GMPLOG (GL_ERROR, "Couldn't create new thread");
644       Error (GMPGenericErr);
645       return;
646     }
647 
648     if (WelsCreateDecoder (&decoder_)) {
649       GMPLOG (GL_ERROR, "Couldn't create decoder");
650       Error (GMPGenericErr);
651       return;
652     }
653 
654     if (!decoder_) {
655       GMPLOG (GL_ERROR, "Couldn't create decoder");
656       Error (GMPGenericErr);
657       return;
658     }
659 
660     SDecodingParam param;
661     memset (&param, 0, sizeof (param));
662     param.uiTargetDqLayer = UCHAR_MAX;  // Default value
663     param.eEcActiveIdc = ERROR_CON_SLICE_MV_COPY_CROSS_IDR_FREEZE_RES_CHANGE; // Error concealment on.
664     param.sVideoProperty.size = sizeof(param.sVideoProperty);
665     param.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;
666 
667     if (decoder_->Initialize (&param)) {
668       GMPLOG (GL_ERROR, "Couldn't initialize decoder");
669       Error (GMPGenericErr);
670       return;
671     }
672 
673     if (aCodecSpecific && aCodecSpecificSize >= sizeof(GMPVideoCodecH264)) {
674       std::vector<uint8_t> annexb;
675 
676       // Convert the AVCC data, starting at the byte containing
677       // numOfSequenceParameterSets, to Annex B format.
678       const uint8_t* avcc = aCodecSpecific + offsetof(GMPVideoCodecH264, mAVCC.mNumSPS);
679 
680       static const int kSPSMask = (1 << 5) - 1;
681       uint8_t spsCount = *avcc++ & kSPSMask;
682       for (int i = 0; i < spsCount; ++i) {
683         size_t size = readU16BE(avcc);
684         avcc += 2;
685         copyWithStartCode(annexb, avcc, size);
686         avcc += size;
687       }
688 
689       uint8_t ppsCount = *avcc++;
690       for (int i = 0; i < ppsCount; ++i) {
691         size_t size = readU16BE(avcc);
692         avcc += 2;
693         copyWithStartCode(annexb, avcc, size);
694         avcc += size;
695       }
696 
697       SBufferInfo decoded;
698       memset (&decoded, 0, sizeof (decoded));
699       unsigned char* data[3] = {nullptr, nullptr, nullptr};
700       DECODING_STATE dState = decoder_->DecodeFrame2 (&*annexb.begin(),
701                                                       annexb.size(),
702                                                       data,
703                                                       &decoded);
704       if (dState) {
705         GMPLOG (GL_ERROR, "Decoding error dState=" << dState);
706       }
707       GMPLOG (GL_ERROR, "InitDecode iBufferStatus=" << decoded.iBufferStatus);
708     }
709   }
710 
Decode(GMPVideoEncodedFrame * inputFrame,bool missingFrames,const uint8_t * aCodecSpecificInfo,uint32_t aCodecSpecificInfoLength,int64_t renderTimeMs=-1)711   virtual void Decode (GMPVideoEncodedFrame* inputFrame,
712                        bool missingFrames,
713                        const uint8_t* aCodecSpecificInfo,
714                        uint32_t aCodecSpecificInfoLength,
715                        int64_t renderTimeMs = -1) {
716     GMPLOG (GL_DEBUG, __FUNCTION__
717             << "Decoding frame size=" << inputFrame->Size()
718             << " timestamp=" << inputFrame->TimeStamp());
719     stats_.FrameIn();
720     //const GMPCodecSpecificInfo *codecSpecificInfo = (GMPCodecSpecificInfo) aCodecSpecificInfo;
721 
722     // Convert to H.264 start codes
723     switch (inputFrame->BufferType()) {
724     case GMP_BufferSingle:
725     case GMP_BufferLength8:
726     case GMP_BufferLength16:
727     case GMP_BufferLength24:
728       // We should look to support these, especially GMP_BufferSingle
729       assert (false);
730       break;
731 
732     case GMP_BufferLength32: {
733       uint8_t* start_code = inputFrame->Buffer();
734       // start code should be at least four bytes from the end or we risk
735       // reading/writing outside the buffer.
736       while (start_code < inputFrame->Buffer() + inputFrame->Size() - 4) {
737         static const uint8_t code[] = { 0x00, 0x00, 0x00, 0x01 };
738         uint8_t* lenp = start_code;
739         start_code += * (reinterpret_cast<int32_t*> (lenp));
740         memcpy (lenp, code, 4);
741       }
742     }
743     break;
744 
745     default:
746       assert (false);
747       break;
748     }
749     DECODING_STATE dState = dsErrorFree;
750     worker_thread_->Post (WrapTaskRefCounted (
751                             this, &OpenH264VideoDecoder::Decode_w,
752                             inputFrame,
753                             missingFrames,
754                             dState,
755                             renderTimeMs));
756     if (dState) {
757       Error (GMPGenericErr);
758     }
759   }
760 
Reset()761   virtual void Reset() {
762     if (callback_) {
763       callback_->ResetComplete ();
764     }
765   }
766 
Drain()767   virtual void Drain() {
768     if (callback_) {
769       callback_->DrainComplete ();
770     }
771   }
772 
DecodingComplete()773   virtual void DecodingComplete() {
774     shutting_down = true;
775 
776     // Release the reference to the external objects, because it is no longer safe to call them
777     host_     = nullptr;
778     callback_ = nullptr;
779     TearDownDecoder();
780 
781     Release();
782   }
783 
784  private:
~OpenH264VideoDecoder()785   virtual ~OpenH264VideoDecoder() {
786     // Tear down the internal decoder in case of DecodingComplete() not being called
787     TearDownDecoder();
788   }
789 
TearDownDecoder()790   void TearDownDecoder() {
791     // Stop the worker thread first
792     if (worker_thread_) {
793       worker_thread_->Join();
794       worker_thread_ = nullptr;
795     }
796 
797     // Destroy OpenH264 decoder
798     if (decoder_) {
799       WelsDestroyDecoder(decoder_);
800       decoder_ = nullptr;
801     }
802   }
803 
TrySyncRunOnMainThread(GMPTask * aTask)804   void TrySyncRunOnMainThread(GMPTask* aTask) {
805     if (!shutting_down && g_platform_api) {
806       g_platform_api->syncrunonmainthread (aTask);
807     }
808   }
809 
Error(GMPErr error)810   void Error (GMPErr error) {
811     if (callback_) {
812       callback_->Error (error);
813     }
814   }
815 
Decode_w(GMPVideoEncodedFrame * inputFrame,bool missingFrames,DECODING_STATE & dState,int64_t renderTimeMs=-1)816   void Decode_w (GMPVideoEncodedFrame* inputFrame,
817                  bool missingFrames,
818                  DECODING_STATE& dState,
819                  int64_t renderTimeMs = -1) {
820     GMPLOG (GL_DEBUG, "Frame decode on worker thread length = "
821             << inputFrame->Size());
822 
823     SBufferInfo decoded;
824     bool valid = false;
825     memset (&decoded, 0, sizeof (decoded));
826     unsigned char* data[3] = {nullptr, nullptr, nullptr};
827 
828     dState = decoder_->DecodeFrameNoDelay (inputFrame->Buffer(),
829                                      inputFrame->Size(),
830                                      data,
831                                      &decoded);
832 
833     if (dState) {
834       GMPLOG (GL_ERROR, "Decoding error dState=" << dState);
835     } else {
836       valid = true;
837     }
838 
839     TrySyncRunOnMainThread (WrapTask (
840                                  this,
841                                  &OpenH264VideoDecoder::Decode_m,
842                                  inputFrame,
843                                  &decoded,
844                                  data,
845                                  renderTimeMs,
846                                  valid));
847   }
848 
849   // Return the decoded data back to the parent.
Decode_m(GMPVideoEncodedFrame * inputFrame,SBufferInfo * decoded,unsigned char * data[3],int64_t renderTimeMs,bool valid)850   void Decode_m (GMPVideoEncodedFrame* inputFrame,
851                  SBufferInfo* decoded,
852                  unsigned char* data[3],
853                  int64_t renderTimeMs,
854                  bool valid) {
855     // Attach a self-destructor so that this dies on return.
856     SelfDestruct<GMPVideoEncodedFrame> ifd (inputFrame);
857 
858     // If we don't actually have data, just abort.
859     if (!valid) {
860       GMPLOG (GL_ERROR, "No valid data decoded");
861       Error (GMPDecodeErr);
862       return;
863     }
864 
865     if (decoded->iBufferStatus != 1) {
866       GMPLOG (GL_ERROR, "iBufferStatus=" << decoded->iBufferStatus);
867       if (callback_) {
868         callback_->InputDataExhausted();
869       }
870       return;
871     }
872 
873     int width = decoded->UsrData.sSystemBuffer.iWidth;
874     int height = decoded->UsrData.sSystemBuffer.iHeight;
875     int ystride = decoded->UsrData.sSystemBuffer.iStride[0];
876     int uvstride = decoded->UsrData.sSystemBuffer.iStride[1];
877 
878     GMPLOG (GL_DEBUG, "Video frame ready for display "
879             << width
880             << "x"
881             << height
882             << " timestamp="
883             << inputFrame->TimeStamp());
884 
885     GMPVideoFrame* ftmp = nullptr;
886 
887     if (!host_) {
888       return;
889     }
890 
891     // Translate the image.
892     GMPErr err = host_->CreateFrame (kGMPI420VideoFrame, &ftmp);
893     if (err != GMPNoErr) {
894       GMPLOG (GL_ERROR, "Couldn't allocate empty I420 frame");
895       return;
896     }
897 
898 
899     GMPVideoi420Frame* frame = static_cast<GMPVideoi420Frame*> (ftmp);
900     err = frame->CreateFrame (
901             ystride * height, static_cast<uint8_t*> (data[0]),
902             uvstride * height / 2, static_cast<uint8_t*> (data[1]),
903             uvstride * height / 2, static_cast<uint8_t*> (data[2]),
904             width, height,
905             ystride, uvstride, uvstride);
906     if (err != GMPNoErr) {
907       GMPLOG (GL_ERROR, "Couldn't make decoded frame");
908       return;
909     }
910 
911     GMPLOG (GL_DEBUG, "Allocated size = "
912             << frame->AllocatedSize (kGMPYPlane));
913     frame->SetTimestamp (inputFrame->TimeStamp());
914     frame->SetDuration (inputFrame->Duration());
915     if (callback_) {
916       callback_->Decoded (frame);
917     }
918 
919     stats_.FrameOut();
920   }
921 
922   GMPVideoHost* host_;
923   GMPThread* worker_thread_;
924   GMPVideoDecoderCallback* callback_;
925   ISVCDecoder* decoder_;
926   FrameStats stats_;
927   bool shutting_down;
928 };
929 
930 extern "C" {
931 
932   PUBLIC_FUNC GMPErr
GMPInit(GMPPlatformAPI * aPlatformAPI)933   GMPInit (GMPPlatformAPI* aPlatformAPI) {
934     g_platform_api = aPlatformAPI;
935     return GMPNoErr;
936   }
937 
938   PUBLIC_FUNC GMPErr
GMPGetAPI(const char * aApiName,void * aHostAPI,void ** aPluginApi)939   GMPGetAPI (const char* aApiName, void* aHostAPI, void** aPluginApi) {
940     if (!strcmp (aApiName, "decode-video")) {
941       *aPluginApi = new OpenH264VideoDecoder (static_cast<GMPVideoHost*> (aHostAPI));
942       return GMPNoErr;
943     } else if (!strcmp (aApiName, "encode-video")) {
944       *aPluginApi = new OpenH264VideoEncoder (static_cast<GMPVideoHost*> (aHostAPI));
945       return GMPNoErr;
946     }
947     return GMPGenericErr;
948   }
949 
950   PUBLIC_FUNC void
GMPShutdown(void)951   GMPShutdown (void) {
952     g_platform_api = nullptr;
953   }
954 
955 } // extern "C"
956