• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "webrtc/modules/video_coding/generic_encoder.h"
12 
13 #include <vector>
14 
15 #include "webrtc/base/checks.h"
16 #include "webrtc/base/logging.h"
17 #include "webrtc/base/trace_event.h"
18 #include "webrtc/engine_configurations.h"
19 #include "webrtc/modules/video_coding/encoded_frame.h"
20 #include "webrtc/modules/video_coding/media_optimization.h"
21 #include "webrtc/system_wrappers/include/critical_section_wrapper.h"
22 
23 namespace webrtc {
24 namespace {
25 // Map information from info into rtp. If no relevant information is found
26 // in info, rtp is set to NULL.
CopyCodecSpecific(const CodecSpecificInfo * info,RTPVideoHeader * rtp)27 void CopyCodecSpecific(const CodecSpecificInfo* info, RTPVideoHeader* rtp) {
28   RTC_DCHECK(info);
29   switch (info->codecType) {
30     case kVideoCodecVP8: {
31       rtp->codec = kRtpVideoVp8;
32       rtp->codecHeader.VP8.InitRTPVideoHeaderVP8();
33       rtp->codecHeader.VP8.pictureId = info->codecSpecific.VP8.pictureId;
34       rtp->codecHeader.VP8.nonReference = info->codecSpecific.VP8.nonReference;
35       rtp->codecHeader.VP8.temporalIdx = info->codecSpecific.VP8.temporalIdx;
36       rtp->codecHeader.VP8.layerSync = info->codecSpecific.VP8.layerSync;
37       rtp->codecHeader.VP8.tl0PicIdx = info->codecSpecific.VP8.tl0PicIdx;
38       rtp->codecHeader.VP8.keyIdx = info->codecSpecific.VP8.keyIdx;
39       rtp->simulcastIdx = info->codecSpecific.VP8.simulcastIdx;
40       return;
41     }
42     case kVideoCodecVP9: {
43       rtp->codec = kRtpVideoVp9;
44       rtp->codecHeader.VP9.InitRTPVideoHeaderVP9();
45       rtp->codecHeader.VP9.inter_pic_predicted =
46           info->codecSpecific.VP9.inter_pic_predicted;
47       rtp->codecHeader.VP9.flexible_mode =
48           info->codecSpecific.VP9.flexible_mode;
49       rtp->codecHeader.VP9.ss_data_available =
50           info->codecSpecific.VP9.ss_data_available;
51       rtp->codecHeader.VP9.picture_id = info->codecSpecific.VP9.picture_id;
52       rtp->codecHeader.VP9.tl0_pic_idx = info->codecSpecific.VP9.tl0_pic_idx;
53       rtp->codecHeader.VP9.temporal_idx = info->codecSpecific.VP9.temporal_idx;
54       rtp->codecHeader.VP9.spatial_idx = info->codecSpecific.VP9.spatial_idx;
55       rtp->codecHeader.VP9.temporal_up_switch =
56           info->codecSpecific.VP9.temporal_up_switch;
57       rtp->codecHeader.VP9.inter_layer_predicted =
58           info->codecSpecific.VP9.inter_layer_predicted;
59       rtp->codecHeader.VP9.gof_idx = info->codecSpecific.VP9.gof_idx;
60       rtp->codecHeader.VP9.num_spatial_layers =
61           info->codecSpecific.VP9.num_spatial_layers;
62 
63       if (info->codecSpecific.VP9.ss_data_available) {
64         rtp->codecHeader.VP9.spatial_layer_resolution_present =
65             info->codecSpecific.VP9.spatial_layer_resolution_present;
66         if (info->codecSpecific.VP9.spatial_layer_resolution_present) {
67           for (size_t i = 0; i < info->codecSpecific.VP9.num_spatial_layers;
68                ++i) {
69             rtp->codecHeader.VP9.width[i] = info->codecSpecific.VP9.width[i];
70             rtp->codecHeader.VP9.height[i] = info->codecSpecific.VP9.height[i];
71           }
72         }
73         rtp->codecHeader.VP9.gof.CopyGofInfoVP9(info->codecSpecific.VP9.gof);
74       }
75 
76       rtp->codecHeader.VP9.num_ref_pics = info->codecSpecific.VP9.num_ref_pics;
77       for (int i = 0; i < info->codecSpecific.VP9.num_ref_pics; ++i)
78         rtp->codecHeader.VP9.pid_diff[i] = info->codecSpecific.VP9.p_diff[i];
79       return;
80     }
81     case kVideoCodecH264:
82       rtp->codec = kRtpVideoH264;
83       return;
84     case kVideoCodecGeneric:
85       rtp->codec = kRtpVideoGeneric;
86       rtp->simulcastIdx = info->codecSpecific.generic.simulcast_idx;
87       return;
88     default:
89       return;
90   }
91 }
92 }  // namespace
93 
94 // #define DEBUG_ENCODER_BIT_STREAM
95 
VCMGenericEncoder(VideoEncoder * encoder,VideoEncoderRateObserver * rate_observer,VCMEncodedFrameCallback * encoded_frame_callback,bool internalSource)96 VCMGenericEncoder::VCMGenericEncoder(
97     VideoEncoder* encoder,
98     VideoEncoderRateObserver* rate_observer,
99     VCMEncodedFrameCallback* encoded_frame_callback,
100     bool internalSource)
101     : encoder_(encoder),
102       rate_observer_(rate_observer),
103       vcm_encoded_frame_callback_(encoded_frame_callback),
104       internal_source_(internalSource),
105       encoder_params_({0, 0, 0, 0}),
106       rotation_(kVideoRotation_0),
107       is_screenshare_(false) {}
108 
~VCMGenericEncoder()109 VCMGenericEncoder::~VCMGenericEncoder() {}
110 
Release()111 int32_t VCMGenericEncoder::Release() {
112   return encoder_->Release();
113 }
114 
InitEncode(const VideoCodec * settings,int32_t numberOfCores,size_t maxPayloadSize)115 int32_t VCMGenericEncoder::InitEncode(const VideoCodec* settings,
116                                       int32_t numberOfCores,
117                                       size_t maxPayloadSize) {
118   TRACE_EVENT0("webrtc", "VCMGenericEncoder::InitEncode");
119   {
120     rtc::CritScope lock(&params_lock_);
121     encoder_params_.target_bitrate = settings->startBitrate * 1000;
122     encoder_params_.input_frame_rate = settings->maxFramerate;
123   }
124 
125   is_screenshare_ = settings->mode == VideoCodecMode::kScreensharing;
126   if (encoder_->InitEncode(settings, numberOfCores, maxPayloadSize) != 0) {
127     LOG(LS_ERROR) << "Failed to initialize the encoder associated with "
128                      "payload name: "
129                   << settings->plName;
130     return -1;
131   }
132   encoder_->RegisterEncodeCompleteCallback(vcm_encoded_frame_callback_);
133   return 0;
134 }
135 
Encode(const VideoFrame & inputFrame,const CodecSpecificInfo * codecSpecificInfo,const std::vector<FrameType> & frameTypes)136 int32_t VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
137                                   const CodecSpecificInfo* codecSpecificInfo,
138                                   const std::vector<FrameType>& frameTypes) {
139   TRACE_EVENT1("webrtc", "VCMGenericEncoder::Encode", "timestamp",
140                inputFrame.timestamp());
141 
142   for (FrameType frame_type : frameTypes)
143     RTC_DCHECK(frame_type == kVideoFrameKey || frame_type == kVideoFrameDelta);
144 
145   rotation_ = inputFrame.rotation();
146 
147   // Keep track of the current frame rotation and apply to the output of the
148   // encoder. There might not be exact as the encoder could have one frame delay
149   // but it should be close enough.
150   // TODO(pbos): Map from timestamp, this is racy (even if rotation_ is locked
151   // properly, which it isn't). More than one frame may be in the pipeline.
152   vcm_encoded_frame_callback_->SetRotation(rotation_);
153 
154   int32_t result = encoder_->Encode(inputFrame, codecSpecificInfo, &frameTypes);
155 
156   if (vcm_encoded_frame_callback_) {
157     vcm_encoded_frame_callback_->SignalLastEncoderImplementationUsed(
158         encoder_->ImplementationName());
159   }
160 
161   if (is_screenshare_ &&
162       result == WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT) {
163     // Target bitrate exceeded, encoder state has been reset - try again.
164     return encoder_->Encode(inputFrame, codecSpecificInfo, &frameTypes);
165   }
166 
167   return result;
168 }
169 
SetEncoderParameters(const EncoderParameters & params)170 void VCMGenericEncoder::SetEncoderParameters(const EncoderParameters& params) {
171   bool channel_parameters_have_changed;
172   bool rates_have_changed;
173   {
174     rtc::CritScope lock(&params_lock_);
175     channel_parameters_have_changed =
176         params.loss_rate != encoder_params_.loss_rate ||
177         params.rtt != encoder_params_.rtt;
178     rates_have_changed =
179         params.target_bitrate != encoder_params_.target_bitrate ||
180         params.input_frame_rate != encoder_params_.input_frame_rate;
181     encoder_params_ = params;
182   }
183   if (channel_parameters_have_changed)
184     encoder_->SetChannelParameters(params.loss_rate, params.rtt);
185   if (rates_have_changed) {
186     uint32_t target_bitrate_kbps = (params.target_bitrate + 500) / 1000;
187     encoder_->SetRates(target_bitrate_kbps, params.input_frame_rate);
188     if (rate_observer_ != nullptr) {
189       rate_observer_->OnSetRates(params.target_bitrate,
190                                  params.input_frame_rate);
191     }
192   }
193 }
194 
GetEncoderParameters() const195 EncoderParameters VCMGenericEncoder::GetEncoderParameters() const {
196   rtc::CritScope lock(&params_lock_);
197   return encoder_params_;
198 }
199 
SetPeriodicKeyFrames(bool enable)200 int32_t VCMGenericEncoder::SetPeriodicKeyFrames(bool enable) {
201   return encoder_->SetPeriodicKeyFrames(enable);
202 }
203 
RequestFrame(const std::vector<FrameType> & frame_types)204 int32_t VCMGenericEncoder::RequestFrame(
205     const std::vector<FrameType>& frame_types) {
206   VideoFrame image;
207   return encoder_->Encode(image, NULL, &frame_types);
208 }
209 
InternalSource() const210 bool VCMGenericEncoder::InternalSource() const {
211   return internal_source_;
212 }
213 
OnDroppedFrame()214 void VCMGenericEncoder::OnDroppedFrame() {
215   encoder_->OnDroppedFrame();
216 }
217 
SupportsNativeHandle() const218 bool VCMGenericEncoder::SupportsNativeHandle() const {
219   return encoder_->SupportsNativeHandle();
220 }
221 
GetTargetFramerate()222 int VCMGenericEncoder::GetTargetFramerate() {
223   return encoder_->GetTargetFramerate();
224 }
225 
226 /***************************
227  * Callback Implementation
228  ***************************/
VCMEncodedFrameCallback(EncodedImageCallback * post_encode_callback)229 VCMEncodedFrameCallback::VCMEncodedFrameCallback(
230     EncodedImageCallback* post_encode_callback)
231     : send_callback_(),
232       _mediaOpt(NULL),
233       _payloadType(0),
234       _internalSource(false),
235       _rotation(kVideoRotation_0),
236       post_encode_callback_(post_encode_callback)
237 #ifdef DEBUG_ENCODER_BIT_STREAM
238       ,
239       _bitStreamAfterEncoder(NULL)
240 #endif
241 {
242 #ifdef DEBUG_ENCODER_BIT_STREAM
243   _bitStreamAfterEncoder = fopen("encoderBitStream.bit", "wb");
244 #endif
245 }
246 
~VCMEncodedFrameCallback()247 VCMEncodedFrameCallback::~VCMEncodedFrameCallback() {
248 #ifdef DEBUG_ENCODER_BIT_STREAM
249   fclose(_bitStreamAfterEncoder);
250 #endif
251 }
252 
SetTransportCallback(VCMPacketizationCallback * transport)253 int32_t VCMEncodedFrameCallback::SetTransportCallback(
254     VCMPacketizationCallback* transport) {
255   send_callback_ = transport;
256   return VCM_OK;
257 }
258 
Encoded(const EncodedImage & encoded_image,const CodecSpecificInfo * codecSpecificInfo,const RTPFragmentationHeader * fragmentationHeader)259 int32_t VCMEncodedFrameCallback::Encoded(
260     const EncodedImage& encoded_image,
261     const CodecSpecificInfo* codecSpecificInfo,
262     const RTPFragmentationHeader* fragmentationHeader) {
263   TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded",
264                        "timestamp", encoded_image._timeStamp);
265   post_encode_callback_->Encoded(encoded_image, NULL, NULL);
266 
267   if (send_callback_ == NULL) {
268     return VCM_UNINITIALIZED;
269   }
270 
271 #ifdef DEBUG_ENCODER_BIT_STREAM
272   if (_bitStreamAfterEncoder != NULL) {
273     fwrite(encoded_image._buffer, 1, encoded_image._length,
274            _bitStreamAfterEncoder);
275   }
276 #endif
277 
278   RTPVideoHeader rtpVideoHeader;
279   memset(&rtpVideoHeader, 0, sizeof(RTPVideoHeader));
280   RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader;
281   if (codecSpecificInfo) {
282     CopyCodecSpecific(codecSpecificInfo, rtpVideoHeaderPtr);
283   }
284   rtpVideoHeader.rotation = _rotation;
285 
286   int32_t callbackReturn = send_callback_->SendData(
287       _payloadType, encoded_image, *fragmentationHeader, rtpVideoHeaderPtr);
288   if (callbackReturn < 0) {
289     return callbackReturn;
290   }
291 
292   if (_mediaOpt != NULL) {
293     _mediaOpt->UpdateWithEncodedData(encoded_image);
294     if (_internalSource)
295       return _mediaOpt->DropFrame();  // Signal to encoder to drop next frame.
296   }
297   return VCM_OK;
298 }
299 
SetMediaOpt(media_optimization::MediaOptimization * mediaOpt)300 void VCMEncodedFrameCallback::SetMediaOpt(
301     media_optimization::MediaOptimization* mediaOpt) {
302   _mediaOpt = mediaOpt;
303 }
304 
SignalLastEncoderImplementationUsed(const char * implementation_name)305 void VCMEncodedFrameCallback::SignalLastEncoderImplementationUsed(
306     const char* implementation_name) {
307   if (send_callback_)
308     send_callback_->OnEncoderImplementationName(implementation_name);
309 }
310 
311 }  // namespace webrtc
312