• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h"
12 
13 #include <algorithm>
14 
15 // NOTE(ajm): Path provided by gyp.
16 #include "libyuv/scale.h"  // NOLINT
17 
18 #include "webrtc/base/checks.h"
19 #include "webrtc/common.h"
20 #include "webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h"
21 
22 namespace {
23 
24 const unsigned int kDefaultMinQp = 2;
25 const unsigned int kDefaultMaxQp = 56;
26 // Max qp for lowest spatial resolution when doing simulcast.
27 const unsigned int kLowestResMaxQp = 45;
28 
SumStreamTargetBitrate(int streams,const webrtc::VideoCodec & codec)29 uint32_t SumStreamTargetBitrate(int streams, const webrtc::VideoCodec& codec) {
30   uint32_t bitrate_sum = 0;
31   for (int i = 0; i < streams; ++i) {
32     bitrate_sum += codec.simulcastStream[i].targetBitrate;
33   }
34   return bitrate_sum;
35 }
36 
SumStreamMaxBitrate(int streams,const webrtc::VideoCodec & codec)37 uint32_t SumStreamMaxBitrate(int streams, const webrtc::VideoCodec& codec) {
38   uint32_t bitrate_sum = 0;
39   for (int i = 0; i < streams; ++i) {
40     bitrate_sum += codec.simulcastStream[i].maxBitrate;
41   }
42   return bitrate_sum;
43 }
44 
NumberOfStreams(const webrtc::VideoCodec & codec)45 int NumberOfStreams(const webrtc::VideoCodec& codec) {
46   int streams =
47       codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams;
48   uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec);
49   if (simulcast_max_bitrate == 0) {
50     streams = 1;
51   }
52   return streams;
53 }
54 
ValidSimulcastResolutions(const webrtc::VideoCodec & codec,int num_streams)55 bool ValidSimulcastResolutions(const webrtc::VideoCodec& codec,
56                                int num_streams) {
57   if (codec.width != codec.simulcastStream[num_streams - 1].width ||
58       codec.height != codec.simulcastStream[num_streams - 1].height) {
59     return false;
60   }
61   for (int i = 0; i < num_streams; ++i) {
62     if (codec.width * codec.simulcastStream[i].height !=
63         codec.height * codec.simulcastStream[i].width) {
64       return false;
65     }
66   }
67   return true;
68 }
69 
VerifyCodec(const webrtc::VideoCodec * inst)70 int VerifyCodec(const webrtc::VideoCodec* inst) {
71   if (inst == NULL) {
72     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
73   }
74   if (inst->maxFramerate < 1) {
75     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
76   }
77   // allow zero to represent an unspecified maxBitRate
78   if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
79     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
80   }
81   if (inst->width <= 1 || inst->height <= 1) {
82     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
83   }
84   if (inst->codecSpecific.VP8.feedbackModeOn &&
85       inst->numberOfSimulcastStreams > 1) {
86     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
87   }
88   if (inst->codecSpecific.VP8.automaticResizeOn &&
89       inst->numberOfSimulcastStreams > 1) {
90     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
91   }
92   return WEBRTC_VIDEO_CODEC_OK;
93 }
94 
95 // TL1 FrameDropper's max time to drop frames.
96 const float kTl1MaxTimeToDropFrames = 20.0f;
97 
98 struct ScreenshareTemporalLayersFactory : webrtc::TemporalLayers::Factory {
ScreenshareTemporalLayersFactory__anon9b428f1c0111::ScreenshareTemporalLayersFactory99   ScreenshareTemporalLayersFactory()
100       : tl1_frame_dropper_(kTl1MaxTimeToDropFrames) {}
101 
~ScreenshareTemporalLayersFactory__anon9b428f1c0111::ScreenshareTemporalLayersFactory102   virtual ~ScreenshareTemporalLayersFactory() {}
103 
Create__anon9b428f1c0111::ScreenshareTemporalLayersFactory104   virtual webrtc::TemporalLayers* Create(int num_temporal_layers,
105                                          uint8_t initial_tl0_pic_idx) const {
106     return new webrtc::ScreenshareLayers(num_temporal_layers, rand());
107   }
108 
109   mutable webrtc::FrameDropper tl0_frame_dropper_;
110   mutable webrtc::FrameDropper tl1_frame_dropper_;
111 };
112 
113 // An EncodedImageCallback implementation that forwards on calls to a
114 // SimulcastEncoderAdapter, but with the stream index it's registered with as
115 // the first parameter to Encoded.
116 class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback {
117  public:
AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter * adapter,size_t stream_idx)118   AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter* adapter,
119                               size_t stream_idx)
120       : adapter_(adapter), stream_idx_(stream_idx) {}
121 
Encoded(const webrtc::EncodedImage & encodedImage,const webrtc::CodecSpecificInfo * codecSpecificInfo=NULL,const webrtc::RTPFragmentationHeader * fragmentation=NULL)122   int32_t Encoded(
123       const webrtc::EncodedImage& encodedImage,
124       const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL,
125       const webrtc::RTPFragmentationHeader* fragmentation = NULL) override {
126     return adapter_->Encoded(stream_idx_, encodedImage, codecSpecificInfo,
127                              fragmentation);
128   }
129 
130  private:
131   webrtc::SimulcastEncoderAdapter* const adapter_;
132   const size_t stream_idx_;
133 };
134 
135 }  // namespace
136 
137 namespace webrtc {
138 
SimulcastEncoderAdapter(VideoEncoderFactory * factory)139 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory)
140     : factory_(factory), encoded_complete_callback_(NULL) {
141   memset(&codec_, 0, sizeof(webrtc::VideoCodec));
142 }
143 
~SimulcastEncoderAdapter()144 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() {
145   Release();
146 }
147 
Release()148 int SimulcastEncoderAdapter::Release() {
149   // TODO(pbos): Keep the last encoder instance but call ::Release() on it, then
150   // re-use this instance in ::InitEncode(). This means that changing
151   // resolutions doesn't require reallocation of the first encoder, but only
152   // reinitialization, which makes sense. Then Destroy this instance instead in
153   // ~SimulcastEncoderAdapter().
154   while (!streaminfos_.empty()) {
155     VideoEncoder* encoder = streaminfos_.back().encoder;
156     EncodedImageCallback* callback = streaminfos_.back().callback;
157     factory_->Destroy(encoder);
158     delete callback;
159     streaminfos_.pop_back();
160   }
161   return WEBRTC_VIDEO_CODEC_OK;
162 }
163 
InitEncode(const VideoCodec * inst,int number_of_cores,size_t max_payload_size)164 int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst,
165                                         int number_of_cores,
166                                         size_t max_payload_size) {
167   if (number_of_cores < 1) {
168     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
169   }
170 
171   int ret = VerifyCodec(inst);
172   if (ret < 0) {
173     return ret;
174   }
175 
176   ret = Release();
177   if (ret < 0) {
178     return ret;
179   }
180 
181   int number_of_streams = NumberOfStreams(*inst);
182   bool doing_simulcast = (number_of_streams > 1);
183 
184   if (doing_simulcast && !ValidSimulcastResolutions(*inst, number_of_streams)) {
185     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
186   }
187 
188   codec_ = *inst;
189 
190   // Special mode when screensharing on a single stream.
191   if (number_of_streams == 1 && inst->mode == kScreensharing) {
192     screensharing_extra_options_.reset(new Config());
193     screensharing_extra_options_->Set<TemporalLayers::Factory>(
194         new ScreenshareTemporalLayersFactory());
195     codec_.extra_options = screensharing_extra_options_.get();
196   }
197 
198   // Create |number_of_streams| of encoder instances and init them.
199   for (int i = 0; i < number_of_streams; ++i) {
200     VideoCodec stream_codec;
201     bool send_stream = true;
202     if (!doing_simulcast) {
203       stream_codec = codec_;
204       stream_codec.numberOfSimulcastStreams = 1;
205     } else {
206       bool highest_resolution_stream = (i == (number_of_streams - 1));
207       PopulateStreamCodec(&codec_, i, number_of_streams,
208                           highest_resolution_stream, &stream_codec,
209                           &send_stream);
210     }
211 
212     // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl.
213     if (stream_codec.qpMax < kDefaultMinQp) {
214       stream_codec.qpMax = kDefaultMaxQp;
215     }
216 
217     VideoEncoder* encoder = factory_->Create();
218     ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size);
219     if (ret < 0) {
220       Release();
221       return ret;
222     }
223     EncodedImageCallback* callback = new AdapterEncodedImageCallback(this, i);
224     encoder->RegisterEncodeCompleteCallback(callback);
225     streaminfos_.push_back(StreamInfo(encoder, callback, stream_codec.width,
226                                       stream_codec.height, send_stream));
227   }
228   return WEBRTC_VIDEO_CODEC_OK;
229 }
230 
Encode(const VideoFrame & input_image,const CodecSpecificInfo * codec_specific_info,const std::vector<FrameType> * frame_types)231 int SimulcastEncoderAdapter::Encode(
232     const VideoFrame& input_image,
233     const CodecSpecificInfo* codec_specific_info,
234     const std::vector<FrameType>* frame_types) {
235   if (!Initialized()) {
236     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
237   }
238   if (encoded_complete_callback_ == NULL) {
239     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
240   }
241 
242   // All active streams should generate a key frame if
243   // a key frame is requested by any stream.
244   bool send_key_frame = false;
245   if (frame_types) {
246     for (size_t i = 0; i < frame_types->size(); ++i) {
247       if (frame_types->at(i) == kVideoFrameKey) {
248         send_key_frame = true;
249         break;
250       }
251     }
252   }
253   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
254     if (streaminfos_[stream_idx].key_frame_request &&
255         streaminfos_[stream_idx].send_stream) {
256       send_key_frame = true;
257       break;
258     }
259   }
260 
261   int src_width = input_image.width();
262   int src_height = input_image.height();
263   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
264     // Don't encode frames in resolutions that we don't intend to send.
265     if (!streaminfos_[stream_idx].send_stream)
266       continue;
267 
268     std::vector<FrameType> stream_frame_types;
269     if (send_key_frame) {
270       stream_frame_types.push_back(kVideoFrameKey);
271       streaminfos_[stream_idx].key_frame_request = false;
272     } else {
273       stream_frame_types.push_back(kVideoFrameDelta);
274     }
275 
276     int dst_width = streaminfos_[stream_idx].width;
277     int dst_height = streaminfos_[stream_idx].height;
278     // If scaling isn't required, because the input resolution
279     // matches the destination or the input image is empty (e.g.
280     // a keyframe request for encoders with internal camera
281     // sources), pass the image on directly. Otherwise, we'll
282     // scale it to match what the encoder expects (below).
283     if ((dst_width == src_width && dst_height == src_height) ||
284         input_image.IsZeroSize()) {
285       streaminfos_[stream_idx].encoder->Encode(input_image, codec_specific_info,
286                                                &stream_frame_types);
287     } else {
288       VideoFrame dst_frame;
289       // Making sure that destination frame is of sufficient size.
290       // Aligning stride values based on width.
291       dst_frame.CreateEmptyFrame(dst_width, dst_height, dst_width,
292                                  (dst_width + 1) / 2, (dst_width + 1) / 2);
293       libyuv::I420Scale(
294           input_image.buffer(kYPlane), input_image.stride(kYPlane),
295           input_image.buffer(kUPlane), input_image.stride(kUPlane),
296           input_image.buffer(kVPlane), input_image.stride(kVPlane), src_width,
297           src_height, dst_frame.buffer(kYPlane), dst_frame.stride(kYPlane),
298           dst_frame.buffer(kUPlane), dst_frame.stride(kUPlane),
299           dst_frame.buffer(kVPlane), dst_frame.stride(kVPlane), dst_width,
300           dst_height, libyuv::kFilterBilinear);
301       dst_frame.set_timestamp(input_image.timestamp());
302       dst_frame.set_render_time_ms(input_image.render_time_ms());
303       streaminfos_[stream_idx].encoder->Encode(dst_frame, codec_specific_info,
304                                                &stream_frame_types);
305     }
306   }
307 
308   return WEBRTC_VIDEO_CODEC_OK;
309 }
310 
RegisterEncodeCompleteCallback(EncodedImageCallback * callback)311 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback(
312     EncodedImageCallback* callback) {
313   encoded_complete_callback_ = callback;
314   return WEBRTC_VIDEO_CODEC_OK;
315 }
316 
SetChannelParameters(uint32_t packet_loss,int64_t rtt)317 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss,
318                                                   int64_t rtt) {
319   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
320     streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt);
321   }
322   return WEBRTC_VIDEO_CODEC_OK;
323 }
324 
SetRates(uint32_t new_bitrate_kbit,uint32_t new_framerate)325 int SimulcastEncoderAdapter::SetRates(uint32_t new_bitrate_kbit,
326                                       uint32_t new_framerate) {
327   if (!Initialized()) {
328     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
329   }
330   if (new_framerate < 1) {
331     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
332   }
333   if (codec_.maxBitrate > 0 && new_bitrate_kbit > codec_.maxBitrate) {
334     new_bitrate_kbit = codec_.maxBitrate;
335   }
336   if (new_bitrate_kbit < codec_.minBitrate) {
337     new_bitrate_kbit = codec_.minBitrate;
338   }
339   if (codec_.numberOfSimulcastStreams > 0 &&
340       new_bitrate_kbit < codec_.simulcastStream[0].minBitrate) {
341     new_bitrate_kbit = codec_.simulcastStream[0].minBitrate;
342   }
343   codec_.maxFramerate = new_framerate;
344 
345   bool send_stream = true;
346   uint32_t stream_bitrate = 0;
347   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
348     stream_bitrate = GetStreamBitrate(stream_idx, streaminfos_.size(),
349                                       new_bitrate_kbit, &send_stream);
350     // Need a key frame if we have not sent this stream before.
351     if (send_stream && !streaminfos_[stream_idx].send_stream) {
352       streaminfos_[stream_idx].key_frame_request = true;
353     }
354     streaminfos_[stream_idx].send_stream = send_stream;
355 
356     // TODO(holmer): This is a temporary hack for screensharing, where we
357     // interpret the startBitrate as the encoder target bitrate. This is
358     // to allow for a different max bitrate, so if the codec can't meet
359     // the target we still allow it to overshoot up to the max before dropping
360     // frames. This hack should be improved.
361     if (codec_.targetBitrate > 0 &&
362         (codec_.codecSpecific.VP8.numberOfTemporalLayers == 2 ||
363          codec_.simulcastStream[0].numberOfTemporalLayers == 2)) {
364       stream_bitrate = std::min(codec_.maxBitrate, stream_bitrate);
365       // TODO(ronghuawu): Can't change max bitrate via the VideoEncoder
366       // interface. And VP8EncoderImpl doesn't take negative framerate.
367       // max_bitrate = std::min(codec_.maxBitrate, stream_bitrate);
368       // new_framerate = -1;
369     }
370 
371     streaminfos_[stream_idx].encoder->SetRates(stream_bitrate, new_framerate);
372   }
373 
374   return WEBRTC_VIDEO_CODEC_OK;
375 }
376 
Encoded(size_t stream_idx,const EncodedImage & encodedImage,const CodecSpecificInfo * codecSpecificInfo,const RTPFragmentationHeader * fragmentation)377 int32_t SimulcastEncoderAdapter::Encoded(
378     size_t stream_idx,
379     const EncodedImage& encodedImage,
380     const CodecSpecificInfo* codecSpecificInfo,
381     const RTPFragmentationHeader* fragmentation) {
382   CodecSpecificInfo stream_codec_specific = *codecSpecificInfo;
383   CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8);
384   vp8Info->simulcastIdx = stream_idx;
385 
386   return encoded_complete_callback_->Encoded(
387       encodedImage, &stream_codec_specific, fragmentation);
388 }
389 
GetStreamBitrate(int stream_idx,size_t total_number_of_streams,uint32_t new_bitrate_kbit,bool * send_stream) const390 uint32_t SimulcastEncoderAdapter::GetStreamBitrate(
391     int stream_idx,
392     size_t total_number_of_streams,
393     uint32_t new_bitrate_kbit,
394     bool* send_stream) const {
395   if (total_number_of_streams == 1) {
396     *send_stream = true;
397     return new_bitrate_kbit;
398   }
399 
400   // The bitrate needed to start sending this stream is given by the
401   // minimum bitrate allowed for encoding this stream, plus the sum target
402   // rates of all lower streams.
403   uint32_t sum_target_lower_streams =
404       SumStreamTargetBitrate(stream_idx, codec_);
405   uint32_t bitrate_to_send_this_layer =
406       codec_.simulcastStream[stream_idx].minBitrate + sum_target_lower_streams;
407   if (new_bitrate_kbit >= bitrate_to_send_this_layer) {
408     // We have enough bandwidth to send this stream.
409     *send_stream = true;
410     // Bitrate for this stream is the new bitrate (|new_bitrate_kbit|) minus the
411     // sum target rates of the lower streams, and capped to a maximum bitrate.
412     // The maximum cap depends on whether we send the next higher stream.
413     // If we will be sending the next higher stream, |max_rate| is given by
414     // current stream's |targetBitrate|, otherwise it's capped by |maxBitrate|.
415     if (stream_idx < codec_.numberOfSimulcastStreams - 1) {
416       unsigned int max_rate = codec_.simulcastStream[stream_idx].maxBitrate;
417       if (new_bitrate_kbit >=
418           SumStreamTargetBitrate(stream_idx + 1, codec_) +
419               codec_.simulcastStream[stream_idx + 1].minBitrate) {
420         max_rate = codec_.simulcastStream[stream_idx].targetBitrate;
421       }
422       return std::min(new_bitrate_kbit - sum_target_lower_streams, max_rate);
423     } else {
424       // For the highest stream (highest resolution), the |targetBitRate| and
425       // |maxBitrate| are not used. Any excess bitrate (above the targets of
426       // all lower streams) is given to this (highest resolution) stream.
427       return new_bitrate_kbit - sum_target_lower_streams;
428     }
429   } else {
430     // Not enough bitrate for this stream.
431     // Return our max bitrate of |stream_idx| - 1, but we don't send it. We need
432     // to keep this resolution coding in order for the multi-encoder to work.
433     *send_stream = false;
434     return codec_.simulcastStream[stream_idx - 1].maxBitrate;
435   }
436 }
437 
PopulateStreamCodec(const webrtc::VideoCodec * inst,int stream_index,size_t total_number_of_streams,bool highest_resolution_stream,webrtc::VideoCodec * stream_codec,bool * send_stream)438 void SimulcastEncoderAdapter::PopulateStreamCodec(
439     const webrtc::VideoCodec* inst,
440     int stream_index,
441     size_t total_number_of_streams,
442     bool highest_resolution_stream,
443     webrtc::VideoCodec* stream_codec,
444     bool* send_stream) {
445   *stream_codec = *inst;
446 
447   // Stream specific settings.
448   stream_codec->codecSpecific.VP8.numberOfTemporalLayers =
449       inst->simulcastStream[stream_index].numberOfTemporalLayers;
450   stream_codec->numberOfSimulcastStreams = 0;
451   stream_codec->width = inst->simulcastStream[stream_index].width;
452   stream_codec->height = inst->simulcastStream[stream_index].height;
453   stream_codec->maxBitrate = inst->simulcastStream[stream_index].maxBitrate;
454   stream_codec->minBitrate = inst->simulcastStream[stream_index].minBitrate;
455   stream_codec->qpMax = inst->simulcastStream[stream_index].qpMax;
456   // Settings that are based on stream/resolution.
457   if (stream_index == 0) {
458     // Settings for lowest spatial resolutions.
459     stream_codec->qpMax = kLowestResMaxQp;
460   }
461   if (!highest_resolution_stream) {
462     // For resolutions below CIF, set the codec |complexity| parameter to
463     // kComplexityHigher, which maps to cpu_used = -4.
464     int pixels_per_frame = stream_codec->width * stream_codec->height;
465     if (pixels_per_frame < 352 * 288) {
466       stream_codec->codecSpecific.VP8.complexity = webrtc::kComplexityHigher;
467     }
468     // Turn off denoising for all streams but the highest resolution.
469     stream_codec->codecSpecific.VP8.denoisingOn = false;
470   }
471   // TODO(ronghuawu): what to do with targetBitrate.
472 
473   int stream_bitrate = GetStreamBitrate(stream_index, total_number_of_streams,
474                                         inst->startBitrate, send_stream);
475   stream_codec->startBitrate = stream_bitrate;
476 }
477 
Initialized() const478 bool SimulcastEncoderAdapter::Initialized() const {
479   return !streaminfos_.empty();
480 }
481 
OnDroppedFrame()482 void SimulcastEncoderAdapter::OnDroppedFrame() {
483   streaminfos_[0].encoder->OnDroppedFrame();
484 }
485 
GetTargetFramerate()486 int SimulcastEncoderAdapter::GetTargetFramerate() {
487   return streaminfos_[0].encoder->GetTargetFramerate();
488 }
489 
SupportsNativeHandle() const490 bool SimulcastEncoderAdapter::SupportsNativeHandle() const {
491   // We should not be calling this method before streaminfos_ are configured.
492   RTC_DCHECK(!streaminfos_.empty());
493   // TODO(pbos): Support textures when using more than one encoder.
494   if (streaminfos_.size() != 1)
495     return false;
496   return streaminfos_[0].encoder->SupportsNativeHandle();
497 }
498 
ImplementationName() const499 const char* SimulcastEncoderAdapter::ImplementationName() const {
500   // We should not be calling this method before streaminfos_ are configured.
501   RTC_DCHECK(!streaminfos_.empty());
502   // TODO(pbos): Support multiple implementation names for different encoders.
503   return streaminfos_[0].encoder->ImplementationName();
504 }
505 
506 }  // namespace webrtc
507