• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
12 
13 #include <cstring>
14 
15 #include "api/video/encoded_image.h"
16 #include "api/video_codecs/video_encoder.h"
17 #include "common_video/include/video_frame_buffer.h"
18 #include "common_video/libyuv/include/webrtc_libyuv.h"
19 #include "media/base/video_common.h"
20 #include "modules/include/module_common_types.h"
21 #include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h"
22 #include "rtc_base/keep_ref_until_done.h"
23 #include "rtc_base/logging.h"
24 
25 namespace webrtc {
26 
27 // Callback wrapper that helps distinguish returned results from |encoders_|
28 // instances.
29 class MultiplexEncoderAdapter::AdapterEncodedImageCallback
30     : public webrtc::EncodedImageCallback {
31  public:
AdapterEncodedImageCallback(webrtc::MultiplexEncoderAdapter * adapter,AlphaCodecStream stream_idx)32   AdapterEncodedImageCallback(webrtc::MultiplexEncoderAdapter* adapter,
33                               AlphaCodecStream stream_idx)
34       : adapter_(adapter), stream_idx_(stream_idx) {}
35 
OnEncodedImage(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_specific_info,const RTPFragmentationHeader * fragmentation)36   EncodedImageCallback::Result OnEncodedImage(
37       const EncodedImage& encoded_image,
38       const CodecSpecificInfo* codec_specific_info,
39       const RTPFragmentationHeader* fragmentation) override {
40     if (!adapter_)
41       return Result(Result::OK);
42     return adapter_->OnEncodedImage(stream_idx_, encoded_image,
43                                     codec_specific_info, fragmentation);
44   }
45 
46  private:
47   MultiplexEncoderAdapter* adapter_;
48   const AlphaCodecStream stream_idx_;
49 };
50 
MultiplexEncoderAdapter(VideoEncoderFactory * factory,const SdpVideoFormat & associated_format,bool supports_augmented_data)51 MultiplexEncoderAdapter::MultiplexEncoderAdapter(
52     VideoEncoderFactory* factory,
53     const SdpVideoFormat& associated_format,
54     bool supports_augmented_data)
55     : factory_(factory),
56       associated_format_(associated_format),
57       encoded_complete_callback_(nullptr),
58       key_frame_interval_(0),
59       supports_augmented_data_(supports_augmented_data) {}
60 
~MultiplexEncoderAdapter()61 MultiplexEncoderAdapter::~MultiplexEncoderAdapter() {
62   Release();
63 }
64 
SetFecControllerOverride(FecControllerOverride * fec_controller_override)65 void MultiplexEncoderAdapter::SetFecControllerOverride(
66     FecControllerOverride* fec_controller_override) {
67   // Ignored.
68 }
69 
InitEncode(const VideoCodec * inst,const VideoEncoder::Settings & settings)70 int MultiplexEncoderAdapter::InitEncode(
71     const VideoCodec* inst,
72     const VideoEncoder::Settings& settings) {
73   const size_t buffer_size =
74       CalcBufferSize(VideoType::kI420, inst->width, inst->height);
75   multiplex_dummy_planes_.resize(buffer_size);
76   // It is more expensive to encode 0x00, so use 0x80 instead.
77   std::fill(multiplex_dummy_planes_.begin(), multiplex_dummy_planes_.end(),
78             0x80);
79 
80   RTC_DCHECK_EQ(kVideoCodecMultiplex, inst->codecType);
81   VideoCodec video_codec = *inst;
82   video_codec.codecType = PayloadStringToCodecType(associated_format_.name);
83 
84   // Take over the key frame interval at adapter level, because we have to
85   // sync the key frames for both sub-encoders.
86   switch (video_codec.codecType) {
87     case kVideoCodecVP8:
88       key_frame_interval_ = video_codec.VP8()->keyFrameInterval;
89       video_codec.VP8()->keyFrameInterval = 0;
90       break;
91     case kVideoCodecVP9:
92       key_frame_interval_ = video_codec.VP9()->keyFrameInterval;
93       video_codec.VP9()->keyFrameInterval = 0;
94       break;
95     case kVideoCodecH264:
96       key_frame_interval_ = video_codec.H264()->keyFrameInterval;
97       video_codec.H264()->keyFrameInterval = 0;
98       break;
99     default:
100       break;
101   }
102 
103   encoder_info_ = EncoderInfo();
104   encoder_info_.implementation_name = "MultiplexEncoderAdapter (";
105   encoder_info_.requested_resolution_alignment = 1;
106   // This needs to be false so that we can do the split in Encode().
107   encoder_info_.supports_native_handle = false;
108 
109   for (size_t i = 0; i < kAlphaCodecStreams; ++i) {
110     std::unique_ptr<VideoEncoder> encoder =
111         factory_->CreateVideoEncoder(associated_format_);
112     const int rv = encoder->InitEncode(&video_codec, settings);
113     if (rv) {
114       RTC_LOG(LS_ERROR) << "Failed to create multiplex codec index " << i;
115       return rv;
116     }
117     adapter_callbacks_.emplace_back(new AdapterEncodedImageCallback(
118         this, static_cast<AlphaCodecStream>(i)));
119     encoder->RegisterEncodeCompleteCallback(adapter_callbacks_.back().get());
120 
121     const EncoderInfo& encoder_impl_info = encoder->GetEncoderInfo();
122     encoder_info_.implementation_name += encoder_impl_info.implementation_name;
123     if (i != kAlphaCodecStreams - 1) {
124       encoder_info_.implementation_name += ", ";
125     }
126     // Uses hardware support if any of the encoders uses it.
127     // For example, if we are having issues with down-scaling due to
128     // pipelining delay in HW encoders we need higher encoder usage
129     // thresholds in CPU adaptation.
130     if (i == 0) {
131       encoder_info_.is_hardware_accelerated =
132           encoder_impl_info.is_hardware_accelerated;
133     } else {
134       encoder_info_.is_hardware_accelerated |=
135           encoder_impl_info.is_hardware_accelerated;
136     }
137 
138     encoder_info_.requested_resolution_alignment = cricket::LeastCommonMultiple(
139         encoder_info_.requested_resolution_alignment,
140         encoder_impl_info.requested_resolution_alignment);
141 
142     encoder_info_.has_internal_source = false;
143 
144     encoders_.emplace_back(std::move(encoder));
145   }
146   encoder_info_.implementation_name += ")";
147 
148   return WEBRTC_VIDEO_CODEC_OK;
149 }
150 
Encode(const VideoFrame & input_image,const std::vector<VideoFrameType> * frame_types)151 int MultiplexEncoderAdapter::Encode(
152     const VideoFrame& input_image,
153     const std::vector<VideoFrameType>* frame_types) {
154   if (!encoded_complete_callback_) {
155     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
156   }
157 
158   std::vector<VideoFrameType> adjusted_frame_types;
159   if (key_frame_interval_ > 0 && picture_index_ % key_frame_interval_ == 0) {
160     adjusted_frame_types.push_back(VideoFrameType::kVideoFrameKey);
161   } else {
162     adjusted_frame_types.push_back(VideoFrameType::kVideoFrameDelta);
163   }
164   const bool has_alpha = input_image.video_frame_buffer()->type() ==
165                          VideoFrameBuffer::Type::kI420A;
166   std::unique_ptr<uint8_t[]> augmenting_data = nullptr;
167   uint16_t augmenting_data_length = 0;
168   AugmentedVideoFrameBuffer* augmented_video_frame_buffer = nullptr;
169   if (supports_augmented_data_) {
170     augmented_video_frame_buffer = static_cast<AugmentedVideoFrameBuffer*>(
171         input_image.video_frame_buffer().get());
172     augmenting_data_length =
173         augmented_video_frame_buffer->GetAugmentingDataSize();
174     augmenting_data =
175         std::unique_ptr<uint8_t[]>(new uint8_t[augmenting_data_length]);
176     memcpy(augmenting_data.get(),
177            augmented_video_frame_buffer->GetAugmentingData(),
178            augmenting_data_length);
179     augmenting_data_size_ = augmenting_data_length;
180   }
181 
182   {
183     MutexLock lock(&mutex_);
184     stashed_images_.emplace(
185         std::piecewise_construct,
186         std::forward_as_tuple(input_image.timestamp()),
187         std::forward_as_tuple(
188             picture_index_, has_alpha ? kAlphaCodecStreams : 1,
189             std::move(augmenting_data), augmenting_data_length));
190   }
191 
192   ++picture_index_;
193 
194   // Encode YUV
195   int rv = encoders_[kYUVStream]->Encode(input_image, &adjusted_frame_types);
196 
197   // If we do not receive an alpha frame, we send a single frame for this
198   // |picture_index_|. The receiver will receive |frame_count| as 1 which
199   // specifies this case.
200   if (rv || !has_alpha)
201     return rv;
202 
203   // Encode AXX
204   const I420ABufferInterface* yuva_buffer =
205       supports_augmented_data_
206           ? augmented_video_frame_buffer->GetVideoFrameBuffer()->GetI420A()
207           : input_image.video_frame_buffer()->GetI420A();
208   rtc::scoped_refptr<I420BufferInterface> alpha_buffer =
209       WrapI420Buffer(input_image.width(), input_image.height(),
210                      yuva_buffer->DataA(), yuva_buffer->StrideA(),
211                      multiplex_dummy_planes_.data(), yuva_buffer->StrideU(),
212                      multiplex_dummy_planes_.data(), yuva_buffer->StrideV(),
213                      rtc::KeepRefUntilDone(input_image.video_frame_buffer()));
214   VideoFrame alpha_image = VideoFrame::Builder()
215                                .set_video_frame_buffer(alpha_buffer)
216                                .set_timestamp_rtp(input_image.timestamp())
217                                .set_timestamp_ms(input_image.render_time_ms())
218                                .set_rotation(input_image.rotation())
219                                .set_id(input_image.id())
220                                .set_packet_infos(input_image.packet_infos())
221                                .build();
222   rv = encoders_[kAXXStream]->Encode(alpha_image, &adjusted_frame_types);
223   return rv;
224 }
225 
RegisterEncodeCompleteCallback(EncodedImageCallback * callback)226 int MultiplexEncoderAdapter::RegisterEncodeCompleteCallback(
227     EncodedImageCallback* callback) {
228   encoded_complete_callback_ = callback;
229   return WEBRTC_VIDEO_CODEC_OK;
230 }
231 
SetRates(const RateControlParameters & parameters)232 void MultiplexEncoderAdapter::SetRates(
233     const RateControlParameters& parameters) {
234   VideoBitrateAllocation bitrate_allocation(parameters.bitrate);
235   bitrate_allocation.SetBitrate(
236       0, 0, parameters.bitrate.GetBitrate(0, 0) - augmenting_data_size_);
237   for (auto& encoder : encoders_) {
238     // TODO(emircan): |framerate| is used to calculate duration in encoder
239     // instances. We report the total frame rate to keep real time for now.
240     // Remove this after refactoring duration logic.
241     encoder->SetRates(RateControlParameters(
242         bitrate_allocation,
243         static_cast<uint32_t>(encoders_.size() * parameters.framerate_fps),
244         parameters.bandwidth_allocation -
245             DataRate::BitsPerSec(augmenting_data_size_)));
246   }
247 }
248 
OnPacketLossRateUpdate(float packet_loss_rate)249 void MultiplexEncoderAdapter::OnPacketLossRateUpdate(float packet_loss_rate) {
250   for (auto& encoder : encoders_) {
251     encoder->OnPacketLossRateUpdate(packet_loss_rate);
252   }
253 }
254 
OnRttUpdate(int64_t rtt_ms)255 void MultiplexEncoderAdapter::OnRttUpdate(int64_t rtt_ms) {
256   for (auto& encoder : encoders_) {
257     encoder->OnRttUpdate(rtt_ms);
258   }
259 }
260 
OnLossNotification(const LossNotification & loss_notification)261 void MultiplexEncoderAdapter::OnLossNotification(
262     const LossNotification& loss_notification) {
263   for (auto& encoder : encoders_) {
264     encoder->OnLossNotification(loss_notification);
265   }
266 }
267 
Release()268 int MultiplexEncoderAdapter::Release() {
269   for (auto& encoder : encoders_) {
270     const int rv = encoder->Release();
271     if (rv)
272       return rv;
273   }
274   encoders_.clear();
275   adapter_callbacks_.clear();
276   MutexLock lock(&mutex_);
277   stashed_images_.clear();
278 
279   return WEBRTC_VIDEO_CODEC_OK;
280 }
281 
GetEncoderInfo() const282 VideoEncoder::EncoderInfo MultiplexEncoderAdapter::GetEncoderInfo() const {
283   return encoder_info_;
284 }
285 
OnEncodedImage(AlphaCodecStream stream_idx,const EncodedImage & encodedImage,const CodecSpecificInfo * codecSpecificInfo,const RTPFragmentationHeader * fragmentation)286 EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage(
287     AlphaCodecStream stream_idx,
288     const EncodedImage& encodedImage,
289     const CodecSpecificInfo* codecSpecificInfo,
290     const RTPFragmentationHeader* fragmentation) {
291   // Save the image
292   MultiplexImageComponent image_component;
293   image_component.component_index = stream_idx;
294   image_component.codec_type =
295       PayloadStringToCodecType(associated_format_.name);
296   image_component.encoded_image = encodedImage;
297 
298   // If we don't already own the buffer, make a copy.
299   image_component.encoded_image.Retain();
300 
301   MutexLock lock(&mutex_);
302   const auto& stashed_image_itr =
303       stashed_images_.find(encodedImage.Timestamp());
304   const auto& stashed_image_next_itr = std::next(stashed_image_itr, 1);
305   RTC_DCHECK(stashed_image_itr != stashed_images_.end());
306   MultiplexImage& stashed_image = stashed_image_itr->second;
307   const uint8_t frame_count = stashed_image.component_count;
308 
309   stashed_image.image_components.push_back(image_component);
310 
311   if (stashed_image.image_components.size() == frame_count) {
312     // Complete case
313     for (auto iter = stashed_images_.begin();
314          iter != stashed_images_.end() && iter != stashed_image_next_itr;
315          iter++) {
316       // No image at all, skip.
317       if (iter->second.image_components.size() == 0)
318         continue;
319 
320       // We have to send out those stashed frames, otherwise the delta frame
321       // dependency chain is broken.
322       combined_image_ =
323           MultiplexEncodedImagePacker::PackAndRelease(iter->second);
324 
325       CodecSpecificInfo codec_info = *codecSpecificInfo;
326       codec_info.codecType = kVideoCodecMultiplex;
327       encoded_complete_callback_->OnEncodedImage(combined_image_, &codec_info,
328                                                  fragmentation);
329     }
330 
331     stashed_images_.erase(stashed_images_.begin(), stashed_image_next_itr);
332   }
333   return EncodedImageCallback::Result(EncodedImageCallback::Result::OK);
334 }
335 
336 }  // namespace webrtc
337