1 /*
2 * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
12
13 #include "api/video/encoded_image.h"
14 #include "api/video/i420_buffer.h"
15 #include "api/video/video_frame_buffer.h"
16 #include "common_video/include/video_frame_buffer.h"
17 #include "common_video/libyuv/include/webrtc_libyuv.h"
18 #include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h"
19 #include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h"
20 #include "rtc_base/keep_ref_until_done.h"
21 #include "rtc_base/logging.h"
22
23 namespace {
KeepBufferRefs(rtc::scoped_refptr<webrtc::VideoFrameBuffer>,rtc::scoped_refptr<webrtc::VideoFrameBuffer>)24 void KeepBufferRefs(rtc::scoped_refptr<webrtc::VideoFrameBuffer>,
25 rtc::scoped_refptr<webrtc::VideoFrameBuffer>) {}
26 } // anonymous namespace
27
28 namespace webrtc {
29
30 class MultiplexDecoderAdapter::AdapterDecodedImageCallback
31 : public webrtc::DecodedImageCallback {
32 public:
AdapterDecodedImageCallback(webrtc::MultiplexDecoderAdapter * adapter,AlphaCodecStream stream_idx)33 AdapterDecodedImageCallback(webrtc::MultiplexDecoderAdapter* adapter,
34 AlphaCodecStream stream_idx)
35 : adapter_(adapter), stream_idx_(stream_idx) {}
36
Decoded(VideoFrame & decoded_image,absl::optional<int32_t> decode_time_ms,absl::optional<uint8_t> qp)37 void Decoded(VideoFrame& decoded_image,
38 absl::optional<int32_t> decode_time_ms,
39 absl::optional<uint8_t> qp) override {
40 if (!adapter_)
41 return;
42 adapter_->Decoded(stream_idx_, &decoded_image, decode_time_ms, qp);
43 }
Decoded(VideoFrame & decoded_image)44 int32_t Decoded(VideoFrame& decoded_image) override {
45 RTC_NOTREACHED();
46 return WEBRTC_VIDEO_CODEC_OK;
47 }
Decoded(VideoFrame & decoded_image,int64_t decode_time_ms)48 int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override {
49 RTC_NOTREACHED();
50 return WEBRTC_VIDEO_CODEC_OK;
51 }
52
53 private:
54 MultiplexDecoderAdapter* adapter_;
55 const AlphaCodecStream stream_idx_;
56 };
57
58 struct MultiplexDecoderAdapter::DecodedImageData {
DecodedImageDatawebrtc::MultiplexDecoderAdapter::DecodedImageData59 explicit DecodedImageData(AlphaCodecStream stream_idx)
60 : stream_idx_(stream_idx),
61 decoded_image_(
62 VideoFrame::Builder()
63 .set_video_frame_buffer(
64 I420Buffer::Create(1 /* width */, 1 /* height */))
65 .set_timestamp_rtp(0)
66 .set_timestamp_us(0)
67 .set_rotation(kVideoRotation_0)
68 .build()) {
69 RTC_DCHECK_EQ(kAXXStream, stream_idx);
70 }
DecodedImageDatawebrtc::MultiplexDecoderAdapter::DecodedImageData71 DecodedImageData(AlphaCodecStream stream_idx,
72 const VideoFrame& decoded_image,
73 const absl::optional<int32_t>& decode_time_ms,
74 const absl::optional<uint8_t>& qp)
75 : stream_idx_(stream_idx),
76 decoded_image_(decoded_image),
77 decode_time_ms_(decode_time_ms),
78 qp_(qp) {}
79 const AlphaCodecStream stream_idx_;
80 VideoFrame decoded_image_;
81 const absl::optional<int32_t> decode_time_ms_;
82 const absl::optional<uint8_t> qp_;
83
84 private:
85 RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(DecodedImageData);
86 };
87
88 struct MultiplexDecoderAdapter::AugmentingData {
AugmentingDatawebrtc::MultiplexDecoderAdapter::AugmentingData89 AugmentingData(std::unique_ptr<uint8_t[]> augmenting_data, uint16_t data_size)
90 : data_(std::move(augmenting_data)), size_(data_size) {}
91 std::unique_ptr<uint8_t[]> data_;
92 const uint16_t size_;
93
94 private:
95 RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(AugmentingData);
96 };
97
MultiplexDecoderAdapter(VideoDecoderFactory * factory,const SdpVideoFormat & associated_format,bool supports_augmenting_data)98 MultiplexDecoderAdapter::MultiplexDecoderAdapter(
99 VideoDecoderFactory* factory,
100 const SdpVideoFormat& associated_format,
101 bool supports_augmenting_data)
102 : factory_(factory),
103 associated_format_(associated_format),
104 supports_augmenting_data_(supports_augmenting_data) {}
105
~MultiplexDecoderAdapter()106 MultiplexDecoderAdapter::~MultiplexDecoderAdapter() {
107 Release();
108 }
109
InitDecode(const VideoCodec * codec_settings,int32_t number_of_cores)110 int32_t MultiplexDecoderAdapter::InitDecode(const VideoCodec* codec_settings,
111 int32_t number_of_cores) {
112 RTC_DCHECK_EQ(kVideoCodecMultiplex, codec_settings->codecType);
113 VideoCodec settings = *codec_settings;
114 settings.codecType = PayloadStringToCodecType(associated_format_.name);
115 for (size_t i = 0; i < kAlphaCodecStreams; ++i) {
116 std::unique_ptr<VideoDecoder> decoder =
117 factory_->CreateVideoDecoder(associated_format_);
118 const int32_t rv = decoder->InitDecode(&settings, number_of_cores);
119 if (rv)
120 return rv;
121 adapter_callbacks_.emplace_back(
122 new MultiplexDecoderAdapter::AdapterDecodedImageCallback(
123 this, static_cast<AlphaCodecStream>(i)));
124 decoder->RegisterDecodeCompleteCallback(adapter_callbacks_.back().get());
125 decoders_.emplace_back(std::move(decoder));
126 }
127 return WEBRTC_VIDEO_CODEC_OK;
128 }
129
Decode(const EncodedImage & input_image,bool missing_frames,int64_t render_time_ms)130 int32_t MultiplexDecoderAdapter::Decode(const EncodedImage& input_image,
131 bool missing_frames,
132 int64_t render_time_ms) {
133 MultiplexImage image = MultiplexEncodedImagePacker::Unpack(input_image);
134
135 if (supports_augmenting_data_) {
136 RTC_DCHECK(decoded_augmenting_data_.find(input_image.Timestamp()) ==
137 decoded_augmenting_data_.end());
138 decoded_augmenting_data_.emplace(
139 std::piecewise_construct,
140 std::forward_as_tuple(input_image.Timestamp()),
141 std::forward_as_tuple(std::move(image.augmenting_data),
142 image.augmenting_data_size));
143 }
144
145 if (image.component_count == 1) {
146 RTC_DCHECK(decoded_data_.find(input_image.Timestamp()) ==
147 decoded_data_.end());
148 decoded_data_.emplace(std::piecewise_construct,
149 std::forward_as_tuple(input_image.Timestamp()),
150 std::forward_as_tuple(kAXXStream));
151 }
152 int32_t rv = 0;
153 for (size_t i = 0; i < image.image_components.size(); i++) {
154 rv = decoders_[image.image_components[i].component_index]->Decode(
155 image.image_components[i].encoded_image, missing_frames,
156 render_time_ms);
157 if (rv != WEBRTC_VIDEO_CODEC_OK)
158 return rv;
159 }
160 return rv;
161 }
162
RegisterDecodeCompleteCallback(DecodedImageCallback * callback)163 int32_t MultiplexDecoderAdapter::RegisterDecodeCompleteCallback(
164 DecodedImageCallback* callback) {
165 decoded_complete_callback_ = callback;
166 return WEBRTC_VIDEO_CODEC_OK;
167 }
168
Release()169 int32_t MultiplexDecoderAdapter::Release() {
170 for (auto& decoder : decoders_) {
171 const int32_t rv = decoder->Release();
172 if (rv)
173 return rv;
174 }
175 decoders_.clear();
176 adapter_callbacks_.clear();
177 return WEBRTC_VIDEO_CODEC_OK;
178 }
179
Decoded(AlphaCodecStream stream_idx,VideoFrame * decoded_image,absl::optional<int32_t> decode_time_ms,absl::optional<uint8_t> qp)180 void MultiplexDecoderAdapter::Decoded(AlphaCodecStream stream_idx,
181 VideoFrame* decoded_image,
182 absl::optional<int32_t> decode_time_ms,
183 absl::optional<uint8_t> qp) {
184 const auto& other_decoded_data_it =
185 decoded_data_.find(decoded_image->timestamp());
186 const auto& augmenting_data_it =
187 decoded_augmenting_data_.find(decoded_image->timestamp());
188 const bool has_augmenting_data =
189 augmenting_data_it != decoded_augmenting_data_.end();
190 if (other_decoded_data_it != decoded_data_.end()) {
191 uint16_t augmenting_data_size =
192 has_augmenting_data ? augmenting_data_it->second.size_ : 0;
193 std::unique_ptr<uint8_t[]> augmenting_data =
194 has_augmenting_data ? std::move(augmenting_data_it->second.data_)
195 : nullptr;
196 auto& other_image_data = other_decoded_data_it->second;
197 if (stream_idx == kYUVStream) {
198 RTC_DCHECK_EQ(kAXXStream, other_image_data.stream_idx_);
199 MergeAlphaImages(decoded_image, decode_time_ms, qp,
200 &other_image_data.decoded_image_,
201 other_image_data.decode_time_ms_, other_image_data.qp_,
202 std::move(augmenting_data), augmenting_data_size);
203 } else {
204 RTC_DCHECK_EQ(kYUVStream, other_image_data.stream_idx_);
205 RTC_DCHECK_EQ(kAXXStream, stream_idx);
206 MergeAlphaImages(&other_image_data.decoded_image_,
207 other_image_data.decode_time_ms_, other_image_data.qp_,
208 decoded_image, decode_time_ms, qp,
209 std::move(augmenting_data), augmenting_data_size);
210 }
211 decoded_data_.erase(decoded_data_.begin(), other_decoded_data_it);
212 if (has_augmenting_data) {
213 decoded_augmenting_data_.erase(decoded_augmenting_data_.begin(),
214 augmenting_data_it);
215 }
216 return;
217 }
218 RTC_DCHECK(decoded_data_.find(decoded_image->timestamp()) ==
219 decoded_data_.end());
220 decoded_data_.emplace(
221 std::piecewise_construct,
222 std::forward_as_tuple(decoded_image->timestamp()),
223 std::forward_as_tuple(stream_idx, *decoded_image, decode_time_ms, qp));
224 }
225
MergeAlphaImages(VideoFrame * decoded_image,const absl::optional<int32_t> & decode_time_ms,const absl::optional<uint8_t> & qp,VideoFrame * alpha_decoded_image,const absl::optional<int32_t> & alpha_decode_time_ms,const absl::optional<uint8_t> & alpha_qp,std::unique_ptr<uint8_t[]> augmenting_data,uint16_t augmenting_data_length)226 void MultiplexDecoderAdapter::MergeAlphaImages(
227 VideoFrame* decoded_image,
228 const absl::optional<int32_t>& decode_time_ms,
229 const absl::optional<uint8_t>& qp,
230 VideoFrame* alpha_decoded_image,
231 const absl::optional<int32_t>& alpha_decode_time_ms,
232 const absl::optional<uint8_t>& alpha_qp,
233 std::unique_ptr<uint8_t[]> augmenting_data,
234 uint16_t augmenting_data_length) {
235 rtc::scoped_refptr<VideoFrameBuffer> merged_buffer;
236 if (!alpha_decoded_image->timestamp()) {
237 merged_buffer = decoded_image->video_frame_buffer();
238 } else {
239 rtc::scoped_refptr<webrtc::I420BufferInterface> yuv_buffer =
240 decoded_image->video_frame_buffer()->ToI420();
241 rtc::scoped_refptr<webrtc::I420BufferInterface> alpha_buffer =
242 alpha_decoded_image->video_frame_buffer()->ToI420();
243 RTC_DCHECK_EQ(yuv_buffer->width(), alpha_buffer->width());
244 RTC_DCHECK_EQ(yuv_buffer->height(), alpha_buffer->height());
245 merged_buffer = WrapI420ABuffer(
246 yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
247 yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
248 yuv_buffer->DataV(), yuv_buffer->StrideV(), alpha_buffer->DataY(),
249 alpha_buffer->StrideY(),
250 rtc::Bind(&KeepBufferRefs, yuv_buffer, alpha_buffer));
251 }
252 if (supports_augmenting_data_) {
253 merged_buffer = rtc::scoped_refptr<webrtc::AugmentedVideoFrameBuffer>(
254 new rtc::RefCountedObject<AugmentedVideoFrameBuffer>(
255 merged_buffer, std::move(augmenting_data), augmenting_data_length));
256 }
257
258 VideoFrame merged_image = VideoFrame::Builder()
259 .set_video_frame_buffer(merged_buffer)
260 .set_timestamp_rtp(decoded_image->timestamp())
261 .set_timestamp_us(0)
262 .set_rotation(decoded_image->rotation())
263 .set_id(decoded_image->id())
264 .set_packet_infos(decoded_image->packet_infos())
265 .build();
266 decoded_complete_callback_->Decoded(merged_image, decode_time_ms, qp);
267 }
268
269 } // namespace webrtc
270