1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "chrome/renderer/media/cast_rtp_stream.h"
6
7 #include "base/bind.h"
8 #include "base/debug/trace_event.h"
9 #include "base/logging.h"
10 #include "base/memory/weak_ptr.h"
11 #include "base/sys_info.h"
12 #include "chrome/renderer/media/cast_session.h"
13 #include "chrome/renderer/media/cast_udp_transport.h"
14 #include "content/public/renderer/media_stream_audio_sink.h"
15 #include "content/public/renderer/media_stream_video_sink.h"
16 #include "content/public/renderer/render_thread.h"
17 #include "content/public/renderer/video_encode_accelerator.h"
18 #include "media/audio/audio_parameters.h"
19 #include "media/base/audio_bus.h"
20 #include "media/base/audio_fifo.h"
21 #include "media/base/bind_to_current_loop.h"
22 #include "media/base/multi_channel_resampler.h"
23 #include "media/base/video_frame.h"
24 #include "media/cast/cast_config.h"
25 #include "media/cast/cast_defines.h"
26 #include "media/cast/cast_sender.h"
27 #include "media/cast/transport/cast_transport_config.h"
28 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
29 #include "ui/gfx/geometry/size.h"
30
31 using media::cast::AudioSenderConfig;
32 using media::cast::VideoSenderConfig;
33
34 namespace {
35
36 const char kCodecNameOpus[] = "OPUS";
37 const char kCodecNameVp8[] = "VP8";
38 const char kCodecNameH264[] = "H264";
39
40 // To convert from kilobits per second to bits to per second.
41 const int kBitrateMultiplier = 1000;
42
43 // This constant defines the number of sets of audio data to buffer
44 // in the FIFO. If input audio and output data have different resampling
45 // rates then buffer is necessary to avoid audio glitches.
46 // See CastAudioSink::ResampleData() and CastAudioSink::OnSetFormat()
47 // for more defaults.
48 const int kBufferAudioData = 2;
49
DefaultOpusPayload()50 CastRtpPayloadParams DefaultOpusPayload() {
51 CastRtpPayloadParams payload;
52 payload.ssrc = 1;
53 payload.feedback_ssrc = 2;
54 payload.payload_type = 127;
55 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
56 payload.codec_name = kCodecNameOpus;
57 payload.clock_rate = 48000;
58 payload.channels = 2;
59 // The value is 0 which means VBR.
60 payload.min_bitrate = payload.max_bitrate =
61 media::cast::kDefaultAudioEncoderBitrate;
62 return payload;
63 }
64
DefaultVp8Payload()65 CastRtpPayloadParams DefaultVp8Payload() {
66 CastRtpPayloadParams payload;
67 payload.ssrc = 11;
68 payload.feedback_ssrc = 12;
69 payload.payload_type = 96;
70 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
71 payload.codec_name = kCodecNameVp8;
72 payload.clock_rate = 90000;
73 payload.width = 1280;
74 payload.height = 720;
75 payload.min_bitrate = 50;
76 payload.max_bitrate = 2000;
77 return payload;
78 }
79
DefaultH264Payload()80 CastRtpPayloadParams DefaultH264Payload() {
81 CastRtpPayloadParams payload;
82 // TODO(hshi): set different ssrc/rtpPayloadType values for H264 and VP8
83 // once b/13696137 is fixed.
84 payload.ssrc = 11;
85 payload.feedback_ssrc = 12;
86 payload.payload_type = 96;
87 payload.max_latency_ms = media::cast::kDefaultRtpMaxDelayMs;
88 payload.codec_name = kCodecNameH264;
89 payload.clock_rate = 90000;
90 payload.width = 1280;
91 payload.height = 720;
92 payload.min_bitrate = 50;
93 payload.max_bitrate = 2000;
94 return payload;
95 }
96
IsHardwareVP8EncodingSupported()97 bool IsHardwareVP8EncodingSupported() {
98 // Query for hardware VP8 encoder support.
99 std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
100 content::GetSupportedVideoEncodeAcceleratorProfiles();
101 for (size_t i = 0; i < vea_profiles.size(); ++i) {
102 if (vea_profiles[i].profile >= media::VP8PROFILE_MIN &&
103 vea_profiles[i].profile <= media::VP8PROFILE_MAX) {
104 return true;
105 }
106 }
107 return false;
108 }
109
IsHardwareH264EncodingSupported()110 bool IsHardwareH264EncodingSupported() {
111 // Query for hardware H.264 encoder support.
112 std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
113 content::GetSupportedVideoEncodeAcceleratorProfiles();
114 for (size_t i = 0; i < vea_profiles.size(); ++i) {
115 if (vea_profiles[i].profile >= media::H264PROFILE_MIN &&
116 vea_profiles[i].profile <= media::H264PROFILE_MAX) {
117 return true;
118 }
119 }
120 return false;
121 }
122
NumberOfEncodeThreads()123 int NumberOfEncodeThreads() {
124 // We want to give CPU cycles for capturing and not to saturate the system
125 // just for encoding. So on a lower end system with only 1 or 2 cores we
126 // use only one thread for encoding.
127 if (base::SysInfo::NumberOfProcessors() <= 2)
128 return 1;
129
130 // On higher end we want to use 2 threads for encoding to reduce latency.
131 // In theory a physical CPU core has maximum 2 hyperthreads. Having 3 or
132 // more logical processors means the system has at least 2 physical cores.
133 return 2;
134 }
135
SupportedAudioParams()136 std::vector<CastRtpParams> SupportedAudioParams() {
137 // TODO(hclam): Fill in more codecs here.
138 std::vector<CastRtpParams> supported_params;
139 supported_params.push_back(CastRtpParams(DefaultOpusPayload()));
140 return supported_params;
141 }
142
SupportedVideoParams()143 std::vector<CastRtpParams> SupportedVideoParams() {
144 std::vector<CastRtpParams> supported_params;
145 if (IsHardwareH264EncodingSupported())
146 supported_params.push_back(CastRtpParams(DefaultH264Payload()));
147 supported_params.push_back(CastRtpParams(DefaultVp8Payload()));
148 return supported_params;
149 }
150
ToAudioSenderConfig(const CastRtpParams & params,AudioSenderConfig * config)151 bool ToAudioSenderConfig(const CastRtpParams& params,
152 AudioSenderConfig* config) {
153 config->rtp_config.ssrc = params.payload.ssrc;
154 config->incoming_feedback_ssrc = params.payload.feedback_ssrc;
155 config->rtp_config.payload_type = params.payload.payload_type;
156 config->rtp_config.max_delay_ms = params.payload.max_latency_ms;
157 config->rtp_config.aes_key = params.payload.aes_key;
158 config->rtp_config.aes_iv_mask = params.payload.aes_iv_mask;
159 config->use_external_encoder = false;
160 config->frequency = params.payload.clock_rate;
161 config->channels = params.payload.channels;
162 config->bitrate = params.payload.max_bitrate * kBitrateMultiplier;
163 config->codec = media::cast::transport::kPcm16;
164 if (params.payload.codec_name == kCodecNameOpus)
165 config->codec = media::cast::transport::kOpus;
166 else
167 return false;
168 return true;
169 }
170
ToVideoSenderConfig(const CastRtpParams & params,VideoSenderConfig * config)171 bool ToVideoSenderConfig(const CastRtpParams& params,
172 VideoSenderConfig* config) {
173 config->rtp_config.ssrc = params.payload.ssrc;
174 config->incoming_feedback_ssrc = params.payload.feedback_ssrc;
175 config->rtp_config.payload_type = params.payload.payload_type;
176 config->rtp_config.max_delay_ms = params.payload.max_latency_ms;
177 config->rtp_config.aes_key = params.payload.aes_key;
178 config->rtp_config.aes_iv_mask = params.payload.aes_iv_mask;
179 config->use_external_encoder = false;
180 config->width = params.payload.width;
181 config->height = params.payload.height;
182 config->min_bitrate = config->start_bitrate =
183 params.payload.min_bitrate * kBitrateMultiplier;
184 config->max_bitrate = params.payload.max_bitrate * kBitrateMultiplier;
185 if (params.payload.codec_name == kCodecNameVp8) {
186 config->use_external_encoder = IsHardwareVP8EncodingSupported();
187 config->codec = media::cast::transport::kVp8;
188 } else if (params.payload.codec_name == kCodecNameH264) {
189 config->use_external_encoder = IsHardwareH264EncodingSupported();
190 config->codec = media::cast::transport::kH264;
191 } else {
192 return false;
193 }
194 if (!config->use_external_encoder) {
195 config->number_of_encode_threads = NumberOfEncodeThreads();
196 }
197 return true;
198 }
199
200 } // namespace
201
202 // This class receives MediaStreamTrack events and video frames from a
203 // MediaStreamTrack.
204 //
205 // Threading: Video frames are received on the IO thread and then
206 // forwarded to media::cast::VideoFrameInput through a static method.
207 // Member variables of this class are only accessed on the render thread.
208 class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>,
209 public content::MediaStreamVideoSink {
210 public:
211 // |track| provides data for this sink.
212 // |expected_coded_size| is the expected dimension of the video frame.
213 // |error_callback| is called if video formats don't match.
CastVideoSink(const blink::WebMediaStreamTrack & track,const gfx::Size & expected_coded_size,const CastRtpStream::ErrorCallback & error_callback)214 CastVideoSink(const blink::WebMediaStreamTrack& track,
215 const gfx::Size& expected_coded_size,
216 const CastRtpStream::ErrorCallback& error_callback)
217 : track_(track),
218 sink_added_(false),
219 expected_coded_size_(expected_coded_size),
220 error_callback_(error_callback) {}
221
~CastVideoSink()222 virtual ~CastVideoSink() {
223 if (sink_added_)
224 RemoveFromVideoTrack(this, track_);
225 }
226
227 // This static method is used to forward video frames to |frame_input|.
OnVideoFrame(const gfx::Size & expected_coded_size,const CastRtpStream::ErrorCallback & error_callback,const scoped_refptr<media::cast::VideoFrameInput> frame_input,const scoped_refptr<media::VideoFrame> & frame,const media::VideoCaptureFormat & format,const base::TimeTicks & estimated_capture_time)228 static void OnVideoFrame(
229 // These parameters are already bound when callback is created.
230 const gfx::Size& expected_coded_size,
231 const CastRtpStream::ErrorCallback& error_callback,
232 const scoped_refptr<media::cast::VideoFrameInput> frame_input,
233 // These parameters are passed for each frame.
234 const scoped_refptr<media::VideoFrame>& frame,
235 const media::VideoCaptureFormat& format,
236 const base::TimeTicks& estimated_capture_time) {
237 if (frame->coded_size() != expected_coded_size) {
238 error_callback.Run("Video frame resolution does not match config.");
239 return;
240 }
241
242 base::TimeTicks timestamp;
243 if (estimated_capture_time.is_null())
244 timestamp = base::TimeTicks::Now();
245 else
246 timestamp = estimated_capture_time;
247
248 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
249 TRACE_EVENT_INSTANT2(
250 "cast_perf_test", "MediaStreamVideoSink::OnVideoFrame",
251 TRACE_EVENT_SCOPE_THREAD,
252 "timestamp", timestamp.ToInternalValue(),
253 "time_delta", frame->timestamp().ToInternalValue());
254 frame_input->InsertRawVideoFrame(frame, timestamp);
255 }
256
257 // Attach this sink to a video track represented by |track_|.
258 // Data received from the track will be submitted to |frame_input|.
AddToTrack(const scoped_refptr<media::cast::VideoFrameInput> & frame_input)259 void AddToTrack(
260 const scoped_refptr<media::cast::VideoFrameInput>& frame_input) {
261 DCHECK(!sink_added_);
262 sink_added_ = true;
263 AddToVideoTrack(
264 this,
265 base::Bind(
266 &CastVideoSink::OnVideoFrame,
267 expected_coded_size_,
268 error_callback_,
269 frame_input),
270 track_);
271 }
272
273 private:
274 blink::WebMediaStreamTrack track_;
275 bool sink_added_;
276 gfx::Size expected_coded_size_;
277 CastRtpStream::ErrorCallback error_callback_;
278
279 DISALLOW_COPY_AND_ASSIGN(CastVideoSink);
280 };
281
282 // Receives audio data from a MediaStreamTrack. Data is submitted to
283 // media::cast::FrameInput.
284 //
285 // Threading: Audio frames are received on the real-time audio thread.
286 // Note that RemoveFromAudioTrack() is synchronous and we have
287 // gurantee that there will be no more audio data after calling it.
288 class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>,
289 public content::MediaStreamAudioSink {
290 public:
291 // |track| provides data for this sink.
292 // |error_callback| is called if audio formats don't match.
CastAudioSink(const blink::WebMediaStreamTrack & track,const CastRtpStream::ErrorCallback & error_callback,int output_channels,int output_sample_rate)293 CastAudioSink(const blink::WebMediaStreamTrack& track,
294 const CastRtpStream::ErrorCallback& error_callback,
295 int output_channels,
296 int output_sample_rate)
297 : track_(track),
298 sink_added_(false),
299 error_callback_(error_callback),
300 weak_factory_(this),
301 output_channels_(output_channels),
302 output_sample_rate_(output_sample_rate),
303 input_preroll_(0) {}
304
~CastAudioSink()305 virtual ~CastAudioSink() {
306 if (sink_added_)
307 RemoveFromAudioTrack(this, track_);
308 }
309
310 // Called on real-time audio thread.
311 // content::MediaStreamAudioSink implementation.
OnData(const int16 * audio_data,int sample_rate,int number_of_channels,int number_of_frames)312 virtual void OnData(const int16* audio_data,
313 int sample_rate,
314 int number_of_channels,
315 int number_of_frames) OVERRIDE {
316 scoped_ptr<media::AudioBus> input_bus;
317 if (resampler_) {
318 input_bus = ResampleData(
319 audio_data, sample_rate, number_of_channels, number_of_frames);
320 if (!input_bus)
321 return;
322 } else {
323 input_bus = media::AudioBus::Create(
324 number_of_channels, number_of_frames);
325 input_bus->FromInterleaved(
326 audio_data, number_of_frames, number_of_channels);
327 }
328
329 // TODO(hclam): Pass in the accurate capture time to have good
330 // audio / video sync.
331 frame_input_->InsertAudio(input_bus.Pass(), base::TimeTicks::Now());
332 }
333
334 // Return a resampled audio data from input. This is called when the
335 // input sample rate doesn't match the output.
336 // The flow of data is as follows:
337 // |audio_data| ->
338 // AudioFifo |fifo_| ->
339 // MultiChannelResampler |resampler|.
340 //
341 // The resampler pulls data out of the FIFO and resample the data in
342 // frequency domain. It might call |fifo_| for more than once. But no more
343 // than |kBufferAudioData| times. We preroll audio data into the FIFO to
344 // make sure there's enough data for resampling.
ResampleData(const int16 * audio_data,int sample_rate,int number_of_channels,int number_of_frames)345 scoped_ptr<media::AudioBus> ResampleData(
346 const int16* audio_data,
347 int sample_rate,
348 int number_of_channels,
349 int number_of_frames) {
350 DCHECK_EQ(number_of_channels, output_channels_);
351 fifo_input_bus_->FromInterleaved(
352 audio_data, number_of_frames, number_of_channels);
353 fifo_->Push(fifo_input_bus_.get());
354
355 if (input_preroll_ < kBufferAudioData - 1) {
356 ++input_preroll_;
357 return scoped_ptr<media::AudioBus>();
358 }
359
360 scoped_ptr<media::AudioBus> output_bus(
361 media::AudioBus::Create(
362 output_channels_,
363 output_sample_rate_ * fifo_input_bus_->frames() / sample_rate));
364
365 // Resampler will then call ProvideData() below to fetch data from
366 // |input_data_|.
367 resampler_->Resample(output_bus->frames(), output_bus.get());
368 return output_bus.Pass();
369 }
370
371 // Called on real-time audio thread.
OnSetFormat(const media::AudioParameters & params)372 virtual void OnSetFormat(const media::AudioParameters& params) OVERRIDE {
373 if (params.sample_rate() == output_sample_rate_)
374 return;
375 fifo_.reset(new media::AudioFifo(
376 output_channels_,
377 kBufferAudioData * params.frames_per_buffer()));
378 fifo_input_bus_ = media::AudioBus::Create(
379 params.channels(), params.frames_per_buffer());
380 resampler_.reset(new media::MultiChannelResampler(
381 output_channels_,
382 static_cast<double>(params.sample_rate()) / output_sample_rate_,
383 params.frames_per_buffer(),
384 base::Bind(&CastAudioSink::ProvideData, base::Unretained(this))));
385 }
386
387 // Add this sink to the track. Data received from the track will be
388 // submitted to |frame_input|.
AddToTrack(const scoped_refptr<media::cast::AudioFrameInput> & frame_input)389 void AddToTrack(
390 const scoped_refptr<media::cast::AudioFrameInput>& frame_input) {
391 DCHECK(!sink_added_);
392 sink_added_ = true;
393
394 // This member is written here and then accessed on the IO thread
395 // We will not get data until AddToAudioTrack is called so it is
396 // safe to access this member now.
397 frame_input_ = frame_input;
398 AddToAudioTrack(this, track_);
399 }
400
ProvideData(int frame_delay,media::AudioBus * output_bus)401 void ProvideData(int frame_delay, media::AudioBus* output_bus) {
402 fifo_->Consume(output_bus, 0, output_bus->frames());
403 }
404
405 private:
406 blink::WebMediaStreamTrack track_;
407 bool sink_added_;
408 CastRtpStream::ErrorCallback error_callback_;
409 base::WeakPtrFactory<CastAudioSink> weak_factory_;
410
411 const int output_channels_;
412 const int output_sample_rate_;
413
414 // These member are accessed on the real-time audio time only.
415 scoped_refptr<media::cast::AudioFrameInput> frame_input_;
416 scoped_ptr<media::MultiChannelResampler> resampler_;
417 scoped_ptr<media::AudioFifo> fifo_;
418 scoped_ptr<media::AudioBus> fifo_input_bus_;
419 int input_preroll_;
420
421 DISALLOW_COPY_AND_ASSIGN(CastAudioSink);
422 };
423
CastRtpParams(const CastRtpPayloadParams & payload_params)424 CastRtpParams::CastRtpParams(const CastRtpPayloadParams& payload_params)
425 : payload(payload_params) {}
426
CastCodecSpecificParams()427 CastCodecSpecificParams::CastCodecSpecificParams() {}
428
~CastCodecSpecificParams()429 CastCodecSpecificParams::~CastCodecSpecificParams() {}
430
CastRtpPayloadParams()431 CastRtpPayloadParams::CastRtpPayloadParams()
432 : payload_type(0),
433 max_latency_ms(0),
434 ssrc(0),
435 feedback_ssrc(0),
436 clock_rate(0),
437 max_bitrate(0),
438 min_bitrate(0),
439 channels(0),
440 width(0),
441 height(0) {}
442
~CastRtpPayloadParams()443 CastRtpPayloadParams::~CastRtpPayloadParams() {}
444
CastRtpParams()445 CastRtpParams::CastRtpParams() {}
446
~CastRtpParams()447 CastRtpParams::~CastRtpParams() {}
448
CastRtpStream(const blink::WebMediaStreamTrack & track,const scoped_refptr<CastSession> & session)449 CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track,
450 const scoped_refptr<CastSession>& session)
451 : track_(track), cast_session_(session), weak_factory_(this) {}
452
~CastRtpStream()453 CastRtpStream::~CastRtpStream() {}
454
GetSupportedParams()455 std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() {
456 if (IsAudio())
457 return SupportedAudioParams();
458 else
459 return SupportedVideoParams();
460 }
461
GetParams()462 CastRtpParams CastRtpStream::GetParams() { return params_; }
463
Start(const CastRtpParams & params,const base::Closure & start_callback,const base::Closure & stop_callback,const ErrorCallback & error_callback)464 void CastRtpStream::Start(const CastRtpParams& params,
465 const base::Closure& start_callback,
466 const base::Closure& stop_callback,
467 const ErrorCallback& error_callback) {
468 VLOG(1) << "CastRtpStream::Start = " << (IsAudio() ? "audio" : "video");
469 stop_callback_ = stop_callback;
470 error_callback_ = error_callback;
471
472 if (IsAudio()) {
473 AudioSenderConfig config;
474 if (!ToAudioSenderConfig(params, &config)) {
475 DidEncounterError("Invalid parameters for audio.");
476 return;
477 }
478
479 // In case of error we have to go through DidEncounterError() to stop
480 // the streaming after reporting the error.
481 audio_sink_.reset(new CastAudioSink(
482 track_,
483 media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError,
484 weak_factory_.GetWeakPtr())),
485 params.payload.channels,
486 params.payload.clock_rate));
487 cast_session_->StartAudio(
488 config,
489 base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr()),
490 base::Bind(&CastRtpStream::DidEncounterError,
491 weak_factory_.GetWeakPtr()));
492 start_callback.Run();
493 } else {
494 VideoSenderConfig config;
495 if (!ToVideoSenderConfig(params, &config)) {
496 DidEncounterError("Invalid parameters for video.");
497 return;
498 }
499 // See the code for audio above for explanation of callbacks.
500 video_sink_.reset(new CastVideoSink(
501 track_,
502 gfx::Size(config.width, config.height),
503 media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError,
504 weak_factory_.GetWeakPtr()))));
505 cast_session_->StartVideo(
506 config,
507 base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr()),
508 base::Bind(&CastRtpStream::DidEncounterError,
509 weak_factory_.GetWeakPtr()));
510 start_callback.Run();
511 }
512 }
513
Stop()514 void CastRtpStream::Stop() {
515 VLOG(1) << "CastRtpStream::Stop = " << (IsAudio() ? "audio" : "video");
516 audio_sink_.reset();
517 video_sink_.reset();
518 if (!stop_callback_.is_null())
519 stop_callback_.Run();
520 }
521
ToggleLogging(bool enable)522 void CastRtpStream::ToggleLogging(bool enable) {
523 cast_session_->ToggleLogging(IsAudio(), enable);
524 }
525
GetRawEvents(const base::Callback<void (scoped_ptr<base::BinaryValue>)> & callback,const std::string & extra_data)526 void CastRtpStream::GetRawEvents(
527 const base::Callback<void(scoped_ptr<base::BinaryValue>)>& callback,
528 const std::string& extra_data) {
529 cast_session_->GetEventLogsAndReset(IsAudio(), extra_data, callback);
530 }
531
GetStats(const base::Callback<void (scoped_ptr<base::DictionaryValue>)> & callback)532 void CastRtpStream::GetStats(
533 const base::Callback<void(scoped_ptr<base::DictionaryValue>)>& callback) {
534 cast_session_->GetStatsAndReset(IsAudio(), callback);
535 }
536
IsAudio() const537 bool CastRtpStream::IsAudio() const {
538 return track_.source().type() == blink::WebMediaStreamSource::TypeAudio;
539 }
540
DidEncounterError(const std::string & message)541 void CastRtpStream::DidEncounterError(const std::string& message) {
542 // Save the WeakPtr first because the error callback might delete this object.
543 base::WeakPtr<CastRtpStream> ptr = weak_factory_.GetWeakPtr();
544 error_callback_.Run(message);
545 content::RenderThread::Get()->GetMessageLoop()->PostTask(
546 FROM_HERE,
547 base::Bind(&CastRtpStream::Stop, ptr));
548 }
549