1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 // Test application that simulates a cast sender - Data can be either generated
6 // or read from a file.
7
8 #include <queue>
9
10 #include "base/at_exit.h"
11 #include "base/base_paths.h"
12 #include "base/command_line.h"
13 #include "base/file_util.h"
14 #include "base/files/file_path.h"
15 #include "base/files/memory_mapped_file.h"
16 #include "base/files/scoped_file.h"
17 #include "base/json/json_writer.h"
18 #include "base/logging.h"
19 #include "base/memory/scoped_ptr.h"
20 #include "base/path_service.h"
21 #include "base/strings/string_number_conversions.h"
22 #include "base/threading/thread.h"
23 #include "base/time/default_tick_clock.h"
24 #include "base/values.h"
25 #include "media/audio/audio_parameters.h"
26 #include "media/base/audio_buffer.h"
27 #include "media/base/audio_bus.h"
28 #include "media/base/audio_fifo.h"
29 #include "media/base/audio_timestamp_helper.h"
30 #include "media/base/media.h"
31 #include "media/base/multi_channel_resampler.h"
32 #include "media/base/video_frame.h"
33 #include "media/base/video_util.h"
34 #include "media/cast/cast_config.h"
35 #include "media/cast/cast_environment.h"
36 #include "media/cast/cast_sender.h"
37 #include "media/cast/logging/encoding_event_subscriber.h"
38 #include "media/cast/logging/log_serializer.h"
39 #include "media/cast/logging/logging_defines.h"
40 #include "media/cast/logging/proto/raw_events.pb.h"
41 #include "media/cast/logging/receiver_time_offset_estimator_impl.h"
42 #include "media/cast/logging/stats_event_subscriber.h"
43 #include "media/cast/test/utility/audio_utility.h"
44 #include "media/cast/test/utility/default_config.h"
45 #include "media/cast/test/utility/input_builder.h"
46 #include "media/cast/test/utility/video_utility.h"
47 #include "media/cast/transport/cast_transport_defines.h"
48 #include "media/cast/transport/cast_transport_sender.h"
49 #include "media/cast/transport/transport/udp_transport.h"
50 #include "media/ffmpeg/ffmpeg_common.h"
51 #include "media/ffmpeg/ffmpeg_deleters.h"
52 #include "media/filters/audio_renderer_algorithm.h"
53 #include "media/filters/ffmpeg_demuxer.h"
54 #include "media/filters/ffmpeg_glue.h"
55 #include "media/filters/in_memory_url_protocol.h"
56 #include "ui/gfx/size.h"
57
58 namespace {
59 static const int kAudioChannels = 2;
60 static const int kAudioSamplingFrequency = 48000;
61 static const int kSoundFrequency = 1234; // Frequency of sinusoid wave.
62 static const float kSoundVolume = 0.5f;
63 static const int kAudioFrameMs = 10; // Each audio frame is exactly 10ms.
64 static const int kAudioPacketsPerSecond = 1000 / kAudioFrameMs;
65
66 // The max allowed size of serialized log.
67 const int kMaxSerializedLogBytes = 10 * 1000 * 1000;
68
69 // Flags for this program:
70 //
71 // --address=xx.xx.xx.xx
72 // IP address of receiver.
73 //
74 // --port=xxxx
75 // Port number of receiver.
76 //
77 // --source-file=xxx.webm
78 // WebM file as source of video frames.
79 //
80 // --fps=xx
81 // Override framerate of the video stream.
82
83 const char kSwitchAddress[] = "address";
84 const char kSwitchPort[] = "port";
85 const char kSwitchSourceFile[] = "source-file";
86 const char kSwitchFps[] = "fps";
87
88 } // namespace
89
90 namespace media {
91 namespace cast {
92
GetAudioSenderConfig()93 AudioSenderConfig GetAudioSenderConfig() {
94 AudioSenderConfig audio_config;
95
96 audio_config.rtcp_c_name = "audio_sender@a.b.c.d";
97
98 audio_config.use_external_encoder = false;
99 audio_config.frequency = kAudioSamplingFrequency;
100 audio_config.channels = kAudioChannels;
101 audio_config.bitrate = 64000;
102 audio_config.codec = transport::kOpus;
103 audio_config.rtp_config.ssrc = 1;
104 audio_config.incoming_feedback_ssrc = 2;
105 audio_config.rtp_config.payload_type = 127;
106 audio_config.rtp_config.max_delay_ms = 300;
107 return audio_config;
108 }
109
GetVideoSenderConfig()110 VideoSenderConfig GetVideoSenderConfig() {
111 VideoSenderConfig video_config;
112
113 video_config.rtcp_c_name = "video_sender@a.b.c.d";
114 video_config.use_external_encoder = false;
115
116 // Resolution.
117 video_config.width = 1280;
118 video_config.height = 720;
119 video_config.max_frame_rate = 30;
120
121 // Bitrates.
122 video_config.max_bitrate = 2500000;
123 video_config.min_bitrate = 100000;
124 video_config.start_bitrate = video_config.min_bitrate;
125
126 // Codec.
127 video_config.codec = transport::kVp8;
128 video_config.max_number_of_video_buffers_used = 1;
129 video_config.number_of_encode_threads = 2;
130
131 // Quality options.
132 video_config.min_qp = 4;
133 video_config.max_qp = 40;
134
135 // SSRCs and payload type. Don't change them.
136 video_config.rtp_config.ssrc = 11;
137 video_config.incoming_feedback_ssrc = 12;
138 video_config.rtp_config.payload_type = 96;
139 video_config.rtp_config.max_delay_ms = 300;
140 return video_config;
141 }
142
AVFreeFrame(AVFrame * frame)143 void AVFreeFrame(AVFrame* frame) { av_frame_free(&frame); }
144
145 class SendProcess {
146 public:
SendProcess(scoped_refptr<base::SingleThreadTaskRunner> thread_proxy,base::TickClock * clock,const VideoSenderConfig & video_config)147 SendProcess(scoped_refptr<base::SingleThreadTaskRunner> thread_proxy,
148 base::TickClock* clock,
149 const VideoSenderConfig& video_config)
150 : test_app_thread_proxy_(thread_proxy),
151 video_config_(video_config),
152 synthetic_count_(0),
153 clock_(clock),
154 audio_frame_count_(0),
155 video_frame_count_(0),
156 weak_factory_(this),
157 av_format_context_(NULL),
158 audio_stream_index_(-1),
159 playback_rate_(1.0),
160 video_stream_index_(-1),
161 video_frame_rate_numerator_(video_config.max_frame_rate),
162 video_frame_rate_denominator_(1),
163 video_first_pts_(0),
164 video_first_pts_set_(false) {
165 audio_bus_factory_.reset(new TestAudioBusFactory(kAudioChannels,
166 kAudioSamplingFrequency,
167 kSoundFrequency,
168 kSoundVolume));
169 const CommandLine* cmd = CommandLine::ForCurrentProcess();
170 int override_fps = 0;
171 if (base::StringToInt(cmd->GetSwitchValueASCII(kSwitchFps),
172 &override_fps)) {
173 video_config_.max_frame_rate = override_fps;
174 video_frame_rate_numerator_ = override_fps;
175 }
176
177 // Load source file and prepare FFmpeg demuxer.
178 base::FilePath source_path = cmd->GetSwitchValuePath(kSwitchSourceFile);
179 if (source_path.empty())
180 return;
181
182 LOG(INFO) << "Source: " << source_path.value();
183 if (!file_data_.Initialize(source_path)) {
184 LOG(ERROR) << "Cannot load file.";
185 return;
186 }
187 protocol_.reset(
188 new InMemoryUrlProtocol(file_data_.data(), file_data_.length(), false));
189 glue_.reset(new FFmpegGlue(protocol_.get()));
190
191 if (!glue_->OpenContext()) {
192 LOG(ERROR) << "Cannot open file.";
193 return;
194 }
195
196 // AVFormatContext is owned by the glue.
197 av_format_context_ = glue_->format_context();
198 if (avformat_find_stream_info(av_format_context_, NULL) < 0) {
199 LOG(ERROR) << "Cannot find stream information.";
200 return;
201 }
202
203 // Prepare FFmpeg decoders.
204 for (unsigned int i = 0; i < av_format_context_->nb_streams; ++i) {
205 AVStream* av_stream = av_format_context_->streams[i];
206 AVCodecContext* av_codec_context = av_stream->codec;
207 AVCodec* av_codec = avcodec_find_decoder(av_codec_context->codec_id);
208
209 if (!av_codec) {
210 LOG(ERROR) << "Cannot find decoder for the codec: "
211 << av_codec_context->codec_id;
212 continue;
213 }
214
215 // Number of threads for decoding.
216 av_codec_context->thread_count = 2;
217 av_codec_context->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK;
218 av_codec_context->request_sample_fmt = AV_SAMPLE_FMT_S16;
219
220 if (avcodec_open2(av_codec_context, av_codec, NULL) < 0) {
221 LOG(ERROR) << "Cannot open AVCodecContext for the codec: "
222 << av_codec_context->codec_id;
223 return;
224 }
225
226 if (av_codec->type == AVMEDIA_TYPE_AUDIO) {
227 if (av_codec_context->sample_fmt == AV_SAMPLE_FMT_S16P) {
228 LOG(ERROR) << "Audio format not supported.";
229 continue;
230 }
231 ChannelLayout layout = ChannelLayoutToChromeChannelLayout(
232 av_codec_context->channel_layout,
233 av_codec_context->channels);
234 if (layout == CHANNEL_LAYOUT_UNSUPPORTED) {
235 LOG(ERROR) << "Unsupported audio channels layout.";
236 continue;
237 }
238 if (audio_stream_index_ != -1) {
239 LOG(WARNING) << "Found multiple audio streams.";
240 }
241 audio_stream_index_ = static_cast<int>(i);
242 audio_params_.Reset(
243 AudioParameters::AUDIO_PCM_LINEAR,
244 layout,
245 av_codec_context->channels,
246 av_codec_context->channels,
247 av_codec_context->sample_rate,
248 8 * av_get_bytes_per_sample(av_codec_context->sample_fmt),
249 av_codec_context->sample_rate / kAudioPacketsPerSecond);
250 LOG(INFO) << "Source file has audio.";
251 } else if (av_codec->type == AVMEDIA_TYPE_VIDEO) {
252 VideoFrame::Format format =
253 PixelFormatToVideoFormat(av_codec_context->pix_fmt);
254 if (format != VideoFrame::YV12) {
255 LOG(ERROR) << "Cannot handle non YV12 video format: " << format;
256 continue;
257 }
258 if (video_stream_index_ != -1) {
259 LOG(WARNING) << "Found multiple video streams.";
260 }
261 video_stream_index_ = static_cast<int>(i);
262 if (!override_fps) {
263 video_frame_rate_numerator_ = av_stream->r_frame_rate.num;
264 video_frame_rate_denominator_ = av_stream->r_frame_rate.den;
265 // Max frame rate is rounded up.
266 video_config_.max_frame_rate =
267 video_frame_rate_denominator_ +
268 video_frame_rate_numerator_ - 1;
269 video_config_.max_frame_rate /= video_frame_rate_denominator_;
270 } else {
271 // If video is played at a manual speed audio needs to match.
272 playback_rate_ = 1.0 * override_fps *
273 av_stream->r_frame_rate.den / av_stream->r_frame_rate.num;
274 }
275 LOG(INFO) << "Source file has video.";
276 } else {
277 LOG(ERROR) << "Unknown stream type; ignore.";
278 }
279 }
280
281 Rewind();
282 }
283
~SendProcess()284 ~SendProcess() {
285 }
286
Start(scoped_refptr<AudioFrameInput> audio_frame_input,scoped_refptr<VideoFrameInput> video_frame_input)287 void Start(scoped_refptr<AudioFrameInput> audio_frame_input,
288 scoped_refptr<VideoFrameInput> video_frame_input) {
289 audio_frame_input_ = audio_frame_input;
290 video_frame_input_ = video_frame_input;
291
292 LOG(INFO) << "Max Frame rate: " << video_config_.max_frame_rate;
293 LOG(INFO) << "Real Frame rate: "
294 << video_frame_rate_numerator_ << "/"
295 << video_frame_rate_denominator_ << " fps.";
296 LOG(INFO) << "Audio playback rate: " << playback_rate_;
297
298 if (!is_transcoding_audio() && !is_transcoding_video()) {
299 // Send fake patterns.
300 test_app_thread_proxy_->PostTask(
301 FROM_HERE,
302 base::Bind(
303 &SendProcess::SendNextFakeFrame,
304 base::Unretained(this)));
305 return;
306 }
307
308 // Send transcoding streams.
309 audio_algo_.Initialize(playback_rate_, audio_params_);
310 audio_algo_.FlushBuffers();
311 audio_fifo_input_bus_ =
312 AudioBus::Create(
313 audio_params_.channels(), audio_params_.frames_per_buffer());
314 // Audio FIFO can carry all data fron AudioRendererAlgorithm.
315 audio_fifo_.reset(
316 new AudioFifo(audio_params_.channels(),
317 audio_algo_.QueueCapacity()));
318 audio_resampler_.reset(new media::MultiChannelResampler(
319 audio_params_.channels(),
320 static_cast<double>(audio_params_.sample_rate()) /
321 kAudioSamplingFrequency,
322 audio_params_.frames_per_buffer(),
323 base::Bind(&SendProcess::ProvideData, base::Unretained(this))));
324 test_app_thread_proxy_->PostTask(
325 FROM_HERE,
326 base::Bind(
327 &SendProcess::SendNextFrame,
328 base::Unretained(this)));
329 }
330
SendNextFakeFrame()331 void SendNextFakeFrame() {
332 gfx::Size size(video_config_.width, video_config_.height);
333 scoped_refptr<VideoFrame> video_frame =
334 VideoFrame::CreateBlackFrame(size);
335 PopulateVideoFrame(video_frame, synthetic_count_);
336 ++synthetic_count_;
337
338 base::TimeTicks now = clock_->NowTicks();
339 if (start_time_.is_null())
340 start_time_ = now;
341
342 base::TimeDelta video_time = VideoFrameTime(video_frame_count_);
343 video_frame->set_timestamp(video_time);
344 video_frame_input_->InsertRawVideoFrame(video_frame,
345 start_time_ + video_time);
346
347 // Send just enough audio data to match next video frame's time.
348 base::TimeDelta audio_time = AudioFrameTime(audio_frame_count_);
349 while (audio_time < video_time) {
350 if (is_transcoding_audio()) {
351 Decode(true);
352 CHECK(!audio_bus_queue_.empty()) << "No audio decoded.";
353 scoped_ptr<AudioBus> bus(audio_bus_queue_.front());
354 audio_bus_queue_.pop();
355 audio_frame_input_->InsertAudio(
356 bus.Pass(), start_time_ + audio_time);
357 } else {
358 audio_frame_input_->InsertAudio(
359 audio_bus_factory_->NextAudioBus(
360 base::TimeDelta::FromMilliseconds(kAudioFrameMs)),
361 start_time_ + audio_time);
362 }
363 audio_time = AudioFrameTime(++audio_frame_count_);
364 }
365
366 // This is the time since the stream started.
367 const base::TimeDelta elapsed_time = now - start_time_;
368
369 // Handle the case when frame generation cannot keep up.
370 // Move the time ahead to match the next frame.
371 while (video_time < elapsed_time) {
372 LOG(WARNING) << "Skipping one frame.";
373 video_time = VideoFrameTime(++video_frame_count_);
374 }
375
376 test_app_thread_proxy_->PostDelayedTask(
377 FROM_HERE,
378 base::Bind(&SendProcess::SendNextFakeFrame,
379 weak_factory_.GetWeakPtr()),
380 video_time - elapsed_time);
381 }
382
383 // Return true if a frame was sent.
SendNextTranscodedVideo(base::TimeDelta elapsed_time)384 bool SendNextTranscodedVideo(base::TimeDelta elapsed_time) {
385 if (!is_transcoding_video())
386 return false;
387
388 Decode(false);
389 if (video_frame_queue_.empty())
390 return false;
391
392 scoped_refptr<VideoFrame> decoded_frame =
393 video_frame_queue_.front();
394 if (elapsed_time < decoded_frame->timestamp())
395 return false;
396
397 gfx::Size size(video_config_.width, video_config_.height);
398 scoped_refptr<VideoFrame> video_frame =
399 VideoFrame::CreateBlackFrame(size);
400 video_frame_queue_.pop();
401 media::CopyPlane(VideoFrame::kYPlane,
402 decoded_frame->data(VideoFrame::kYPlane),
403 decoded_frame->stride(VideoFrame::kYPlane),
404 decoded_frame->rows(VideoFrame::kYPlane),
405 video_frame);
406 media::CopyPlane(VideoFrame::kUPlane,
407 decoded_frame->data(VideoFrame::kUPlane),
408 decoded_frame->stride(VideoFrame::kUPlane),
409 decoded_frame->rows(VideoFrame::kUPlane),
410 video_frame);
411 media::CopyPlane(VideoFrame::kVPlane,
412 decoded_frame->data(VideoFrame::kVPlane),
413 decoded_frame->stride(VideoFrame::kVPlane),
414 decoded_frame->rows(VideoFrame::kVPlane),
415 video_frame);
416
417 base::TimeDelta video_time;
418 // Use the timestamp from the file if we're transcoding.
419 video_time = ScaleTimestamp(decoded_frame->timestamp());
420 video_frame_input_->InsertRawVideoFrame(
421 video_frame, start_time_ + video_time);
422
423 // Make sure queue is not empty.
424 Decode(false);
425 return true;
426 }
427
428 // Return true if a frame was sent.
SendNextTranscodedAudio(base::TimeDelta elapsed_time)429 bool SendNextTranscodedAudio(base::TimeDelta elapsed_time) {
430 if (!is_transcoding_audio())
431 return false;
432
433 Decode(true);
434 if (audio_bus_queue_.empty())
435 return false;
436
437 base::TimeDelta audio_time = audio_sent_ts_->GetTimestamp();
438 if (elapsed_time < audio_time)
439 return false;
440 scoped_ptr<AudioBus> bus(audio_bus_queue_.front());
441 audio_bus_queue_.pop();
442 audio_sent_ts_->AddFrames(bus->frames());
443 audio_frame_input_->InsertAudio(
444 bus.Pass(), start_time_ + audio_time);
445
446 // Make sure queue is not empty.
447 Decode(true);
448 return true;
449 }
450
SendNextFrame()451 void SendNextFrame() {
452 if (start_time_.is_null())
453 start_time_ = clock_->NowTicks();
454 if (start_time_.is_null())
455 start_time_ = clock_->NowTicks();
456
457 // Send as much as possible. Audio is sent according to
458 // system time.
459 while (SendNextTranscodedAudio(clock_->NowTicks() - start_time_));
460
461 // Video is sync'ed to audio.
462 while (SendNextTranscodedVideo(audio_sent_ts_->GetTimestamp()));
463
464 if (audio_bus_queue_.empty() && video_frame_queue_.empty()) {
465 // Both queues are empty can only mean that we have reached
466 // the end of the stream.
467 LOG(INFO) << "Rewind.";
468 Rewind();
469 start_time_ = base::TimeTicks();
470 audio_sent_ts_.reset();
471 video_first_pts_set_ = false;
472 }
473
474 // Send next send.
475 test_app_thread_proxy_->PostDelayedTask(
476 FROM_HERE,
477 base::Bind(
478 &SendProcess::SendNextFrame,
479 base::Unretained(this)),
480 base::TimeDelta::FromMilliseconds(kAudioFrameMs));
481 }
482
get_video_config() const483 const VideoSenderConfig& get_video_config() const { return video_config_; }
484
485 private:
is_transcoding_audio()486 bool is_transcoding_audio() { return audio_stream_index_ >= 0; }
is_transcoding_video()487 bool is_transcoding_video() { return video_stream_index_ >= 0; }
488
489 // Helper methods to compute timestamps for the frame number specified.
VideoFrameTime(int frame_number)490 base::TimeDelta VideoFrameTime(int frame_number) {
491 return frame_number * base::TimeDelta::FromSeconds(1) *
492 video_frame_rate_denominator_ / video_frame_rate_numerator_;
493 }
494
ScaleTimestamp(base::TimeDelta timestamp)495 base::TimeDelta ScaleTimestamp(base::TimeDelta timestamp) {
496 return base::TimeDelta::FromMicroseconds(
497 timestamp.InMicroseconds() / playback_rate_);
498 }
499
AudioFrameTime(int frame_number)500 base::TimeDelta AudioFrameTime(int frame_number) {
501 return frame_number * base::TimeDelta::FromMilliseconds(kAudioFrameMs);
502 }
503
504 // Go to the beginning of the stream.
Rewind()505 void Rewind() {
506 CHECK(av_seek_frame(av_format_context_, -1, 0, AVSEEK_FLAG_BACKWARD) >= 0)
507 << "Failed to rewind to the beginning.";
508 }
509
510 // Call FFmpeg to fetch one packet.
DemuxOnePacket(bool * audio)511 ScopedAVPacket DemuxOnePacket(bool* audio) {
512 ScopedAVPacket packet(new AVPacket());
513 if (av_read_frame(av_format_context_, packet.get()) < 0) {
514 LOG(ERROR) << "Failed to read one AVPacket.";
515 packet.reset();
516 return packet.Pass();
517 }
518
519 int stream_index = static_cast<int>(packet->stream_index);
520 if (stream_index == audio_stream_index_) {
521 *audio = true;
522 } else if (stream_index == video_stream_index_) {
523 *audio = false;
524 } else {
525 // Ignore unknown packet.
526 LOG(INFO) << "Unknown packet.";
527 packet.reset();
528 }
529 return packet.Pass();
530 }
531
DecodeAudio(ScopedAVPacket packet)532 void DecodeAudio(ScopedAVPacket packet) {
533 // Audio.
534 AVFrame* avframe = av_frame_alloc();
535
536 // Make a shallow copy of packet so we can slide packet.data as frames are
537 // decoded from the packet; otherwise av_free_packet() will corrupt memory.
538 AVPacket packet_temp = *packet.get();
539
540 do {
541 int frame_decoded = 0;
542 int result = avcodec_decode_audio4(
543 av_audio_context(), avframe, &frame_decoded, &packet_temp);
544 CHECK(result >= 0) << "Failed to decode audio.";
545 packet_temp.size -= result;
546 packet_temp.data += result;
547 if (!frame_decoded)
548 continue;
549
550 int frames_read = avframe->nb_samples;
551 if (frames_read < 0)
552 break;
553
554 if (!audio_sent_ts_) {
555 // Initialize the base time to the first packet in the file.
556 // This is set to the frequency we send to the receiver.
557 // Not the frequency of the source file. This is because we
558 // increment the frame count by samples we sent.
559 audio_sent_ts_.reset(
560 new AudioTimestampHelper(kAudioSamplingFrequency));
561 // For some files this is an invalid value.
562 base::TimeDelta base_ts;
563 audio_sent_ts_->SetBaseTimestamp(base_ts);
564 }
565
566 scoped_refptr<AudioBuffer> buffer =
567 AudioBuffer::CopyFrom(
568 AVSampleFormatToSampleFormat(
569 av_audio_context()->sample_fmt),
570 ChannelLayoutToChromeChannelLayout(
571 av_audio_context()->channel_layout,
572 av_audio_context()->channels),
573 av_audio_context()->channels,
574 av_audio_context()->sample_rate,
575 frames_read,
576 &avframe->data[0],
577 // Note: Not all files have correct values for pkt_pts.
578 base::TimeDelta::FromMilliseconds(avframe->pkt_pts));
579 audio_algo_.EnqueueBuffer(buffer);
580 av_frame_unref(avframe);
581 } while (packet_temp.size > 0);
582 av_frame_free(&avframe);
583
584 const int frames_needed_to_scale =
585 playback_rate_ * av_audio_context()->sample_rate /
586 kAudioPacketsPerSecond;
587 while (frames_needed_to_scale <= audio_algo_.frames_buffered()) {
588 if (!audio_algo_.FillBuffer(audio_fifo_input_bus_.get(),
589 audio_fifo_input_bus_->frames())) {
590 // Nothing can be scaled. Decode some more.
591 return;
592 }
593
594 // Prevent overflow of audio data in the FIFO.
595 if (audio_fifo_input_bus_->frames() + audio_fifo_->frames()
596 <= audio_fifo_->max_frames()) {
597 audio_fifo_->Push(audio_fifo_input_bus_.get());
598 } else {
599 LOG(WARNING) << "Audio FIFO full; dropping samples.";
600 }
601
602 // Make sure there's enough data to resample audio.
603 if (audio_fifo_->frames() <
604 2 * audio_params_.sample_rate() / kAudioPacketsPerSecond) {
605 continue;
606 }
607
608 scoped_ptr<media::AudioBus> resampled_bus(
609 media::AudioBus::Create(
610 audio_params_.channels(),
611 kAudioSamplingFrequency / kAudioPacketsPerSecond));
612 audio_resampler_->Resample(resampled_bus->frames(),
613 resampled_bus.get());
614 audio_bus_queue_.push(resampled_bus.release());
615 }
616 }
617
DecodeVideo(ScopedAVPacket packet)618 void DecodeVideo(ScopedAVPacket packet) {
619 // Video.
620 int got_picture;
621 AVFrame* avframe = av_frame_alloc();
622 // Tell the decoder to reorder for us.
623 avframe->reordered_opaque =
624 av_video_context()->reordered_opaque = packet->pts;
625 CHECK(avcodec_decode_video2(
626 av_video_context(), avframe, &got_picture, packet.get()) >= 0)
627 << "Video decode error.";
628 if (!got_picture) {
629 av_frame_free(&avframe);
630 return;
631 }
632 gfx::Size size(av_video_context()->width, av_video_context()->height);
633 if (!video_first_pts_set_ ||
634 avframe->reordered_opaque < video_first_pts_) {
635 video_first_pts_set_ = true;
636 video_first_pts_ = avframe->reordered_opaque;
637 }
638 int64 pts = avframe->reordered_opaque - video_first_pts_;
639 video_frame_queue_.push(
640 VideoFrame::WrapExternalYuvData(
641 media::VideoFrame::YV12,
642 size,
643 gfx::Rect(size),
644 size,
645 avframe->linesize[0],
646 avframe->linesize[1],
647 avframe->linesize[2],
648 avframe->data[0],
649 avframe->data[1],
650 avframe->data[2],
651 base::TimeDelta::FromMilliseconds(pts),
652 base::Bind(&AVFreeFrame, avframe)));
653 }
654
Decode(bool decode_audio)655 void Decode(bool decode_audio) {
656 // Read the stream until one video frame can be decoded.
657 while (true) {
658 if (decode_audio && !audio_bus_queue_.empty())
659 return;
660 if (!decode_audio && !video_frame_queue_.empty())
661 return;
662
663 bool audio_packet = false;
664 ScopedAVPacket packet = DemuxOnePacket(&audio_packet);
665 if (!packet) {
666 LOG(INFO) << "End of stream.";
667 return;
668 }
669
670 if (audio_packet)
671 DecodeAudio(packet.Pass());
672 else
673 DecodeVideo(packet.Pass());
674 }
675 }
676
ProvideData(int frame_delay,media::AudioBus * output_bus)677 void ProvideData(int frame_delay, media::AudioBus* output_bus) {
678 if (audio_fifo_->frames() >= output_bus->frames()) {
679 audio_fifo_->Consume(output_bus, 0, output_bus->frames());
680 } else {
681 LOG(WARNING) << "Not enough audio data for resampling.";
682 output_bus->Zero();
683 }
684 }
685
av_audio_stream()686 AVStream* av_audio_stream() {
687 return av_format_context_->streams[audio_stream_index_];
688 }
av_video_stream()689 AVStream* av_video_stream() {
690 return av_format_context_->streams[video_stream_index_];
691 }
av_audio_context()692 AVCodecContext* av_audio_context() { return av_audio_stream()->codec; }
av_video_context()693 AVCodecContext* av_video_context() { return av_video_stream()->codec; }
694
695 scoped_refptr<base::SingleThreadTaskRunner> test_app_thread_proxy_;
696 VideoSenderConfig video_config_;
697 scoped_refptr<AudioFrameInput> audio_frame_input_;
698 scoped_refptr<VideoFrameInput> video_frame_input_;
699 uint8 synthetic_count_;
700 base::TickClock* const clock_; // Not owned by this class.
701
702 // Time when the stream starts.
703 base::TimeTicks start_time_;
704
705 // The following three members are used only for fake frames.
706 int audio_frame_count_; // Each audio frame is exactly 10ms.
707 int video_frame_count_;
708 scoped_ptr<TestAudioBusFactory> audio_bus_factory_;
709
710 // NOTE: Weak pointers must be invalidated before all other member variables.
711 base::WeakPtrFactory<SendProcess> weak_factory_;
712
713 base::MemoryMappedFile file_data_;
714 scoped_ptr<InMemoryUrlProtocol> protocol_;
715 scoped_ptr<FFmpegGlue> glue_;
716 AVFormatContext* av_format_context_;
717
718 int audio_stream_index_;
719 AudioParameters audio_params_;
720 double playback_rate_;
721
722 int video_stream_index_;
723 int video_frame_rate_numerator_;
724 int video_frame_rate_denominator_;
725
726 // These are used for audio resampling.
727 scoped_ptr<media::MultiChannelResampler> audio_resampler_;
728 scoped_ptr<media::AudioFifo> audio_fifo_;
729 scoped_ptr<media::AudioBus> audio_fifo_input_bus_;
730 media::AudioRendererAlgorithm audio_algo_;
731
732 // Track the timestamp of audio sent to the receiver.
733 scoped_ptr<media::AudioTimestampHelper> audio_sent_ts_;
734
735 std::queue<scoped_refptr<VideoFrame> > video_frame_queue_;
736 int64 video_first_pts_;
737 bool video_first_pts_set_;
738
739 std::queue<AudioBus*> audio_bus_queue_;
740
741 DISALLOW_COPY_AND_ASSIGN(SendProcess);
742 };
743
744 } // namespace cast
745 } // namespace media
746
747 namespace {
UpdateCastTransportStatus(media::cast::transport::CastTransportStatus status)748 void UpdateCastTransportStatus(
749 media::cast::transport::CastTransportStatus status) {
750 VLOG(1) << "Transport status: " << status;
751 }
752
LogRawEvents(const scoped_refptr<media::cast::CastEnvironment> & cast_environment,const std::vector<media::cast::PacketEvent> & packet_events)753 void LogRawEvents(
754 const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
755 const std::vector<media::cast::PacketEvent>& packet_events) {
756 VLOG(1) << "Got packet events from transport, size: " << packet_events.size();
757 for (std::vector<media::cast::PacketEvent>::const_iterator it =
758 packet_events.begin();
759 it != packet_events.end();
760 ++it) {
761 cast_environment->Logging()->InsertPacketEvent(it->timestamp,
762 it->type,
763 it->media_type,
764 it->rtp_timestamp,
765 it->frame_id,
766 it->packet_id,
767 it->max_packet_id,
768 it->size);
769 }
770 }
771
InitializationResult(media::cast::CastInitializationStatus result)772 void InitializationResult(media::cast::CastInitializationStatus result) {
773 bool end_result = result == media::cast::STATUS_AUDIO_INITIALIZED ||
774 result == media::cast::STATUS_VIDEO_INITIALIZED;
775 CHECK(end_result) << "Cast sender uninitialized";
776 }
777
CreateUDPAddress(std::string ip_str,int port)778 net::IPEndPoint CreateUDPAddress(std::string ip_str, int port) {
779 net::IPAddressNumber ip_number;
780 CHECK(net::ParseIPLiteralToNumber(ip_str, &ip_number));
781 return net::IPEndPoint(ip_number, port);
782 }
783
DumpLoggingData(const media::cast::proto::LogMetadata & log_metadata,const media::cast::FrameEventList & frame_events,const media::cast::PacketEventList & packet_events,base::ScopedFILE log_file)784 void DumpLoggingData(const media::cast::proto::LogMetadata& log_metadata,
785 const media::cast::FrameEventList& frame_events,
786 const media::cast::PacketEventList& packet_events,
787 base::ScopedFILE log_file) {
788 VLOG(0) << "Frame map size: " << frame_events.size();
789 VLOG(0) << "Packet map size: " << packet_events.size();
790
791 scoped_ptr<char[]> event_log(new char[kMaxSerializedLogBytes]);
792 int event_log_bytes;
793 if (!media::cast::SerializeEvents(log_metadata,
794 frame_events,
795 packet_events,
796 true,
797 kMaxSerializedLogBytes,
798 event_log.get(),
799 &event_log_bytes)) {
800 VLOG(0) << "Failed to serialize events.";
801 return;
802 }
803
804 VLOG(0) << "Events serialized length: " << event_log_bytes;
805
806 int ret = fwrite(event_log.get(), 1, event_log_bytes, log_file.get());
807 if (ret != event_log_bytes)
808 VLOG(0) << "Failed to write logs to file.";
809 }
810
WriteLogsToFileAndDestroySubscribers(const scoped_refptr<media::cast::CastEnvironment> & cast_environment,scoped_ptr<media::cast::EncodingEventSubscriber> video_event_subscriber,scoped_ptr<media::cast::EncodingEventSubscriber> audio_event_subscriber,base::ScopedFILE video_log_file,base::ScopedFILE audio_log_file)811 void WriteLogsToFileAndDestroySubscribers(
812 const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
813 scoped_ptr<media::cast::EncodingEventSubscriber> video_event_subscriber,
814 scoped_ptr<media::cast::EncodingEventSubscriber> audio_event_subscriber,
815 base::ScopedFILE video_log_file,
816 base::ScopedFILE audio_log_file) {
817 cast_environment->Logging()->RemoveRawEventSubscriber(
818 video_event_subscriber.get());
819 cast_environment->Logging()->RemoveRawEventSubscriber(
820 audio_event_subscriber.get());
821
822 VLOG(0) << "Dumping logging data for video stream.";
823 media::cast::proto::LogMetadata log_metadata;
824 media::cast::FrameEventList frame_events;
825 media::cast::PacketEventList packet_events;
826 video_event_subscriber->GetEventsAndReset(
827 &log_metadata, &frame_events, &packet_events);
828
829 DumpLoggingData(log_metadata,
830 frame_events,
831 packet_events,
832 video_log_file.Pass());
833
834 VLOG(0) << "Dumping logging data for audio stream.";
835 audio_event_subscriber->GetEventsAndReset(
836 &log_metadata, &frame_events, &packet_events);
837
838 DumpLoggingData(log_metadata,
839 frame_events,
840 packet_events,
841 audio_log_file.Pass());
842 }
843
WriteStatsAndDestroySubscribers(const scoped_refptr<media::cast::CastEnvironment> & cast_environment,scoped_ptr<media::cast::StatsEventSubscriber> video_event_subscriber,scoped_ptr<media::cast::StatsEventSubscriber> audio_event_subscriber,scoped_ptr<media::cast::ReceiverTimeOffsetEstimatorImpl> estimator)844 void WriteStatsAndDestroySubscribers(
845 const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
846 scoped_ptr<media::cast::StatsEventSubscriber> video_event_subscriber,
847 scoped_ptr<media::cast::StatsEventSubscriber> audio_event_subscriber,
848 scoped_ptr<media::cast::ReceiverTimeOffsetEstimatorImpl> estimator) {
849 cast_environment->Logging()->RemoveRawEventSubscriber(
850 video_event_subscriber.get());
851 cast_environment->Logging()->RemoveRawEventSubscriber(
852 audio_event_subscriber.get());
853 cast_environment->Logging()->RemoveRawEventSubscriber(estimator.get());
854
855 scoped_ptr<base::DictionaryValue> stats = video_event_subscriber->GetStats();
856 std::string json;
857 base::JSONWriter::WriteWithOptions(
858 stats.get(), base::JSONWriter::OPTIONS_PRETTY_PRINT, &json);
859 VLOG(0) << "Video stats: " << json;
860
861 stats = audio_event_subscriber->GetStats();
862 json.clear();
863 base::JSONWriter::WriteWithOptions(
864 stats.get(), base::JSONWriter::OPTIONS_PRETTY_PRINT, &json);
865 VLOG(0) << "Audio stats: " << json;
866 }
867
868 } // namespace
869
main(int argc,char ** argv)870 int main(int argc, char** argv) {
871 base::AtExitManager at_exit;
872 CommandLine::Init(argc, argv);
873 InitLogging(logging::LoggingSettings());
874
875 // Load the media module for FFmpeg decoding.
876 base::FilePath path;
877 PathService::Get(base::DIR_MODULE, &path);
878 if (!media::InitializeMediaLibrary(path)) {
879 LOG(ERROR) << "Could not initialize media library.";
880 return 1;
881 }
882
883 base::Thread test_thread("Cast sender test app thread");
884 base::Thread audio_thread("Cast audio encoder thread");
885 base::Thread video_thread("Cast video encoder thread");
886 test_thread.Start();
887 audio_thread.Start();
888 video_thread.Start();
889
890 base::MessageLoopForIO io_message_loop;
891
892 // Default parameters.
893 CommandLine* cmd = CommandLine::ForCurrentProcess();
894 std::string remote_ip_address = cmd->GetSwitchValueASCII(kSwitchAddress);
895 if (remote_ip_address.empty())
896 remote_ip_address = "127.0.0.1";
897 int remote_port = 0;
898 if (!base::StringToInt(cmd->GetSwitchValueASCII(kSwitchPort),
899 &remote_port)) {
900 remote_port = 2344;
901 }
902 LOG(INFO) << "Sending to " << remote_ip_address << ":" << remote_port
903 << ".";
904
905 media::cast::AudioSenderConfig audio_config =
906 media::cast::GetAudioSenderConfig();
907 media::cast::VideoSenderConfig video_config =
908 media::cast::GetVideoSenderConfig();
909
910 // Running transport on the main thread.
911 // Setting up transport config.
912 net::IPEndPoint remote_endpoint =
913 CreateUDPAddress(remote_ip_address, remote_port);
914
915 // Enable raw event and stats logging.
916 // Running transport on the main thread.
917 scoped_refptr<media::cast::CastEnvironment> cast_environment(
918 new media::cast::CastEnvironment(
919 make_scoped_ptr<base::TickClock>(new base::DefaultTickClock()),
920 io_message_loop.message_loop_proxy(),
921 audio_thread.message_loop_proxy(),
922 video_thread.message_loop_proxy()));
923
924 // SendProcess initialization.
925 scoped_ptr<media::cast::SendProcess> send_process(
926 new media::cast::SendProcess(test_thread.message_loop_proxy(),
927 cast_environment->Clock(),
928 video_config));
929
930 // CastTransportSender initialization.
931 scoped_ptr<media::cast::transport::CastTransportSender> transport_sender =
932 media::cast::transport::CastTransportSender::Create(
933 NULL, // net log.
934 cast_environment->Clock(),
935 remote_endpoint,
936 base::Bind(&UpdateCastTransportStatus),
937 base::Bind(&LogRawEvents, cast_environment),
938 base::TimeDelta::FromSeconds(1),
939 io_message_loop.message_loop_proxy());
940
941 // CastSender initialization.
942 scoped_ptr<media::cast::CastSender> cast_sender =
943 media::cast::CastSender::Create(cast_environment, transport_sender.get());
944 cast_sender->InitializeVideo(
945 send_process->get_video_config(),
946 base::Bind(&InitializationResult),
947 media::cast::CreateDefaultVideoEncodeAcceleratorCallback(),
948 media::cast::CreateDefaultVideoEncodeMemoryCallback());
949 cast_sender->InitializeAudio(audio_config, base::Bind(&InitializationResult));
950 transport_sender->SetPacketReceiver(cast_sender->packet_receiver());
951
952 // Set up event subscribers.
953 scoped_ptr<media::cast::EncodingEventSubscriber> video_event_subscriber;
954 scoped_ptr<media::cast::EncodingEventSubscriber> audio_event_subscriber;
955 std::string video_log_file_name("/tmp/video_events.log.gz");
956 std::string audio_log_file_name("/tmp/audio_events.log.gz");
957 LOG(INFO) << "Logging audio events to: " << audio_log_file_name;
958 LOG(INFO) << "Logging video events to: " << video_log_file_name;
959 video_event_subscriber.reset(new media::cast::EncodingEventSubscriber(
960 media::cast::VIDEO_EVENT, 10000));
961 audio_event_subscriber.reset(new media::cast::EncodingEventSubscriber(
962 media::cast::AUDIO_EVENT, 10000));
963 cast_environment->Logging()->AddRawEventSubscriber(
964 video_event_subscriber.get());
965 cast_environment->Logging()->AddRawEventSubscriber(
966 audio_event_subscriber.get());
967
968 // Subscribers for stats.
969 scoped_ptr<media::cast::ReceiverTimeOffsetEstimatorImpl> offset_estimator(
970 new media::cast::ReceiverTimeOffsetEstimatorImpl);
971 cast_environment->Logging()->AddRawEventSubscriber(offset_estimator.get());
972 scoped_ptr<media::cast::StatsEventSubscriber> video_stats_subscriber(
973 new media::cast::StatsEventSubscriber(media::cast::VIDEO_EVENT,
974 cast_environment->Clock(),
975 offset_estimator.get()));
976 scoped_ptr<media::cast::StatsEventSubscriber> audio_stats_subscriber(
977 new media::cast::StatsEventSubscriber(media::cast::AUDIO_EVENT,
978 cast_environment->Clock(),
979 offset_estimator.get()));
980 cast_environment->Logging()->AddRawEventSubscriber(
981 video_stats_subscriber.get());
982 cast_environment->Logging()->AddRawEventSubscriber(
983 audio_stats_subscriber.get());
984
985 base::ScopedFILE video_log_file(fopen(video_log_file_name.c_str(), "w"));
986 if (!video_log_file) {
987 VLOG(1) << "Failed to open video log file for writing.";
988 exit(-1);
989 }
990
991 base::ScopedFILE audio_log_file(fopen(audio_log_file_name.c_str(), "w"));
992 if (!audio_log_file) {
993 VLOG(1) << "Failed to open audio log file for writing.";
994 exit(-1);
995 }
996
997 const int logging_duration_seconds = 10;
998 io_message_loop.message_loop_proxy()->PostDelayedTask(
999 FROM_HERE,
1000 base::Bind(&WriteLogsToFileAndDestroySubscribers,
1001 cast_environment,
1002 base::Passed(&video_event_subscriber),
1003 base::Passed(&audio_event_subscriber),
1004 base::Passed(&video_log_file),
1005 base::Passed(&audio_log_file)),
1006 base::TimeDelta::FromSeconds(logging_duration_seconds));
1007
1008 io_message_loop.message_loop_proxy()->PostDelayedTask(
1009 FROM_HERE,
1010 base::Bind(&WriteStatsAndDestroySubscribers,
1011 cast_environment,
1012 base::Passed(&video_stats_subscriber),
1013 base::Passed(&audio_stats_subscriber),
1014 base::Passed(&offset_estimator)),
1015 base::TimeDelta::FromSeconds(logging_duration_seconds));
1016
1017 send_process->Start(cast_sender->audio_frame_input(),
1018 cast_sender->video_frame_input());
1019
1020 io_message_loop.Run();
1021 return 0;
1022 }
1023