• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include <algorithm>
6 #include <cmath>
7 
8 #include "base/command_line.h"
9 #include "base/float_util.h"
10 #include "base/run_loop.h"
11 #include "base/strings/stringprintf.h"
12 #include "base/synchronization/lock.h"
13 #include "base/time/time.h"
14 #include "chrome/browser/extensions/extension_apitest.h"
15 #include "chrome/common/chrome_switches.h"
16 #include "content/public/common/content_switches.h"
17 #include "extensions/common/switches.h"
18 #include "media/base/bind_to_current_loop.h"
19 #include "media/base/video_frame.h"
20 #include "media/cast/cast_config.h"
21 #include "media/cast/cast_environment.h"
22 #include "media/cast/test/utility/audio_utility.h"
23 #include "media/cast/test/utility/default_config.h"
24 #include "media/cast/test/utility/in_process_receiver.h"
25 #include "media/cast/test/utility/standalone_cast_environment.h"
26 #include "net/base/net_errors.h"
27 #include "net/base/net_util.h"
28 #include "net/base/rand_callback.h"
29 #include "net/udp/udp_socket.h"
30 #include "testing/gtest/include/gtest/gtest.h"
31 
32 namespace extensions {
33 
34 class CastStreamingApiTest : public ExtensionApiTest {
35  public:
SetUpCommandLine(CommandLine * command_line)36   virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE {
37     ExtensionApiTest::SetUpCommandLine(command_line);
38     command_line->AppendSwitchASCII(
39         extensions::switches::kWhitelistedExtensionID,
40         "ddchlicdkolnonkihahngkmmmjnjlkkf");
41   }
42 };
43 
44 // Test running the test extension for Cast Mirroring API.
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest,Basics)45 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Basics) {
46   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "basics.html")) << message_;
47 }
48 
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest,Stats)49 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Stats) {
50   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "stats.html")) << message_;
51 }
52 
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest,BadLogging)53 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, BadLogging) {
54   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "bad_logging.html"))
55       << message_;
56 }
57 
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest,DestinationNotSet)58 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, DestinationNotSet) {
59   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "destination_not_set.html"))
60       << message_;
61 }
62 
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest,StopNoStart)63 IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, StopNoStart) {
64   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "stop_no_start.html"))
65       << message_;
66 }
67 
68 namespace {
69 
70 // An in-process Cast receiver that examines the audio/video frames being
71 // received for expected colors and tones.  Used in
72 // CastStreamingApiTest.EndToEnd, below.
73 class TestPatternReceiver : public media::cast::InProcessReceiver {
74  public:
TestPatternReceiver(const scoped_refptr<media::cast::CastEnvironment> & cast_environment,const net::IPEndPoint & local_end_point)75   explicit TestPatternReceiver(
76       const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
77       const net::IPEndPoint& local_end_point)
78       : InProcessReceiver(cast_environment,
79                           local_end_point,
80                           net::IPEndPoint(),
81                           media::cast::GetDefaultAudioReceiverConfig(),
82                           media::cast::GetDefaultVideoReceiverConfig()),
83         target_tone_frequency_(0),
84         current_tone_frequency_(0.0f) {
85     memset(&target_color_, 0, sizeof(target_color_));
86     memset(&current_color_, 0, sizeof(current_color_));
87   }
88 
~TestPatternReceiver()89   virtual ~TestPatternReceiver() {}
90 
91   // Blocks the caller until this receiver has seen both |yuv_color| and
92   // |tone_frequency| consistently for the given |duration|.
WaitForColorAndTone(const uint8 yuv_color[3],int tone_frequency,base::TimeDelta duration)93   void WaitForColorAndTone(const uint8 yuv_color[3],
94                            int tone_frequency,
95                            base::TimeDelta duration) {
96     LOG(INFO) << "Waiting for test pattern: color=yuv("
97               << static_cast<int>(yuv_color[0]) << ", "
98               << static_cast<int>(yuv_color[1]) << ", "
99               << static_cast<int>(yuv_color[2])
100               << "), tone_frequency=" << tone_frequency << " Hz";
101 
102     base::RunLoop run_loop;
103     cast_env()->PostTask(
104         media::cast::CastEnvironment::MAIN,
105         FROM_HERE,
106         base::Bind(&TestPatternReceiver::NotifyOnceMatched,
107                    base::Unretained(this),
108                    yuv_color,
109                    tone_frequency,
110                    duration,
111                    media::BindToCurrentLoop(run_loop.QuitClosure())));
112     run_loop.Run();
113   }
114 
115  private:
116   // Resets tracking data and sets the match duration and callback.
NotifyOnceMatched(const uint8 yuv_color[3],int tone_frequency,base::TimeDelta match_duration,const base::Closure & matched_callback)117   void NotifyOnceMatched(const uint8 yuv_color[3],
118                          int tone_frequency,
119                          base::TimeDelta match_duration,
120                          const base::Closure& matched_callback) {
121     DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
122 
123     match_duration_ = match_duration;
124     matched_callback_ = matched_callback;
125     target_color_[0] = yuv_color[0];
126     target_color_[1] = yuv_color[1];
127     target_color_[2] = yuv_color[2];
128     target_tone_frequency_ = tone_frequency;
129     first_time_near_target_color_ = base::TimeTicks();
130     first_time_near_target_tone_ = base::TimeTicks();
131   }
132 
133   // Runs |matched_callback_| once both color and tone have been matched for the
134   // required |match_duration_|.
NotifyIfMatched()135   void NotifyIfMatched() {
136     DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
137 
138     // TODO(miu): Check audio tone too, once audio is fixed in the library.
139     // http://crbug.com/349295
140     if (first_time_near_target_color_.is_null() ||
141         /*first_time_near_target_tone_.is_null()*/ false)
142       return;
143     const base::TimeTicks now = cast_env()->Clock()->NowTicks();
144     if ((now - first_time_near_target_color_) >= match_duration_ &&
145         /*(now - first_time_near_target_tone_) >= match_duration_*/ true) {
146       matched_callback_.Run();
147     }
148   }
149 
150   // Invoked by InProcessReceiver for each received audio frame.
OnAudioFrame(scoped_ptr<media::AudioBus> audio_frame,const base::TimeTicks & playout_time,bool is_continuous)151   virtual void OnAudioFrame(scoped_ptr<media::AudioBus> audio_frame,
152                             const base::TimeTicks& playout_time,
153                             bool is_continuous) OVERRIDE {
154     DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
155 
156     if (audio_frame->frames() <= 0) {
157       NOTREACHED() << "OnAudioFrame called with no samples?!?";
158       return;
159     }
160 
161     // Assume the audio signal is a single sine wave (it can have some
162     // low-amplitude noise).  Count zero crossings, and extrapolate the
163     // frequency of the sine wave in |audio_frame|.
164     int crossings = 0;
165     for (int ch = 0; ch < audio_frame->channels(); ++ch) {
166       crossings += media::cast::CountZeroCrossings(audio_frame->channel(ch),
167                                                    audio_frame->frames());
168     }
169     crossings /= audio_frame->channels();  // Take the average.
170     const float seconds_per_frame =
171         audio_frame->frames() / static_cast<float>(audio_config().frequency);
172     const float frequency_in_frame = crossings / seconds_per_frame / 2.0f;
173 
174     const float kAveragingWeight = 0.1f;
175     UpdateExponentialMovingAverage(
176         kAveragingWeight, frequency_in_frame, &current_tone_frequency_);
177     VLOG(1) << "Current audio tone frequency: " << current_tone_frequency_;
178 
179     const float kTargetWindowHz = 20;
180     // Update the time at which the current tone started falling within
181     // kTargetWindowHz of the target tone.
182     if (fabsf(current_tone_frequency_ - target_tone_frequency_) <
183         kTargetWindowHz) {
184       if (first_time_near_target_tone_.is_null())
185         first_time_near_target_tone_ = cast_env()->Clock()->NowTicks();
186       NotifyIfMatched();
187     } else {
188       first_time_near_target_tone_ = base::TimeTicks();
189     }
190   }
191 
OnVideoFrame(const scoped_refptr<media::VideoFrame> & video_frame,const base::TimeTicks & render_time,bool is_continuous)192   virtual void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame,
193                             const base::TimeTicks& render_time,
194                             bool is_continuous) OVERRIDE {
195     DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
196 
197     CHECK(video_frame->format() == media::VideoFrame::YV12 ||
198           video_frame->format() == media::VideoFrame::I420 ||
199           video_frame->format() == media::VideoFrame::YV12A);
200 
201     // Note: We take the median value of each plane because the test image will
202     // contain mostly a solid color plus some "cruft" which is the "Testing..."
203     // text in the upper-left corner of the video frame.  In other words, we
204     // want to read "the most common color."
205     const int kPlanes[] = {media::VideoFrame::kYPlane,
206                            media::VideoFrame::kUPlane,
207                            media::VideoFrame::kVPlane};
208     for (size_t i = 0; i < arraysize(kPlanes); ++i) {
209       current_color_[i] =
210           ComputeMedianIntensityInPlane(video_frame->row_bytes(kPlanes[i]),
211                                         video_frame->rows(kPlanes[i]),
212                                         video_frame->stride(kPlanes[i]),
213                                         video_frame->data(kPlanes[i]));
214     }
215 
216     VLOG(1) << "Current video color: yuv(" << current_color_[0] << ", "
217             << current_color_[1] << ", " << current_color_[2] << ')';
218 
219     const float kTargetWindow = 10.0f;
220     // Update the time at which all color channels started falling within
221     // kTargetWindow of the target.
222     if (fabsf(current_color_[0] - target_color_[0]) < kTargetWindow &&
223         fabsf(current_color_[1] - target_color_[1]) < kTargetWindow &&
224         fabsf(current_color_[2] - target_color_[2]) < kTargetWindow) {
225       if (first_time_near_target_color_.is_null())
226         first_time_near_target_color_ = cast_env()->Clock()->NowTicks();
227       NotifyIfMatched();
228     } else {
229       first_time_near_target_color_ = base::TimeTicks();
230     }
231   }
232 
UpdateExponentialMovingAverage(float weight,float sample_value,float * average)233   static void UpdateExponentialMovingAverage(float weight,
234                                              float sample_value,
235                                              float* average) {
236     *average = weight * sample_value + (1.0f - weight) * (*average);
237     CHECK(base::IsFinite(*average));
238   }
239 
ComputeMedianIntensityInPlane(int width,int height,int stride,uint8 * data)240   static uint8 ComputeMedianIntensityInPlane(int width,
241                                              int height,
242                                              int stride,
243                                              uint8* data) {
244     const int num_pixels = width * height;
245     if (num_pixels <= 0)
246       return 0;
247     // If necessary, re-pack the pixels such that the stride is equal to the
248     // width.
249     if (width < stride) {
250       for (int y = 1; y < height; ++y) {
251         uint8* const src = data + y * stride;
252         uint8* const dest = data + y * width;
253         memmove(dest, src, width);
254       }
255     }
256     const size_t middle_idx = num_pixels / 2;
257     std::nth_element(data, data + middle_idx, data + num_pixels);
258     return data[middle_idx];
259   }
260 
261   base::TimeDelta match_duration_;
262   base::Closure matched_callback_;
263 
264   float target_color_[3];  // Y, U, V
265   float target_tone_frequency_;
266 
267   float current_color_[3];  // Y, U, V
268   base::TimeTicks first_time_near_target_color_;
269   float current_tone_frequency_;
270   base::TimeTicks first_time_near_target_tone_;
271 
272   DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver);
273 };
274 
275 }  // namespace
276 
277 class CastStreamingApiTestWithPixelOutput : public CastStreamingApiTest {
SetUp()278   virtual void SetUp() OVERRIDE {
279     EnablePixelOutput();
280     CastStreamingApiTest::SetUp();
281   }
282 
SetUpCommandLine(CommandLine * command_line)283   virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE {
284     command_line->AppendSwitchASCII(::switches::kWindowSize, "128,128");
285     CastStreamingApiTest::SetUpCommandLine(command_line);
286   }
287 };
288 
289 // Tests the Cast streaming API and its basic functionality end-to-end.  An
290 // extension subtest is run to generate test content, capture that content, and
291 // use the API to send it out.  At the same time, this test launches an
292 // in-process Cast receiver, listening on a localhost UDP socket, to receive the
293 // content and check whether it matches expectations.
294 //
295 // Note: This test is disabled until outstanding bugs are fixed and the
296 // media/cast library has achieved sufficient stability.
297 // http://crbug.com/349599
IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput,DISABLED_EndToEnd)298 IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput, DISABLED_EndToEnd) {
299   // Determine a unused UDP port for the in-process receiver to listen on.
300   // Method: Bind a UDP socket on port 0, and then check which port the
301   // operating system assigned to it.
302   net::IPAddressNumber localhost;
303   localhost.push_back(127);
304   localhost.push_back(0);
305   localhost.push_back(0);
306   localhost.push_back(1);
307   scoped_ptr<net::UDPSocket> receive_socket(
308       new net::UDPSocket(net::DatagramSocket::DEFAULT_BIND,
309                          net::RandIntCallback(),
310                          NULL,
311                          net::NetLog::Source()));
312   receive_socket->AllowAddressReuse();
313   ASSERT_EQ(net::OK, receive_socket->Bind(net::IPEndPoint(localhost, 0)));
314   net::IPEndPoint receiver_end_point;
315   ASSERT_EQ(net::OK, receive_socket->GetLocalAddress(&receiver_end_point));
316   receive_socket.reset();
317 
318   // Start the in-process receiver that examines audio/video for the expected
319   // test patterns.
320   const scoped_refptr<media::cast::StandaloneCastEnvironment> cast_environment(
321       new media::cast::StandaloneCastEnvironment());
322   TestPatternReceiver* const receiver =
323       new TestPatternReceiver(cast_environment, receiver_end_point);
324   receiver->Start();
325 
326   // Launch the page that: 1) renders the source content; 2) uses the
327   // chrome.tabCapture and chrome.cast.streaming APIs to capture its content and
328   // stream using Cast; and 3) calls chrome.test.succeed() once it is
329   // operational.
330   const std::string page_url = base::StringPrintf(
331       "end_to_end_sender.html?port=%d", receiver_end_point.port());
332   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url)) << message_;
333 
334   // Examine the Cast receiver for expected audio/video test patterns.  The
335   // colors and tones specified here must match those in end_to_end_sender.js.
336   const uint8 kRedInYUV[3] = {82, 90, 240};    // rgb(255, 0, 0)
337   const uint8 kGreenInYUV[3] = {145, 54, 34};  // rgb(0, 255, 0)
338   const uint8 kBlueInYUV[3] = {41, 240, 110};  // rgb(0, 0, 255)
339   const base::TimeDelta kOneHalfSecond = base::TimeDelta::FromMilliseconds(500);
340   receiver->WaitForColorAndTone(kRedInYUV, 200 /* Hz */, kOneHalfSecond);
341   receiver->WaitForColorAndTone(kGreenInYUV, 500 /* Hz */, kOneHalfSecond);
342   receiver->WaitForColorAndTone(kBlueInYUV, 1800 /* Hz */, kOneHalfSecond);
343 
344   receiver->Stop();
345   cast_environment->Shutdown();
346 }
347 
IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput,RtpStreamError)348 IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput, RtpStreamError) {
349   ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "rtp_stream_error.html"));
350 }
351 
352 }  // namespace extensions
353