1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <map>
6 #include <vector>
7
8 #include "base/basictypes.h"
9 #include "base/command_line.h"
10 #if defined(OS_MACOSX)
11 #include "base/mac/mac_util.h"
12 #endif
13 #include "base/strings/stringprintf.h"
14 #include "base/test/trace_event_analyzer.h"
15 #include "base/time/default_tick_clock.h"
16 #include "base/win/windows_version.h"
17 #include "chrome/browser/extensions/extension_apitest.h"
18 #include "chrome/browser/extensions/extension_service.h"
19 #include "chrome/browser/extensions/extension_test_message_listener.h"
20 #include "chrome/browser/extensions/tab_helper.h"
21 #include "chrome/browser/profiles/profile.h"
22 #include "chrome/browser/ui/fullscreen/fullscreen_controller.h"
23 #include "chrome/common/chrome_switches.h"
24 #include "chrome/common/chrome_version_info.h"
25 #include "chrome/test/base/test_launcher_utils.h"
26 #include "chrome/test/base/test_switches.h"
27 #include "chrome/test/base/tracing.h"
28 #include "content/public/browser/render_process_host.h"
29 #include "content/public/browser/render_view_host.h"
30 #include "content/public/common/content_switches.h"
31 #include "extensions/common/feature_switch.h"
32 #include "extensions/common/features/base_feature_provider.h"
33 #include "extensions/common/features/complex_feature.h"
34 #include "extensions/common/features/feature.h"
35 #include "extensions/common/features/simple_feature.h"
36 #include "extensions/common/switches.h"
37 #include "media/base/audio_bus.h"
38 #include "media/base/video_frame.h"
39 #include "media/cast/cast_config.h"
40 #include "media/cast/cast_environment.h"
41 #include "media/cast/test/utility/audio_utility.h"
42 #include "media/cast/test/utility/barcode.h"
43 #include "media/cast/test/utility/default_config.h"
44 #include "media/cast/test/utility/in_process_receiver.h"
45 #include "media/cast/test/utility/standalone_cast_environment.h"
46 #include "media/cast/test/utility/udp_proxy.h"
47 #include "net/base/ip_endpoint.h"
48 #include "net/base/net_errors.h"
49 #include "net/base/net_util.h"
50 #include "net/base/rand_callback.h"
51 #include "net/udp/udp_socket.h"
52 #include "testing/gtest/include/gtest/gtest.h"
53 #include "testing/perf/perf_test.h"
54 #include "ui/compositor/compositor_switches.h"
55 #include "ui/gl/gl_switches.h"
56
57 namespace {
58
59 const char kExtensionId[] = "ddchlicdkolnonkihahngkmmmjnjlkkf";
60
61 // Skip a few events from the beginning.
62 static const size_t kSkipEvents = 3;
63
64 enum TestFlags {
65 kUseGpu = 1 << 0, // Only execute test if --enable-gpu was given
66 // on the command line. This is required for
67 // tests that run on GPU.
68 kDisableVsync = 1 << 1, // Do not limit framerate to vertical refresh.
69 // when on GPU, nor to 60hz when not on GPU.
70 kSmallWindow = 1 << 2, // 1 = 800x600, 0 = 2000x1000
71 k24fps = 1 << 3, // use 24 fps video
72 k30fps = 1 << 4, // use 30 fps video
73 k60fps = 1 << 5, // use 60 fps video
74 kProxyWifi = 1 << 6, // Run UDP through UDPProxy wifi profile
75 kProxyEvil = 1 << 7, // Run UDP through UDPProxy evil profile
76 kSlowClock = 1 << 8, // Receiver clock is 10 seconds slow
77 kFastClock = 1 << 9, // Receiver clock is 10 seconds fast
78 };
79
80 class SkewedTickClock : public base::DefaultTickClock {
81 public:
SkewedTickClock(const base::TimeDelta & delta)82 explicit SkewedTickClock(const base::TimeDelta& delta) : delta_(delta) {
83 }
NowTicks()84 virtual base::TimeTicks NowTicks() OVERRIDE {
85 return DefaultTickClock::NowTicks() + delta_;
86 }
87 private:
88 base::TimeDelta delta_;
89 };
90
91 class SkewedCastEnvironment : public media::cast::StandaloneCastEnvironment {
92 public:
SkewedCastEnvironment(const base::TimeDelta & delta)93 explicit SkewedCastEnvironment(const base::TimeDelta& delta) :
94 StandaloneCastEnvironment() {
95 clock_.reset(new SkewedTickClock(delta));
96 }
97
98 protected:
~SkewedCastEnvironment()99 virtual ~SkewedCastEnvironment() {}
100 };
101
102 // We log one of these for each call to OnAudioFrame/OnVideoFrame.
103 struct TimeData {
TimeData__anon5b7dad3f0111::TimeData104 TimeData(uint16 frame_no_, base::TimeTicks render_time_) :
105 frame_no(frame_no_),
106 render_time(render_time_) {
107 }
108 // The unit here is video frames, for audio data there can be duplicates.
109 // This was decoded from the actual audio/video data.
110 uint16 frame_no;
111 // This is when we should play this data, according to the sender.
112 base::TimeTicks render_time;
113 };
114
115 // TODO(hubbe): Move to media/cast to use for offline log analysis.
116 class MeanAndError {
117 public:
MeanAndError()118 MeanAndError() {}
MeanAndError(const std::vector<double> & values)119 explicit MeanAndError(const std::vector<double>& values) {
120 double sum = 0.0;
121 double sqr_sum = 0.0;
122 num_values = values.size();
123 if (num_values) {
124 for (size_t i = 0; i < num_values; i++) {
125 sum += values[i];
126 sqr_sum += values[i] * values[i];
127 }
128 mean = sum / num_values;
129 std_dev = sqrt(std::max(0.0, num_values * sqr_sum - sum * sum)) /
130 num_values;
131 }
132 }
AsString() const133 std::string AsString() const {
134 return base::StringPrintf("%f,%f", mean, std_dev);
135 }
136
Print(const std::string & measurement,const std::string & modifier,const std::string & trace,const std::string & unit)137 void Print(const std::string& measurement,
138 const std::string& modifier,
139 const std::string& trace,
140 const std::string& unit) {
141 if (num_values >= 20) {
142 perf_test::PrintResultMeanAndError(measurement,
143 modifier,
144 trace,
145 AsString(),
146 unit,
147 true);
148 } else {
149 LOG(ERROR) << "Not enough events for "
150 << measurement << modifier << " " << trace;
151 }
152 }
153
154 size_t num_values;
155 double mean;
156 double std_dev;
157 };
158
159 // This function checks how smooth the data in |data| is.
160 // It computes the average error of deltas and the average delta.
161 // If data[x] == x * A + B, then this function returns zero.
162 // The unit is milliseconds.
AnalyzeJitter(const std::vector<TimeData> & data)163 static MeanAndError AnalyzeJitter(const std::vector<TimeData>& data) {
164 CHECK_GT(data.size(), 1UL);
165 VLOG(0) << "Jitter analyzis on " << data.size() << " values.";
166 std::vector<double> deltas;
167 double sum = 0.0;
168 for (size_t i = 1; i < data.size(); i++) {
169 double delta = (data[i].render_time -
170 data[i - 1].render_time).InMillisecondsF();
171 deltas.push_back(delta);
172 sum += delta;
173 }
174 double mean = sum / deltas.size();
175 for (size_t i = 0; i < deltas.size(); i++) {
176 deltas[i] = fabs(mean - deltas[i]);
177 }
178
179 return MeanAndError(deltas);
180 }
181
182 // An in-process Cast receiver that examines the audio/video frames being
183 // received and logs some data about each received audio/video frame.
184 class TestPatternReceiver : public media::cast::InProcessReceiver {
185 public:
TestPatternReceiver(const scoped_refptr<media::cast::CastEnvironment> & cast_environment,const net::IPEndPoint & local_end_point)186 explicit TestPatternReceiver(
187 const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
188 const net::IPEndPoint& local_end_point)
189 : InProcessReceiver(cast_environment,
190 local_end_point,
191 net::IPEndPoint(),
192 media::cast::GetDefaultAudioReceiverConfig(),
193 media::cast::GetDefaultVideoReceiverConfig()) {
194 }
195
196 typedef std::map<uint16, base::TimeTicks> TimeMap;
197
198 // Build a map from frame ID (as encoded in the audio and video data)
199 // to the rtp timestamp for that frame. Note that there will be multiple
200 // audio frames which all have the same frame ID. When that happens we
201 // want the minimum rtp timestamp, because that audio frame is supposed
202 // to play at the same time that the corresponding image is presented.
MapFrameTimes(const std::vector<TimeData> & events,TimeMap * map)203 void MapFrameTimes(const std::vector<TimeData>& events, TimeMap* map) {
204 for (size_t i = kSkipEvents; i < events.size(); i++) {
205 base::TimeTicks& frame_tick = (*map)[events[i].frame_no];
206 if (frame_tick.is_null()) {
207 frame_tick = events[i].render_time;
208 } else {
209 frame_tick = std::min(events[i].render_time, frame_tick);
210 }
211 }
212 }
213
Analyze(const std::string & name,const std::string & modifier)214 void Analyze(const std::string& name, const std::string& modifier) {
215 // First, find the minimum rtp timestamp for each audio and video frame.
216 // Note that the data encoded in the audio stream contains video frame
217 // numbers. So in a 30-fps video stream, there will be 1/30s of "1", then
218 // 1/30s of "2", etc.
219 TimeMap audio_frame_times, video_frame_times;
220 MapFrameTimes(audio_events_, &audio_frame_times);
221 MapFrameTimes(video_events_, &video_frame_times);
222 std::vector<double> deltas;
223 for (TimeMap::const_iterator i = audio_frame_times.begin();
224 i != audio_frame_times.end();
225 ++i) {
226 TimeMap::const_iterator j = video_frame_times.find(i->first);
227 if (j != video_frame_times.end()) {
228 deltas.push_back((i->second - j->second).InMillisecondsF());
229 }
230 }
231
232 // Close to zero is better. (can be negative)
233 MeanAndError(deltas).Print(name, modifier, "av_sync", "ms");
234 // lower is better.
235 AnalyzeJitter(audio_events_).Print(name, modifier, "audio_jitter", "ms");
236 // lower is better.
237 AnalyzeJitter(video_events_).Print(name, modifier, "video_jitter", "ms");
238 }
239
240 private:
241 // Invoked by InProcessReceiver for each received audio frame.
OnAudioFrame(scoped_ptr<media::AudioBus> audio_frame,const base::TimeTicks & playout_time,bool is_continuous)242 virtual void OnAudioFrame(scoped_ptr<media::AudioBus> audio_frame,
243 const base::TimeTicks& playout_time,
244 bool is_continuous) OVERRIDE {
245 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
246
247 if (audio_frame->frames() <= 0) {
248 NOTREACHED() << "OnAudioFrame called with no samples?!?";
249 return;
250 }
251
252 // Note: This is the number of the video frame that this audio belongs to.
253 uint16 frame_no;
254 if (media::cast::DecodeTimestamp(audio_frame->channel(0),
255 audio_frame->frames(),
256 &frame_no)) {
257 audio_events_.push_back(TimeData(frame_no, playout_time));
258 } else {
259 VLOG(0) << "Failed to decode audio timestamp!";
260 }
261 }
262
OnVideoFrame(const scoped_refptr<media::VideoFrame> & video_frame,const base::TimeTicks & render_time,bool is_continuous)263 virtual void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame,
264 const base::TimeTicks& render_time,
265 bool is_continuous) OVERRIDE {
266 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
267
268 TRACE_EVENT_INSTANT1(
269 "mirroring", "TestPatternReceiver::OnVideoFrame",
270 TRACE_EVENT_SCOPE_THREAD,
271 "render_time", render_time.ToInternalValue());
272
273 uint16 frame_no;
274 if (media::cast::test::DecodeBarcode(video_frame, &frame_no)) {
275 video_events_.push_back(TimeData(frame_no, render_time));
276 } else {
277 VLOG(0) << "Failed to decode barcode!";
278 }
279 }
280
281 std::vector<TimeData> audio_events_;
282 std::vector<TimeData> video_events_;
283
284 DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver);
285 };
286
287 class CastV2PerformanceTest
288 : public ExtensionApiTest,
289 public testing::WithParamInterface<int> {
290 public:
CastV2PerformanceTest()291 CastV2PerformanceTest() {}
292
HasFlag(TestFlags flag) const293 bool HasFlag(TestFlags flag) const {
294 return (GetParam() & flag) == flag;
295 }
296
IsGpuAvailable() const297 bool IsGpuAvailable() const {
298 return CommandLine::ForCurrentProcess()->HasSwitch("enable-gpu");
299 }
300
GetSuffixForTestFlags()301 std::string GetSuffixForTestFlags() {
302 std::string suffix;
303 if (HasFlag(kUseGpu))
304 suffix += "_gpu";
305 if (HasFlag(kDisableVsync))
306 suffix += "_novsync";
307 if (HasFlag(kSmallWindow))
308 suffix += "_small";
309 if (HasFlag(k24fps))
310 suffix += "_24fps";
311 if (HasFlag(k30fps))
312 suffix += "_30fps";
313 if (HasFlag(k60fps))
314 suffix += "_60fps";
315 if (HasFlag(kProxyWifi))
316 suffix += "_wifi";
317 if (HasFlag(kProxyEvil))
318 suffix += "_evil";
319 if (HasFlag(kSlowClock))
320 suffix += "_slow";
321 if (HasFlag(kFastClock))
322 suffix += "_fast";
323 return suffix;
324 }
325
getfps()326 int getfps() {
327 if (HasFlag(k24fps))
328 return 24;
329 if (HasFlag(k30fps))
330 return 30;
331 if (HasFlag(k60fps))
332 return 60;
333 NOTREACHED();
334 return 0;
335 }
336
GetFreeLocalPort()337 net::IPEndPoint GetFreeLocalPort() {
338 // Determine a unused UDP port for the in-process receiver to listen on.
339 // Method: Bind a UDP socket on port 0, and then check which port the
340 // operating system assigned to it.
341 net::IPAddressNumber localhost;
342 localhost.push_back(127);
343 localhost.push_back(0);
344 localhost.push_back(0);
345 localhost.push_back(1);
346 scoped_ptr<net::UDPSocket> receive_socket(
347 new net::UDPSocket(net::DatagramSocket::DEFAULT_BIND,
348 net::RandIntCallback(),
349 NULL,
350 net::NetLog::Source()));
351 receive_socket->AllowAddressReuse();
352 CHECK_EQ(net::OK, receive_socket->Bind(net::IPEndPoint(localhost, 0)));
353 net::IPEndPoint endpoint;
354 CHECK_EQ(net::OK, receive_socket->GetLocalAddress(&endpoint));
355 return endpoint;
356 }
357
SetUp()358 virtual void SetUp() OVERRIDE {
359 EnablePixelOutput();
360 ExtensionApiTest::SetUp();
361 }
362
SetUpCommandLine(CommandLine * command_line)363 virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE {
364 // Some of the tests may launch http requests through JSON or AJAX
365 // which causes a security error (cross domain request) when the page
366 // is loaded from the local file system ( file:// ). The following switch
367 // fixes that error.
368 command_line->AppendSwitch(switches::kAllowFileAccessFromFiles);
369
370 if (HasFlag(kSmallWindow)) {
371 command_line->AppendSwitchASCII(switches::kWindowSize, "800,600");
372 } else {
373 command_line->AppendSwitchASCII(switches::kWindowSize, "2000,1500");
374 }
375
376 if (!HasFlag(kUseGpu))
377 command_line->AppendSwitch(switches::kDisableGpu);
378
379 if (HasFlag(kDisableVsync))
380 command_line->AppendSwitch(switches::kDisableGpuVsync);
381
382 command_line->AppendSwitchASCII(
383 extensions::switches::kWhitelistedExtensionID,
384 kExtensionId);
385 ExtensionApiTest::SetUpCommandLine(command_line);
386 }
387
GetTraceEvents(trace_analyzer::TraceAnalyzer * analyzer,const std::string & event_name,trace_analyzer::TraceEventVector * events)388 void GetTraceEvents(trace_analyzer::TraceAnalyzer* analyzer,
389 const std::string& event_name,
390 trace_analyzer::TraceEventVector* events) {
391 trace_analyzer::Query query =
392 trace_analyzer::Query::EventNameIs(event_name) &&
393 (trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_BEGIN) ||
394 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_ASYNC_BEGIN) ||
395 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_FLOW_BEGIN) ||
396 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_INSTANT) ||
397 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_COMPLETE));
398 analyzer->FindEvents(query, events);
399 }
400
401 // The key contains the name of the argument and the argument.
402 typedef std::pair<std::string, double> EventMapKey;
403 typedef std::map<EventMapKey, const trace_analyzer::TraceEvent*> EventMap;
404
405 // Make events findable by their arguments, for instance, if an
406 // event has a "timestamp": 238724 argument, the map will contain
407 // pair<"timestamp", 238724> -> &event. All arguments are indexed.
IndexEvents(trace_analyzer::TraceAnalyzer * analyzer,const std::string & event_name,EventMap * event_map)408 void IndexEvents(trace_analyzer::TraceAnalyzer* analyzer,
409 const std::string& event_name,
410 EventMap* event_map) {
411 trace_analyzer::TraceEventVector events;
412 GetTraceEvents(analyzer, event_name, &events);
413 for (size_t i = 0; i < events.size(); i++) {
414 std::map<std::string, double>::const_iterator j;
415 for (j = events[i]->arg_numbers.begin();
416 j != events[i]->arg_numbers.end();
417 ++j) {
418 (*event_map)[*j] = events[i];
419 }
420 }
421 }
422
423 // Look up an event in |event_map|. The return event will have the same
424 // value for the argument |key_name| as |prev_event|. Note that if
425 // the |key_name| is "time_delta", then we allow some fuzzy logic since
426 // the time deltas are truncated to milliseconds in the code.
FindNextEvent(const EventMap & event_map,std::vector<const trace_analyzer::TraceEvent * > prev_events,std::string key_name)427 const trace_analyzer::TraceEvent* FindNextEvent(
428 const EventMap& event_map,
429 std::vector<const trace_analyzer::TraceEvent*> prev_events,
430 std::string key_name) {
431 EventMapKey key;
432 for (size_t i = prev_events.size(); i;) {
433 --i;
434 std::map<std::string, double>::const_iterator j =
435 prev_events[i]->arg_numbers.find(key_name);
436 if (j != prev_events[i]->arg_numbers.end()) {
437 key = *j;
438 break;
439 }
440 }
441 EventMap::const_iterator i = event_map.lower_bound(key);
442 if (i == event_map.end())
443 return NULL;
444 if (i->first.second == key.second)
445 return i->second;
446 if (key_name != "time_delta")
447 return NULL;
448 if (fabs(i->first.second - key.second) < 1000)
449 return i->second;
450 if (i == event_map.begin())
451 return NULL;
452 i--;
453 if (fabs(i->first.second - key.second) < 1000)
454 return i->second;
455 return NULL;
456 }
457
458 // Given a vector of vector of data, extract the difference between
459 // two columns (|col_a| and |col_b|) and output the result as a
460 // performance metric.
OutputMeasurement(const std::string & test_name,const std::vector<std::vector<double>> data,const std::string & measurement_name,int col_a,int col_b)461 void OutputMeasurement(const std::string& test_name,
462 const std::vector<std::vector<double> > data,
463 const std::string& measurement_name,
464 int col_a,
465 int col_b) {
466 std::vector<double> tmp;
467 for (size_t i = 0; i < data.size(); i++) {
468 tmp.push_back((data[i][col_b] - data[i][col_a]) / 1000.0);
469 }
470 return MeanAndError(tmp).Print(test_name,
471 GetSuffixForTestFlags(),
472 measurement_name,
473 "ms");
474 }
475
476 // Analyzing latency is hard, because there is no unifying identifier for
477 // frames throughout the code. At first, we have a capture timestamp, which
478 // gets converted to a time delta, then back to a timestamp. Once it enters
479 // the cast library it gets converted to an rtp_timestamp, and when it leaves
480 // the cast library, all we have is the render_time.
481 //
482 // To be able to follow the frame throughout all this, we insert TRACE
483 // calls that tracks each conversion as it happens. Then we extract all
484 // these events and link them together.
AnalyzeLatency(const std::string & test_name,trace_analyzer::TraceAnalyzer * analyzer)485 void AnalyzeLatency(const std::string& test_name,
486 trace_analyzer::TraceAnalyzer* analyzer) {
487 EventMap onbuffer, sink, inserted, encoded, transmitted, decoded, done;
488 IndexEvents(analyzer, "OnBufferReceived", &onbuffer);
489 IndexEvents(analyzer, "MediaStreamVideoSink::OnVideoFrame", &sink);
490 IndexEvents(analyzer, "InsertRawVideoFrame", &inserted);
491 IndexEvents(analyzer, "VideoFrameEncoded", &encoded);
492 IndexEvents(analyzer, "PullEncodedVideoFrame", &transmitted);
493 IndexEvents(analyzer, "FrameDecoded", &decoded);
494 IndexEvents(analyzer, "TestPatternReceiver::OnVideoFrame", &done);
495 std::vector<std::pair<EventMap*, std::string> > event_maps;
496 event_maps.push_back(std::make_pair(&onbuffer, "timestamp"));
497 event_maps.push_back(std::make_pair(&sink, "time_delta"));
498 event_maps.push_back(std::make_pair(&inserted, "timestamp"));
499 event_maps.push_back(std::make_pair(&encoded, "rtp_timestamp"));
500 event_maps.push_back(std::make_pair(&transmitted, "rtp_timestamp"));
501 event_maps.push_back(std::make_pair(&decoded, "rtp_timestamp"));
502 event_maps.push_back(std::make_pair(&done, "render_time"));
503
504 trace_analyzer::TraceEventVector capture_events;
505 GetTraceEvents(analyzer, "Capture" , &capture_events);
506 std::vector<std::vector<double> > traced_frames;
507 for (size_t i = kSkipEvents; i < capture_events.size(); i++) {
508 std::vector<double> times;
509 const trace_analyzer::TraceEvent *event = capture_events[i];
510 times.push_back(event->timestamp); // begin capture
511 event = event->other_event;
512 if (!event) {
513 continue;
514 }
515 times.push_back(event->timestamp); // end capture (with timestamp)
516 std::vector<const trace_analyzer::TraceEvent*> prev_events;
517 prev_events.push_back(event);
518 for (size_t j = 0; j < event_maps.size(); j++) {
519 event = FindNextEvent(*event_maps[j].first,
520 prev_events,
521 event_maps[j].second);
522 if (!event) {
523 break;
524 }
525 prev_events.push_back(event);
526 times.push_back(event->timestamp);
527 }
528 if (event) {
529 // Successfully traced frame from beginning to end
530 traced_frames.push_back(times);
531 }
532 }
533
534 // 0 = capture begin
535 // 1 = capture end
536 // 2 = onbuffer
537 // 3 = sink
538 // 4 = inserted
539 // 5 = encoded
540 // 6 = transmitted
541 // 7 = decoded
542 // 8 = done
543
544 // Lower is better for all of these.
545 OutputMeasurement(test_name, traced_frames, "total_latency", 0, 8);
546 OutputMeasurement(test_name, traced_frames, "capture_duration", 0, 1);
547 OutputMeasurement(test_name, traced_frames, "send_to_renderer", 1, 3);
548 OutputMeasurement(test_name, traced_frames, "encode", 3, 5);
549 OutputMeasurement(test_name, traced_frames, "transmit", 5, 6);
550 OutputMeasurement(test_name, traced_frames, "decode", 6, 7);
551 OutputMeasurement(test_name, traced_frames, "cast_latency", 3, 8);
552 }
553
AnalyzeTraceDistance(trace_analyzer::TraceAnalyzer * analyzer,const std::string & event_name)554 MeanAndError AnalyzeTraceDistance(trace_analyzer::TraceAnalyzer* analyzer,
555 const std::string& event_name) {
556 trace_analyzer::TraceEventVector events;
557 GetTraceEvents(analyzer, event_name, &events);
558
559 std::vector<double> deltas;
560 for (size_t i = kSkipEvents + 1; i < events.size(); ++i) {
561 double delta_micros = events[i]->timestamp - events[i - 1]->timestamp;
562 deltas.push_back(delta_micros / 1000.0);
563 }
564 return MeanAndError(deltas);
565 }
566
RunTest(const std::string & test_name)567 void RunTest(const std::string& test_name) {
568 if (HasFlag(kUseGpu) && !IsGpuAvailable()) {
569 LOG(WARNING) <<
570 "Test skipped: requires gpu. Pass --enable-gpu on the command "
571 "line if use of GPU is desired.";
572 return;
573 }
574
575 ASSERT_EQ(1,
576 (HasFlag(k24fps) ? 1 : 0) +
577 (HasFlag(k30fps) ? 1 : 0) +
578 (HasFlag(k60fps) ? 1 : 0));
579
580 net::IPEndPoint receiver_end_point = GetFreeLocalPort();
581
582 // Start the in-process receiver that examines audio/video for the expected
583 // test patterns.
584 base::TimeDelta delta = base::TimeDelta::FromSeconds(0);
585 if (HasFlag(kFastClock)) {
586 delta = base::TimeDelta::FromSeconds(10);
587 }
588 if (HasFlag(kSlowClock)) {
589 delta = base::TimeDelta::FromSeconds(-10);
590 }
591 scoped_refptr<media::cast::StandaloneCastEnvironment> cast_environment(
592 new SkewedCastEnvironment(delta));
593 TestPatternReceiver* const receiver =
594 new TestPatternReceiver(cast_environment, receiver_end_point);
595 receiver->Start();
596
597 scoped_ptr<media::cast::test::UDPProxy> udp_proxy;
598 if (HasFlag(kProxyWifi) || HasFlag(kProxyEvil)) {
599 net::IPEndPoint proxy_end_point = GetFreeLocalPort();
600 if (HasFlag(kProxyWifi)) {
601 udp_proxy = media::cast::test::UDPProxy::Create(
602 proxy_end_point,
603 receiver_end_point,
604 media::cast::test::WifiNetwork().Pass(),
605 media::cast::test::WifiNetwork().Pass(),
606 NULL);
607 } else if (HasFlag(kProxyEvil)) {
608 udp_proxy = media::cast::test::UDPProxy::Create(
609 proxy_end_point,
610 receiver_end_point,
611 media::cast::test::EvilNetwork().Pass(),
612 media::cast::test::EvilNetwork().Pass(),
613 NULL);
614 }
615 receiver_end_point = proxy_end_point;
616 }
617
618 std::string json_events;
619 ASSERT_TRUE(tracing::BeginTracing("test_fps,mirroring,cast_perf_test"));
620 const std::string page_url = base::StringPrintf(
621 "performance%d.html?port=%d",
622 getfps(),
623 receiver_end_point.port());
624 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url)) << message_;
625 ASSERT_TRUE(tracing::EndTracing(&json_events));
626 receiver->Stop();
627
628 // Stop all threads, removes the need for synchronization when analyzing
629 // the data.
630 cast_environment->Shutdown();
631 scoped_ptr<trace_analyzer::TraceAnalyzer> analyzer;
632 analyzer.reset(trace_analyzer::TraceAnalyzer::Create(json_events));
633 analyzer->AssociateAsyncBeginEndEvents();
634
635 MeanAndError frame_data = AnalyzeTraceDistance(
636 analyzer.get(),
637 TRACE_DISABLED_BY_DEFAULT("OnSwapCompositorFrame"));
638 EXPECT_GT(frame_data.num_values, 0UL);
639 // Lower is better.
640 frame_data.Print(test_name,
641 GetSuffixForTestFlags(),
642 "time_between_frames",
643 "ms");
644
645 // This prints out the average time between capture events.
646 // As the capture frame rate is capped at 30fps, this score
647 // cannot get any better than (lower) 33.33 ms.
648 MeanAndError capture_data = AnalyzeTraceDistance(analyzer.get(), "Capture");
649 // Lower is better.
650 capture_data.Print(test_name,
651 GetSuffixForTestFlags(),
652 "time_between_captures",
653 "ms");
654
655 receiver->Analyze(test_name, GetSuffixForTestFlags());
656
657 AnalyzeLatency(test_name, analyzer.get());
658 }
659 };
660
661 } // namespace
662
IN_PROC_BROWSER_TEST_P(CastV2PerformanceTest,Performance)663 IN_PROC_BROWSER_TEST_P(CastV2PerformanceTest, Performance) {
664 RunTest("CastV2Performance");
665 }
666
667 // Note: First argument is optional and intentionally left blank.
668 // (it's a prefix for the generated test cases)
669 INSTANTIATE_TEST_CASE_P(
670 ,
671 CastV2PerformanceTest,
672 testing::Values(
673 kUseGpu | k24fps,
674 kUseGpu | k30fps,
675 kUseGpu | k60fps,
676 kUseGpu | k24fps | kDisableVsync,
677 kUseGpu | k30fps | kProxyWifi,
678 kUseGpu | k30fps | kProxyEvil,
679 kUseGpu | k30fps | kSlowClock,
680 kUseGpu | k30fps | kFastClock));
681