• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# Copyright 2016 the V8 project authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5'''
6Usage: callstats.py [-h] <command> ...
7
8Optional arguments:
9  -h, --help  show this help message and exit
10
11Commands:
12  run         run chrome with --runtime-call-stats and generate logs
13  stats       process logs and print statistics
14  json        process logs from several versions and generate JSON
15  help        help information
16
17For each command, you can try ./runtime-call-stats.py help command.
18'''
19
20import argparse
21import json
22import os
23import re
24import shutil
25import subprocess
26import sys
27import tempfile
28import operator
29
30import numpy
31import scipy
32import scipy.stats
33from math import sqrt
34
35
36MAX_NOF_RETRIES = 5
37
38
39# Run benchmarks.
40
41def print_command(cmd_args):
42  def fix_for_printing(arg):
43    m = re.match(r'^--([^=]+)=(.*)$', arg)
44    if m and (' ' in m.group(2) or m.group(2).startswith('-')):
45      arg = "--{}='{}'".format(m.group(1), m.group(2))
46    elif ' ' in arg:
47      arg = "'{}'".format(arg)
48    return arg
49  print " ".join(map(fix_for_printing, cmd_args))
50
51
52def start_replay_server(args, sites, discard_output=True):
53  with tempfile.NamedTemporaryFile(prefix='callstats-inject-', suffix='.js',
54                                   mode='wt', delete=False) as f:
55    injection = f.name
56    generate_injection(f, sites, args.refresh)
57  http_port = 4080 + args.port_offset
58  https_port = 4443 + args.port_offset
59  cmd_args = [
60      args.replay_bin,
61      "--port=%s" % http_port,
62      "--ssl_port=%s" % https_port,
63      "--no-dns_forwarding",
64      "--use_closest_match",
65      "--no-diff_unknown_requests",
66      "--inject_scripts=deterministic.js,{}".format(injection),
67      args.replay_wpr,
68  ]
69  print "=" * 80
70  print_command(cmd_args)
71  if discard_output:
72    with open(os.devnull, 'w') as null:
73      server = subprocess.Popen(cmd_args, stdout=null, stderr=null)
74  else:
75      server = subprocess.Popen(cmd_args)
76  print "RUNNING REPLAY SERVER: %s with PID=%s" % (args.replay_bin, server.pid)
77  print "=" * 80
78  return {'process': server, 'injection': injection}
79
80
81def stop_replay_server(server):
82  print("SHUTTING DOWN REPLAY SERVER %s" % server['process'].pid)
83  server['process'].terminate()
84  os.remove(server['injection'])
85
86
87def generate_injection(f, sites, refreshes=0):
88  print >> f, """\
89(function() {
90  var s = window.sessionStorage.getItem("refreshCounter");
91  var refreshTotal = """, refreshes, """;
92  var refreshCounter = s ? parseInt(s) : refreshTotal;
93  var refreshId = refreshTotal - refreshCounter;
94  if (refreshCounter > 0) {
95    window.sessionStorage.setItem("refreshCounter", refreshCounter-1);
96  }
97  function match(url, item) {
98    if ('regexp' in item) { return url.match(item.regexp) !== null };
99    var url_wanted = item.url;
100    /* Allow automatic redirections from http to https. */
101    if (url_wanted.startsWith("http://") && url.startsWith("https://")) {
102      url_wanted = "https://" + url_wanted.substr(7);
103    }
104    return url.startsWith(url_wanted);
105  };
106  function onLoad(url) {
107    for (var item of sites) {
108      if (!match(url, item)) continue;
109      var timeout = 'timeline' in item ? 2000 * item.timeline
110                  : 'timeout'  in item ? 1000 * (item.timeout - 3)
111                  : 10000;
112      console.log("Setting time out of " + timeout + " for: " + url);
113      window.setTimeout(function() {
114        console.log("Time is out for: " + url);
115        var msg = "STATS: (" + refreshId + ") " + url;
116        %GetAndResetRuntimeCallStats(1, msg);
117        if (refreshCounter > 0) {
118          console.log(
119              "Refresh counter is " + refreshCounter + ", refreshing: " + url);
120          window.location.reload();
121        }
122      }, timeout);
123      return;
124    }
125    console.log("Ignoring: " + url);
126  };
127  var sites =
128    """, json.dumps(sites), """;
129  onLoad(window.location.href);
130})();"""
131
132def get_chrome_flags(js_flags, user_data_dir, arg_delimiter=""):
133  return [
134      "--no-default-browser-check",
135      "--no-sandbox",
136      "--disable-translate",
137      "--enable-benchmarking",
138      "--enable-stats-table",
139      "--js-flags={}{}{}".format(arg_delimiter, js_flags, arg_delimiter),
140      "--no-first-run",
141      "--user-data-dir={}{}{}".format(arg_delimiter, user_data_dir,
142                                      arg_delimiter),
143    ]
144
145def get_chrome_replay_flags(args, arg_delimiter=""):
146  http_port = 4080 + args.port_offset
147  https_port = 4443 + args.port_offset
148  return [
149      "--host-resolver-rules=%sMAP *:80 localhost:%s, "  \
150                              "MAP *:443 localhost:%s, " \
151                              "EXCLUDE localhost%s" % (
152                               arg_delimiter, http_port, https_port,
153                               arg_delimiter),
154      "--ignore-certificate-errors",
155      "--disable-seccomp-sandbox",
156      "--disable-web-security",
157      "--reduce-security-for-testing",
158      "--allow-insecure-localhost",
159    ]
160
161def run_site(site, domain, args, timeout=None):
162  print "="*80
163  print "RUNNING DOMAIN %s" % domain
164  print "="*80
165  result_template = "{domain}#{count}.txt" if args.repeat else "{domain}.txt"
166  count = 0
167  if timeout is None: timeout = args.timeout
168  if args.replay_wpr:
169    timeout *= 1 + args.refresh
170    timeout += 1
171  retries_since_good_run = 0
172  while count == 0 or args.repeat is not None and count < args.repeat:
173    count += 1
174    result = result_template.format(domain=domain, count=count)
175    retries = 0
176    while args.retries is None or retries < args.retries:
177      retries += 1
178      try:
179        if args.user_data_dir:
180          user_data_dir = args.user_data_dir
181        else:
182          user_data_dir = tempfile.mkdtemp(prefix="chr_")
183        js_flags = "--runtime-call-stats --noconcurrent-recompilation"
184        if args.replay_wpr: js_flags += " --allow-natives-syntax"
185        if args.js_flags: js_flags += " " + args.js_flags
186        chrome_flags = get_chrome_flags(js_flags, user_data_dir)
187        if args.replay_wpr:
188          chrome_flags += get_chrome_replay_flags(args)
189        else:
190          chrome_flags += [ "--single-process", ]
191        if args.chrome_flags:
192          chrome_flags += args.chrome_flags.split()
193        cmd_args = [
194            "timeout", str(timeout),
195            args.with_chrome
196        ] + chrome_flags + [ site ]
197        print "- " * 40
198        print_command(cmd_args)
199        print "- " * 40
200        with open(result, "wt") as f:
201          with open(args.log_stderr or os.devnull, 'at') as err:
202            status = subprocess.call(cmd_args, stdout=f, stderr=err)
203        # 124 means timeout killed chrome, 0 means the user was bored first!
204        # If none of these two happened, then chrome apparently crashed, so
205        # it must be called again.
206        if status != 124 and status != 0:
207          print("CHROME CRASHED, REPEATING RUN");
208          continue
209        # If the stats file is empty, chrome must be called again.
210        if os.path.isfile(result) and os.path.getsize(result) > 0:
211          if args.print_url:
212            with open(result, "at") as f:
213              print >> f
214              print >> f, "URL: {}".format(site)
215          retries_since_good_run = 0
216          break
217        if retries_since_good_run > MAX_NOF_RETRIES:
218          # Abort after too many retries, no point in ever increasing the
219          # timeout.
220          print("TOO MANY EMPTY RESULTS ABORTING RUN")
221          break
222        timeout += 2 ** retries_since_good_run
223        retries_since_good_run += 1
224        print("EMPTY RESULT, REPEATING RUN ({})".format(
225            retries_since_good_run));
226      finally:
227        if not args.user_data_dir:
228          shutil.rmtree(user_data_dir)
229
230
231def read_sites_file(args):
232  try:
233    sites = []
234    try:
235      with open(args.sites_file, "rt") as f:
236        for item in json.load(f):
237          if 'timeout' not in item:
238            # This is more-or-less arbitrary.
239            item['timeout'] = int(1.5 * item['timeline'] + 7)
240          if item['timeout'] > args.timeout: item['timeout'] = args.timeout
241          sites.append(item)
242    except ValueError:
243      with open(args.sites_file, "rt") as f:
244        for line in f:
245          line = line.strip()
246          if not line or line.startswith('#'): continue
247          sites.append({'url': line, 'timeout': args.timeout})
248    return sites
249  except IOError as e:
250    args.error("Cannot read from {}. {}.".format(args.sites_file, e.strerror))
251    sys.exit(1)
252
253
254def read_sites(args):
255  # Determine the websites to benchmark.
256  if args.sites_file:
257    return read_sites_file(args)
258  return [{'url': site, 'timeout': args.timeout} for site in args.sites]
259
260def do_run(args):
261  sites = read_sites(args)
262  replay_server = start_replay_server(args, sites) if args.replay_wpr else None
263  # Disambiguate domains, if needed.
264  L = []
265  domains = {}
266  for item in sites:
267    site = item['url']
268    domain = None
269    if args.domain:
270      domain = args.domain
271    elif 'domain' in item:
272      domain = item['domain']
273    else:
274      m = re.match(r'^(https?://)?([^/]+)(/.*)?$', site)
275      if not m:
276        args.error("Invalid URL {}.".format(site))
277        continue
278      domain = m.group(2)
279    entry = [site, domain, None, item['timeout']]
280    if domain not in domains:
281      domains[domain] = entry
282    else:
283      if not isinstance(domains[domain], int):
284        domains[domain][2] = 1
285        domains[domain] = 1
286      domains[domain] += 1
287      entry[2] = domains[domain]
288    L.append(entry)
289  try:
290    # Run them.
291    for site, domain, count, timeout in L:
292      if count is not None: domain = "{}%{}".format(domain, count)
293      print(site, domain, timeout)
294      run_site(site, domain, args, timeout)
295  finally:
296    if replay_server:
297      stop_replay_server(replay_server)
298
299
300def do_run_replay_server(args):
301  sites = read_sites(args)
302  print("- " * 40)
303  print("Available URLs:")
304  for site in sites:
305    print("    "+site['url'])
306  print("- " * 40)
307  print("Launch chromium with the following commands for debugging:")
308  flags = get_chrome_flags("--runtime-call-stats --allow-natives-syntax",
309                           "/var/tmp/`date +%s`", '"')
310  flags += get_chrome_replay_flags(args, "'")
311  print("    $CHROMIUM_DIR/out/Release/chrome " + (" ".join(flags)) + " <URL>")
312  print("- " * 40)
313  replay_server = start_replay_server(args, sites, discard_output=False)
314  try:
315    replay_server['process'].wait()
316  finally:
317   stop_replay_server(replay_server)
318
319
320# Calculate statistics.
321
322def statistics(data):
323  N = len(data)
324  average = numpy.average(data)
325  median = numpy.median(data)
326  low = numpy.min(data)
327  high= numpy.max(data)
328  if N > 1:
329    # evaluate sample variance by setting delta degrees of freedom (ddof) to
330    # 1. The degree used in calculations is N - ddof
331    stddev = numpy.std(data, ddof=1)
332    # Get the endpoints of the range that contains 95% of the distribution
333    t_bounds = scipy.stats.t.interval(0.95, N-1)
334    #assert abs(t_bounds[0] + t_bounds[1]) < 1e-6
335    # sum mean to the confidence interval
336    ci = {
337        'abs': t_bounds[1] * stddev / sqrt(N),
338        'low': average + t_bounds[0] * stddev / sqrt(N),
339        'high': average + t_bounds[1] * stddev / sqrt(N)
340    }
341  else:
342    stddev = 0
343    ci = { 'abs': 0, 'low': average, 'high': average }
344  if abs(stddev) > 0.0001 and abs(average) > 0.0001:
345    ci['perc'] = t_bounds[1] * stddev / sqrt(N) / average * 100
346  else:
347    ci['perc'] = 0
348  return { 'samples': N, 'average': average, 'median': median,
349           'stddev': stddev, 'min': low, 'max': high, 'ci': ci }
350
351
352def read_stats(path, domain, args):
353  groups = [];
354  if args.aggregate:
355    groups = [
356        ('Group-IC', re.compile(".*IC_.*")),
357        ('Group-Optimize',
358         re.compile("StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*")),
359        ('Group-CompileBackground', re.compile("(.*CompileBackground.*)")),
360        ('Group-Compile', re.compile("(^Compile.*)|(.*_Compile.*)")),
361        ('Group-ParseBackground', re.compile(".*ParseBackground.*")),
362        ('Group-Parse', re.compile(".*Parse.*")),
363        ('Group-Callback', re.compile(".*Callback.*")),
364        ('Group-API', re.compile(".*API.*")),
365        ('Group-GC-Custom', re.compile("GC_Custom_.*")),
366        ('Group-GC-Background', re.compile(".*GC.*BACKGROUND.*")),
367        ('Group-GC', re.compile("GC_.*|AllocateInTargetSpace")),
368        ('Group-JavaScript', re.compile("JS_Execution")),
369        ('Group-Runtime', re.compile(".*"))]
370  with open(path, "rt") as f:
371    # Process the whole file and sum repeating entries.
372    entries = { 'Sum': {'time': 0, 'count': 0} }
373    for group_name, regexp in groups:
374      entries[group_name] = { 'time': 0, 'count': 0 }
375    for line in f:
376      line = line.strip()
377      # Discard headers and footers.
378      if not line: continue
379      if line.startswith("Runtime Function"): continue
380      if line.startswith("===="): continue
381      if line.startswith("----"): continue
382      if line.startswith("URL:"): continue
383      if line.startswith("STATS:"): continue
384      # We have a regular line.
385      fields = line.split()
386      key = fields[0]
387      time = float(fields[1].replace("ms", ""))
388      count = int(fields[3])
389      if key not in entries: entries[key] = { 'time': 0, 'count': 0 }
390      entries[key]['time'] += time
391      entries[key]['count'] += count
392      # We calculate the sum, if it's not the "total" line.
393      if key != "Total":
394        entries['Sum']['time'] += time
395        entries['Sum']['count'] += count
396        for group_name, regexp in groups:
397          if not regexp.match(key): continue
398          entries[group_name]['time'] += time
399          entries[group_name]['count'] += count
400          break
401    # Calculate the V8-Total (all groups except Callback)
402    group_data = { 'time': 0, 'count': 0 }
403    for group_name, regexp in groups:
404      if group_name == 'Group-Callback': continue
405      group_data['time'] += entries[group_name]['time']
406      group_data['count'] += entries[group_name]['count']
407    entries['Group-Total-V8'] = group_data
408    # Calculate the Parse-Total group
409    group_data = { 'time': 0, 'count': 0 }
410    for group_name, regexp in groups:
411      if not group_name.startswith('Group-Parse'): continue
412      group_data['time'] += entries[group_name]['time']
413      group_data['count'] += entries[group_name]['count']
414    entries['Group-Parse-Total'] = group_data
415    # Calculate the Compile-Total group
416    group_data = { 'time': 0, 'count': 0 }
417    for group_name, regexp in groups:
418      if not group_name.startswith('Group-Compile'): continue
419      group_data['time'] += entries[group_name]['time']
420      group_data['count'] += entries[group_name]['count']
421    entries['Group-Compile-Total'] = group_data
422    # Append the sums as single entries to domain.
423    for key in entries:
424      if key not in domain: domain[key] = { 'time_list': [], 'count_list': [] }
425      domain[key]['time_list'].append(entries[key]['time'])
426      domain[key]['count_list'].append(entries[key]['count'])
427
428
429def print_stats(S, args):
430  # Sort by ascending/descending time average, then by ascending/descending
431  # count average, then by ascending name.
432  def sort_asc_func(item):
433    return (item[1]['time_stat']['average'],
434            item[1]['count_stat']['average'],
435            item[0])
436  def sort_desc_func(item):
437    return (-item[1]['time_stat']['average'],
438            -item[1]['count_stat']['average'],
439            item[0])
440  # Sorting order is in the commend-line arguments.
441  sort_func = sort_asc_func if args.sort == "asc" else sort_desc_func
442  # Possibly limit how many elements to print.
443  L = [item for item in sorted(S.items(), key=sort_func)
444       if item[0] not in ["Total", "Sum"]]
445  N = len(L)
446  if args.limit == 0:
447    low, high = 0, N
448  elif args.sort == "desc":
449    low, high = 0, args.limit
450  else:
451    low, high = N-args.limit, N
452  # How to print entries.
453  def print_entry(key, value):
454    def stats(s, units=""):
455      conf = "{:0.1f}({:0.2f}%)".format(s['ci']['abs'], s['ci']['perc'])
456      return "{:8.1f}{} +/- {:15s}".format(s['average'], units, conf)
457    print "{:>50s}  {}  {}".format(
458      key,
459      stats(value['time_stat'], units="ms"),
460      stats(value['count_stat'])
461    )
462  # Print and calculate partial sums, if necessary.
463  for i in range(low, high):
464    print_entry(*L[i])
465    if args.totals and args.limit != 0 and not args.aggregate:
466      if i == low:
467        partial = { 'time_list': [0] * len(L[i][1]['time_list']),
468                    'count_list': [0] * len(L[i][1]['count_list']) }
469      assert len(partial['time_list']) == len(L[i][1]['time_list'])
470      assert len(partial['count_list']) == len(L[i][1]['count_list'])
471      for j, v in enumerate(L[i][1]['time_list']):
472        partial['time_list'][j] += v
473      for j, v in enumerate(L[i][1]['count_list']):
474        partial['count_list'][j] += v
475  # Print totals, if necessary.
476  if args.totals:
477    print '-' * 80
478    if args.limit != 0 and not args.aggregate:
479      partial['time_stat'] = statistics(partial['time_list'])
480      partial['count_stat'] = statistics(partial['count_list'])
481      print_entry("Partial", partial)
482    print_entry("Sum", S["Sum"])
483    print_entry("Total", S["Total"])
484
485
486def do_stats(args):
487  domains = {}
488  for path in args.logfiles:
489    filename = os.path.basename(path)
490    m = re.match(r'^([^#]+)(#.*)?$', filename)
491    domain = m.group(1)
492    if domain not in domains: domains[domain] = {}
493    read_stats(path, domains[domain], args)
494  if args.aggregate:
495    create_total_page_stats(domains, args)
496  for i, domain in enumerate(sorted(domains)):
497    if len(domains) > 1:
498      if i > 0: print
499      print "{}:".format(domain)
500      print '=' * 80
501    domain_stats = domains[domain]
502    for key in domain_stats:
503      domain_stats[key]['time_stat'] = \
504          statistics(domain_stats[key]['time_list'])
505      domain_stats[key]['count_stat'] = \
506          statistics(domain_stats[key]['count_list'])
507    print_stats(domain_stats, args)
508
509
510# Create a Total page with all entries summed up.
511def create_total_page_stats(domains, args):
512  total = {}
513  def sum_up(parent, key, other):
514    sums = parent[key]
515    for i, item in enumerate(other[key]):
516      if i >= len(sums):
517        sums.extend([0] * (i - len(sums) + 1))
518      if item is not None:
519        sums[i] += item
520  # Exclude adwords and speedometer pages from aggrigate total, since adwords
521  # dominates execution time and speedometer is measured elsewhere.
522  excluded_domains = ['adwords.google.com', 'speedometer-angular',
523                      'speedometer-jquery', 'speedometer-backbone',
524                      'speedometer-ember', 'speedometer-vanilla'];
525  # Sum up all the entries/metrics from all non-excluded domains
526  for domain, entries in domains.items():
527    if domain in excluded_domains:
528      continue;
529    for key, domain_stats in entries.items():
530      if key not in total:
531        total[key] = {}
532        total[key]['time_list'] = list(domain_stats['time_list'])
533        total[key]['count_list'] = list(domain_stats['count_list'])
534      else:
535        sum_up(total[key], 'time_list', domain_stats)
536        sum_up(total[key], 'count_list', domain_stats)
537  # Add a new "Total" page containing the summed up metrics.
538  domains['Total'] = total
539
540
541# Generate JSON file.
542
543def do_json(args):
544  versions = {}
545  for path in args.logdirs:
546    if os.path.isdir(path):
547      for root, dirs, files in os.walk(path):
548        version = os.path.basename(root)
549        if version not in versions: versions[version] = {}
550        for filename in files:
551          if filename.endswith(".txt"):
552            m = re.match(r'^([^#]+)(#.*)?\.txt$', filename)
553            domain = m.group(1)
554            if domain not in versions[version]: versions[version][domain] = {}
555            read_stats(os.path.join(root, filename),
556                       versions[version][domain], args)
557  for version, domains in versions.items():
558    if args.aggregate:
559      create_total_page_stats(domains, args)
560    for domain, entries in domains.items():
561      stats = []
562      for name, value in entries.items():
563        # We don't want the calculated sum in the JSON file.
564        if name == "Sum": continue
565        entry = [name]
566        for x in ['time_list', 'count_list']:
567          s = statistics(entries[name][x])
568          entry.append(round(s['average'], 1))
569          entry.append(round(s['ci']['abs'], 1))
570          entry.append(round(s['ci']['perc'], 2))
571        stats.append(entry)
572      domains[domain] = stats
573  print json.dumps(versions, separators=(',', ':'))
574
575
576# Help.
577
578def do_help(parser, subparsers, args):
579  if args.help_cmd:
580    if args.help_cmd in subparsers:
581      subparsers[args.help_cmd].print_help()
582    else:
583      args.error("Unknown command '{}'".format(args.help_cmd))
584  else:
585    parser.print_help()
586
587
588# Main program, parse command line and execute.
589
590def coexist(*l):
591  given = sum(1 for x in l if x)
592  return given == 0 or given == len(l)
593
594def main():
595  parser = argparse.ArgumentParser()
596  subparser_adder = parser.add_subparsers(title="commands", dest="command",
597                                          metavar="<command>")
598  subparsers = {}
599  # Command: run.
600  subparsers["run"] = subparser_adder.add_parser(
601      "run", help="Replay websites and collect runtime stats data.")
602  subparsers["run"].set_defaults(
603      func=do_run, error=subparsers["run"].error)
604  subparsers["run"].add_argument(
605      "--chrome-flags", type=str, default="",
606      help="specify additional chrome flags")
607  subparsers["run"].add_argument(
608      "--js-flags", type=str, default="",
609      help="specify additional V8 flags")
610  subparsers["run"].add_argument(
611      "-u", "--user-data-dir", type=str, metavar="<path>",
612      help="specify user data dir (default is temporary)")
613  subparsers["run"].add_argument(
614      "-c", "--with-chrome", type=str, metavar="<path>",
615      default="/usr/bin/google-chrome",
616      help="specify chrome executable to use")
617  subparsers["run"].add_argument(
618      "-r", "--retries", type=int, metavar="<num>",
619      help="specify retries if website is down (default: forever)")
620  subparsers["run"].add_argument(
621      "--no-url", dest="print_url", action="store_false", default=True,
622      help="do not include url in statistics file")
623  subparsers["run"].add_argument(
624      "--domain", type=str, default="",
625      help="specify the output file domain name")
626  subparsers["run"].add_argument(
627      "-n", "--repeat", type=int, metavar="<num>",
628      help="specify iterations for each website (default: once)")
629
630  def add_replay_args(subparser):
631    subparser.add_argument(
632        "-k", "--refresh", type=int, metavar="<num>", default=0,
633        help="specify refreshes for each iteration (default: 0)")
634    subparser.add_argument(
635        "--replay-wpr", type=str, metavar="<path>",
636        help="use the specified web page replay (.wpr) archive")
637    subparser.add_argument(
638        "--replay-bin", type=str, metavar="<path>",
639        help="specify the replay.py script typically located in " \
640             "$CHROMIUM/src/third_party/webpagereplay/replay.py")
641    subparser.add_argument(
642        "-f", "--sites-file", type=str, metavar="<path>",
643        help="specify file containing benchmark websites")
644    subparser.add_argument(
645        "-t", "--timeout", type=int, metavar="<seconds>", default=60,
646        help="specify seconds before chrome is killed")
647    subparser.add_argument(
648        "-p", "--port-offset", type=int, metavar="<offset>", default=0,
649        help="specify the offset for the replay server's default ports")
650    subparser.add_argument(
651        "-l", "--log-stderr", type=str, metavar="<path>",
652        help="specify where chrome's stderr should go (default: /dev/null)")
653    subparser.add_argument(
654        "sites", type=str, metavar="<URL>", nargs="*",
655        help="specify benchmark website")
656  add_replay_args(subparsers["run"])
657
658  # Command: replay-server
659  subparsers["replay"] = subparser_adder.add_parser(
660      "replay", help="Run the replay server for debugging purposes")
661  subparsers["replay"].set_defaults(
662      func=do_run_replay_server, error=subparsers["replay"].error)
663  add_replay_args(subparsers["replay"])
664
665  # Command: stats.
666  subparsers["stats"] = subparser_adder.add_parser(
667      "stats", help="Analize the results file create by the 'run' command.")
668  subparsers["stats"].set_defaults(
669      func=do_stats, error=subparsers["stats"].error)
670  subparsers["stats"].add_argument(
671      "-l", "--limit", type=int, metavar="<num>", default=0,
672      help="limit how many items to print (default: none)")
673  subparsers["stats"].add_argument(
674      "-s", "--sort", choices=["asc", "desc"], default="asc",
675      help="specify sorting order (default: ascending)")
676  subparsers["stats"].add_argument(
677      "-n", "--no-total", dest="totals", action="store_false", default=True,
678      help="do not print totals")
679  subparsers["stats"].add_argument(
680      "logfiles", type=str, metavar="<logfile>", nargs="*",
681      help="specify log files to parse")
682  subparsers["stats"].add_argument(
683      "--aggregate", dest="aggregate", action="store_true", default=False,
684      help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
685      "Additionally creates a Total page with all entries.")
686
687  # Command: json.
688  subparsers["json"] = subparser_adder.add_parser(
689      "json", help="Collect results file created by the 'run' command into" \
690          "a single json file.")
691  subparsers["json"].set_defaults(
692      func=do_json, error=subparsers["json"].error)
693  subparsers["json"].add_argument(
694      "logdirs", type=str, metavar="<logdir>", nargs="*",
695      help="specify directories with log files to parse")
696  subparsers["json"].add_argument(
697      "--aggregate", dest="aggregate", action="store_true", default=False,
698      help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
699      "Additionally creates a Total page with all entries.")
700
701  # Command: help.
702  subparsers["help"] = subparser_adder.add_parser(
703      "help", help="help information")
704  subparsers["help"].set_defaults(
705      func=lambda args: do_help(parser, subparsers, args),
706      error=subparsers["help"].error)
707  subparsers["help"].add_argument(
708      "help_cmd", type=str, metavar="<command>", nargs="?",
709      help="command for which to display help")
710
711  # Execute the command.
712  args = parser.parse_args()
713  setattr(args, 'script_path', os.path.dirname(sys.argv[0]))
714  if args.command == "run" and coexist(args.sites_file, args.sites):
715    args.error("use either option --sites-file or site URLs")
716    sys.exit(1)
717  elif args.command == "run" and not coexist(args.replay_wpr, args.replay_bin):
718    args.error("options --replay-wpr and --replay-bin must be used together")
719    sys.exit(1)
720  else:
721    args.func(args)
722
723if __name__ == "__main__":
724  sys.exit(main())
725