• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2014 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import datetime
6import logging
7import os
8
9from telemetry.page import page_measurement
10from metrics import network
11
12
13class ChromeProxyMetricException(page_measurement.MeasurementFailure):
14  pass
15
16
17CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
18CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
19
20PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
21PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
22PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
23PROXY_SETTING_DIRECT = 'direct://'
24
25# The default Chrome Proxy bypass time is a range from one to five mintues.
26# See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
27DEFAULT_BYPASS_MIN_SECONDS = 60
28DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
29
30def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
31  tab.Navigate(url)
32  with open(os.path.join(os.path.dirname(__file__), 'chrome_proxy.js')) as f:
33    js = f.read()
34    tab.ExecuteJavaScript(js)
35  tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
36  info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
37  return info
38
39
40def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30):
41  return (retry_time >= low and
42          (retry_time < high + datetime.timedelta(seconds=grace_seconds)))
43
44
45class ChromeProxyResponse(network.HTTPResponse):
46  """ Represents an HTTP response from a timeleine event."""
47  def __init__(self, event):
48    super(ChromeProxyResponse, self).__init__(event)
49
50  def ShouldHaveChromeProxyViaHeader(self):
51    resp = self.response
52    # Ignore https and data url
53    if resp.url.startswith('https') or resp.url.startswith('data:'):
54      return False
55    # Ignore 304 Not Modified and cache hit.
56    if resp.status == 304 or resp.served_from_cache:
57      return False
58    # Ignore invalid responses that don't have any header. Log a warning.
59    if not resp.headers:
60      logging.warning('response for %s does not any have header '
61                      '(refer=%s, status=%s)',
62                      resp.url, resp.GetHeader('Referer'), resp.status)
63      return False
64    return True
65
66  def HasChromeProxyViaHeader(self):
67    via_header = self.response.GetHeader('Via')
68    if not via_header:
69      return False
70    vias = [v.strip(' ') for v in via_header.split(',')]
71    # The Via header is valid if it is the old format or the new format
72    # with 4-character version prefix, for example,
73    # "1.1 Chrome-Compression-Proxy".
74    return (CHROME_PROXY_VIA_HEADER_DEPRECATED in vias or
75            any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias))
76
77  def IsValidByViaHeader(self):
78    return (not self.ShouldHaveChromeProxyViaHeader() or
79            self.HasChromeProxyViaHeader())
80
81  def IsSafebrowsingResponse(self):
82    if (self.response.status == 307 and
83        self.response.GetHeader('X-Malware-Url') == '1' and
84        self.IsValidByViaHeader() and
85        self.response.GetHeader('Location') == self.response.url):
86      return True
87    return False
88
89
90class ChromeProxyMetric(network.NetworkMetric):
91  """A Chrome proxy timeline metric."""
92
93  def __init__(self):
94    super(ChromeProxyMetric, self).__init__()
95    self.compute_data_saving = True
96    self.effective_proxies = {
97        "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
98        "fallback": PROXY_SETTING_HTTP,
99        "direct": PROXY_SETTING_DIRECT,
100        }
101
102  def SetEvents(self, events):
103    """Used for unittest."""
104    self._events = events
105
106  def ResponseFromEvent(self, event):
107    return ChromeProxyResponse(event)
108
109  def AddResults(self, tab, results):
110    raise NotImplementedError
111
112  def AddResultsForDataSaving(self, tab, results):
113    resources_via_proxy = 0
114    resources_from_cache = 0
115    resources_direct = 0
116
117    super(ChromeProxyMetric, self).AddResults(tab, results)
118    for resp in self.IterResponses(tab):
119      if resp.response.served_from_cache:
120        resources_from_cache += 1
121      if resp.HasChromeProxyViaHeader():
122        resources_via_proxy += 1
123      else:
124        resources_direct += 1
125
126    results.Add('resources_via_proxy', 'count', resources_via_proxy)
127    results.Add('resources_from_cache', 'count', resources_from_cache)
128    results.Add('resources_direct', 'count', resources_direct)
129
130  def AddResultsForHeaderValidation(self, tab, results):
131    via_count = 0
132    bypass_count = 0
133    for resp in self.IterResponses(tab):
134      if resp.IsValidByViaHeader():
135        via_count += 1
136      elif tab and self.IsProxyBypassed(tab):
137        logging.warning('Proxy bypassed for %s', resp.response.url)
138        bypass_count += 1
139      else:
140        r = resp.response
141        raise ChromeProxyMetricException, (
142            '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
143                r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
144    results.Add('checked_via_header', 'count', via_count)
145    results.Add('request_bypassed', 'count', bypass_count)
146
147  def IsProxyBypassed(self, tab):
148    """ Returns True if all configured proxies are bypassed."""
149    info = GetProxyInfoFromNetworkInternals(tab)
150    if not info['enabled']:
151      raise ChromeProxyMetricException, (
152          'Chrome proxy should be enabled. proxy info: %s' % info)
153
154    bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort()
155    proxies = [self.effective_proxies['proxy'],
156               self.effective_proxies['fallback']].sort()
157    return bad_proxies == proxies
158
159  @staticmethod
160  def VerifyBadProxies(
161      badProxies, expected_proxies,
162      retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS,
163      retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS):
164    """Verify the bad proxy list and their retry times are expected. """
165    if not badProxies or (len(badProxies) != len(expected_proxies)):
166      return False
167
168    # Check all expected proxies.
169    proxies = [p['proxy'] for p in badProxies]
170    expected_proxies.sort()
171    proxies.sort()
172    if not expected_proxies == proxies:
173      raise ChromeProxyMetricException, (
174          'Bad proxies: got %s want %s' % (
175              str(badProxies), str(expected_proxies)))
176
177    # Check retry time
178    for p in badProxies:
179      retry_time_low = (datetime.datetime.now() +
180                        datetime.timedelta(seconds=retry_seconds_low))
181      retry_time_high = (datetime.datetime.now() +
182                        datetime.timedelta(seconds=retry_seconds_high))
183      got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000)
184      if not ProxyRetryTimeInRange(
185          got_retry_time, retry_time_low, retry_time_high):
186        raise ChromeProxyMetricException, (
187            'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
188                p['proxy'], str(got_retry_time), str(retry_time_low),
189                str(retry_time_high)))
190    return True
191
192  def AddResultsForBypass(self, tab, results):
193    bypass_count = 0
194    for resp in self.IterResponses(tab):
195      if resp.HasChromeProxyViaHeader():
196        r = resp.response
197        raise ChromeProxyMetricException, (
198            '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
199                r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
200      bypass_count += 1
201
202    if tab:
203      info = GetProxyInfoFromNetworkInternals(tab)
204      if not info['enabled']:
205        raise ChromeProxyMetricException, (
206            'Chrome proxy should be enabled. proxy info: %s' % info)
207      self.VerifyBadProxies(
208          info['badProxies'],
209          [self.effective_proxies['proxy'],
210           self.effective_proxies['fallback']])
211
212    results.Add('bypass', 'count', bypass_count)
213
214  def AddResultsForSafebrowsing(self, tab, results):
215    count = 0
216    safebrowsing_count = 0
217    for resp in self.IterResponses(tab):
218      count += 1
219      if resp.IsSafebrowsingResponse():
220        safebrowsing_count += 1
221      else:
222        r = resp.response
223        raise ChromeProxyMetricException, (
224            '%s: Not a valid safe browsing response.\n'
225            'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
226                r.url, r.status, r.status_text, r.headers))
227    if count == safebrowsing_count:
228      results.Add('safebrowsing', 'boolean', True)
229    else:
230      raise ChromeProxyMetricException, (
231          'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
232              count, safebrowsing_count))
233
234  def AddResultsForHTTPFallback(
235      self, tab, results, expected_proxies=None, expected_bad_proxies=None):
236    info = GetProxyInfoFromNetworkInternals(tab)
237    if not 'enabled' in info or not info['enabled']:
238      raise ChromeProxyMetricException, (
239          'Chrome proxy should be enabled. proxy info: %s' % info)
240
241    if not expected_proxies:
242      expected_proxies = [self.effective_proxies['fallback'],
243                          self.effective_proxies['direct']]
244    if not expected_bad_proxies:
245      expected_bad_proxies = []
246
247    proxies = info['proxies']
248    if proxies != expected_proxies:
249      raise ChromeProxyMetricException, (
250          'Wrong effective proxies (%s). Expect: "%s"' % (
251          str(proxies), str(expected_proxies)))
252
253    bad_proxies = []
254    if 'badProxies' in info and info['badProxies']:
255      bad_proxies = [p['proxy'] for p in info['badProxies']
256                     if 'proxy' in p and p['proxy']]
257    if bad_proxies != expected_bad_proxies:
258      raise ChromeProxyMetricException, (
259          'Wrong bad proxies (%s). Expect: "%s"' % (
260          str(bad_proxies), str(expected_bad_proxies)))
261    results.Add('http_fallback', 'boolean', True)
262