1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
9 from integration_tests import network_metrics
10 from telemetry.page import page_test
11 from telemetry.value import scalar
14 class ChromeProxyMetricException(page_test.MeasurementFailure):
18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
23 PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
24 PROXY_SETTING_DIRECT = 'direct://'
26 # The default Chrome Proxy bypass time is a range from one to five mintues.
27 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
28 DEFAULT_BYPASS_MIN_SECONDS = 60
29 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
31 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
33 with open(os.path.join(os.path.dirname(__file__),
34 'chrome_proxy_metrics.js')) as f:
36 tab.ExecuteJavaScript(js)
37 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
38 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
42 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30):
43 return (retry_time >= low and
44 (retry_time < high + datetime.timedelta(seconds=grace_seconds)))
47 class ChromeProxyResponse(network_metrics.HTTPResponse):
48 """ Represents an HTTP response from a timeleine event."""
49 def __init__(self, event):
50 super(ChromeProxyResponse, self).__init__(event)
52 def ShouldHaveChromeProxyViaHeader(self):
54 # Ignore https and data url
55 if resp.url.startswith('https') or resp.url.startswith('data:'):
57 # Ignore 304 Not Modified and cache hit.
58 if resp.status == 304 or resp.served_from_cache:
60 # Ignore invalid responses that don't have any header. Log a warning.
62 logging.warning('response for %s does not any have header '
63 '(refer=%s, status=%s)',
64 resp.url, resp.GetHeader('Referer'), resp.status)
68 def HasChromeProxyViaHeader(self):
69 via_header = self.response.GetHeader('Via')
72 vias = [v.strip(' ') for v in via_header.split(',')]
73 # The Via header is valid if it is the old format or the new format
74 # with 4-character version prefix, for example,
75 # "1.1 Chrome-Compression-Proxy".
76 return (CHROME_PROXY_VIA_HEADER_DEPRECATED in vias or
77 any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias))
79 def IsValidByViaHeader(self):
80 return (not self.ShouldHaveChromeProxyViaHeader() or
81 self.HasChromeProxyViaHeader())
83 def IsSafebrowsingResponse(self):
84 if (self.response.status == 307 and
85 self.response.GetHeader('X-Malware-Url') == '1' and
86 self.IsValidByViaHeader() and
87 self.response.GetHeader('Location') == self.response.url):
92 class ChromeProxyMetric(network_metrics.NetworkMetric):
93 """A Chrome proxy timeline metric."""
96 super(ChromeProxyMetric, self).__init__()
97 self.compute_data_saving = True
98 self.effective_proxies = {
99 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
100 "fallback": PROXY_SETTING_HTTP,
101 "direct": PROXY_SETTING_DIRECT,
104 def SetEvents(self, events):
105 """Used for unittest."""
106 self._events = events
108 def ResponseFromEvent(self, event):
109 return ChromeProxyResponse(event)
111 def AddResults(self, tab, results):
112 raise NotImplementedError
114 def AddResultsForDataSaving(self, tab, results):
115 resources_via_proxy = 0
116 resources_from_cache = 0
119 super(ChromeProxyMetric, self).AddResults(tab, results)
120 for resp in self.IterResponses(tab):
121 if resp.response.served_from_cache:
122 resources_from_cache += 1
123 if resp.HasChromeProxyViaHeader():
124 resources_via_proxy += 1
126 resources_direct += 1
128 results.AddValue(scalar.ScalarValue(
129 results.current_page, 'resources_via_proxy', 'count',
130 resources_via_proxy))
131 results.AddValue(scalar.ScalarValue(
132 results.current_page, 'resources_from_cache', 'count',
133 resources_from_cache))
134 results.AddValue(scalar.ScalarValue(
135 results.current_page, 'resources_direct', 'count', resources_direct))
137 def AddResultsForHeaderValidation(self, tab, results):
140 for resp in self.IterResponses(tab):
141 if resp.IsValidByViaHeader():
143 elif tab and self.IsProxyBypassed(tab):
144 logging.warning('Proxy bypassed for %s', resp.response.url)
148 raise ChromeProxyMetricException, (
149 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
150 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
151 results.AddValue(scalar.ScalarValue(
152 results.current_page, 'checked_via_header', 'count', via_count))
153 results.AddValue(scalar.ScalarValue(
154 results.current_page, 'request_bypassed', 'count', bypass_count))
156 def AddResultsForClientVersion(self, tab, results):
157 for resp in self.IterResponses(tab):
159 if resp.response.status != 200:
160 raise ChromeProxyMetricException, ('%s: Response is not 200: %d' %
162 if not resp.IsValidByViaHeader():
163 raise ChromeProxyMetricException, ('%s: Response missing via header' %
165 results.AddValue(scalar.ScalarValue(
166 results.current_page, 'version_test', 'count', 1))
169 def IsProxyBypassed(self, tab):
170 """ Returns True if all configured proxies are bypassed."""
171 info = GetProxyInfoFromNetworkInternals(tab)
172 if not info['enabled']:
173 raise ChromeProxyMetricException, (
174 'Chrome proxy should be enabled. proxy info: %s' % info)
176 bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort()
177 proxies = [self.effective_proxies['proxy'],
178 self.effective_proxies['fallback']].sort()
179 return bad_proxies == proxies
182 def VerifyBadProxies(
183 badProxies, expected_proxies,
184 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS,
185 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS):
186 """Verify the bad proxy list and their retry times are expected. """
187 if not badProxies or (len(badProxies) != len(expected_proxies)):
190 # Check all expected proxies.
191 proxies = [p['proxy'] for p in badProxies]
192 expected_proxies.sort()
194 if not expected_proxies == proxies:
195 raise ChromeProxyMetricException, (
196 'Bad proxies: got %s want %s' % (
197 str(badProxies), str(expected_proxies)))
201 retry_time_low = (datetime.datetime.now() +
202 datetime.timedelta(seconds=retry_seconds_low))
203 retry_time_high = (datetime.datetime.now() +
204 datetime.timedelta(seconds=retry_seconds_high))
205 got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000)
206 if not ProxyRetryTimeInRange(
207 got_retry_time, retry_time_low, retry_time_high):
208 raise ChromeProxyMetricException, (
209 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
210 p['proxy'], str(got_retry_time), str(retry_time_low),
211 str(retry_time_high)))
214 def AddResultsForBypass(self, tab, results):
216 for resp in self.IterResponses(tab):
217 if resp.HasChromeProxyViaHeader():
219 raise ChromeProxyMetricException, (
220 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
221 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
225 info = GetProxyInfoFromNetworkInternals(tab)
226 if not info['enabled']:
227 raise ChromeProxyMetricException, (
228 'Chrome proxy should be enabled. proxy info: %s' % info)
229 self.VerifyBadProxies(
231 [self.effective_proxies['proxy'],
232 self.effective_proxies['fallback']])
234 results.AddValue(scalar.ScalarValue(
235 results.current_page, 'bypass', 'count', bypass_count))
237 def AddResultsForSafebrowsing(self, tab, results):
239 safebrowsing_count = 0
240 for resp in self.IterResponses(tab):
242 if resp.IsSafebrowsingResponse():
243 safebrowsing_count += 1
246 raise ChromeProxyMetricException, (
247 '%s: Not a valid safe browsing response.\n'
248 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
249 r.url, r.status, r.status_text, r.headers))
250 if count == safebrowsing_count:
251 results.AddValue(scalar.ScalarValue(
252 results.current_page, 'safebrowsing', 'boolean', True))
254 raise ChromeProxyMetricException, (
255 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
256 count, safebrowsing_count))
258 def AddResultsForHTTPFallback(
259 self, tab, results, expected_proxies=None, expected_bad_proxies=None):
260 info = GetProxyInfoFromNetworkInternals(tab)
261 if not 'enabled' in info or not info['enabled']:
262 raise ChromeProxyMetricException, (
263 'Chrome proxy should be enabled. proxy info: %s' % info)
265 if not expected_proxies:
266 expected_proxies = [self.effective_proxies['fallback'],
267 self.effective_proxies['direct']]
268 if not expected_bad_proxies:
269 expected_bad_proxies = []
271 proxies = info['proxies']
272 if proxies != expected_proxies:
273 raise ChromeProxyMetricException, (
274 'Wrong effective proxies (%s). Expect: "%s"' % (
275 str(proxies), str(expected_proxies)))
278 if 'badProxies' in info and info['badProxies']:
279 bad_proxies = [p['proxy'] for p in info['badProxies']
280 if 'proxy' in p and p['proxy']]
281 if bad_proxies != expected_bad_proxies:
282 raise ChromeProxyMetricException, (
283 'Wrong bad proxies (%s). Expect: "%s"' % (
284 str(bad_proxies), str(expected_bad_proxies)))
285 results.AddValue(scalar.ScalarValue(
286 results.current_page, 'http_fallback', 'boolean', True))