1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
9 from integration_tests import network_metrics
10 from telemetry.page import page_test
11 from telemetry.value import scalar
14 class ChromeProxyMetricException(page_test.MeasurementFailure):
18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
23 PROXY_DEV_SETTING_HTTPS_WITH_SCHEME = 'http://proxy-dev.googlezip.net:80'
24 PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
25 PROXY_SETTING_DIRECT = 'direct://'
27 # The default Chrome Proxy bypass time is a range from one to five mintues.
28 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
29 DEFAULT_BYPASS_MIN_SECONDS = 60
30 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
32 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
34 with open(os.path.join(os.path.dirname(__file__),
35 'chrome_proxy_metrics.js')) as f:
37 tab.ExecuteJavaScript(js)
38 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
39 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
43 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30):
44 return (retry_time >= low and
45 (retry_time < high + datetime.timedelta(seconds=grace_seconds)))
48 class ChromeProxyResponse(network_metrics.HTTPResponse):
49 """ Represents an HTTP response from a timeleine event."""
50 def __init__(self, event):
51 super(ChromeProxyResponse, self).__init__(event)
53 def ShouldHaveChromeProxyViaHeader(self):
55 # Ignore https and data url
56 if resp.url.startswith('https') or resp.url.startswith('data:'):
58 # Ignore 304 Not Modified and cache hit.
59 if resp.status == 304 or resp.served_from_cache:
61 # Ignore invalid responses that don't have any header. Log a warning.
63 logging.warning('response for %s does not any have header '
64 '(refer=%s, status=%s)',
65 resp.url, resp.GetHeader('Referer'), resp.status)
69 def HasChromeProxyViaHeader(self):
70 via_header = self.response.GetHeader('Via')
73 vias = [v.strip(' ') for v in via_header.split(',')]
74 # The Via header is valid if it is the old format or the new format
75 # with 4-character version prefix, for example,
76 # "1.1 Chrome-Compression-Proxy".
77 return (CHROME_PROXY_VIA_HEADER_DEPRECATED in vias or
78 any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias))
80 def IsValidByViaHeader(self):
81 return (not self.ShouldHaveChromeProxyViaHeader() or
82 self.HasChromeProxyViaHeader())
84 def IsSafebrowsingResponse(self):
85 if (self.response.status == 307 and
86 self.response.GetHeader('X-Malware-Url') == '1' and
87 self.IsValidByViaHeader() and
88 self.response.GetHeader('Location') == self.response.url):
93 class ChromeProxyMetric(network_metrics.NetworkMetric):
94 """A Chrome proxy timeline metric."""
97 super(ChromeProxyMetric, self).__init__()
98 self.compute_data_saving = True
99 self.effective_proxies = {
100 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
101 "proxy-dev": PROXY_DEV_SETTING_HTTPS_WITH_SCHEME,
102 "fallback": PROXY_SETTING_HTTP,
103 "direct": PROXY_SETTING_DIRECT,
106 def SetEvents(self, events):
107 """Used for unittest."""
108 self._events = events
110 def ResponseFromEvent(self, event):
111 return ChromeProxyResponse(event)
113 def AddResults(self, tab, results):
114 raise NotImplementedError
116 def AddResultsForDataSaving(self, tab, results):
117 resources_via_proxy = 0
118 resources_from_cache = 0
121 super(ChromeProxyMetric, self).AddResults(tab, results)
122 for resp in self.IterResponses(tab):
123 if resp.response.served_from_cache:
124 resources_from_cache += 1
125 if resp.HasChromeProxyViaHeader():
126 resources_via_proxy += 1
128 resources_direct += 1
130 results.AddValue(scalar.ScalarValue(
131 results.current_page, 'resources_via_proxy', 'count',
132 resources_via_proxy))
133 results.AddValue(scalar.ScalarValue(
134 results.current_page, 'resources_from_cache', 'count',
135 resources_from_cache))
136 results.AddValue(scalar.ScalarValue(
137 results.current_page, 'resources_direct', 'count', resources_direct))
139 def AddResultsForHeaderValidation(self, tab, results):
142 for resp in self.IterResponses(tab):
143 if resp.IsValidByViaHeader():
146 bypassed, _ = self.IsProxyBypassed(tab)
148 logging.warning('Proxy bypassed for %s', resp.response.url)
152 raise ChromeProxyMetricException, (
153 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
154 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
155 results.AddValue(scalar.ScalarValue(
156 results.current_page, 'checked_via_header', 'count', via_count))
157 results.AddValue(scalar.ScalarValue(
158 results.current_page, 'request_bypassed', 'count', bypass_count))
160 def AddResultsForClientVersion(self, tab, results):
161 for resp in self.IterResponses(tab):
163 if resp.response.status != 200:
164 raise ChromeProxyMetricException, ('%s: Response is not 200: %d' %
166 if not resp.IsValidByViaHeader():
167 raise ChromeProxyMetricException, ('%s: Response missing via header' %
169 results.AddValue(scalar.ScalarValue(
170 results.current_page, 'version_test', 'count', 1))
173 def IsProxyBypassed(self, tab):
174 """ Returns True if all configured proxies are bypassed."""
178 info = GetProxyInfoFromNetworkInternals(tab)
179 if not info['enabled']:
180 raise ChromeProxyMetricException, (
181 'Chrome proxy should be enabled. proxy info: %s' % info)
183 bad_proxies = [str(p['proxy']) for p in info['badProxies']]
185 proxies = [self.effective_proxies['proxy'],
186 self.effective_proxies['fallback']]
188 proxies_dev = [self.effective_proxies['proxy-dev'],
189 self.effective_proxies['fallback']]
191 if bad_proxies == proxies:
193 elif bad_proxies == proxies_dev:
194 return True, proxies_dev
198 def VerifyBadProxies(
199 badProxies, expected_proxies,
200 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS,
201 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS):
202 """Verify the bad proxy list and their retry times are expected. """
203 if not badProxies or (len(badProxies) != len(expected_proxies)):
206 # Check all expected proxies.
207 proxies = [p['proxy'] for p in badProxies]
208 expected_proxies.sort()
210 if not expected_proxies == proxies:
211 raise ChromeProxyMetricException, (
212 'Bad proxies: got %s want %s' % (
213 str(badProxies), str(expected_proxies)))
217 retry_time_low = (datetime.datetime.now() +
218 datetime.timedelta(seconds=retry_seconds_low))
219 retry_time_high = (datetime.datetime.now() +
220 datetime.timedelta(seconds=retry_seconds_high))
221 got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000)
222 if not ProxyRetryTimeInRange(
223 got_retry_time, retry_time_low, retry_time_high):
224 raise ChromeProxyMetricException, (
225 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
226 p['proxy'], str(got_retry_time), str(retry_time_low),
227 str(retry_time_high)))
230 def AddResultsForBypass(self, tab, results):
232 for resp in self.IterResponses(tab):
233 if resp.HasChromeProxyViaHeader():
235 raise ChromeProxyMetricException, (
236 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
237 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
241 info = GetProxyInfoFromNetworkInternals(tab)
242 if not info['enabled']:
243 raise ChromeProxyMetricException, (
244 'Chrome proxy should be enabled. proxy info: %s' % info)
245 _, expected_bad_proxies = self.IsProxyBypassed(tab)
246 self.VerifyBadProxies(info['badProxies'], expected_bad_proxies)
248 results.AddValue(scalar.ScalarValue(
249 results.current_page, 'bypass', 'count', bypass_count))
251 def AddResultsForBlockOnce(self, tab, results):
252 eligible_response_count = 0
254 for resp in self.IterResponses(tab):
255 if resp.ShouldHaveChromeProxyViaHeader():
256 eligible_response_count += 1
257 if not resp.HasChromeProxyViaHeader():
261 info = GetProxyInfoFromNetworkInternals(tab)
262 if not info['enabled']:
263 raise ChromeProxyMetricException, (
264 'Chrome proxy should be enabled. proxy info: %s' % info)
265 self.VerifyBadProxies(info['badProxies'], [])
267 if eligible_response_count <= 1:
268 raise ChromeProxyMetricException, (
269 'There should be more than one DRP eligible response '
270 '(eligible_response_count=%d, bypass_count=%d)\n' % (
271 eligible_response_count, bypass_count))
272 elif bypass_count != 1:
273 raise ChromeProxyMetricException, (
274 'Exactly one response should be bypassed. '
275 '(eligible_response_count=%d, bypass_count=%d)\n' % (
276 eligible_response_count, bypass_count))
278 results.AddValue(scalar.ScalarValue(
279 results.current_page, 'eligible_responses', 'count',
280 eligible_response_count))
281 results.AddValue(scalar.ScalarValue(
282 results.current_page, 'bypass', 'count', bypass_count))
284 def AddResultsForSafebrowsing(self, tab, results):
286 safebrowsing_count = 0
287 for resp in self.IterResponses(tab):
289 if resp.IsSafebrowsingResponse():
290 safebrowsing_count += 1
293 raise ChromeProxyMetricException, (
294 '%s: Not a valid safe browsing response.\n'
295 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
296 r.url, r.status, r.status_text, r.headers))
297 if count == safebrowsing_count:
298 results.AddValue(scalar.ScalarValue(
299 results.current_page, 'safebrowsing', 'boolean', True))
301 raise ChromeProxyMetricException, (
302 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
303 count, safebrowsing_count))
305 def AddResultsForHTTPFallback(
306 self, tab, results, expected_proxies=None, expected_bad_proxies=None):
307 info = GetProxyInfoFromNetworkInternals(tab)
308 if not 'enabled' in info or not info['enabled']:
309 raise ChromeProxyMetricException, (
310 'Chrome proxy should be enabled. proxy info: %s' % info)
312 if not expected_proxies:
313 expected_proxies = [self.effective_proxies['fallback'],
314 self.effective_proxies['direct']]
315 if not expected_bad_proxies:
316 expected_bad_proxies = []
318 proxies = info['proxies']
319 if proxies != expected_proxies:
320 raise ChromeProxyMetricException, (
321 'Wrong effective proxies (%s). Expect: "%s"' % (
322 str(proxies), str(expected_proxies)))
325 if 'badProxies' in info and info['badProxies']:
326 bad_proxies = [p['proxy'] for p in info['badProxies']
327 if 'proxy' in p and p['proxy']]
328 if bad_proxies != expected_bad_proxies:
329 raise ChromeProxyMetricException, (
330 'Wrong bad proxies (%s). Expect: "%s"' % (
331 str(bad_proxies), str(expected_bad_proxies)))
332 results.AddValue(scalar.ScalarValue(
333 results.current_page, 'http_fallback', 'boolean', True))