1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
8 from telemetry.page import page_measurement
9 from metrics import network
12 class ChromeProxyMetricException(page_measurement.MeasurementFailure):
16 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
17 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
19 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
20 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
21 PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
22 PROXY_SETTING_DIRECT = 'direct://'
24 # The default Chrome Proxy bypass time is a range from one to five mintues.
25 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
26 DEFAULT_BYPASS_MIN_SECONDS = 60
27 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
29 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
31 with open(os.path.join(os.path.dirname(__file__), 'chrome_proxy.js')) as f:
33 tab.ExecuteJavaScript(js)
34 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
35 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
39 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30):
40 return (retry_time >= low and
41 (retry_time < high + datetime.timedelta(seconds=grace_seconds)))
44 class ChromeProxyResponse(network.HTTPResponse):
45 """ Represents an HTTP response from a timeleine event."""
46 def __init__(self, event):
47 super(ChromeProxyResponse, self).__init__(event)
49 def ShouldHaveChromeProxyViaHeader(self):
51 # Ignore https and data url
52 if resp.url.startswith('https') or resp.url.startswith('data:'):
54 # Ignore 304 Not Modified.
55 if resp.status == 304:
59 def HasChromeProxyViaHeader(self):
60 via_header = self.response.GetHeader('Via')
63 vias = [v.strip(' ') for v in via_header.split(',')]
64 # The Via header is valid if it is the old format or the new format
65 # with 4-character version prefix, for example,
66 # "1.1 Chrome-Compression-Proxy".
67 return (CHROME_PROXY_VIA_HEADER_DEPRECATED in vias or
68 any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias))
70 def IsValidByViaHeader(self):
71 return (not self.ShouldHaveChromeProxyViaHeader() or
72 self.HasChromeProxyViaHeader())
74 def IsSafebrowsingResponse(self):
75 if (self.response.status == 307 and
76 self.response.GetHeader('X-Malware-Url') == '1' and
77 self.IsValidByViaHeader() and
78 self.response.GetHeader('Location') == self.response.url):
83 class ChromeProxyMetric(network.NetworkMetric):
84 """A Chrome proxy timeline metric."""
87 super(ChromeProxyMetric, self).__init__()
88 self.compute_data_saving = True
89 self.effective_proxies = {
90 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
91 "fallback": PROXY_SETTING_HTTP,
92 "direct": PROXY_SETTING_DIRECT,
95 def SetEvents(self, events):
96 """Used for unittest."""
99 def ResponseFromEvent(self, event):
100 return ChromeProxyResponse(event)
102 def AddResults(self, tab, results):
103 raise NotImplementedError
105 def AddResultsForDataSaving(self, tab, results):
106 resources_via_proxy = 0
107 resources_from_cache = 0
110 super(ChromeProxyMetric, self).AddResults(tab, results)
111 for resp in self.IterResponses(tab):
112 if resp.response.served_from_cache:
113 resources_from_cache += 1
114 if resp.HasChromeProxyViaHeader():
115 resources_via_proxy += 1
117 resources_direct += 1
119 results.Add('resources_via_proxy', 'count', resources_via_proxy)
120 results.Add('resources_from_cache', 'count', resources_from_cache)
121 results.Add('resources_direct', 'count', resources_direct)
123 def AddResultsForHeaderValidation(self, tab, results):
125 for resp in self.IterResponses(tab):
126 if resp.IsValidByViaHeader():
130 raise ChromeProxyMetricException, (
131 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
132 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
133 results.Add('checked_via_header', 'count', via_count)
136 def VerifyBadProxies(
137 badProxies, expected_proxies,
138 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS,
139 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS):
140 """Verify the bad proxy list and their retry times are expected. """
141 if not badProxies or (len(badProxies) != len(expected_proxies)):
144 # Check all expected proxies.
145 proxies = [p['proxy'] for p in badProxies]
146 expected_proxies.sort()
148 if not expected_proxies == proxies:
149 raise ChromeProxyMetricException, (
150 'Bad proxies: got %s want %s' % (
151 str(badProxies), str(expected_proxies)))
155 retry_time_low = (datetime.datetime.now() +
156 datetime.timedelta(seconds=retry_seconds_low))
157 retry_time_high = (datetime.datetime.now() +
158 datetime.timedelta(seconds=retry_seconds_high))
159 got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000)
160 if not ProxyRetryTimeInRange(
161 got_retry_time, retry_time_low, retry_time_high):
162 raise ChromeProxyMetricException, (
163 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
164 p['proxy'], str(got_retry_time), str(retry_time_low),
165 str(retry_time_high)))
168 def AddResultsForBypass(self, tab, results):
170 for resp in self.IterResponses(tab):
171 if resp.HasChromeProxyViaHeader():
173 raise ChromeProxyMetricException, (
174 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
175 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
179 info = GetProxyInfoFromNetworkInternals(tab)
180 if not info['enabled']:
181 raise ChromeProxyMetricException, (
182 'Chrome proxy should be enabled. proxy info: %s' % info)
183 self.VerifyBadProxies(
185 [self.effective_proxies['proxy'],
186 self.effective_proxies['fallback']])
188 results.Add('bypass', 'count', bypass_count)
190 def AddResultsForSafebrowsing(self, tab, results):
192 safebrowsing_count = 0
193 for resp in self.IterResponses(tab):
195 if resp.IsSafebrowsingResponse():
196 safebrowsing_count += 1
199 raise ChromeProxyMetricException, (
200 '%s: Not a valid safe browsing response.\n'
201 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
202 r.url, r.status, r.status_text, r.headers))
203 if count == safebrowsing_count:
204 results.Add('safebrowsing', 'boolean', True)
206 raise ChromeProxyMetricException, (
207 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
208 count, safebrowsing_count))
210 def AddResultsForHTTPFallback(
211 self, tab, results, expected_proxies=None, expected_bad_proxies=None):
212 info = GetProxyInfoFromNetworkInternals(tab)
213 if not 'enabled' in info or not info['enabled']:
214 raise ChromeProxyMetricException, (
215 'Chrome proxy should be enabled. proxy info: %s' % info)
217 if not expected_proxies:
218 expected_proxies = [self.effective_proxies['fallback'],
219 self.effective_proxies['direct']]
220 if not expected_bad_proxies:
221 expected_bad_proxies = []
223 proxies = info['proxies']
224 if proxies != expected_proxies:
225 raise ChromeProxyMetricException, (
226 'Wrong effective proxies (%s). Expect: "%s"' % (
227 str(proxies), str(expected_proxies)))
230 if 'badProxies' in info and info['badProxies']:
231 bad_proxies = [p['proxy'] for p in info['badProxies']
232 if 'proxy' in p and p['proxy']]
233 if bad_proxies != expected_bad_proxies:
234 raise ChromeProxyMetricException, (
235 'Wrong bad proxies (%s). Expect: "%s"' % (
236 str(bad_proxies), str(expected_bad_proxies)))
237 results.Add('http_fallback', 'boolean', True)