1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 # These are fake fetchers that are used for testing and the preview server.
6 # They return canned responses for URLs. appengine_wrappers.py uses the fake
7 # fetchers if the App Engine imports fail.
14 import appengine_wrappers
15 from extensions_paths import SERVER2
16 from path_util import IsDirectory
17 from test_util import ReadFile, ChromiumPath
21 # TODO(kalman): Investigate why logging in this class implies that the server
22 # isn't properly caching some fetched files; often it fetches the same file
23 # 10+ times. This may be a test anomaly.
26 def _ReadTestData(*path, **read_args):
27 return ReadFile(SERVER2, 'test_data', *path, **read_args)
30 class _FakeFetcher(object):
31 def _ListDir(self, path):
32 return os.listdir(path)
34 def _IsDir(self, path):
35 return os.path.isdir(path)
37 def _Stat(self, path):
38 return int(os.stat(path).st_mtime)
41 class _FakeOmahaProxy(_FakeFetcher):
43 return _ReadTestData('branch_utility', 'first.json')
46 class _FakeOmahaHistory(_FakeFetcher):
48 return _ReadTestData('branch_utility', 'second.json')
51 _SVN_URL_TO_PATH_PATTERN = re.compile(
52 r'^.*chrome/.*(trunk|branches/.*)/src/?([^?]*).*?')
53 def _ExtractPathFromSvnUrl(url):
54 return _SVN_URL_TO_PATH_PATTERN.match(url).group(2)
57 class _FakeSubversionServer(_FakeFetcher):
59 path = _ExtractPathFromSvnUrl(url)
61 html = ['<html>Revision 000000']
63 for f in self._ListDir(ChromiumPath(path)):
66 if self._IsDir(ChromiumPath(path, f)):
67 html.append('<a>' + f + '/</a>')
69 html.append('<a>' + f + '</a>')
70 html.append('</html>')
71 return '\n'.join(html)
80 _GITILES_BASE_RE = re.escape('%s/%s' %
81 (url_constants.GITILES_BASE, url_constants.GITILES_SRC_ROOT))
82 _GITILES_BRANCH_BASE_RE = re.escape('%s/%s/%s' %
83 (url_constants.GITILES_BASE,
84 url_constants.GITILES_SRC_ROOT,
85 url_constants.GITILES_BRANCHES_PATH))
86 # NOTE: _GITILES_BRANCH_BASE_RE must be first, because _GITILES_BASE_RE is
87 # a more general pattern.
88 _GITILES_URL_RE = r'(%s|%s)/' % (_GITILES_BRANCH_BASE_RE, _GITILES_BASE_RE)
89 _GITILES_URL_TO_COMMIT_PATTERN = re.compile(r'%s[^/]+$' % _GITILES_URL_RE)
90 _GITILES_URL_TO_PATH_PATTERN = re.compile(r'%s.+?/(.*)' % _GITILES_URL_RE)
91 def _ExtractPathFromGitilesUrl(url):
92 return _GITILES_URL_TO_PATH_PATTERN.match(url).group(2)
95 class _FakeGitilesServer(_FakeFetcher):
97 if _GITILES_URL_TO_COMMIT_PATTERN.match(url) is not None:
98 return json.dumps({'commit': '1' * 40})
99 path = _ExtractPathFromGitilesUrl(url)
100 chromium_path = ChromiumPath(path)
101 if self._IsDir(chromium_path):
103 dir_stat = self._Stat(chromium_path)
106 for f in self._ListDir(chromium_path):
107 if f.startswith('.'):
109 f_path = os.path.join(chromium_path, f)
110 jsn['entries'].append({
111 'id': self._Stat(f_path),
113 'type': 'tree' if self._IsDir(f_path) else 'blob'
115 return json.dumps(jsn)
117 return base64.b64encode(ReadFile(path))
122 class _FakeViewvcServer(_FakeFetcher):
123 def fetch(self, url):
124 path = ChromiumPath(_ExtractPathFromSvnUrl(url))
125 if self._IsDir(path):
126 html = ['<table><tbody><tr>...</tr>']
127 # The version of the directory.
128 dir_stat = self._Stat(path)
130 html.append('<td>Directory revision:</td>')
131 html.append('<td><a>%s</a><a></a></td>' % dir_stat)
133 # The version of each file.
134 for f in self._ListDir(path):
135 if f.startswith('.'):
138 html.append(' <td><a>%s%s</a></td>' % (
139 f, '/' if self._IsDir(os.path.join(path, f)) else ''))
140 html.append(' <td><a><strong>%s</strong></a></td>' %
141 self._Stat(os.path.join(path, f)))
142 html.append('<td></td><td></td><td></td>')
144 html.append('</tbody></table>')
145 return '\n'.join(html)
147 return ReadFile(path)
152 class _FakeGithubStat(_FakeFetcher):
153 def fetch(self, url):
154 return '{ "sha": 0 }'
157 class _FakeGithubZip(_FakeFetcher):
158 def fetch(self, url):
159 return _ReadTestData('github_file_system', 'apps_samples.zip', mode='rb')
162 class _FakeRietveldAPI(_FakeFetcher):
164 self._base_pattern = re.compile(r'.*/(api/.*)')
166 def fetch(self, url):
167 return _ReadTestData(
168 'rietveld_patcher', self._base_pattern.match(url).group(1), 'json')
171 class _FakeRietveldTarball(_FakeFetcher):
173 self._base_pattern = re.compile(r'.*/(tarball/\d+/\d+)')
175 def fetch(self, url):
176 return _ReadTestData(
177 'rietveld_patcher', self._base_pattern.match(url).group(1) + '.tar.bz2',
181 def ConfigureFakeFetchers():
182 '''Configure the fake fetcher paths relative to the docs directory.
184 appengine_wrappers.ConfigureFakeUrlFetch({
185 url_constants.OMAHA_HISTORY: _FakeOmahaHistory(),
186 url_constants.OMAHA_PROXY_URL: _FakeOmahaProxy(),
187 '%s/.*' % url_constants.SVN_URL: _FakeSubversionServer(),
188 '%s/.*' % url_constants.VIEWVC_URL: _FakeViewvcServer(),
189 '%s/.*/commits/.*' % url_constants.GITHUB_REPOS: _FakeGithubStat(),
190 '%s/.*/zipball' % url_constants.GITHUB_REPOS: _FakeGithubZip(),
191 '%s/api/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldAPI(),
192 '%s/tarball/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldTarball(),
193 '%s/.*' % _GITILES_BASE_RE: _FakeGitilesServer(),
194 '%s/.*' % _GITILES_BRANCH_BASE_RE: _FakeGitilesServer()