Upstream version 10.39.225.0
[platform/framework/web/crosswalk.git] / src / chrome / common / extensions / docs / server2 / fake_fetchers.py
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5 # These are fake fetchers that are used for testing and the preview server.
6 # They return canned responses for URLs. appengine_wrappers.py uses the fake
7 # fetchers if the App Engine imports fail.
8
9 import base64
10 import json
11 import os
12 import re
13
14 import appengine_wrappers
15 from extensions_paths import SERVER2
16 from path_util import IsDirectory
17 from test_util import ReadFile, ChromiumPath
18 import url_constants
19
20
21 # TODO(kalman): Investigate why logging in this class implies that the server
22 # isn't properly caching some fetched files; often it fetches the same file
23 # 10+ times. This may be a test anomaly.
24
25
26 def _ReadTestData(*path, **read_args):
27   return ReadFile(SERVER2, 'test_data', *path, **read_args)
28
29
30 class _FakeFetcher(object):
31   def _ListDir(self, path):
32     return os.listdir(path)
33
34   def _IsDir(self, path):
35     return os.path.isdir(path)
36
37   def _Stat(self, path):
38     return int(os.stat(path).st_mtime)
39
40
41 class _FakeOmahaProxy(_FakeFetcher):
42   def fetch(self, url):
43     return _ReadTestData('branch_utility', 'first.json')
44
45
46 class _FakeOmahaHistory(_FakeFetcher):
47   def fetch(self, url):
48     return _ReadTestData('branch_utility', 'second.json')
49
50
51 _SVN_URL_TO_PATH_PATTERN = re.compile(
52     r'^.*chrome/.*(trunk|branches/.*)/src/?([^?]*).*?')
53 def _ExtractPathFromSvnUrl(url):
54   return _SVN_URL_TO_PATH_PATTERN.match(url).group(2)
55
56
57 class _FakeSubversionServer(_FakeFetcher):
58   def fetch(self, url):
59     path = _ExtractPathFromSvnUrl(url)
60     if IsDirectory(path):
61       html = ['<html>Revision 000000']
62       try:
63         for f in self._ListDir(ChromiumPath(path)):
64           if f.startswith('.'):
65             continue
66           if self._IsDir(ChromiumPath(path, f)):
67             html.append('<a>' + f + '/</a>')
68           else:
69             html.append('<a>' + f + '</a>')
70         html.append('</html>')
71         return '\n'.join(html)
72       except OSError as e:
73         return None
74     try:
75       return ReadFile(path)
76     except IOError:
77       return None
78
79
80 _GITILES_BASE_RE = re.escape('%s/%s' %
81     (url_constants.GITILES_BASE, url_constants.GITILES_SRC_ROOT))
82 _GITILES_BRANCH_BASE_RE = re.escape('%s/%s/%s' %
83     (url_constants.GITILES_BASE,
84      url_constants.GITILES_SRC_ROOT,
85      url_constants.GITILES_BRANCHES_PATH))
86 # NOTE: _GITILES_BRANCH_BASE_RE must be first, because _GITILES_BASE_RE is
87 # a more general pattern.
88 _GITILES_URL_RE = r'(%s|%s)/' % (_GITILES_BRANCH_BASE_RE, _GITILES_BASE_RE)
89 _GITILES_URL_TO_COMMIT_PATTERN = re.compile(r'%s[^/]+$' % _GITILES_URL_RE)
90 _GITILES_URL_TO_PATH_PATTERN = re.compile(r'%s.+?/(.*)' % _GITILES_URL_RE)
91 def _ExtractPathFromGitilesUrl(url):
92   return _GITILES_URL_TO_PATH_PATTERN.match(url).group(2)
93
94
95 class _FakeGitilesServer(_FakeFetcher):
96   def fetch(self, url):
97     if _GITILES_URL_TO_COMMIT_PATTERN.match(url) is not None:
98       return json.dumps({'commit': '1' * 40})
99     path = _ExtractPathFromGitilesUrl(url)
100     chromium_path = ChromiumPath(path)
101     if self._IsDir(chromium_path):
102       jsn = {}
103       dir_stat = self._Stat(chromium_path)
104       jsn['id'] = dir_stat
105       jsn['entries'] = []
106       for f in self._ListDir(chromium_path):
107         if f.startswith('.'):
108           continue
109         f_path = os.path.join(chromium_path, f)
110         jsn['entries'].append({
111           'id': self._Stat(f_path),
112           'name': f,
113           'type': 'tree' if self._IsDir(f_path) else 'blob'
114         })
115       return json.dumps(jsn)
116     try:
117       return base64.b64encode(ReadFile(path))
118     except IOError:
119       return None
120
121
122 class _FakeViewvcServer(_FakeFetcher):
123   def fetch(self, url):
124     path = ChromiumPath(_ExtractPathFromSvnUrl(url))
125     if self._IsDir(path):
126       html = ['<table><tbody><tr>...</tr>']
127       # The version of the directory.
128       dir_stat = self._Stat(path)
129       html.append('<tr>')
130       html.append('<td>Directory revision:</td>')
131       html.append('<td><a>%s</a><a></a></td>' % dir_stat)
132       html.append('</tr>')
133       # The version of each file.
134       for f in self._ListDir(path):
135         if f.startswith('.'):
136           continue
137         html.append('<tr>')
138         html.append('  <td><a>%s%s</a></td>' % (
139             f, '/' if self._IsDir(os.path.join(path, f)) else ''))
140         html.append('  <td><a><strong>%s</strong></a></td>' %
141                     self._Stat(os.path.join(path, f)))
142         html.append('<td></td><td></td><td></td>')
143         html.append('</tr>')
144       html.append('</tbody></table>')
145       return '\n'.join(html)
146     try:
147       return ReadFile(path)
148     except IOError:
149       return None
150
151
152 class _FakeGithubStat(_FakeFetcher):
153   def fetch(self, url):
154     return '{ "sha": 0 }'
155
156
157 class _FakeGithubZip(_FakeFetcher):
158   def fetch(self, url):
159     return _ReadTestData('github_file_system', 'apps_samples.zip', mode='rb')
160
161
162 class _FakeRietveldAPI(_FakeFetcher):
163   def __init__(self):
164     self._base_pattern = re.compile(r'.*/(api/.*)')
165
166   def fetch(self, url):
167     return _ReadTestData(
168         'rietveld_patcher', self._base_pattern.match(url).group(1), 'json')
169
170
171 class _FakeRietveldTarball(_FakeFetcher):
172   def __init__(self):
173     self._base_pattern = re.compile(r'.*/(tarball/\d+/\d+)')
174
175   def fetch(self, url):
176     return _ReadTestData(
177         'rietveld_patcher', self._base_pattern.match(url).group(1) + '.tar.bz2',
178         mode='rb')
179
180
181 def ConfigureFakeFetchers():
182   '''Configure the fake fetcher paths relative to the docs directory.
183   '''
184   appengine_wrappers.ConfigureFakeUrlFetch({
185     url_constants.OMAHA_HISTORY: _FakeOmahaHistory(),
186     url_constants.OMAHA_PROXY_URL: _FakeOmahaProxy(),
187     '%s/.*' % url_constants.SVN_URL: _FakeSubversionServer(),
188     '%s/.*' % url_constants.VIEWVC_URL: _FakeViewvcServer(),
189     '%s/.*/commits/.*' % url_constants.GITHUB_REPOS: _FakeGithubStat(),
190     '%s/.*/zipball' % url_constants.GITHUB_REPOS: _FakeGithubZip(),
191     '%s/api/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldAPI(),
192     '%s/tarball/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldTarball(),
193     '%s/.*' % _GITILES_BASE_RE: _FakeGitilesServer(),
194     '%s/.*' % _GITILES_BRANCH_BASE_RE: _FakeGitilesServer()
195   })