2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 # Run build_server so that files needed by tests are copied to the local
7 # third_party directory.
19 from branch_utility import BranchUtility
20 from chroot_file_system import ChrootFileSystem
21 from extensions_paths import CONTENT_PROVIDERS, EXTENSIONS, PUBLIC_TEMPLATES
22 from fake_fetchers import ConfigureFakeFetchers
23 from special_paths import SITE_VERIFICATION_FILE
24 from handler import Handler
25 from link_error_detector import LinkErrorDetector, StringifyBrokenLinks
26 from local_file_system import LocalFileSystem
27 from local_renderer import LocalRenderer
28 from path_util import AssertIsValid
29 from servlet import Request
30 from third_party.json_schema_compiler import json_parse
31 from test_util import (
32 ChromiumPath, DisableLogging, EnableLogging, ReadFile, Server2Path)
35 # Arguments set up if __main__ specifies them.
36 _EXPLICIT_TEST_FILES = None
41 def _ToPosixPath(os_path):
42 return os_path.replace(os.sep, '/')
45 def _FilterHidden(paths):
46 '''Returns a list of the non-hidden paths from |paths|.
48 # Hidden files start with a '.' but paths like './foo' and '../foo' are not
50 return [path for path in paths if (not path.startswith('.')) or
51 path.startswith('./') or
52 path.startswith('../')]
55 def _GetPublicFiles():
56 '''Gets all public file paths mapped to their contents.
58 def walk(path, prefix=''):
59 path = ChromiumPath(path)
61 for root, dirs, files in os.walk(path, topdown=True):
62 relative_root = root[len(path):].lstrip(os.path.sep)
63 dirs[:] = _FilterHidden(dirs)
64 for filename in _FilterHidden(files):
65 with open(os.path.join(root, filename), 'r') as f:
66 request_path = posixpath.join(prefix, relative_root, filename)
67 public_files[request_path] = f.read()
70 # Public file locations are defined in content_providers.json, sort of. Epic
71 # hack to pull them out; list all the files from the directories that
72 # Chromium content providers ask for.
74 content_providers = json_parse.Parse(ReadFile(CONTENT_PROVIDERS))
75 for content_provider in content_providers.itervalues():
76 if 'chromium' in content_provider:
77 public_files.update(walk(content_provider['chromium']['dir'],
78 prefix=content_provider['serveFrom']))
82 class IntegrationTest(unittest.TestCase):
84 ConfigureFakeFetchers()
86 @EnableLogging('info')
87 def testCronAndPublicFiles(self):
88 '''Runs cron then requests every public file. Cron needs to be run first
89 because the public file requests are offline.
91 if _EXPLICIT_TEST_FILES is not None:
94 print('Running cron...')
95 start_time = time.time()
97 response = Handler(Request.ForTest('/_cron')).Get()
99 self.assertEqual(200, response.status)
100 self.assertEqual('Success', response.content.ToString())
102 self.fail('No response for _cron')
104 print('Took %s seconds' % (time.time() - start_time))
106 # TODO(kalman): Re-enable this, but it takes about an hour at the moment,
107 # presumably because every page now has a lot of links on it from the
110 #print("Checking for broken links...")
111 #start_time = time.time()
112 #link_error_detector = LinkErrorDetector(
113 # # TODO(kalman): Use of ChrootFileSystem here indicates a hack. Fix.
114 # ChrootFileSystem(LocalFileSystem.Create(), EXTENSIONS),
115 # lambda path: Handler(Request.ForTest(path)).Get(),
116 # 'templates/public',
117 # ('extensions/index.html', 'apps/about_apps.html'))
119 #broken_links = link_error_detector.GetBrokenLinks()
121 # print('Found %d broken links.' % (
122 # len(broken_links)))
124 # print(StringifyBrokenLinks(broken_links))
126 #broken_links_set = set(broken_links)
128 #known_broken_links_path = os.path.join(
129 # Server2Path('known_broken_links.json'))
131 # with open(known_broken_links_path, 'r') as f:
132 # # The JSON file converts tuples and sets into lists, and for this
133 # # set union/difference logic they need to be converted back.
134 # known_broken_links = set(tuple(item) for item in json.load(f))
136 # known_broken_links = set()
138 #newly_broken_links = broken_links_set - known_broken_links
139 #fixed_links = known_broken_links - broken_links_set
141 #print('Took %s seconds.' % (time.time() - start_time))
143 #print('Searching for orphaned pages...')
144 #start_time = time.time()
145 #orphaned_pages = link_error_detector.GetOrphanedPages()
147 # # TODO(jshumway): Test should fail when orphaned pages are detected.
148 # print('Found %d orphaned pages:' % len(orphaned_pages))
149 # for page in orphaned_pages:
151 #print('Took %s seconds.' % (time.time() - start_time))
153 public_files = _GetPublicFiles()
155 print('Rendering %s public files...' % len(public_files.keys()))
156 start_time = time.time()
158 for path, content in public_files.iteritems():
160 if path.endswith('redirects.json'):
163 # The non-example html and md files are served without their file
165 path_without_ext, ext = posixpath.splitext(path)
166 if (ext in ('.html', '.md') and
167 '/examples/' not in path and
168 path != SITE_VERIFICATION_FILE):
169 path = path_without_ext
171 def check_result(response):
172 self.assertEqual(200, response.status,
173 'Got %s when rendering %s' % (response.status, path))
175 # This is reaaaaally rough since usually these will be tiny templates
176 # that render large files. At least it'll catch zero-length responses.
177 self.assertTrue(len(response.content) >= len(content),
178 'Rendered content length was %s vs template content length %s '
179 'when rendering %s' % (len(response.content), len(content), path))
181 check_result(Handler(Request.ForTest(path)).Get())
183 if path.startswith(('apps/', 'extensions/')):
184 # Make sure that adding the .html will temporarily redirect to
185 # the path without the .html for APIs and articles.
186 if '/examples/' not in path:
187 redirect_response = Handler(Request.ForTest(path + '.html')).Get()
189 ('/' + path, False), redirect_response.GetRedirect(),
190 '%s.html did not (temporarily) redirect to %s (status %s)' %
191 (path, path, redirect_response.status))
193 # Make sure including a channel will permanently redirect to the same
194 # path without a channel.
195 for channel in BranchUtility.GetAllChannelNames():
196 redirect_response = Handler(
197 Request.ForTest(posixpath.join(channel, path))).Get()
200 redirect_response.GetRedirect(),
201 '%s/%s did not (permanently) redirect to %s (status %s)' %
202 (channel, path, path, redirect_response.status))
204 # Samples are internationalized, test some locales.
205 if path.endswith('/samples'):
206 for lang in ('en-US', 'es', 'ar'):
207 check_result(Handler(Request.ForTest(
209 headers={'Accept-Language': '%s;q=0.8' % lang})).Get())
211 print('Took %s seconds' % (time.time() - start_time))
214 # print('Rebasing broken links with %s newly broken and %s fixed links.' %
215 # (len(newly_broken_links), len(fixed_links)))
216 # with open(known_broken_links_path, 'w') as f:
217 # json.dump(broken_links, f,
218 # indent=2, separators=(',', ': '), sort_keys=True)
220 # if fixed_links or newly_broken_links:
221 # print('**********************************************\n'
222 # 'CHANGE DETECTED IN BROKEN LINKS WITHOUT REBASE\n'
223 # '**********************************************')
224 # print('Found %s broken links, and some have changed. '
225 # 'If this is acceptable or expected then run %s with the --rebase '
226 # 'option.' % (len(broken_links), os.path.split(__file__)[-1]))
228 # print('%s existing broken links' % len(broken_links))
230 # print('%s broken links have been fixed:' % len(fixed_links))
231 # print(StringifyBrokenLinks(fixed_links))
232 # if newly_broken_links:
233 # print('There are %s new broken links:' % len(newly_broken_links))
234 # print(StringifyBrokenLinks(newly_broken_links))
235 # self.fail('See logging for details.')
237 # TODO(kalman): Move this test elsewhere, it's not an integration test.
238 # Perhaps like "presubmit_tests" or something.
239 def testExplicitFiles(self):
240 '''Tests just the files in _EXPLICIT_TEST_FILES.
242 if _EXPLICIT_TEST_FILES is None:
244 for filename in _EXPLICIT_TEST_FILES:
245 print('Rendering %s...' % filename)
246 start_time = time.time()
248 response = LocalRenderer.Render(_ToPosixPath(filename))
249 self.assertEqual(200, response.status)
250 self.assertTrue(response.content != '')
252 print('Took %s seconds' % (time.time() - start_time))
254 # TODO(jshumway): Check page for broken links (currently prohibited by the
255 # time it takes to render the pages).
257 @DisableLogging('warning')
258 def testFileNotFound(self):
259 response = LocalRenderer.Render('/extensions/notfound')
260 self.assertEqual(404, response.status)
262 def testSiteVerificationFile(self):
263 response = LocalRenderer.Render('/' + SITE_VERIFICATION_FILE)
264 self.assertEqual(200, response.status)
266 if __name__ == '__main__':
267 parser = optparse.OptionParser()
268 parser.add_option('-a', '--all', action='store_true', default=False,
269 help='Render all pages, not just the one specified')
270 parser.add_option('-r', '--rebase', action='store_true', default=False,
271 help='Rewrites the known_broken_links.json file with '
272 'the current set of broken links')
273 parser.add_option('-v', '--verbose', action='store_true', default=False,
274 help='Show verbose output like currently broken links')
275 (opts, args) = parser.parse_args()
277 _EXPLICIT_TEST_FILES = args
278 _REBASE = opts.rebase
279 _VERBOSE = opts.verbose
280 # Kill sys.argv because we have our own flags.
281 sys.argv = [sys.argv[0]]