2 # Copyright 2013 The Swarming Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 that
4 # can be found in the LICENSE file.
6 # pylint: disable=W0212,W0223,W0231,W0613
21 TEST_DIR = os.path.dirname(os.path.abspath(__file__))
22 ROOT_DIR = os.path.dirname(TEST_DIR)
23 sys.path.insert(0, ROOT_DIR)
24 sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
26 from depot_tools import auto_stub
29 from utils import threading_utils
35 class TestCase(auto_stub.TestCase):
36 """Mocks out url_open() calls and sys.stdout/stderr."""
38 super(TestCase, self).setUp()
39 self.mock(isolateserver.net, 'url_open', self._url_open)
40 self.mock(isolateserver.net, 'sleep_before_retry', lambda *_: None)
41 self._lock = threading.Lock()
43 self.mock(sys, 'stdout', StringIO.StringIO())
44 self.mock(sys, 'stderr', StringIO.StringIO())
48 self.assertEqual([], self._requests)
49 self.checkOutput('', '')
51 super(TestCase, self).tearDown()
53 def checkOutput(self, expected_out, expected_err):
55 self.assertEqual(expected_err, sys.stderr.getvalue())
56 self.assertEqual(expected_out, sys.stdout.getvalue())
58 # Prevent double-fail.
59 self.mock(sys, 'stdout', StringIO.StringIO())
60 self.mock(sys, 'stderr', StringIO.StringIO())
62 def _url_open(self, url, **kwargs):
63 logging.warn('url_open(%s, %s)', url[:500], str(kwargs)[:500])
65 if not self._requests:
67 # Ignore 'stream' argument, it's not important for these tests.
68 kwargs.pop('stream', None)
69 for i, n in enumerate(self._requests):
71 _, expected_kwargs, result, headers = self._requests.pop(i)
72 self.assertEqual(expected_kwargs, kwargs)
73 if result is not None:
74 return isolateserver.net.HttpResponse.get_fake_response(
77 self.fail('Unknown request %s' % url)
80 class TestZipCompression(TestCase):
81 """Test zip_compress and zip_decompress generators."""
83 def test_compress_and_decompress(self):
84 """Test data === decompress(compress(data))."""
85 original = [str(x) for x in xrange(0, 1000)]
86 processed = isolateserver.zip_decompress(
87 isolateserver.zip_compress(original))
88 self.assertEqual(''.join(original), ''.join(processed))
90 def test_zip_bomb(self):
91 """Verify zip_decompress always returns small chunks."""
92 original = '\x00' * 100000
93 bomb = ''.join(isolateserver.zip_compress(original))
96 for chunk in isolateserver.zip_decompress([bomb], chunk_size):
97 self.assertLessEqual(len(chunk), chunk_size)
98 decompressed.append(chunk)
99 self.assertEqual(original, ''.join(decompressed))
101 def test_bad_zip_file(self):
102 """Verify decompressing broken file raises IOError."""
103 with self.assertRaises(IOError):
104 ''.join(isolateserver.zip_decompress(['Im not a zip file']))
107 class FakeItem(isolateserver.Item):
108 def __init__(self, data, is_isolated=False):
109 super(FakeItem, self).__init__(
110 ALGO(data).hexdigest(), len(data), is_isolated)
113 def content(self, _chunk_size):
118 return zlib.compress(self.data, self.compression_level)
121 class StorageTest(TestCase):
122 """Tests for Storage methods."""
125 def mock_push(side_effect=None):
126 """Returns StorageApi subclass with mocked 'push' method."""
127 class MockedStorageApi(isolateserver.StorageApi):
130 def push(self, item, content):
131 self.pushed.append((item, ''.join(content)))
134 return MockedStorageApi()
136 def assertEqualIgnoringOrder(self, a, b):
137 """Asserts that containers |a| and |b| contain same items."""
138 self.assertEqual(len(a), len(b))
139 self.assertEqual(set(a), set(b))
141 def test_batch_items_for_check(self):
143 isolateserver.Item('foo', 12),
144 isolateserver.Item('blow', 0),
145 isolateserver.Item('bizz', 1222),
146 isolateserver.Item('buzz', 1223),
149 [items[3], items[2], items[0], items[1]],
151 batches = list(isolateserver.Storage.batch_items_for_check(items))
152 self.assertEqual(batches, expected)
154 def test_get_missing_items(self):
156 isolateserver.Item('foo', 12),
157 isolateserver.Item('blow', 0),
158 isolateserver.Item('bizz', 1222),
159 isolateserver.Item('buzz', 1223),
162 [items[2], items[3]],
165 class MockedStorageApi(isolateserver.StorageApi):
166 def contains(self, _items):
168 storage = isolateserver.Storage(MockedStorageApi(), use_zip=False)
170 # 'get_missing_items' is a generator, materialize its result in a list.
171 result = list(storage.get_missing_items(items))
172 self.assertEqual(missing, result)
174 def test_async_push(self):
175 for use_zip in (False, True):
176 item = FakeItem('1234567')
177 storage_api = self.mock_push()
178 storage = isolateserver.Storage(storage_api, use_zip)
179 channel = threading_utils.TaskChannel()
180 storage.async_push(channel, 0, item)
181 # Wait for push to finish.
182 pushed_item = channel.pull()
183 self.assertEqual(item, pushed_item)
184 # StorageApi.push was called with correct arguments.
186 [(item, item.zipped if use_zip else item.data)], storage_api.pushed)
188 def test_async_push_generator_errors(self):
189 class FakeException(Exception):
192 def faulty_generator(_chunk_size):
194 raise FakeException('fake exception')
196 for use_zip in (False, True):
198 self.mock(item, 'content', faulty_generator)
199 storage_api = self.mock_push()
200 storage = isolateserver.Storage(storage_api, use_zip)
201 channel = threading_utils.TaskChannel()
202 storage.async_push(channel, 0, item)
203 with self.assertRaises(FakeException):
205 # StorageApi's push should never complete when data can not be read.
206 self.assertEqual(0, len(storage_api.pushed))
208 def test_async_push_upload_errors(self):
211 def _generator(_chunk_size):
214 def push_side_effect():
215 raise IOError('Nope')
217 # TODO(vadimsh): Retrying push when fetching data from a generator is
218 # broken now (it reuses same generator instance when retrying).
221 lambda _chunk_size: [chunk],
224 for use_zip in (False, True):
225 for source in content_sources:
226 item = FakeItem(chunk)
227 self.mock(item, 'content', source)
228 storage_api = self.mock_push(push_side_effect)
229 storage = isolateserver.Storage(storage_api, use_zip)
230 channel = threading_utils.TaskChannel()
231 storage.async_push(channel, 0, item)
232 with self.assertRaises(IOError):
234 # First initial attempt + all retries.
235 attempts = 1 + isolateserver.WorkerPool.RETRIES
236 # Single push attempt parameters.
237 expected_push = (item, item.zipped if use_zip else item.data)
238 # Ensure all pushes are attempted.
240 [expected_push] * attempts, storage_api.pushed)
242 def test_upload_tree(self):
262 files_data = dict((k, 'x' * files[k]['s']) for k in files)
263 all_hashes = set(f['h'] for f in files.itervalues())
264 missing_hashes = set(['hash_a', 'hash_b'])
266 # Files read by mocked_file_read.
273 def mocked_file_read(filepath, chunk_size=0, offset=0):
274 self.assertEqual(root, os.path.dirname(filepath))
275 filename = os.path.basename(filepath)
276 self.assertIn(filename, files_data)
277 read_calls.append(filename)
278 return files_data[filename]
279 self.mock(isolateserver, 'file_read', mocked_file_read)
281 class MockedStorageApi(isolateserver.StorageApi):
282 def contains(self, items):
283 contains_calls.append(items)
284 return [i for i in items
285 if os.path.basename(i.digest) in missing_hashes]
287 def push(self, item, content):
288 push_calls.append((item, ''.join(content)))
290 storage_api = MockedStorageApi()
291 storage = isolateserver.Storage(storage_api, use_zip=False)
292 storage.upload_tree(root, files)
294 # Was reading only missing files.
295 self.assertEqualIgnoringOrder(
297 [files[path]['h'] for path in read_calls])
298 # 'contains' checked for existence of all files.
299 self.assertEqualIgnoringOrder(
301 [i.digest for i in sum(contains_calls, [])])
302 # Pushed only missing files.
303 self.assertEqualIgnoringOrder(
305 [call[0].digest for call in push_calls])
306 # Pushing with correct data, size and push urls.
307 for pushed_item, pushed_content in push_calls:
309 name for name, metadata in files.iteritems()
310 if metadata['h'] == pushed_item.digest
312 # If there are multiple files that map to same hash, upload_tree chooses
314 filename = filenames[0]
315 self.assertEqual(os.path.join(root, filename), pushed_item.path)
316 self.assertEqual(files_data[filename], pushed_content)
319 class IsolateServerStorageApiTest(TestCase):
321 def mock_handshake_request(server, token='fake token', error=None):
322 handshake_request = {
323 'client_app_version': isolateserver.__version__,
325 'protocol_version': isolateserver.ISOLATE_PROTOCOL_VERSION,
328 handshake_response = {
329 'access_token': token,
331 'protocol_version': isolateserver.ISOLATE_PROTOCOL_VERSION,
332 'server_app_version': 'mocked server T1000',
335 server + '/content-gs/handshake',
337 'content_type': 'application/json',
339 'data': json.dumps(handshake_request, separators=(',', ':')),
341 json.dumps(handshake_response),
346 def mock_fetch_request(server, namespace, item, data,
347 request_headers=None, response_headers=None):
349 server + '/content-gs/retrieve/%s/%s' % (namespace, item),
353 'headers': request_headers,
360 def mock_contains_request(server, namespace, token, request, response):
361 url = server + '/content-gs/pre-upload/%s?token=%s' % (
362 namespace, urllib.quote(token))
366 'data': json.dumps(request, separators=(',', ':')),
367 'content_type': 'application/json',
370 json.dumps(response),
374 def test_server_capabilities_success(self):
375 server = 'http://example.com'
376 namespace = 'default'
377 access_token = 'fake token'
379 self.mock_handshake_request(server, access_token),
381 storage = isolateserver.IsolateServer(server, namespace)
382 caps = storage._server_capabilities
383 self.assertEqual(access_token, caps['access_token'])
385 def test_server_capabilities_network_failure(self):
386 self.mock(isolateserver.net, 'url_open', lambda *_args, **_kwargs: None)
387 with self.assertRaises(isolateserver.MappingError):
388 storage = isolateserver.IsolateServer('http://example.com', 'default')
389 _ = storage._server_capabilities
391 def test_server_capabilities_format_failure(self):
392 server = 'http://example.com'
393 namespace = 'default'
394 handshake_req = self.mock_handshake_request(server)
396 (handshake_req[0], handshake_req[1], 'Im a bad response', None),
398 storage = isolateserver.IsolateServer(server, namespace)
399 with self.assertRaises(isolateserver.MappingError):
400 _ = storage._server_capabilities
402 def test_server_capabilities_respects_error(self):
403 server = 'http://example.com'
404 namespace = 'default'
405 error = 'Im sorry, Dave. Im afraid I cant do that.'
407 self.mock_handshake_request(server, error=error)
409 storage = isolateserver.IsolateServer(server, namespace)
410 with self.assertRaises(isolateserver.MappingError) as context:
411 _ = storage._server_capabilities
412 # Server error message should be reported to user.
413 self.assertIn(error, str(context.exception))
415 def test_fetch_success(self):
416 server = 'http://example.com'
417 namespace = 'default'
418 data = ''.join(str(x) for x in xrange(1000))
419 item = ALGO(data).hexdigest()
421 self.mock_fetch_request(server, namespace, item, data),
423 storage = isolateserver.IsolateServer(server, namespace)
424 fetched = ''.join(storage.fetch(item))
425 self.assertEqual(data, fetched)
427 def test_fetch_failure(self):
428 server = 'http://example.com'
429 namespace = 'default'
430 item = ALGO('something').hexdigest()
432 self.mock_fetch_request(server, namespace, item, None),
434 storage = isolateserver.IsolateServer(server, namespace)
435 with self.assertRaises(IOError):
436 _ = ''.join(storage.fetch(item))
438 def test_fetch_offset_success(self):
439 server = 'http://example.com'
440 namespace = 'default'
441 data = ''.join(str(x) for x in xrange(1000))
442 item = ALGO(data).hexdigest()
446 good_content_range_headers = [
447 'bytes %d-%d/%d' % (offset, size - 1, size),
448 'bytes %d-%d/*' % (offset, size - 1),
451 for content_range_header in good_content_range_headers:
453 self.mock_fetch_request(
454 server, namespace, item, data[offset:],
455 request_headers={'Range': 'bytes=%d-' % offset},
456 response_headers={'Content-Range': content_range_header}),
458 storage = isolateserver.IsolateServer(server, namespace)
459 fetched = ''.join(storage.fetch(item, offset))
460 self.assertEqual(data[offset:], fetched)
462 def test_fetch_offset_bad_header(self):
463 server = 'http://example.com'
464 namespace = 'default'
465 data = ''.join(str(x) for x in xrange(1000))
466 item = ALGO(data).hexdigest()
470 bad_content_range_headers = [
475 'not bytes %d-%d/%d' % (offset, size - 1, size),
476 'bytes %d-%d' % (offset, size - 1),
478 'bytes %d-%d/%d' % (offset - 1, size - 1, size),
480 'bytes %d-%d/%d' % (offset, offset + 10, size),
483 for content_range_header in bad_content_range_headers:
485 self.mock_fetch_request(
486 server, namespace, item, data[offset:],
487 request_headers={'Range': 'bytes=%d-' % offset},
488 response_headers={'Content-Range': content_range_header}),
490 storage = isolateserver.IsolateServer(server, namespace)
491 with self.assertRaises(IOError):
492 _ = ''.join(storage.fetch(item, offset))
495 def test_push_success(self):
496 server = 'http://example.com'
497 namespace = 'default'
499 data = ''.join(str(x) for x in xrange(1000))
500 item = FakeItem(data)
501 push_urls = (server + '/push_here', server + '/call_this')
502 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}]
503 contains_response = [push_urls]
505 self.mock_handshake_request(server, token),
506 self.mock_contains_request(
507 server, namespace, token, contains_request, contains_response),
512 'content_type': 'application/octet-stream',
522 'content_type': 'application/json',
529 storage = isolateserver.IsolateServer(server, namespace)
530 missing = storage.contains([item])
531 self.assertEqual([item], missing)
532 storage.push(item, [data])
533 self.assertTrue(item.push_state.uploaded)
534 self.assertTrue(item.push_state.finalized)
536 def test_push_failure_upload(self):
537 server = 'http://example.com'
538 namespace = 'default'
540 data = ''.join(str(x) for x in xrange(1000))
541 item = FakeItem(data)
542 push_urls = (server + '/push_here', server + '/call_this')
543 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}]
544 contains_response = [push_urls]
546 self.mock_handshake_request(server, token),
547 self.mock_contains_request(
548 server, namespace, token, contains_request, contains_response),
553 'content_type': 'application/octet-stream',
560 storage = isolateserver.IsolateServer(server, namespace)
561 missing = storage.contains([item])
562 self.assertEqual([item], missing)
563 with self.assertRaises(IOError):
564 storage.push(item, [data])
565 self.assertFalse(item.push_state.uploaded)
566 self.assertFalse(item.push_state.finalized)
568 def test_push_failure_finalize(self):
569 server = 'http://example.com'
570 namespace = 'default'
572 data = ''.join(str(x) for x in xrange(1000))
573 item = FakeItem(data)
574 push_urls = (server + '/push_here', server + '/call_this')
575 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}]
576 contains_response = [push_urls]
578 self.mock_handshake_request(server, token),
579 self.mock_contains_request(
580 server, namespace, token, contains_request, contains_response),
585 'content_type': 'application/octet-stream',
595 'content_type': 'application/json',
602 storage = isolateserver.IsolateServer(server, namespace)
603 missing = storage.contains([item])
604 self.assertEqual([item], missing)
605 with self.assertRaises(IOError):
606 storage.push(item, [data])
607 self.assertTrue(item.push_state.uploaded)
608 self.assertFalse(item.push_state.finalized)
610 def test_contains_success(self):
611 server = 'http://example.com'
612 namespace = 'default'
615 FakeItem('1', is_isolated=True),
620 {'h': files[0].digest, 's': files[0].size, 'i': 1},
621 {'h': files[1].digest, 's': files[1].size, 'i': 0},
622 {'h': files[2].digest, 's': files[2].size, 'i': 0},
626 ['http://example/upload_here_1', None],
627 ['http://example/upload_here_2', 'http://example/call_this'],
634 self.mock_handshake_request(server, token),
635 self.mock_contains_request(server, namespace, token, request, response),
637 storage = isolateserver.IsolateServer(server, namespace)
638 result = storage.contains(files)
639 self.assertEqual(missing, result)
641 [x for x in response if x],
642 [[i.push_state.upload_url, i.push_state.finalize_url] for i in missing])
644 def test_contains_network_failure(self):
645 server = 'http://example.com'
646 namespace = 'default'
648 req = self.mock_contains_request(server, namespace, token, [], [])
650 self.mock_handshake_request(server, token),
651 (req[0], req[1], None, None),
653 storage = isolateserver.IsolateServer(server, namespace)
654 with self.assertRaises(isolateserver.MappingError):
657 def test_contains_format_failure(self):
658 server = 'http://example.com'
659 namespace = 'default'
662 self.mock_handshake_request(server, token),
663 self.mock_contains_request(server, namespace, token, [], [1, 2, 3])
665 storage = isolateserver.IsolateServer(server, namespace)
666 with self.assertRaises(isolateserver.MappingError):
670 class IsolateServerDownloadTest(TestCase):
676 shutil.rmtree(self.tempdir)
678 super(IsolateServerDownloadTest, self).tearDown()
680 def test_download_two_files(self):
681 # Test downloading two files.
683 def out(key, generator):
684 actual[key] = ''.join(generator)
685 self.mock(isolateserver, 'file_write', out)
686 server = 'http://example.com'
689 server + '/content-gs/retrieve/default-gzip/sha-1',
690 {'read_timeout': 60, 'retry_404': True, 'headers': None},
691 zlib.compress('Coucou'),
695 server + '/content-gs/retrieve/default-gzip/sha-2',
696 {'read_timeout': 60, 'retry_404': True, 'headers': None},
697 zlib.compress('Bye Bye'),
703 '--isolate-server', server,
704 '--target', ROOT_DIR,
705 '--file', 'sha-1', 'path/to/a',
706 '--file', 'sha-2', 'path/to/b',
708 self.assertEqual(0, isolateserver.main(cmd))
710 os.path.join(ROOT_DIR, 'path/to/a'): 'Coucou',
711 os.path.join(ROOT_DIR, 'path/to/b'): 'Bye Bye',
713 self.assertEqual(expected, actual)
715 def test_download_isolated(self):
716 # Test downloading an isolated tree.
717 self.tempdir = tempfile.mkdtemp(prefix='isolateserver')
719 def file_write_mock(key, generator):
720 actual[key] = ''.join(generator)
721 self.mock(isolateserver, 'file_write', file_write_mock)
722 self.mock(os, 'makedirs', lambda _: None)
723 server = 'http://example.com'
726 os.path.join('a', 'foo'): 'Content',
730 'command': ['Absurb', 'command'],
733 (k, {'h': ALGO(v).hexdigest(), 's': len(v)})
734 for k, v in files.iteritems()),
736 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':'))
737 isolated_hash = ALGO(isolated_data).hexdigest()
738 requests = [(v['h'], files[k]) for k, v in isolated['files'].iteritems()]
739 requests.append((isolated_hash, isolated_data))
742 server + '/content-gs/retrieve/default-gzip/' + h,
744 'read_timeout': isolateserver.DOWNLOAD_READ_TIMEOUT,
750 ) for h, v in requests
754 '--isolate-server', server,
755 '--target', self.tempdir,
756 '--isolated', isolated_hash,
758 self.assertEqual(0, isolateserver.main(cmd))
760 (os.path.join(self.tempdir, k), v) for k, v in files.iteritems())
761 self.assertEqual(expected, actual)
763 'To run this test please run from the directory %s:\n Absurb command\n'
764 % os.path.join(self.tempdir, 'a'))
765 self.checkOutput(expected_stdout, '')
768 class TestIsolated(auto_stub.TestCase):
769 def test_load_isolated_empty(self):
770 m = isolateserver.load_isolated('{}', None, ALGO)
771 self.assertEqual({}, m)
773 def test_load_isolated_good(self):
775 u'command': [u'foo', u'bar'],
782 u'h': u'0123456789abcdef0123456789abcdef01234567',
786 u'includes': [u'0123456789abcdef0123456789abcdef01234567'],
789 u'relative_cwd': u'somewhere_else'
791 m = isolateserver.load_isolated(json.dumps(data), None, ALGO)
792 self.assertEqual(data, m)
794 def test_load_isolated_bad(self):
799 u'h': u'0123456789abcdef0123456789abcdef01234567'
804 isolateserver.load_isolated(json.dumps(data), None, ALGO)
806 except isolateserver.ConfigError:
809 def test_load_isolated_os_only(self):
813 m = isolateserver.load_isolated(json.dumps(data), 'HP/UX', ALGO)
814 self.assertEqual(data, m)
816 def test_load_isolated_os_bad(self):
821 isolateserver.load_isolated(json.dumps(data), 'AS/400', ALGO)
823 except isolateserver.ConfigError:
826 def test_load_isolated_path(self):
827 # Automatically convert the path case.
828 wrong_path_sep = u'\\' if os.path.sep == '/' else u'/'
829 def gen_data(path_sep):
831 u'command': [u'foo', u'bar'],
833 path_sep.join(('a', 'b')): {
834 u'l': path_sep.join(('..', 'somewhere')),
838 u'relative_cwd': path_sep.join(('somewhere', 'else')),
841 data = gen_data(wrong_path_sep)
842 actual = isolateserver.load_isolated(json.dumps(data), None, ALGO)
843 expected = gen_data(os.path.sep)
844 self.assertEqual(expected, actual)
846 def test_save_isolated_good_long_size(self):
848 self.mock(isolateserver.tools, 'write_json', lambda *x: calls.append(x))
854 u'h': u'0123456789abcdef0123456789abcdef01234567',
859 m = isolateserver.save_isolated('foo', data)
860 self.assertEqual([], m)
861 self.assertEqual([('foo', data, True)], calls)
864 class SymlinkTest(unittest.TestCase):
866 super(SymlinkTest, self).setUp()
867 self.old_cwd = os.getcwd()
868 self.cwd = tempfile.mkdtemp(prefix='isolate_')
869 # Everything should work even from another directory.
874 os.chdir(self.old_cwd)
875 shutil.rmtree(self.cwd)
877 super(SymlinkTest, self).tearDown()
879 if sys.platform == 'darwin':
880 def test_expand_symlinks_path_case(self):
881 # Ensures that the resulting path case is fixed on case insensitive file
883 os.symlink('dest', os.path.join(self.cwd, 'link'))
884 os.mkdir(os.path.join(self.cwd, 'Dest'))
885 open(os.path.join(self.cwd, 'Dest', 'file.txt'), 'w').close()
887 result = isolateserver.expand_symlinks(unicode(self.cwd), 'link')
888 self.assertEqual((u'Dest', [u'link']), result)
889 result = isolateserver.expand_symlinks(unicode(self.cwd), 'link/File.txt')
890 self.assertEqual((u'Dest/file.txt', [u'link']), result)
892 def test_expand_directories_and_symlinks_path_case(self):
893 # Ensures that the resulting path case is fixed on case insensitive file
894 # system. A superset of test_expand_symlinks_path_case.
895 # Create *all* the paths with the wrong path case.
896 basedir = os.path.join(self.cwd, 'baseDir')
897 os.mkdir(basedir.lower())
898 subdir = os.path.join(basedir, 'subDir')
899 os.mkdir(subdir.lower())
900 open(os.path.join(subdir, 'Foo.txt'), 'w').close()
901 os.symlink('subDir', os.path.join(basedir, 'linkdir'))
902 actual = isolateserver.expand_directories_and_symlinks(
903 unicode(self.cwd), [u'baseDir/'], lambda _: None, True, False)
906 u'basedir/subdir/Foo.txt',
907 u'basedir/subdir/Foo.txt',
909 self.assertEqual(expected, actual)
911 def test_process_input_path_case_simple(self):
912 # Ensure the symlink dest is saved in the right path case.
913 subdir = os.path.join(self.cwd, 'subdir')
915 linkdir = os.path.join(self.cwd, 'linkdir')
916 os.symlink('subDir', linkdir)
917 actual = isolateserver.process_input(
918 unicode(linkdir.upper()), {}, True, 'mac', ALGO)
919 expected = {'l': u'subdir', 'm': 360, 't': int(os.stat(linkdir).st_mtime)}
920 self.assertEqual(expected, actual)
922 def test_process_input_path_case_complex(self):
923 # Ensure the symlink dest is saved in the right path case. This includes 2
924 # layers of symlinks.
925 basedir = os.path.join(self.cwd, 'basebir')
928 linkeddir2 = os.path.join(self.cwd, 'linkeddir2')
931 linkeddir1 = os.path.join(basedir, 'linkeddir1')
932 os.symlink('../linkedDir2', linkeddir1)
934 subsymlinkdir = os.path.join(basedir, 'symlinkdir')
935 os.symlink('linkedDir1', subsymlinkdir)
937 actual = isolateserver.process_input(
938 unicode(subsymlinkdir.upper()), {}, True, 'mac', ALGO)
940 'l': u'linkeddir1', 'm': 360, 't': int(os.stat(subsymlinkdir).st_mtime),
942 self.assertEqual(expected, actual)
944 actual = isolateserver.process_input(
945 unicode(linkeddir1.upper()), {}, True, 'mac', ALGO)
947 'l': u'../linkeddir2', 'm': 360, 't': int(os.stat(linkeddir1).st_mtime),
949 self.assertEqual(expected, actual)
951 if sys.platform != 'win32':
952 def test_symlink_input_absolute_path(self):
953 # A symlink is outside of the checkout, it should be treated as a normal
956 # .../src/out -> .../tmp/foo
959 src = os.path.join(self.cwd, u'src')
960 src_out = os.path.join(src, 'out')
961 tmp = os.path.join(self.cwd, 'tmp')
962 tmp_foo = os.path.join(tmp, 'foo')
966 # The problem was that it's an absolute path, so it must be considered a
968 os.symlink(tmp, src_out)
969 open(os.path.join(tmp_foo, 'bar.txt'), 'w').close()
970 actual = isolateserver.expand_symlinks(src, u'out/foo/bar.txt')
971 self.assertEqual((u'out/foo/bar.txt', []), actual)
974 def get_storage(_isolate_server, _namespace):
975 class StorageFake(object):
976 def __enter__(self, *_):
979 def __exit__(self, *_):
983 def upload_items(items):
984 # Always returns the second item as not present.
989 class TestArchive(TestCase):
991 def get_isolateserver_prog():
992 """Returns 'isolateserver.py' or 'isolateserver.pyc'."""
993 return os.path.basename(sys.modules[isolateserver.__name__].__file__)
995 def test_archive_no_server(self):
996 with self.assertRaises(SystemExit):
997 isolateserver.main(['archive', '.'])
998 prog = self.get_isolateserver_prog()
1001 'Usage: %(prog)s archive [options] <file1..fileN> or - to read '
1003 '%(prog)s: error: --isolate-server is required.\n' % {'prog': prog})
1005 def test_archive_duplicates(self):
1006 with self.assertRaises(SystemExit):
1009 'archive', '--isolate-server', 'https://localhost:1',
1013 prog = self.get_isolateserver_prog()
1016 'Usage: %(prog)s archive [options] <file1..fileN> or - to read '
1018 '%(prog)s: error: Duplicate entries found.\n' % {'prog': prog})
1020 def test_archive_files(self):
1021 old_cwd = os.getcwd()
1023 os.chdir(os.path.join(TEST_DIR, 'isolateserver'))
1024 self.mock(isolateserver, 'get_storage', get_storage)
1025 f = ['empty_file.txt', 'small_file.txt']
1027 ['archive', '--isolate-server', 'https://localhost:1'] + f)
1029 'da39a3ee5e6b4b0d3255bfef95601890afd80709 empty_file.txt\n'
1030 '0491bd1da8087ad10fcdd7c9634e308804b72158 small_file.txt\n',
1035 def help_test_archive(self, cmd_line_prefix):
1036 old_cwd = os.getcwd()
1039 self.mock(isolateserver, 'get_storage', get_storage)
1040 p = os.path.join(TEST_DIR, 'isolateserver')
1041 isolateserver.main(cmd_line_prefix + [p])
1042 # TODO(maruel): The problem here is that the test depends on the file mode
1043 # of the files in this directory.
1044 # Fix is to copy the files in a temporary directory with known file modes.
1046 # If you modify isolateserver.ISOLATED_FILE_VERSION, you'll have to update
1047 # the hash below. Sorry about that.
1049 '189dbab83102b8ebcff92c1332a25fe26c1a5d7d %s\n' % p,
1054 def test_archive_directory(self):
1055 self.help_test_archive(['archive', '--isolate-server',
1056 'https://localhost:1'])
1058 def test_archive_directory_envvar(self):
1059 with test_utils.EnvVars({'ISOLATE_SERVER': 'https://localhost:1'}):
1060 self.help_test_archive(['archive'])
1063 class OptionsTest(unittest.TestCase):
1064 def test_isolate_server(self):
1066 (['-I', 'http://foo.com/'], 'http://foo.com'),
1067 (['-I', 'https://foo.com/'], 'https://foo.com'),
1068 (['-I', 'https://foo.com'], 'https://foo.com'),
1069 (['-I', 'https://foo.com/a'], 'https://foo.com/a'),
1070 (['-I', 'https://foo.com/a/'], 'https://foo.com/a'),
1071 (['-I', 'https://foo.com:8080/a/'], 'https://foo.com:8080/a'),
1072 (['-I', 'foo.com'], 'https://foo.com'),
1073 (['-I', 'foo.com:8080'], 'https://foo.com:8080'),
1074 (['-I', 'foo.com/'], 'https://foo.com'),
1075 (['-I', 'foo.com/a/'], 'https://foo.com/a'),
1077 for value, expected in data:
1078 parser = isolateserver.OptionParserIsolateServer()
1079 isolateserver.add_isolate_server_options(parser, False)
1080 options, _ = parser.parse_args(value)
1081 isolateserver.process_isolate_server_options(parser, options)
1082 self.assertEqual(expected, options.isolate_server)
1084 def test_indir(self):
1086 (['-I', 'http://foo.com/'], ('http://foo.com', None)),
1087 (['--indir', ROOT_DIR], ('', ROOT_DIR)),
1089 for value, (expected_isolate_server, expected_indir) in data:
1090 parser = isolateserver.OptionParserIsolateServer()
1091 isolateserver.add_isolate_server_options(parser, True)
1092 options, _ = parser.parse_args(value)
1093 isolateserver.process_isolate_server_options(parser, options)
1094 self.assertEqual(expected_isolate_server, options.isolate_server)
1095 self.assertEqual(expected_indir, options.indir)
1098 def clear_env_vars():
1099 for e in ('ISOLATE_DEBUG', 'ISOLATE_SERVER'):
1100 os.environ.pop(e, None)
1103 if __name__ == '__main__':
1104 if '-v' in sys.argv:
1105 unittest.TestCase.maxDiff = None
1106 logging.basicConfig(
1107 level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR))