2 # Copyright 2013 The Swarming Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 that
4 # can be found in the LICENSE file.
6 # pylint: disable=W0212,W0223,W0231,W0613
21 TEST_DIR = os.path.dirname(os.path.abspath(__file__))
22 ROOT_DIR = os.path.dirname(TEST_DIR)
23 sys.path.insert(0, ROOT_DIR)
24 sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
26 from depot_tools import auto_stub
28 from utils import threading_utils
34 class TestCase(auto_stub.TestCase):
35 """Mocks out url_open() calls and sys.stdout/stderr."""
37 super(TestCase, self).setUp()
38 self.mock(isolateserver.net, 'url_open', self._url_open)
39 self.mock(isolateserver.net, 'sleep_before_retry', lambda *_: None)
40 self._lock = threading.Lock()
42 self.mock(sys, 'stdout', StringIO.StringIO())
43 self.mock(sys, 'stderr', StringIO.StringIO())
47 self.assertEqual([], self._requests)
48 self.checkOutput('', '')
50 super(TestCase, self).tearDown()
52 def checkOutput(self, expected_out, expected_err):
54 self.assertEqual(expected_err, sys.stderr.getvalue())
55 self.assertEqual(expected_out, sys.stdout.getvalue())
57 # Prevent double-fail.
58 self.mock(sys, 'stdout', StringIO.StringIO())
59 self.mock(sys, 'stderr', StringIO.StringIO())
61 def _url_open(self, url, **kwargs):
62 logging.warn('url_open(%s, %s)', url[:500], str(kwargs)[:500])
64 if not self._requests:
66 # Ignore 'stream' argument, it's not important for these tests.
67 kwargs.pop('stream', None)
68 for i, n in enumerate(self._requests):
70 _, expected_kwargs, result, headers = self._requests.pop(i)
71 self.assertEqual(expected_kwargs, kwargs)
72 if result is not None:
73 return isolateserver.net.HttpResponse.get_fake_response(
76 self.fail('Unknown request %s' % url)
79 class TestZipCompression(TestCase):
80 """Test zip_compress and zip_decompress generators."""
82 def test_compress_and_decompress(self):
83 """Test data === decompress(compress(data))."""
84 original = [str(x) for x in xrange(0, 1000)]
85 processed = isolateserver.zip_decompress(
86 isolateserver.zip_compress(original))
87 self.assertEqual(''.join(original), ''.join(processed))
89 def test_zip_bomb(self):
90 """Verify zip_decompress always returns small chunks."""
91 original = '\x00' * 100000
92 bomb = ''.join(isolateserver.zip_compress(original))
95 for chunk in isolateserver.zip_decompress([bomb], chunk_size):
96 self.assertLessEqual(len(chunk), chunk_size)
97 decompressed.append(chunk)
98 self.assertEqual(original, ''.join(decompressed))
100 def test_bad_zip_file(self):
101 """Verify decompressing broken file raises IOError."""
102 with self.assertRaises(IOError):
103 ''.join(isolateserver.zip_decompress(['Im not a zip file']))
106 class FakeItem(isolateserver.Item):
107 def __init__(self, data, is_isolated=False):
108 super(FakeItem, self).__init__(
109 ALGO(data).hexdigest(), len(data), is_isolated)
112 def content(self, _chunk_size):
117 return zlib.compress(self.data, self.compression_level)
120 class StorageTest(TestCase):
121 """Tests for Storage methods."""
124 def mock_push(side_effect=None):
125 """Returns StorageApi subclass with mocked 'push' method."""
126 class MockedStorageApi(isolateserver.StorageApi):
129 def push(self, item, content):
130 self.pushed.append((item, ''.join(content)))
133 return MockedStorageApi()
135 def assertEqualIgnoringOrder(self, a, b):
136 """Asserts that containers |a| and |b| contain same items."""
137 self.assertEqual(len(a), len(b))
138 self.assertEqual(set(a), set(b))
140 def test_batch_items_for_check(self):
142 isolateserver.Item('foo', 12),
143 isolateserver.Item('blow', 0),
144 isolateserver.Item('bizz', 1222),
145 isolateserver.Item('buzz', 1223),
148 [items[3], items[2], items[0], items[1]],
150 batches = list(isolateserver.Storage.batch_items_for_check(items))
151 self.assertEqual(batches, expected)
153 def test_get_missing_items(self):
155 isolateserver.Item('foo', 12),
156 isolateserver.Item('blow', 0),
157 isolateserver.Item('bizz', 1222),
158 isolateserver.Item('buzz', 1223),
161 [items[2], items[3]],
164 class MockedStorageApi(isolateserver.StorageApi):
165 def contains(self, _items):
167 storage = isolateserver.Storage(MockedStorageApi(), use_zip=False)
169 # 'get_missing_items' is a generator, materialize its result in a list.
170 result = list(storage.get_missing_items(items))
171 self.assertEqual(missing, result)
173 def test_async_push(self):
174 for use_zip in (False, True):
175 item = FakeItem('1234567')
176 storage_api = self.mock_push()
177 storage = isolateserver.Storage(storage_api, use_zip)
178 channel = threading_utils.TaskChannel()
179 storage.async_push(channel, 0, item)
180 # Wait for push to finish.
181 pushed_item = channel.pull()
182 self.assertEqual(item, pushed_item)
183 # StorageApi.push was called with correct arguments.
185 [(item, item.zipped if use_zip else item.data)], storage_api.pushed)
187 def test_async_push_generator_errors(self):
188 class FakeException(Exception):
191 def faulty_generator(_chunk_size):
193 raise FakeException('fake exception')
195 for use_zip in (False, True):
197 self.mock(item, 'content', faulty_generator)
198 storage_api = self.mock_push()
199 storage = isolateserver.Storage(storage_api, use_zip)
200 channel = threading_utils.TaskChannel()
201 storage.async_push(channel, 0, item)
202 with self.assertRaises(FakeException):
204 # StorageApi's push should never complete when data can not be read.
205 self.assertEqual(0, len(storage_api.pushed))
207 def test_async_push_upload_errors(self):
210 def _generator(_chunk_size):
213 def push_side_effect():
214 raise IOError('Nope')
216 # TODO(vadimsh): Retrying push when fetching data from a generator is
217 # broken now (it reuses same generator instance when retrying).
220 lambda _chunk_size: [chunk],
223 for use_zip in (False, True):
224 for source in content_sources:
225 item = FakeItem(chunk)
226 self.mock(item, 'content', source)
227 storage_api = self.mock_push(push_side_effect)
228 storage = isolateserver.Storage(storage_api, use_zip)
229 channel = threading_utils.TaskChannel()
230 storage.async_push(channel, 0, item)
231 with self.assertRaises(IOError):
233 # First initial attempt + all retries.
234 attempts = 1 + isolateserver.WorkerPool.RETRIES
235 # Single push attempt parameters.
236 expected_push = (item, item.zipped if use_zip else item.data)
237 # Ensure all pushes are attempted.
239 [expected_push] * attempts, storage_api.pushed)
241 def test_upload_tree(self):
261 files_data = dict((k, 'x' * files[k]['s']) for k in files)
262 all_hashes = set(f['h'] for f in files.itervalues())
263 missing_hashes = set(['hash_a', 'hash_b'])
265 # Files read by mocked_file_read.
272 def mocked_file_read(filepath, chunk_size=0, offset=0):
273 self.assertEqual(root, os.path.dirname(filepath))
274 filename = os.path.basename(filepath)
275 self.assertIn(filename, files_data)
276 read_calls.append(filename)
277 return files_data[filename]
278 self.mock(isolateserver, 'file_read', mocked_file_read)
280 class MockedStorageApi(isolateserver.StorageApi):
281 def contains(self, items):
282 contains_calls.append(items)
283 return [i for i in items
284 if os.path.basename(i.digest) in missing_hashes]
286 def push(self, item, content):
287 push_calls.append((item, ''.join(content)))
289 storage_api = MockedStorageApi()
290 storage = isolateserver.Storage(storage_api, use_zip=False)
291 storage.upload_tree(root, files)
293 # Was reading only missing files.
294 self.assertEqualIgnoringOrder(
296 [files[path]['h'] for path in read_calls])
297 # 'contains' checked for existence of all files.
298 self.assertEqualIgnoringOrder(
300 [i.digest for i in sum(contains_calls, [])])
301 # Pushed only missing files.
302 self.assertEqualIgnoringOrder(
304 [call[0].digest for call in push_calls])
305 # Pushing with correct data, size and push urls.
306 for pushed_item, pushed_content in push_calls:
308 name for name, metadata in files.iteritems()
309 if metadata['h'] == pushed_item.digest
311 # If there are multiple files that map to same hash, upload_tree chooses
313 filename = filenames[0]
314 self.assertEqual(os.path.join(root, filename), pushed_item.path)
315 self.assertEqual(files_data[filename], pushed_content)
318 class IsolateServerStorageApiTest(TestCase):
320 def mock_handshake_request(server, token='fake token', error=None):
321 handshake_request = {
322 'client_app_version': isolateserver.__version__,
324 'protocol_version': isolateserver.ISOLATE_PROTOCOL_VERSION,
327 handshake_response = {
328 'access_token': token,
330 'protocol_version': isolateserver.ISOLATE_PROTOCOL_VERSION,
331 'server_app_version': 'mocked server T1000',
334 server + '/content-gs/handshake',
336 'content_type': 'application/json',
338 'data': json.dumps(handshake_request, separators=(',', ':')),
340 json.dumps(handshake_response),
345 def mock_fetch_request(server, namespace, item, data,
346 request_headers=None, response_headers=None):
348 server + '/content-gs/retrieve/%s/%s' % (namespace, item),
352 'headers': request_headers,
359 def mock_contains_request(server, namespace, token, request, response):
360 url = server + '/content-gs/pre-upload/%s?token=%s' % (
361 namespace, urllib.quote(token))
365 'data': json.dumps(request, separators=(',', ':')),
366 'content_type': 'application/json',
369 json.dumps(response),
373 def test_server_capabilities_success(self):
374 server = 'http://example.com'
375 namespace = 'default'
376 access_token = 'fake token'
378 self.mock_handshake_request(server, access_token),
380 storage = isolateserver.IsolateServer(server, namespace)
381 caps = storage._server_capabilities
382 self.assertEqual(access_token, caps['access_token'])
384 def test_server_capabilities_network_failure(self):
385 self.mock(isolateserver.net, 'url_open', lambda *_args, **_kwargs: None)
386 with self.assertRaises(isolateserver.MappingError):
387 storage = isolateserver.IsolateServer('http://example.com', 'default')
388 _ = storage._server_capabilities
390 def test_server_capabilities_format_failure(self):
391 server = 'http://example.com'
392 namespace = 'default'
393 handshake_req = self.mock_handshake_request(server)
395 (handshake_req[0], handshake_req[1], 'Im a bad response', None),
397 storage = isolateserver.IsolateServer(server, namespace)
398 with self.assertRaises(isolateserver.MappingError):
399 _ = storage._server_capabilities
401 def test_server_capabilities_respects_error(self):
402 server = 'http://example.com'
403 namespace = 'default'
404 error = 'Im sorry, Dave. Im afraid I cant do that.'
406 self.mock_handshake_request(server, error=error)
408 storage = isolateserver.IsolateServer(server, namespace)
409 with self.assertRaises(isolateserver.MappingError) as context:
410 _ = storage._server_capabilities
411 # Server error message should be reported to user.
412 self.assertIn(error, str(context.exception))
414 def test_fetch_success(self):
415 server = 'http://example.com'
416 namespace = 'default'
417 data = ''.join(str(x) for x in xrange(1000))
418 item = ALGO(data).hexdigest()
420 self.mock_fetch_request(server, namespace, item, data),
422 storage = isolateserver.IsolateServer(server, namespace)
423 fetched = ''.join(storage.fetch(item))
424 self.assertEqual(data, fetched)
426 def test_fetch_failure(self):
427 server = 'http://example.com'
428 namespace = 'default'
429 item = ALGO('something').hexdigest()
431 self.mock_fetch_request(server, namespace, item, None),
433 storage = isolateserver.IsolateServer(server, namespace)
434 with self.assertRaises(IOError):
435 _ = ''.join(storage.fetch(item))
437 def test_fetch_offset_success(self):
438 server = 'http://example.com'
439 namespace = 'default'
440 data = ''.join(str(x) for x in xrange(1000))
441 item = ALGO(data).hexdigest()
445 good_content_range_headers = [
446 'bytes %d-%d/%d' % (offset, size - 1, size),
447 'bytes %d-%d/*' % (offset, size - 1),
450 for content_range_header in good_content_range_headers:
452 self.mock_fetch_request(
453 server, namespace, item, data[offset:],
454 request_headers={'Range': 'bytes=%d-' % offset},
455 response_headers={'Content-Range': content_range_header}),
457 storage = isolateserver.IsolateServer(server, namespace)
458 fetched = ''.join(storage.fetch(item, offset))
459 self.assertEqual(data[offset:], fetched)
461 def test_fetch_offset_bad_header(self):
462 server = 'http://example.com'
463 namespace = 'default'
464 data = ''.join(str(x) for x in xrange(1000))
465 item = ALGO(data).hexdigest()
469 bad_content_range_headers = [
474 'not bytes %d-%d/%d' % (offset, size - 1, size),
475 'bytes %d-%d' % (offset, size - 1),
477 'bytes %d-%d/%d' % (offset - 1, size - 1, size),
479 'bytes %d-%d/%d' % (offset, offset + 10, size),
482 for content_range_header in bad_content_range_headers:
484 self.mock_fetch_request(
485 server, namespace, item, data[offset:],
486 request_headers={'Range': 'bytes=%d-' % offset},
487 response_headers={'Content-Range': content_range_header}),
489 storage = isolateserver.IsolateServer(server, namespace)
490 with self.assertRaises(IOError):
491 _ = ''.join(storage.fetch(item, offset))
494 def test_push_success(self):
495 server = 'http://example.com'
496 namespace = 'default'
498 data = ''.join(str(x) for x in xrange(1000))
499 item = FakeItem(data)
500 push_urls = (server + '/push_here', server + '/call_this')
501 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}]
502 contains_response = [push_urls]
504 self.mock_handshake_request(server, token),
505 self.mock_contains_request(
506 server, namespace, token, contains_request, contains_response),
511 'content_type': 'application/octet-stream',
521 'content_type': 'application/json',
528 storage = isolateserver.IsolateServer(server, namespace)
529 missing = storage.contains([item])
530 self.assertEqual([item], missing)
531 storage.push(item, [data])
532 self.assertTrue(item.push_state.uploaded)
533 self.assertTrue(item.push_state.finalized)
535 def test_push_failure_upload(self):
536 server = 'http://example.com'
537 namespace = 'default'
539 data = ''.join(str(x) for x in xrange(1000))
540 item = FakeItem(data)
541 push_urls = (server + '/push_here', server + '/call_this')
542 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}]
543 contains_response = [push_urls]
545 self.mock_handshake_request(server, token),
546 self.mock_contains_request(
547 server, namespace, token, contains_request, contains_response),
552 'content_type': 'application/octet-stream',
559 storage = isolateserver.IsolateServer(server, namespace)
560 missing = storage.contains([item])
561 self.assertEqual([item], missing)
562 with self.assertRaises(IOError):
563 storage.push(item, [data])
564 self.assertFalse(item.push_state.uploaded)
565 self.assertFalse(item.push_state.finalized)
567 def test_push_failure_finalize(self):
568 server = 'http://example.com'
569 namespace = 'default'
571 data = ''.join(str(x) for x in xrange(1000))
572 item = FakeItem(data)
573 push_urls = (server + '/push_here', server + '/call_this')
574 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}]
575 contains_response = [push_urls]
577 self.mock_handshake_request(server, token),
578 self.mock_contains_request(
579 server, namespace, token, contains_request, contains_response),
584 'content_type': 'application/octet-stream',
594 'content_type': 'application/json',
601 storage = isolateserver.IsolateServer(server, namespace)
602 missing = storage.contains([item])
603 self.assertEqual([item], missing)
604 with self.assertRaises(IOError):
605 storage.push(item, [data])
606 self.assertTrue(item.push_state.uploaded)
607 self.assertFalse(item.push_state.finalized)
609 def test_contains_success(self):
610 server = 'http://example.com'
611 namespace = 'default'
614 FakeItem('1', is_isolated=True),
619 {'h': files[0].digest, 's': files[0].size, 'i': 1},
620 {'h': files[1].digest, 's': files[1].size, 'i': 0},
621 {'h': files[2].digest, 's': files[2].size, 'i': 0},
625 ['http://example/upload_here_1', None],
626 ['http://example/upload_here_2', 'http://example/call_this'],
633 self.mock_handshake_request(server, token),
634 self.mock_contains_request(server, namespace, token, request, response),
636 storage = isolateserver.IsolateServer(server, namespace)
637 result = storage.contains(files)
638 self.assertEqual(missing, result)
640 [x for x in response if x],
641 [[i.push_state.upload_url, i.push_state.finalize_url] for i in missing])
643 def test_contains_network_failure(self):
644 server = 'http://example.com'
645 namespace = 'default'
647 req = self.mock_contains_request(server, namespace, token, [], [])
649 self.mock_handshake_request(server, token),
650 (req[0], req[1], None, None),
652 storage = isolateserver.IsolateServer(server, namespace)
653 with self.assertRaises(isolateserver.MappingError):
656 def test_contains_format_failure(self):
657 server = 'http://example.com'
658 namespace = 'default'
661 self.mock_handshake_request(server, token),
662 self.mock_contains_request(server, namespace, token, [], [1, 2, 3])
664 storage = isolateserver.IsolateServer(server, namespace)
665 with self.assertRaises(isolateserver.MappingError):
669 class IsolateServerDownloadTest(TestCase):
675 shutil.rmtree(self.tempdir)
677 super(IsolateServerDownloadTest, self).tearDown()
679 def test_download_two_files(self):
680 # Test downloading two files.
682 def out(key, generator):
683 actual[key] = ''.join(generator)
684 self.mock(isolateserver, 'file_write', out)
685 server = 'http://example.com'
688 server + '/content-gs/retrieve/default-gzip/sha-1',
689 {'read_timeout': 60, 'retry_404': True, 'headers': None},
690 zlib.compress('Coucou'),
694 server + '/content-gs/retrieve/default-gzip/sha-2',
695 {'read_timeout': 60, 'retry_404': True, 'headers': None},
696 zlib.compress('Bye Bye'),
702 '--isolate-server', server,
703 '--target', ROOT_DIR,
704 '--file', 'sha-1', 'path/to/a',
705 '--file', 'sha-2', 'path/to/b',
707 self.assertEqual(0, isolateserver.main(cmd))
709 os.path.join(ROOT_DIR, 'path/to/a'): 'Coucou',
710 os.path.join(ROOT_DIR, 'path/to/b'): 'Bye Bye',
712 self.assertEqual(expected, actual)
714 def test_download_isolated(self):
715 # Test downloading an isolated tree.
716 self.tempdir = tempfile.mkdtemp(prefix='isolateserver')
718 def file_write_mock(key, generator):
719 actual[key] = ''.join(generator)
720 self.mock(isolateserver, 'file_write', file_write_mock)
721 self.mock(os, 'makedirs', lambda _: None)
722 server = 'http://example.com'
725 os.path.join('a', 'foo'): 'Content',
729 'command': ['Absurb', 'command'],
732 (k, {'h': ALGO(v).hexdigest(), 's': len(v)})
733 for k, v in files.iteritems()),
735 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':'))
736 isolated_hash = ALGO(isolated_data).hexdigest()
737 requests = [(v['h'], files[k]) for k, v in isolated['files'].iteritems()]
738 requests.append((isolated_hash, isolated_data))
741 server + '/content-gs/retrieve/default-gzip/' + h,
743 'read_timeout': isolateserver.DOWNLOAD_READ_TIMEOUT,
749 ) for h, v in requests
753 '--isolate-server', server,
754 '--target', self.tempdir,
755 '--isolated', isolated_hash,
757 self.assertEqual(0, isolateserver.main(cmd))
759 (os.path.join(self.tempdir, k), v) for k, v in files.iteritems())
760 self.assertEqual(expected, actual)
762 'To run this test please run from the directory %s:\n Absurb command\n'
763 % os.path.join(self.tempdir, 'a'))
764 self.checkOutput(expected_stdout, '')
767 class TestIsolated(auto_stub.TestCase):
768 def test_load_isolated_empty(self):
769 m = isolateserver.load_isolated('{}', None, ALGO)
770 self.assertEqual({}, m)
772 def test_load_isolated_good(self):
774 u'command': [u'foo', u'bar'],
781 u'h': u'0123456789abcdef0123456789abcdef01234567',
785 u'includes': [u'0123456789abcdef0123456789abcdef01234567'],
788 u'relative_cwd': u'somewhere_else'
790 m = isolateserver.load_isolated(json.dumps(data), None, ALGO)
791 self.assertEqual(data, m)
793 def test_load_isolated_bad(self):
798 u'h': u'0123456789abcdef0123456789abcdef01234567'
803 isolateserver.load_isolated(json.dumps(data), None, ALGO)
805 except isolateserver.ConfigError:
808 def test_load_isolated_os_only(self):
812 m = isolateserver.load_isolated(json.dumps(data), 'HP/UX', ALGO)
813 self.assertEqual(data, m)
815 def test_load_isolated_os_bad(self):
820 isolateserver.load_isolated(json.dumps(data), 'AS/400', ALGO)
822 except isolateserver.ConfigError:
825 def test_load_isolated_path(self):
826 # Automatically convert the path case.
827 wrong_path_sep = u'\\' if os.path.sep == '/' else u'/'
828 def gen_data(path_sep):
830 u'command': [u'foo', u'bar'],
832 path_sep.join(('a', 'b')): {
833 u'l': path_sep.join(('..', 'somewhere')),
837 u'relative_cwd': path_sep.join(('somewhere', 'else')),
840 data = gen_data(wrong_path_sep)
841 actual = isolateserver.load_isolated(json.dumps(data), None, ALGO)
842 expected = gen_data(os.path.sep)
843 self.assertEqual(expected, actual)
845 def test_save_isolated_good_long_size(self):
847 self.mock(isolateserver.tools, 'write_json', lambda *x: calls.append(x))
853 u'h': u'0123456789abcdef0123456789abcdef01234567',
858 m = isolateserver.save_isolated('foo', data)
859 self.assertEqual([], m)
860 self.assertEqual([('foo', data, True)], calls)
863 class SymlinkTest(unittest.TestCase):
865 super(SymlinkTest, self).setUp()
866 self.old_cwd = os.getcwd()
867 self.cwd = tempfile.mkdtemp(prefix='isolate_')
868 # Everything should work even from another directory.
873 os.chdir(self.old_cwd)
874 shutil.rmtree(self.cwd)
876 super(SymlinkTest, self).tearDown()
878 if sys.platform == 'darwin':
879 def test_expand_symlinks_path_case(self):
880 # Ensures that the resulting path case is fixed on case insensitive file
882 os.symlink('dest', os.path.join(self.cwd, 'link'))
883 os.mkdir(os.path.join(self.cwd, 'Dest'))
884 open(os.path.join(self.cwd, 'Dest', 'file.txt'), 'w').close()
886 result = isolateserver.expand_symlinks(unicode(self.cwd), 'link')
887 self.assertEqual((u'Dest', [u'link']), result)
888 result = isolateserver.expand_symlinks(unicode(self.cwd), 'link/File.txt')
889 self.assertEqual((u'Dest/file.txt', [u'link']), result)
891 def test_expand_directories_and_symlinks_path_case(self):
892 # Ensures that the resulting path case is fixed on case insensitive file
893 # system. A superset of test_expand_symlinks_path_case.
894 # Create *all* the paths with the wrong path case.
895 basedir = os.path.join(self.cwd, 'baseDir')
896 os.mkdir(basedir.lower())
897 subdir = os.path.join(basedir, 'subDir')
898 os.mkdir(subdir.lower())
899 open(os.path.join(subdir, 'Foo.txt'), 'w').close()
900 os.symlink('subDir', os.path.join(basedir, 'linkdir'))
901 actual = isolateserver.expand_directories_and_symlinks(
902 unicode(self.cwd), [u'baseDir/'], lambda _: None, True, False)
905 u'basedir/subdir/Foo.txt',
906 u'basedir/subdir/Foo.txt',
908 self.assertEqual(expected, actual)
910 def test_process_input_path_case_simple(self):
911 # Ensure the symlink dest is saved in the right path case.
912 subdir = os.path.join(self.cwd, 'subdir')
914 linkdir = os.path.join(self.cwd, 'linkdir')
915 os.symlink('subDir', linkdir)
916 actual = isolateserver.process_input(
917 unicode(linkdir.upper()), {}, True, 'mac', ALGO)
918 expected = {'l': u'subdir', 'm': 360, 't': int(os.stat(linkdir).st_mtime)}
919 self.assertEqual(expected, actual)
921 def test_process_input_path_case_complex(self):
922 # Ensure the symlink dest is saved in the right path case. This includes 2
923 # layers of symlinks.
924 basedir = os.path.join(self.cwd, 'basebir')
927 linkeddir2 = os.path.join(self.cwd, 'linkeddir2')
930 linkeddir1 = os.path.join(basedir, 'linkeddir1')
931 os.symlink('../linkedDir2', linkeddir1)
933 subsymlinkdir = os.path.join(basedir, 'symlinkdir')
934 os.symlink('linkedDir1', subsymlinkdir)
936 actual = isolateserver.process_input(
937 unicode(subsymlinkdir.upper()), {}, True, 'mac', ALGO)
939 'l': u'linkeddir1', 'm': 360, 't': int(os.stat(subsymlinkdir).st_mtime),
941 self.assertEqual(expected, actual)
943 actual = isolateserver.process_input(
944 unicode(linkeddir1.upper()), {}, True, 'mac', ALGO)
946 'l': u'../linkeddir2', 'm': 360, 't': int(os.stat(linkeddir1).st_mtime),
948 self.assertEqual(expected, actual)
950 if sys.platform != 'win32':
951 def test_symlink_input_absolute_path(self):
952 # A symlink is outside of the checkout, it should be treated as a normal
955 # .../src/out -> .../tmp/foo
958 src = os.path.join(self.cwd, u'src')
959 src_out = os.path.join(src, 'out')
960 tmp = os.path.join(self.cwd, 'tmp')
961 tmp_foo = os.path.join(tmp, 'foo')
965 # The problem was that it's an absolute path, so it must be considered a
967 os.symlink(tmp, src_out)
968 open(os.path.join(tmp_foo, 'bar.txt'), 'w').close()
969 actual = isolateserver.expand_symlinks(src, u'out/foo/bar.txt')
970 self.assertEqual((u'out/foo/bar.txt', []), actual)
973 def get_storage(_isolate_server, _namespace):
974 class StorageFake(object):
975 def __enter__(self, *_):
978 def __exit__(self, *_):
982 def upload_items(items):
983 # Always returns the second item as not present.
988 class TestArchive(TestCase):
990 def get_isolateserver_prog():
991 """Returns 'isolateserver.py' or 'isolateserver.pyc'."""
992 return os.path.basename(sys.modules[isolateserver.__name__].__file__)
994 def test_archive_no_server(self):
995 with self.assertRaises(SystemExit):
996 isolateserver.main(['archive', '.'])
997 prog = self.get_isolateserver_prog()
1000 'Usage: %(prog)s archive [options] <file1..fileN> or - to read '
1002 '%(prog)s: error: --isolate-server is required.\n' % {'prog': prog})
1004 def test_archive_duplicates(self):
1005 with self.assertRaises(SystemExit):
1008 'archive', '--isolate-server', 'https://localhost:1',
1012 prog = self.get_isolateserver_prog()
1015 'Usage: %(prog)s archive [options] <file1..fileN> or - to read '
1017 '%(prog)s: error: Duplicate entries found.\n' % {'prog': prog})
1019 def test_archive_files(self):
1020 old_cwd = os.getcwd()
1022 os.chdir(os.path.join(TEST_DIR, 'isolateserver'))
1023 self.mock(isolateserver, 'get_storage', get_storage)
1024 f = ['empty_file.txt', 'small_file.txt']
1026 ['archive', '--isolate-server', 'https://localhost:1'] + f)
1028 'da39a3ee5e6b4b0d3255bfef95601890afd80709 empty_file.txt\n'
1029 '0491bd1da8087ad10fcdd7c9634e308804b72158 small_file.txt\n',
1034 def test_archive_directory(self):
1035 old_cwd = os.getcwd()
1038 self.mock(isolateserver, 'get_storage', get_storage)
1039 p = os.path.join(TEST_DIR, 'isolateserver')
1041 ['archive', '--isolate-server', 'https://localhost:1', p])
1042 # TODO(maruel): The problem here is that the test depends on the file mode
1043 # of the files in this directory.
1044 # Fix is to copy the files in a temporary directory with known file modes.
1046 # If you modify isolateserver.ISOLATED_FILE_VERSION, you'll have to update
1047 # the hash below. Sorry about that.
1049 '189dbab83102b8ebcff92c1332a25fe26c1a5d7d %s\n' % p,
1055 if __name__ == '__main__':
1056 if '-v' in sys.argv:
1057 unittest.TestCase.maxDiff = None
1058 logging.basicConfig(
1059 level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR))