ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, ROOT_DIR)
-import isolateserver
+import isolated_format
import run_isolated
+from utils import file_path
-VERBOSE = False
+import test_utils
ALGO = hashlib.sha1
self.data_dir = os.path.join(ROOT_DIR, 'tests', 'run_isolated')
def tearDown(self):
- run_isolated.rmtree(self.tempdir)
+ file_path.rmtree(self.tempdir)
super(RunIsolatedTest, self).tearDown()
def _result_tree(self):
def _run(self, args):
cmd = [sys.executable, self.run_isolated_zip]
cmd.extend(args)
- if VERBOSE:
- cmd.extend(['-v'] * 2)
- pipe = None
- else:
- pipe = subprocess.PIPE
+ pipe = subprocess.PIPE
logging.debug(' '.join(cmd))
proc = subprocess.Popen(
cmd,
Returns its sha-1 hash.
"""
filepath = os.path.join(self.data_dir, filename)
- h = isolateserver.hash_file(filepath, ALGO)
+ h = isolated_format.hash_file(filepath, ALGO)
shutil.copyfile(filepath, os.path.join(self.table, h))
return h
return [
'--isolated', isolated,
'--cache', self.cache,
- '--isolate-server', self.table,
+ '--indir', self.table,
'--namespace', 'default',
]
return [
'--hash', hash_value,
'--cache', self.cache,
- '--isolate-server', self.table,
+ '--indir', self.table,
'--namespace', 'default',
]
self._store('file1.txt'),
self._store('file1_copy.txt'),
self._store('repeated_files.py'),
- isolateserver.hash_file(isolated, ALGO),
+ isolated_format.hash_file(isolated, ALGO),
]
out, err, returncode = self._run(
self._generate_args_with_isolated(isolated))
- if not VERBOSE:
- self.assertEqual('Success\n', out, (out, err))
+ self.assertEqual('Success\n', out, (out, err))
self.assertEqual(0, returncode)
actual = list_files_tree(self.cache)
self.assertEqual(sorted(set(expected)), actual)
]
out, err, returncode = self._run(self._generate_args_with_hash(result_hash))
- if not VERBOSE:
- self.assertEqual('', err)
- self.assertEqual('Success\n', out, out)
+ self.assertEqual('', err)
+ self.assertEqual('Success\n', out, out)
self.assertEqual(0, returncode)
actual = list_files_tree(self.cache)
self.assertEqual(sorted(set(expected)), actual)
result_hash,
]
out, err, returncode = self._run(self._generate_args_with_hash(result_hash))
- if not VERBOSE:
- self.assertEqual('', out)
- self.assertIn('No command to run\n', err)
+ self.assertEqual('', out)
+ self.assertIn('No command to run\n', err)
self.assertEqual(1, returncode)
actual = list_files_tree(self.cache)
self.assertEqual(sorted(expected), actual)
self._store('repeated_files.isolated'),
]
out, err, returncode = self._run(self._generate_args_with_hash(result_hash))
- if not VERBOSE:
- self.assertEqual('', err)
- self.assertEqual('Success\n', out)
+ self.assertEqual('', err)
+ self.assertEqual('Success\n', out)
self.assertEqual(0, returncode)
actual = list_files_tree(self.cache)
self.assertEqual(sorted(expected), actual)
]
out, err, returncode = self._run(self._generate_args_with_hash(result_hash))
- if not VERBOSE:
- self.assertEqual('', err)
- self.assertEqual('Success\n', out)
+ self.assertEqual('', err)
+ self.assertEqual('Success\n', out)
self.assertEqual(0, returncode)
actual = list_files_tree(self.cache)
self.assertEqual(sorted(expected), actual)
# Test that an entry with an invalid file size properly gets removed and
# fetched again. This test case also check for file modes.
isolated_file = os.path.join(self.data_dir, 'file_with_size.isolated')
- isolated_hash = isolateserver.hash_file(isolated_file, ALGO)
+ isolated_hash = isolated_format.hash_file(isolated_file, ALGO)
file1_hash = self._store('file1.txt')
# Note that <tempdir>/table/<file1_hash> has 640 mode.
# Run the test once to generate the cache.
- out, err, returncode = self._run(self._generate_args_with_isolated(
+ _out, _err, returncode = self._run(self._generate_args_with_isolated(
isolated_file))
- if VERBOSE:
- print out
- print err
self.assertEqual(0, returncode)
expected = {
- '.': (040775, 040755, 040777),
- 'state.json': (0100664, 0100644, 0100666),
+ '.': (040707, 040707, 040777),
+ 'state.json': (0100606, 0100606, 0100666),
# The reason for 0100666 on Windows is that the file node had to be
# modified to delete the hardlinked node. The read only bit is reset on
# load.
os.stat(cached_file_path).st_size)
# Rerun the test and make sure the cache contains the right file afterwards.
- out, err, returncode = self._run(self._generate_args_with_isolated(
+ _out, _err, returncode = self._run(self._generate_args_with_isolated(
isolated_file))
- if VERBOSE:
- print out
- print err
self.assertEqual(0, returncode)
expected = {
'.': (040700, 040700, 040777),
# Test that an entry with an invalid file size properly gets removed and
# fetched again. This test case also check for file modes.
isolated_file = os.path.join(self.data_dir, 'file_with_size.isolated')
- isolated_hash = isolateserver.hash_file(isolated_file, ALGO)
+ isolated_hash = isolated_format.hash_file(isolated_file, ALGO)
file1_hash = self._store('file1.txt')
# Note that <tempdir>/table/<file1_hash> has 640 mode.
# Run the test once to generate the cache.
- out, err, returncode = self._run(self._generate_args_with_isolated(
+ _out, _err, returncode = self._run(self._generate_args_with_isolated(
isolated_file))
- if VERBOSE:
- print out
- print err
self.assertEqual(0, returncode)
expected = {
- '.': (040775, 040755, 040777),
- 'state.json': (0100664, 0100644, 0100666),
+ '.': (040707, 040707, 040777),
+ 'state.json': (0100606, 0100606, 0100666),
file1_hash: (0100400, 0100400, 0100666),
isolated_hash: (0100400, 0100400, 0100444),
}
os.stat(cached_file_path).st_size)
# Rerun the test and make sure the cache contains the right file afterwards.
- out, err, returncode = self._run(self._generate_args_with_isolated(
+ _out, _err, returncode = self._run(self._generate_args_with_isolated(
isolated_file))
- if VERBOSE:
- print out
- print err
self.assertEqual(0, returncode)
expected = {
'.': (040700, 040700, 040777),
if __name__ == '__main__':
- VERBOSE = '-v' in sys.argv
- logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
- unittest.main()
+ test_utils.main()