2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Reads a .isolated, creates a tree of hardlinks and runs the test.
25 from third_party.depot_tools import fix_encoding
28 from utils import threading_utils
29 from utils import tools
30 from utils import zip_package
35 # Absolute path to this file (can be None if running from zip on Mac).
36 THIS_FILE_PATH = os.path.abspath(__file__) if __file__ else None
38 # Directory that contains this file (might be inside zip package).
39 BASE_DIR = os.path.dirname(THIS_FILE_PATH) if __file__ else None
41 # Directory that contains currently running script file.
42 if zip_package.get_main_script_path():
43 MAIN_DIR = os.path.dirname(
44 os.path.abspath(zip_package.get_main_script_path()))
46 # This happens when 'import run_isolated' is executed at the python
47 # interactive prompt, in that case __file__ is undefined.
50 # Types of action accepted by link_file().
51 HARDLINK, HARDLINK_WITH_FALLBACK, SYMLINK, COPY = range(1, 5)
53 # The name of the log file to use.
54 RUN_ISOLATED_LOG_FILE = 'run_isolated.log'
56 # The name of the log to use for the run_test_cases.py command
57 RUN_TEST_CASES_LOG = 'run_test_cases.log'
60 # Used by get_flavor().
66 'freebsd7': 'freebsd',
67 'freebsd8': 'freebsd',
71 def get_as_zip_package(executable=True):
72 """Returns ZipPackage with this module and all its dependencies.
74 If |executable| is True will store run_isolated.py as __main__.py so that
75 zip package is directly executable be python.
77 # Building a zip package when running from another zip package is
78 # unsupported and probably unneeded.
79 assert not zip_package.is_zipped_module(sys.modules[__name__])
82 package = zip_package.ZipPackage(root=BASE_DIR)
83 package.add_python_file(THIS_FILE_PATH, '__main__.py' if executable else None)
84 package.add_python_file(os.path.join(BASE_DIR, 'isolateserver.py'))
85 package.add_directory(os.path.join(BASE_DIR, 'third_party'))
86 package.add_directory(os.path.join(BASE_DIR, 'utils'))
91 """Returns the system default flavor. Copied from gyp/pylib/gyp/common.py."""
92 return FLAVOR_MAPPING.get(sys.platform, 'linux')
95 def os_link(source, link_name):
96 """Add support for os.link() on Windows."""
97 if sys.platform == 'win32':
98 if not ctypes.windll.kernel32.CreateHardLinkW(
99 unicode(link_name), unicode(source), 0):
102 os.link(source, link_name)
105 def readable_copy(outfile, infile):
106 """Makes a copy of the file that is readable by everyone."""
107 shutil.copy2(infile, outfile)
108 read_enabled_mode = (os.stat(outfile).st_mode | stat.S_IRUSR |
109 stat.S_IRGRP | stat.S_IROTH)
110 os.chmod(outfile, read_enabled_mode)
113 def link_file(outfile, infile, action):
114 """Links a file. The type of link depends on |action|."""
115 logging.debug('Mapping %s to %s' % (infile, outfile))
116 if action not in (HARDLINK, HARDLINK_WITH_FALLBACK, SYMLINK, COPY):
117 raise ValueError('Unknown mapping action %s' % action)
118 if not os.path.isfile(infile):
119 raise isolateserver.MappingError('%s is missing' % infile)
120 if os.path.isfile(outfile):
121 raise isolateserver.MappingError(
122 '%s already exist; insize:%d; outsize:%d' %
123 (outfile, os.stat(infile).st_size, os.stat(outfile).st_size))
126 readable_copy(outfile, infile)
127 elif action == SYMLINK and sys.platform != 'win32':
128 # On windows, symlink are converted to hardlink and fails over to copy.
129 os.symlink(infile, outfile) # pylint: disable=E1101
132 os_link(infile, outfile)
134 if action == HARDLINK:
135 raise isolateserver.MappingError(
136 'Failed to hardlink %s to %s: %s' % (infile, outfile, e))
137 # Probably a different file system.
139 'Failed to hardlink, failing back to copy %s to %s' % (
141 readable_copy(outfile, infile)
144 def _set_write_bit(path, read_only):
145 """Sets or resets the executable bit on a file or directory."""
146 mode = os.lstat(path).st_mode
151 if hasattr(os, 'lchmod'):
152 os.lchmod(path, mode) # pylint: disable=E1101
154 if stat.S_ISLNK(mode):
155 # Skip symlink without lchmod() support.
156 logging.debug('Can\'t change +w bit on symlink %s' % path)
159 # TODO(maruel): Implement proper DACL modification on Windows.
163 def make_writable(root, read_only):
164 """Toggle the writable bit on a directory tree."""
165 assert os.path.isabs(root), root
166 for dirpath, dirnames, filenames in os.walk(root, topdown=True):
167 for filename in filenames:
168 _set_write_bit(os.path.join(dirpath, filename), read_only)
170 for dirname in dirnames:
171 _set_write_bit(os.path.join(dirpath, dirname), read_only)
175 """Wrapper around shutil.rmtree() to retry automatically on Windows."""
176 make_writable(root, False)
177 if sys.platform == 'win32':
182 except WindowsError: # pylint: disable=E0602
184 print >> sys.stderr, (
185 'The test has subprocess outliving it. Sleep %d seconds.' % delay)
191 def try_remove(filepath):
192 """Removes a file without crashing even if it doesn't exist."""
199 def is_same_filesystem(path1, path2):
200 """Returns True if both paths are on the same filesystem.
202 This is required to enable the use of hardlinks.
204 assert os.path.isabs(path1), path1
205 assert os.path.isabs(path2), path2
206 if sys.platform == 'win32':
207 # If the drive letter mismatches, assume it's a separate partition.
208 # TODO(maruel): It should look at the underlying drive, a drive letter could
209 # be a mount point to a directory on another drive.
210 assert re.match(r'^[a-zA-Z]\:\\.*', path1), path1
211 assert re.match(r'^[a-zA-Z]\:\\.*', path2), path2
212 if path1[0].lower() != path2[0].lower():
214 return os.stat(path1).st_dev == os.stat(path2).st_dev
217 def get_free_space(path):
218 """Returns the number of free bytes."""
219 if sys.platform == 'win32':
220 free_bytes = ctypes.c_ulonglong(0)
221 ctypes.windll.kernel32.GetDiskFreeSpaceExW(
222 ctypes.c_wchar_p(path), None, None, ctypes.pointer(free_bytes))
223 return free_bytes.value
224 # For OSes other than Windows.
225 f = os.statvfs(path) # pylint: disable=E1101
226 return f.f_bfree * f.f_frsize
229 def make_temp_dir(prefix, root_dir):
230 """Returns a temporary directory on the same file system as root_dir."""
232 if not is_same_filesystem(root_dir, tempfile.gettempdir()):
233 base_temp_dir = os.path.dirname(root_dir)
234 return tempfile.mkdtemp(prefix=prefix, dir=base_temp_dir)
237 class CachePolicies(object):
238 def __init__(self, max_cache_size, min_free_space, max_items):
241 - max_cache_size: Trim if the cache gets larger than this value. If 0, the
242 cache is effectively a leak.
243 - min_free_space: Trim if disk free space becomes lower than this value. If
244 0, it unconditionally fill the disk.
245 - max_items: Maximum number of items to keep in the cache. If 0, do not
248 self.max_cache_size = max_cache_size
249 self.min_free_space = min_free_space
250 self.max_items = max_items
253 class DiskCache(isolateserver.LocalCache):
254 """Stateful LRU cache in a flat hash table in a directory.
256 Saves its state as json file.
258 STATE_FILE = 'state.json'
260 def __init__(self, cache_dir, policies, algo):
263 cache_dir: directory where to place the cache.
264 policies: cache retention policies.
265 algo: hashing algorithm used.
267 super(DiskCache, self).__init__()
269 self.cache_dir = cache_dir
270 self.policies = policies
271 self.state_file = os.path.join(cache_dir, self.STATE_FILE)
273 # All protected methods (starting with '_') except _path should be called
274 # with this lock locked.
275 self._lock = threading_utils.LockWithAssert()
276 self._lru = lru.LRUDict()
283 with tools.Profiler('Setup'):
290 def __exit__(self, _exc_type, _exec_value, _traceback):
291 with tools.Profiler('CleanupTrimming'):
297 len(self._added), sum(self._added) / 1024)
299 '%5d (%8dkb) current',
301 sum(self._lru.itervalues()) / 1024)
303 '%5d (%8dkb) removed',
304 len(self._removed), sum(self._removed) / 1024)
307 self._free_disk / 1024)
310 def cached_set(self):
312 return self._lru.keys_set()
314 def touch(self, digest, size):
315 # Verify an actual file is valid. Note that is doesn't compute the hash so
316 # it could still be corrupted. Do it outside the lock.
317 if not isolateserver.is_valid_file(self._path(digest), size):
320 # Update it's LRU position.
322 if digest not in self._lru:
324 self._lru.touch(digest)
327 def evict(self, digest):
329 self._lru.pop(digest)
330 self._delete_file(digest, isolateserver.UNKNOWN_FILE_SIZE)
332 def read(self, digest):
333 with open(self._path(digest), 'rb') as f:
336 def write(self, digest, content):
337 path = self._path(digest)
339 size = isolateserver.file_write(path, content)
341 # There are two possible places were an exception can occur:
342 # 1) Inside |content| generator in case of network or unzipping errors.
343 # 2) Inside file_write itself in case of disk IO errors.
344 # In any case delete an incomplete file and propagate the exception to
345 # caller, it will be logged there.
349 self._add(digest, size)
351 def link(self, digest, dest, file_mode=None):
352 link_file(dest, self._path(digest), HARDLINK)
353 if file_mode is not None:
354 os.chmod(dest, file_mode)
357 """Loads state of the cache from json file."""
358 self._lock.assert_locked()
360 if not os.path.isdir(self.cache_dir):
361 os.makedirs(self.cache_dir)
363 # Load state of the cache.
364 if os.path.isfile(self.state_file):
366 self._lru = lru.LRUDict.load(self.state_file)
367 except ValueError as err:
368 logging.error('Failed to load cache state: %s' % (err,))
369 # Don't want to keep broken state file.
370 os.remove(self.state_file)
372 # Ensure that all files listed in the state still exist and add new ones.
373 previous = self._lru.keys_set()
375 for filename in os.listdir(self.cache_dir):
376 if filename == self.STATE_FILE:
378 if filename in previous:
379 previous.remove(filename)
382 if not isolateserver.is_valid_hash(filename, self.algo):
383 logging.warning('Removing unknown file %s from cache', filename)
384 try_remove(self._path(filename))
386 # File that's not referenced in 'state.json'.
387 # TODO(vadimsh): Verify its SHA1 matches file name.
388 logging.warning('Adding unknown file %s to cache', filename)
389 unknown.append(filename)
392 # Add as oldest files. They will be deleted eventually if not accessed.
393 self._add_oldest_list(unknown)
394 logging.warning('Added back %d unknown files', len(unknown))
397 # Filter out entries that were not found.
398 logging.warning('Removed %d lost files', len(previous))
399 for filename in previous:
400 self._lru.pop(filename)
404 """Saves the LRU ordering."""
405 self._lock.assert_locked()
406 self._lru.save(self.state_file)
409 """Trims anything we don't know, make sure enough free space exists."""
410 self._lock.assert_locked()
412 # Ensure maximum cache size.
413 if self.policies.max_cache_size:
414 total_size = sum(self._lru.itervalues())
415 while total_size > self.policies.max_cache_size:
416 total_size -= self._remove_lru_file()
418 # Ensure maximum number of items in the cache.
419 if self.policies.max_items and len(self._lru) > self.policies.max_items:
420 for _ in xrange(len(self._lru) - self.policies.max_items):
421 self._remove_lru_file()
423 # Ensure enough free space.
424 self._free_disk = get_free_space(self.cache_dir)
425 trimmed_due_to_space = False
427 self.policies.min_free_space and
429 self._free_disk < self.policies.min_free_space):
430 trimmed_due_to_space = True
431 self._remove_lru_file()
432 self._free_disk = get_free_space(self.cache_dir)
433 if trimmed_due_to_space:
434 total = sum(self._lru.itervalues())
436 'Trimmed due to not enough free disk space: %.1fkb free, %.1fkb '
437 'cache (%.1f%% of its maximum capacity)',
438 self._free_disk / 1024.,
440 100. * self.policies.max_cache_size / float(total),
444 def _path(self, digest):
445 """Returns the path to one item."""
446 return os.path.join(self.cache_dir, digest)
448 def _remove_lru_file(self):
449 """Removes the last recently used file and returns its size."""
450 self._lock.assert_locked()
451 digest, size = self._lru.pop_oldest()
452 self._delete_file(digest, size)
455 def _add(self, digest, size=isolateserver.UNKNOWN_FILE_SIZE):
456 """Adds an item into LRU cache marking it as a newest one."""
457 self._lock.assert_locked()
458 if size == isolateserver.UNKNOWN_FILE_SIZE:
459 size = os.stat(self._path(digest)).st_size
460 self._added.append(size)
461 self._lru.add(digest, size)
463 def _add_oldest_list(self, digests):
464 """Adds a bunch of items into LRU cache marking them as oldest ones."""
465 self._lock.assert_locked()
467 for digest in digests:
468 size = os.stat(self._path(digest)).st_size
469 self._added.append(size)
470 pairs.append((digest, size))
471 self._lru.batch_insert_oldest(pairs)
473 def _delete_file(self, digest, size=isolateserver.UNKNOWN_FILE_SIZE):
474 """Deletes cache file from the file system."""
475 self._lock.assert_locked()
477 if size == isolateserver.UNKNOWN_FILE_SIZE:
478 size = os.stat(self._path(digest)).st_size
479 os.remove(self._path(digest))
480 self._removed.append(size)
482 logging.error('Error attempting to delete a file %s:\n%s' % (digest, e))
485 def run_tha_test(isolated_hash, storage, cache, algo, outdir):
486 """Downloads the dependencies in the cache, hardlinks them into a |outdir|
487 and runs the executable.
491 settings = isolateserver.fetch_isolated(
492 isolated_hash=isolated_hash,
497 os_flavor=get_flavor(),
498 require_command=True)
499 except isolateserver.ConfigError as e:
500 print >> sys.stderr, str(e)
503 if settings.read_only:
504 logging.info('Making files read only')
505 make_writable(outdir, True)
506 cwd = os.path.normpath(os.path.join(outdir, settings.relative_cwd))
507 logging.info('Running %s, cwd=%s' % (settings.command, cwd))
509 # TODO(csharp): This should be specified somewhere else.
510 # TODO(vadimsh): Pass it via 'env_vars' in manifest.
511 # Add a rotating log file if one doesn't already exist.
512 env = os.environ.copy()
514 env.setdefault('RUN_TEST_CASES_LOG_FILE',
515 os.path.join(MAIN_DIR, RUN_TEST_CASES_LOG))
517 with tools.Profiler('RunTest'):
518 return subprocess.call(settings.command, cwd=cwd, env=env)
520 print >> sys.stderr, 'Failed to run %s; cwd=%s' % (settings.command, cwd)
528 tools.disable_buffering()
529 parser = tools.OptionParserWithLogging(
530 usage='%prog <options>',
532 log_file=RUN_ISOLATED_LOG_FILE)
534 group = optparse.OptionGroup(parser, 'Data source')
538 help='File/url describing what to map or run')
541 help='Hash of the .isolated to grab from the hash table')
543 '-I', '--isolate-server',
544 metavar='URL', default='',
545 help='Isolate server to use')
548 default='default-gzip',
549 help='namespace to use when using isolateserver, default: %default')
550 parser.add_option_group(group)
552 group = optparse.OptionGroup(parser, 'Cache management')
557 help='Cache directory, default=%default')
562 default=20*1024*1024*1024,
563 help='Trim if the cache gets larger than this value, default=%default')
568 default=2*1024*1024*1024,
569 help='Trim if disk free space becomes lower than this value, '
576 help='Trim if more than this number of items are in the cache '
578 parser.add_option_group(group)
580 options, args = parser.parse_args()
582 if bool(options.isolated) == bool(options.hash):
583 logging.debug('One and only one of --isolated or --hash is required.')
584 parser.error('One and only one of --isolated or --hash is required.')
586 logging.debug('Unsupported args %s' % ' '.join(args))
587 parser.error('Unsupported args %s' % ' '.join(args))
588 if not options.isolate_server:
589 parser.error('--isolate-server is required.')
591 options.cache = os.path.abspath(options.cache)
592 policies = CachePolicies(
593 options.max_cache_size, options.min_free_space, options.max_items)
594 storage = isolateserver.get_storage(options.isolate_server, options.namespace)
595 algo = isolateserver.get_hash_algo(options.namespace)
598 # |options.cache| may not exist until DiskCache() instance is created.
599 cache = DiskCache(options.cache, policies, algo)
600 outdir = make_temp_dir('run_tha_test', options.cache)
602 options.isolated or options.hash, storage, cache, algo, outdir)
603 except Exception as e:
604 # Make sure any exception is logged.
609 if __name__ == '__main__':
610 # Ensure that we are always running with the correct encoding.
611 fix_encoding.fix_encoding()