import subprocess
import sys
+import auth
import isolate_format
import isolateserver
import run_isolated
import trace_inputs
-# Import here directly so isolate is easier to use as a library.
-from run_isolated import get_flavor
-
from third_party import colorama
from third_party.depot_tools import fix_encoding
from third_party.depot_tools import subcommand
from utils import tools
-__version__ = '0.2'
+__version__ = '0.3.1'
class ExecutionError(Exception):
probably be tracked.
- untracked: list of files names that must not be tracked.
"""
- # These directories are not guaranteed to be always present on every builder.
- CHROMIUM_OPTIONAL_DIRECTORIES = (
- 'test/data/plugin',
- 'third_party/WebKit/LayoutTests',
- )
-
new_tracked = []
new_untracked = list(untracked)
return False
if ' ' in filepath:
return False
- if any(i in filepath for i in CHROMIUM_OPTIONAL_DIRECTORIES):
- return False
# Look if any element in the path is a symlink.
split = filepath.split('/')
for i in range(len(split)):
def chromium_fix(f, variables):
"""Fixes an isolate dependency with Chromium-specific fixes."""
- # Skip log in PRODUCT_DIR. Note that these are applied on '/' style path
- # separator.
- LOG_FILE = re.compile(r'^\<\(PRODUCT_DIR\)\/[^\/]+\.log$')
- # Ignored items.
- IGNORED_ITEMS = (
- # http://crbug.com/160539, on Windows, it's in chrome/.
- 'Media Cache/',
- 'chrome/Media Cache/',
- # 'First Run' is not created by the compile, but by the test itself.
- '<(PRODUCT_DIR)/First Run')
-
# Blacklist logs and other unimportant files.
- if LOG_FILE.match(f) or f in IGNORED_ITEMS:
+ # - 'First Run' is not created by the compile but by the test itself.
+ # - Skip log in PRODUCT_DIR. Note that these are applied on '/' style path
+ # separator at this point.
+ if (re.match(r'^\<\(PRODUCT_DIR\)\/[^\/]+\.log$', f) or
+ f == '<(PRODUCT_DIR)/First Run'):
logging.debug('Ignoring %s', f)
return None
config_variable_names, config_values = zip(
*sorted(config_variables.iteritems()))
out = isolate_format.Configs(None, config_variable_names)
- # TODO(maruel): Create a public interface in Configs to add a ConfigSettings.
- # pylint: disable=W0212
- out._by_config[config_values] = isolate_format.ConfigSettings(dependencies)
+ out.set_config(
+ config_values,
+ isolate_format.ConfigSettings(
+ dependencies, os.path.abspath(relative_cwd)))
return out.make_isolate_file()
new_slave = {
'algo': data['algo'],
'files': {},
- 'os': data['os'],
'version': data['version'],
}
for f in data['files'].keys():
separator instead of '/' used in .isolate file.
"""
MEMBERS = (
+ # Value of sys.platform so that the file is rejected if loaded from a
+ # different OS. While this should never happen in practice, users are ...
+ # "creative".
+ 'OS',
# Algorithm used to generate the hash. The only supported value is at the
# time of writting 'sha-1'.
'algo',
+ # List of included .isolated files. Used to support/remember 'slave'
+ # .isolated files. Relative path to isolated_basedir.
+ 'child_isolated_files',
# Cache of the processed command. This value is saved because .isolated
# files are never loaded by isolate.py so it's the only way to load the
# command safely.
'files',
# Path of the original .isolate file. Relative path to isolated_basedir.
'isolate_file',
- # List of included .isolated files. Used to support/remember 'slave'
- # .isolated files. Relative path to isolated_basedir.
- 'child_isolated_files',
+ # GYP variables used to generate the .isolated files paths based on path
+ # variables. Frequent examples are DEPTH and PRODUCT_DIR.
+ 'path_variables',
# If the generated directory tree should be read-only.
'read_only',
# Relative cwd to use to start the command.
'relative_cwd',
- # GYP variables used to generate the .isolated files paths based on path
- # variables. Frequent examples are DEPTH and PRODUCT_DIR.
- 'path_variables',
- # Version of the file format in format 'major.minor'. Any non-breaking
- # change must update minor. Any breaking change must update major.
+ # Root directory the files are mapped from.
+ 'root_dir',
+ # Version of the saved state file format. Any breaking change must update
+ # the value.
'version',
)
+ # Bump this version whenever the saved state changes. It is also keyed on the
+ # .isolated file version so any change in the generator will invalidate .state
+ # files.
+ EXPECTED_VERSION = isolateserver.ISOLATED_FILE_VERSION + '.2'
+
def __init__(self, isolated_basedir):
"""Creates an empty SavedState.
- |isolated_basedir| is the directory where the .isolated and .isolated.state
- files are saved.
+ Arguments:
+ isolated_basedir: the directory where the .isolated and .isolated.state
+ files are saved.
"""
super(SavedState, self).__init__()
assert os.path.isabs(isolated_basedir), isolated_basedir
self.isolated_basedir = isolated_basedir
# The default algorithm used.
+ self.OS = sys.platform
self.algo = isolateserver.SUPPORTED_ALGOS['sha-1']
self.child_isolated_files = []
self.command = []
self.path_variables = {}
self.read_only = None
self.relative_cwd = None
- self.version = isolateserver.ISOLATED_FILE_VERSION
+ self.root_dir = None
+ self.version = self.EXPECTED_VERSION
+
+ def update_config(self, config_variables):
+ """Updates the saved state with only config variables."""
+ self.config_variables.update(config_variables)
- def update(
- self, isolate_file, path_variables, config_variables, extra_variables):
+ def update(self, isolate_file, path_variables, extra_variables):
"""Updates the saved state with new data to keep GYP variables and internal
reference to the original .isolate file.
"""
# .isolated.state.
assert isolate_file == self.isolate_file or not self.isolate_file, (
isolate_file, self.isolate_file)
- self.config_variables.update(config_variables)
self.extra_variables.update(extra_variables)
self.isolate_file = isolate_file
self.path_variables.update(path_variables)
'algo': isolateserver.SUPPORTED_ALGOS_REVERSE[self.algo],
'files': dict(
(filepath, strip(data)) for filepath, data in self.files.iteritems()),
- 'version': self.version,
+ # The version of the .state file is different than the one of the
+ # .isolated file.
+ 'version': isolateserver.ISOLATED_FILE_VERSION,
}
- if self.config_variables.get('OS'):
- out['os'] = self.config_variables['OS']
if self.command:
out['command'] = self.command
if self.read_only is not None:
file is saved in OS-specific format.
"""
out = super(SavedState, cls).load(data, isolated_basedir)
- if data.get('os'):
- out.config_variables['OS'] = data['os']
+ if data.get('OS') != sys.platform:
+ raise isolateserver.ConfigError('Unexpected OS %s', data.get('OS'))
# Converts human readable form back into the proper class type.
- algo = data.get('algo', 'sha-1')
+ algo = data.get('algo')
if not algo in isolateserver.SUPPORTED_ALGOS:
raise isolateserver.ConfigError('Unknown algo \'%s\'' % out.algo)
out.algo = isolateserver.SUPPORTED_ALGOS[algo]
# Refuse the load non-exact version, even minor difference. This is unlike
# isolateserver.load_isolated(). This is because .isolated.state could have
# changed significantly even in minor version difference.
- if not re.match(r'^(\d+)\.(\d+)$', out.version):
- raise isolateserver.ConfigError('Unknown version \'%s\'' % out.version)
- if out.version != isolateserver.ISOLATED_FILE_VERSION:
+ if out.version != cls.EXPECTED_VERSION:
raise isolateserver.ConfigError(
'Unsupported version \'%s\'' % out.version)
- # The .isolate file must be valid. It could be absolute on Windows if the
- # drive containing the .isolate and the drive containing the .isolated files
- # differ.
- assert not os.path.isabs(out.isolate_file) or sys.platform == 'win32'
- assert os.path.isfile(out.isolate_filepath), out.isolate_filepath
+ # The .isolate file must be valid. If it is not present anymore, zap the
+ # value as if it was not noted, so .isolate_file can safely be overriden
+ # later.
+ if out.isolate_file and not os.path.isfile(out.isolate_filepath):
+ out.isolate_file = None
+ if out.isolate_file:
+ # It could be absolute on Windows if the drive containing the .isolate and
+ # the drive containing the .isolated files differ, .e.g .isolate is on
+ # C:\\ and .isolated is on D:\\ .
+ assert not os.path.isabs(out.isolate_file) or sys.platform == 'win32'
+ assert os.path.isfile(out.isolate_filepath), out.isolate_filepath
return out
def flatten(self):
'CompleteState.load_isolate(%s, %s, %s, %s, %s, %s)',
cwd, isolate_file, path_variables, config_variables, extra_variables,
ignore_broken_items)
- relative_base_dir = os.path.dirname(isolate_file)
- # Processes the variables.
- path_variables = normalize_path_variables(
- cwd, path_variables, relative_base_dir)
- # Update the saved state.
- self.saved_state.update(
- isolate_file, path_variables, config_variables, extra_variables)
- path_variables = self.saved_state.path_variables
+ # Config variables are not affected by the paths and must be used to
+ # retrieve the paths, so update them first.
+ self.saved_state.update_config(config_variables)
with open(isolate_file, 'r') as f:
# At that point, variables are not replaced yet in command and infiles.
# infiles may contain directory entries and is in posix style.
- command, infiles, touched, read_only = (
+ command, infiles, touched, read_only, isolate_cmd_dir = (
isolate_format.load_isolate_for_config(
os.path.dirname(isolate_file), f.read(),
self.saved_state.config_variables))
+ # Processes the variables with the new found relative root. Note that 'cwd'
+ # is used when path variables are used.
+ path_variables = normalize_path_variables(
+ cwd, path_variables, isolate_cmd_dir)
+ # Update the rest of the saved state.
+ self.saved_state.update(isolate_file, path_variables, extra_variables)
+
total_variables = self.saved_state.path_variables.copy()
total_variables.update(self.saved_state.config_variables)
total_variables.update(self.saved_state.extra_variables)
# root_dir is automatically determined by the deepest root accessed with the
# form '../../foo/bar'. Note that path variables must be taken in account
# too, add them as if they were input files.
- root_dir = isolate_format.determine_root_dir(
- relative_base_dir, infiles + touched +
+ self.saved_state.root_dir = isolate_format.determine_root_dir(
+ isolate_cmd_dir, infiles + touched +
self.saved_state.path_variables.values())
# The relative directory is automatically determined by the relative path
# between root_dir and the directory containing the .isolate file,
# isolate_base_dir.
- relative_cwd = os.path.relpath(relative_base_dir, root_dir)
+ relative_cwd = os.path.relpath(isolate_cmd_dir, self.saved_state.root_dir)
# Now that we know where the root is, check that the path_variables point
# inside it.
for k, v in self.saved_state.path_variables.iteritems():
- if not file_path.path_starts_with(
- root_dir, os.path.join(relative_base_dir, v)):
+ dest = os.path.join(isolate_cmd_dir, relative_cwd, v)
+ if not file_path.path_starts_with(self.saved_state.root_dir, dest):
raise isolateserver.MappingError(
- 'Path variable %s=%r points outside the inferred root directory %s'
- % (k, v, root_dir))
- # Normalize the files based to root_dir. It is important to keep the
- # trailing os.path.sep at that step.
+ 'Path variable %s=%r points outside the inferred root directory '
+ '%s; %s'
+ % (k, v, self.saved_state.root_dir, dest))
+ # Normalize the files based to self.saved_state.root_dir. It is important to
+ # keep the trailing os.path.sep at that step.
infiles = [
file_path.relpath(
- file_path.normpath(os.path.join(relative_base_dir, f)), root_dir)
+ file_path.normpath(os.path.join(isolate_cmd_dir, f)),
+ self.saved_state.root_dir)
for f in infiles
]
touched = [
file_path.relpath(
- file_path.normpath(os.path.join(relative_base_dir, f)), root_dir)
+ file_path.normpath(os.path.join(isolate_cmd_dir, f)),
+ self.saved_state.root_dir)
for f in touched
]
- follow_symlinks = config_variables['OS'] != 'win'
+ follow_symlinks = sys.platform != 'win32'
# Expand the directories by listing each file inside. Up to now, trailing
# os.path.sep must be kept. Do not expand 'touched'.
infiles = expand_directories_and_symlinks(
- root_dir,
+ self.saved_state.root_dir,
infiles,
lambda x: re.match(r'.*\.(git|svn|pyc)$', x),
follow_symlinks,
filepath,
self.saved_state.files[infile],
self.saved_state.read_only,
- self.saved_state.config_variables['OS'],
self.saved_state.algo)
def save_files(self):
@property
def root_dir(self):
- """Returns the absolute path of the root_dir to reference the .isolate file
- via relative_cwd.
-
- So that join(root_dir, relative_cwd, basename(isolate_file)) is equivalent
- to isolate_filepath.
- """
- if not self.saved_state.isolate_file:
- raise ExecutionError('Please specify --isolate')
- isolate_dir = os.path.dirname(self.saved_state.isolate_filepath)
- # Special case '.'.
- if self.saved_state.relative_cwd == '.':
- root_dir = isolate_dir
- else:
- if not isolate_dir.endswith(self.saved_state.relative_cwd):
- raise ExecutionError(
- ('Make sure the .isolate file is in the directory that will be '
- 'used as the relative directory. It is currently in %s and should '
- 'be in %s') % (isolate_dir, self.saved_state.relative_cwd))
- # Walk back back to the root directory.
- root_dir = isolate_dir[:-(len(self.saved_state.relative_cwd) + 1)]
- return file_path.get_native_path_case(root_dir)
-
- @property
- def resultdir(self):
- """Returns the absolute path containing the .isolated file.
-
- It is usually equivalent to the variable PRODUCT_DIR. Uses the .isolated
- path as the value.
- """
- return os.path.dirname(self.isolated_filepath)
+ return self.saved_state.root_dir
def __str__(self):
def indent(data, indent_length):
complete_state = CompleteState.load_files(options.isolated)
else:
# Constructs a dummy object that cannot be saved. Useful for temporary
- # commands like 'run'.
- complete_state = CompleteState(None, SavedState())
+ # commands like 'run'. There is no directory containing a .isolated file so
+ # specify the current working directory as a valid directory.
+ complete_state = CompleteState(None, SavedState(os.getcwd()))
if not options.isolate:
if not complete_state.saved_state.isolate_file:
rel_isolate = file_path.safe_relpath(
options.isolate, complete_state.saved_state.isolated_basedir)
if rel_isolate != complete_state.saved_state.isolate_file:
- raise ExecutionError(
- '%s and %s do not match.' % (
- options.isolate, complete_state.saved_state.isolate_file))
+ # This happens if the .isolate file was moved for example. In this case,
+ # discard the saved state.
+ logging.warning(
+ '--isolated %s != %s as saved in %s. Discarding saved state',
+ rel_isolate,
+ complete_state.saved_state.isolate_file,
+ isolatedfile_to_state(options.isolated))
+ complete_state = CompleteState(
+ options.isolated,
+ SavedState(complete_state.saved_state.isolated_basedir))
if not skip_update:
# Then load the .isolate and expands directories.
isolate_format.eval_content(prev_content),
isolate_format.extract_comment(prev_content))
new_config = isolate_format.load_isolate_as_config(isolate_dir, value, '')
- config = isolate_format.union(prev_config, new_config)
+ config = prev_config.union(new_config)
data = config.make_isolate_file()
print('Updating %s' % complete_state.saved_state.isolate_file)
with open(complete_state.saved_state.isolate_filepath, 'wb') as f:
exceptions[0][2]
-def get_remap_dir(root_dir, isolated, outdir):
- """If necessary, creates a directory aside the root directory."""
- if outdir:
- if not os.path.isdir(outdir):
- os.makedirs(outdir)
- return outdir
-
- if not os.path.isabs(root_dir):
- root_dir = os.path.join(os.path.dirname(isolated), root_dir)
- return run_isolated.make_temp_dir(
- 'isolate-%s' % datetime.date.today(), root_dir)
-
-
def create_isolate_tree(outdir, root_dir, files, relative_cwd, read_only):
"""Creates a isolated tree usable for test execution.
return cwd
-### Commands.
+def prepare_for_archival(options, cwd):
+ """Loads the isolated file and create 'infiles' for archival."""
+ complete_state = load_complete_state(
+ options, cwd, options.subdir, False)
+ # Make sure that complete_state isn't modified until save_files() is
+ # called, because any changes made to it here will propagate to the files
+ # created (which is probably not intended).
+ complete_state.save_files()
+ infiles = complete_state.saved_state.files
+ # Add all the .isolated files.
+ isolated_hash = []
+ isolated_files = [
+ options.isolated,
+ ] + complete_state.saved_state.child_isolated_files
+ for item in isolated_files:
+ item_path = os.path.join(
+ os.path.dirname(complete_state.isolated_filepath), item)
+ # Do not use isolateserver.hash_file() here because the file is
+ # likely smallish (under 500kb) and its file size is needed.
+ with open(item_path, 'rb') as f:
+ content = f.read()
+ isolated_hash.append(
+ complete_state.saved_state.algo(content).hexdigest())
+ isolated_metadata = {
+ 'h': isolated_hash[-1],
+ 's': len(content),
+ 'priority': '0'
+ }
+ infiles[item_path] = isolated_metadata
+ return complete_state, infiles, isolated_hash
-def add_subdir_flag(parser):
- parser.add_option(
- '--subdir',
- help='Filters to a subdirectory. Its behavior changes depending if it '
- 'is a relative path as a string or as a path variable. Path '
- 'variables are always keyed from the directory containing the '
- '.isolate file. Anything else is keyed on the root directory.')
+
+### Commands.
def CMDarchive(parser, args):
All the files listed in the .isolated file are put in the isolate server
cache via isolateserver.py.
"""
- add_subdir_flag(parser)
+ add_subdir_option(parser)
+ isolateserver.add_isolate_server_options(parser, False)
+ auth.add_auth_options(parser)
options, args = parser.parse_args(args)
+ auth.process_auth_options(parser, options)
+ isolateserver.process_isolate_server_options(parser, options)
if args:
parser.error('Unsupported argument: %s' % args)
-
+ cwd = os.getcwd()
with tools.Profiler('GenerateHashtable'):
success = False
try:
- complete_state = load_complete_state(
- options, os.getcwd(), options.subdir, False)
- if not options.outdir:
- options.outdir = os.path.join(
- os.path.dirname(complete_state.isolated_filepath), 'hashtable')
- # Make sure that complete_state isn't modified until save_files() is
- # called, because any changes made to it here will propagate to the files
- # created (which is probably not intended).
- complete_state.save_files()
-
- infiles = complete_state.saved_state.files
- # Add all the .isolated files.
- isolated_hash = []
- isolated_files = [
- options.isolated,
- ] + complete_state.saved_state.child_isolated_files
- for item in isolated_files:
- item_path = os.path.join(
- os.path.dirname(complete_state.isolated_filepath), item)
- # Do not use isolateserver.hash_file() here because the file is
- # likely smallish (under 500kb) and its file size is needed.
- with open(item_path, 'rb') as f:
- content = f.read()
- isolated_hash.append(
- complete_state.saved_state.algo(content).hexdigest())
- isolated_metadata = {
- 'h': isolated_hash[-1],
- 's': len(content),
- 'priority': '0'
- }
- infiles[item_path] = isolated_metadata
-
+ complete_state, infiles, isolated_hash = prepare_for_archival(
+ options, cwd)
logging.info('Creating content addressed object store with %d item',
len(infiles))
- if file_path.is_url(options.outdir):
- isolateserver.upload_tree(
- base_url=options.outdir,
- indir=complete_state.root_dir,
- infiles=infiles,
- namespace='default-gzip')
- else:
- recreate_tree(
- outdir=options.outdir,
- indir=complete_state.root_dir,
- infiles=infiles,
- action=run_isolated.HARDLINK_WITH_FALLBACK,
- as_hash=True)
- # TODO(maruel): Make the files read-only?
+ isolateserver.upload_tree(
+ base_url=options.isolate_server,
+ indir=complete_state.root_dir,
+ infiles=infiles,
+ namespace=options.namespace)
success = True
print('%s %s' % (isolated_hash[0], os.path.basename(options.isolated)))
finally:
# important so no stale swarm job is executed.
if not success and os.path.isfile(options.isolated):
os.remove(options.isolated)
- return not success
+ return int(not success)
def CMDcheck(parser, args):
"""Checks that all the inputs are present and generates .isolated."""
- add_subdir_flag(parser)
+ add_subdir_option(parser)
options, args = parser.parse_args(args)
if args:
parser.error('Unsupported argument: %s' % args)
return 0
-CMDhashtable = CMDarchive
+def CMDhashtable(parser, args):
+ """Creates a .isolated file and stores the contains in a directory.
+
+ All the files listed in the .isolated file are put in the directory with their
+ sha-1 as their file name. When using an NFS/CIFS server, the files can then be
+ shared accross slaves without an isolate server.
+ """
+ add_subdir_option(parser)
+ isolateserver.add_outdir_options(parser)
+ add_skip_refresh_option(parser)
+ options, args = parser.parse_args(args)
+ if args:
+ parser.error('Unsupported argument: %s' % args)
+ cwd = os.getcwd()
+ isolateserver.process_outdir_options(parser, options, cwd)
+
+ success = False
+ try:
+ complete_state, infiles, isolated_hash = prepare_for_archival(options, cwd)
+ logging.info('Creating content addressed object store with %d item',
+ len(infiles))
+ if not os.path.isdir(options.outdir):
+ os.makedirs(options.outdir)
+
+ # TODO(maruel): Make the files read-only?
+ recreate_tree(
+ outdir=options.outdir,
+ indir=complete_state.root_dir,
+ infiles=infiles,
+ action=run_isolated.HARDLINK_WITH_FALLBACK,
+ as_hash=True)
+ success = True
+ print('%s %s' % (isolated_hash[0], os.path.basename(options.isolated)))
+ finally:
+ # If the command failed, delete the .isolated file if it exists. This is
+ # important so no stale swarm job is executed.
+ if not success and os.path.isfile(options.isolated):
+ os.remove(options.isolated)
+ return int(not success)
def CMDmerge(parser, args):
"""Reads and merges the data from the trace back into the original .isolate.
-
- Ignores --outdir.
"""
parser.require_isolated = False
add_trace_option(parser)
def CMDread(parser, args):
- """Reads the trace file generated with command 'trace'.
-
- Ignores --outdir.
- """
+ """Reads the trace file generated with command 'trace'."""
parser.require_isolated = False
add_trace_option(parser)
- parser.add_option(
- '--skip-refresh', action='store_true',
- help='Skip reading .isolate file and do not refresh the hash of '
- 'dependencies')
+ add_skip_refresh_option(parser)
parser.add_option(
'-m', '--merge', action='store_true',
help='merge the results back in the .isolate file instead of printing')
run.
"""
parser.require_isolated = False
- parser.add_option(
- '--skip-refresh', action='store_true',
- help='Skip reading .isolate file and do not refresh the hash of '
- 'dependencies')
+ isolateserver.add_outdir_options(parser)
+ add_skip_refresh_option(parser)
options, args = parser.parse_args(args)
if args:
parser.error('Unsupported argument: %s' % args)
- if options.outdir and file_path.is_url(options.outdir):
- parser.error('Can\'t use url for --outdir with mode remap.')
-
- complete_state = load_complete_state(
- options, os.getcwd(), None, options.skip_refresh)
-
- outdir = get_remap_dir(
- complete_state.root_dir, options.isolated, options.outdir)
-
- print('Remapping into %s' % outdir)
- if len(os.listdir(outdir)):
+ cwd = os.getcwd()
+ isolateserver.process_outdir_options(parser, options, cwd)
+ complete_state = load_complete_state(options, cwd, None, options.skip_refresh)
+
+ if not os.path.isdir(options.outdir):
+ os.makedirs(options.outdir)
+ print('Remapping into %s' % options.outdir)
+ if os.listdir(options.outdir):
raise ExecutionError('Can\'t remap in a non-empty directory')
create_isolate_tree(
- outdir, complete_state.root_dir, complete_state.saved_state.files,
+ options.outdir, complete_state.root_dir, complete_state.saved_state.files,
complete_state.saved_state.relative_cwd,
complete_state.saved_state.read_only)
if complete_state.isolated_filepath:
"""Runs the test executable in an isolated (temporary) directory.
All the dependencies are mapped into the temporary directory and the
- directory is cleaned up after the target exits. Warning: if --outdir is
- specified, it is deleted upon exit.
+ directory is cleaned up after the target exits.
Argument processing stops at -- and these arguments are appended to the
command line of the target to run. For example, use:
isolate.py run --isolated foo.isolated -- --gtest_filter=Foo.Bar
"""
parser.require_isolated = False
- parser.add_option(
- '--skip-refresh', action='store_true',
- help='Skip reading .isolate file and do not refresh the hash of '
- 'dependencies')
+ add_skip_refresh_option(parser)
options, args = parser.parse_args(args)
- if options.outdir and file_path.is_url(options.outdir):
- parser.error('Can\'t use url for --outdir with mode run.')
complete_state = load_complete_state(
options, os.getcwd(), None, options.skip_refresh)
raise ExecutionError('No command to run.')
cmd = tools.fix_python_path(cmd)
+ outdir = run_isolated.make_temp_dir(
+ 'isolate-%s' % datetime.date.today(),
+ os.path.dirname(complete_state.root_dir))
try:
- outdir = get_remap_dir(
- complete_state.root_dir, options.isolated, options.outdir)
# TODO(maruel): Use run_isolated.run_tha_test().
cwd = create_isolate_tree(
outdir, complete_state.root_dir, complete_state.saved_state.files,
logging.info('Running %s, cwd=%s' % (cmd, cwd))
result = subprocess.call(cmd, cwd=cwd)
finally:
- if options.outdir:
- run_isolated.rmtree(options.outdir)
+ run_isolated.rmtree(outdir)
if complete_state.isolated_filepath:
complete_state.save_files()
parser.add_option(
'-m', '--merge', action='store_true',
help='After tracing, merge the results back in the .isolate file')
- parser.add_option(
- '--skip-refresh', action='store_true',
- help='Skip reading .isolate file and do not refresh the hash of '
- 'dependencies')
+ add_skip_refresh_option(parser)
options, args = parser.parse_args(args)
complete_state = load_complete_state(
'--config-variable',
action='callback',
callback=_process_variable_arg,
- default=[('OS', get_flavor())],
+ default=[],
dest='config_variables',
metavar='FOO BAR',
help='Config variables are used to determine which conditions should be '
'paths in the .isolate file but are not considered relative paths.')
+def add_subdir_option(parser):
+ parser.add_option(
+ '--subdir',
+ help='Filters to a subdirectory. Its behavior changes depending if it '
+ 'is a relative path as a string or as a path variable. Path '
+ 'variables are always keyed from the directory containing the '
+ '.isolate file. Anything else is keyed on the root directory.')
+
+
def add_trace_option(parser):
"""Adds --trace-blacklist to the parser."""
parser.add_option(
'test case.')
+def add_skip_refresh_option(parser):
+ parser.add_option(
+ '--skip-refresh', action='store_true',
+ help='Skip reading .isolate file and do not refresh the hash of '
+ 'dependencies')
+
+
def parse_isolated_option(parser, options, cwd, require_isolated):
"""Processes --isolated."""
if options.isolated:
help='.isolate file to load the dependency data from')
add_variable_option(group)
group.add_option(
- '-o', '--outdir', metavar='DIR',
- help='Directory used to recreate the tree or store the hash table. '
- 'Defaults: run|remap: a /tmp subdirectory, others: '
- 'defaults to the directory containing --isolated')
- group.add_option(
'--ignore_broken_items', action='store_true',
default=bool(os.environ.get('ISOLATE_IGNORE_BROKEN_ITEMS')),
help='Indicates that invalid entries in the isolated file to be '
options.isolate = os.path.normpath(os.path.join(cwd, options.isolate))
options.isolate = file_path.get_native_path_case(options.isolate)
- if options.outdir and not file_path.is_url(options.outdir):
- options.outdir = unicode(options.outdir).replace('/', os.path.sep)
- # outdir doesn't need native path case since tracing is never done from
- # there.
- options.outdir = os.path.normpath(os.path.join(cwd, options.outdir))
-
return options, args