"""Class capturing a command invocation as data."""
-
-# Done first to setup python module path.
-import toolchain_env
-
import inspect
+import glob
import hashlib
+import logging
import os
import shutil
import sys
-import file_tools
-import log_tools
-import repo_tools
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+import pynacl.file_tools
+import pynacl.log_tools
+import pynacl.repo_tools
+
import substituter
COMMAND_CODE_FILES = [os.path.join(SCRIPT_DIR, f)
for f in ('command.py', 'once.py', 'substituter.py',
- 'pnacl_commands.py', 'toolchain_env.py',
- 'toolchain_main.py')]
-COMMAND_CODE_FILES += [os.path.join(NACL_DIR, 'build', f)
- for f in ('directory_storage.py', 'file_tools.py',
- 'gsd_storage.py', 'hashing_tools.py',
- 'local_storage_cache.py', 'log_tools.py',
- 'repo_tools.py')]
+ 'pnacl_commands.py', 'toolchain_main.py',)]
COMMAND_CODE_FILES += [os.path.join(NACL_DIR, 'pynacl', f)
- for f in ('platform.py',)]
+ for f in ('platform.py','directory_storage.py',
+ 'file_tools.py', 'gsd_storage.py',
+ 'hashing_tools.py', 'local_storage_cache.py',
+ 'log_tools.py', 'repo_tools.py',)]
def HashBuildSystemSources():
"""Read the build source files to use in hashes for Callbacks."""
env = os.environ.copy()
paths = []
if sys.platform == 'win32':
- if Runnable.use_cygwin:
- # Use the hermetic cygwin.
- paths = [os.path.join(NACL_DIR, 'cygwin', 'bin')]
- else:
- # TODO(bradnelson): switch to something hermetic.
- mingw = os.environ.get('MINGW', r'c:\mingw')
- msys = os.path.join(mingw, 'msys', '1.0')
- if not os.path.exists(msys):
- msys = os.path.join(mingw, 'msys')
- # We need both msys (posix like build environment) and MinGW (windows
- # build of tools like gcc). We add <MINGW>/msys/[1.0/]bin to the path to
- # get sh.exe. We add <MINGW>/bin to allow direct invocation on MinGW
- # tools. We also add an msys style path (/mingw/bin) to get things like
- # gcc from inside msys.
- paths = [
- '/mingw/bin',
- os.path.join(mingw, 'bin'),
- os.path.join(msys, 'bin'),
- ]
+ # TODO(bradnelson): switch to something hermetic.
+ mingw = os.environ.get('MINGW', r'c:\mingw')
+ msys = os.path.join(mingw, 'msys', '1.0')
+ if not os.path.exists(msys):
+ msys = os.path.join(mingw, 'msys')
+ # We need both msys (posix like build environment) and MinGW (windows
+ # build of tools like gcc). We add <MINGW>/msys/[1.0/]bin to the path to
+ # get sh.exe. We add <MINGW>/bin to allow direct invocation on MinGW
+ # tools. We also add an msys style path (/mingw/bin) to get things like
+ # gcc from inside msys.
+ paths = [
+ '/mingw/bin',
+ os.path.join(mingw, 'bin'),
+ os.path.join(msys, 'bin'),
+ ]
env['PATH'] = os.pathsep.join(
paths + extra_paths + env.get('PATH', '').split(os.pathsep))
return env
class Runnable(object):
"""An object representing a single command."""
- use_cygwin = False
-
- def __init__(self, func, *args, **kwargs):
+ def __init__(self, run_cond, func, *args, **kwargs):
"""Construct a runnable which will call 'func' with 'args' and 'kwargs'.
Args:
+ run_cond: If not None, expects a function which takes a CommandOptions
+ object and returns whether or not to run the command.
func: Function which will be called by Invoke
args: Positional arguments to be passed to func
kwargs: Keyword arguments to be passed to func
When 'func' is called, its first argument will be a substitution object
which it can use to substitute %-templates in its arguments.
"""
+ self._run_cond = run_cond
self._func = func
self._args = args or []
self._kwargs = kwargs or {}
if not found_match:
print 'Function', self._func.func_name, 'in', sourcefile
raise Exception('Python Runnable objects must be implemented in one of' +
- 'the following files: ' + str(COMMAND_CODE_FILES))
+ ' the following files: ' + str(COMMAND_CODE_FILES))
# Like repr(datum), but do something stable for dictionaries.
# This only properly handles dictionaries that use simple types
return '\n'.join(values)
- def Invoke(self, subst):
- return self._func(subst, *self._args, **self._kwargs)
+ def CheckRunCond(self, cmd_options):
+ if self._run_cond and not self._run_cond(cmd_options):
+ return False
+ return True
+
+ def Invoke(self, logger, subst):
+ return self._func(logger, subst, *self._args, **self._kwargs)
-def Command(command, stdout=None, **kwargs):
+def Command(command, stdout=None, run_cond=None, **kwargs):
"""Return a Runnable which invokes 'command' with check_call.
Args:
'path_dirs', the directories therein will be added to the paths searched for
the command. Any other kwargs will be passed to check_call.
"""
- def runcmd(subst, command, stdout, **kwargs):
+ def runcmd(logger, subst, command, stdout, **kwargs):
check_call_kwargs = kwargs.copy()
command = command[:]
else:
command = [subst.Substitute(arg) for arg in command]
paths = check_call_kwargs['env']['PATH'].split(os.pathsep)
- command[0] = file_tools.Which(command[0], paths=paths)
+ command[0] = pynacl.file_tools.Which(command[0], paths=paths)
if stdout is not None:
stdout = subst.SubstituteAbsPaths(stdout)
- log_tools.CheckCall(command, stdout=stdout, **check_call_kwargs)
+ pynacl.log_tools.CheckCall(command, stdout=stdout, logger=logger,
+ **check_call_kwargs)
- return Runnable(runcmd, command, stdout, **kwargs)
+ return Runnable(run_cond, runcmd, command, stdout, **kwargs)
-def SkipForIncrementalCommand(command, **kwargs):
- """Return a command which has the skip_for_incremental property set on it.
- This will cause the command to be skipped for incremental builds, if the
- working directory is not empty.
+def SkipForIncrementalCommand(command, run_cond=None, **kwargs):
+ """Return a command which gets skipped for incremental builds.
+
+ Incremental builds are defined to be when the clobber flag is not on and
+ the working directory is not empty.
"""
- cmd = Command(command, **kwargs)
- cmd.skip_for_incremental = True
- return cmd
+ def SkipForIncrementalCondition(cmd_opts):
+ # Check if caller passed their own run_cond.
+ if run_cond and not run_cond(cmd_opts):
+ return False
+
+ dir_list = os.listdir(cmd_opts.GetWorkDir())
+ # Only run when clobbering working directory or working directory is empty.
+ return (cmd_opts.IsClobberWorking() or
+ not os.path.isdir(cmd_opts.GetWorkDir()) or
+ len(dir_list) == 0 or
+ (len(dir_list) == 1 and dir_list[0].endswith('.log')))
-def Mkdir(path, parents=False):
+ return Command(command, run_cond=SkipForIncrementalCondition, **kwargs)
+
+
+def Mkdir(path, parents=False, run_cond=None):
"""Convenience method for generating mkdir commands."""
- def mkdir(subst, path):
+ def mkdir(logger, subst, path):
path = subst.SubstituteAbsPaths(path)
+ if os.path.isdir(path):
+ return
+ logger.debug('Making Directory: %s', path)
if parents:
os.makedirs(path)
else:
os.mkdir(path)
- return Runnable(mkdir, path)
+ return Runnable(run_cond, mkdir, path)
-def Copy(src, dst):
+def Copy(src, dst, run_cond=None):
"""Convenience method for generating cp commands."""
- def copy(subst, src, dst):
- shutil.copyfile(subst.SubstituteAbsPaths(src),
- subst.SubstituteAbsPaths(dst))
- return Runnable(copy, src, dst)
+ def copy(logger, subst, src, dst):
+ src = subst.SubstituteAbsPaths(src)
+ dst = subst.SubstituteAbsPaths(dst)
+ logger.debug('Copying: %s -> %s', src, dst)
+ shutil.copyfile(src, dst)
+
+ return Runnable(run_cond, copy, src, dst)
+
+def CopyRecursive(src, dst, run_cond=None):
+ """Recursively copy items in a directory tree.
-def CopyTree(src, dst, exclude=[]):
- """Copy a directory tree, excluding a list of top-level entries."""
- def copyTree(subst, src, dst, exclude):
+ If src is a file, the semantics are like shutil.copyfile+copymode.
+ If src is a directory, the semantics are like shutil.copytree, except
+ that the destination may exist (it must be a directory) and will not be
+ clobbered. There must be no files in dst which have names/subpaths which
+ match files in src.
+ """
+ def rcopy(logger, subst, src, dst):
+ src = subst.SubstituteAbsPaths(src)
+ dst = subst.SubstituteAbsPaths(dst)
+ if os.path.isfile(src):
+ shutil.copyfile(src, dst)
+ shutil.copymode(src, dst)
+ elif os.path.isdir(src):
+ logger.debug('Copying directory: %s -> %s', src, dst)
+ pynacl.file_tools.MakeDirectoryIfAbsent(dst)
+ for item in os.listdir(src):
+ rcopy(logger, subst, os.path.join(src, item), os.path.join(dst, item))
+ return Runnable(run_cond, rcopy, src, dst)
+
+def CopyTree(src, dst, exclude=[], run_cond=None):
+ """Copy a directory tree, excluding a list of top-level entries.
+
+ The destination directory will be clobbered if it exists.
+ """
+ def copyTree(logger, subst, src, dst, exclude):
src = subst.SubstituteAbsPaths(src)
dst = subst.SubstituteAbsPaths(dst)
def ignoreExcludes(dir, files):
return exclude
else:
return []
- file_tools.RemoveDirectoryIfPresent(dst)
+ logger.debug('Copying Tree: %s -> %s', src, dst)
+ pynacl.file_tools.RemoveDirectoryIfPresent(dst)
shutil.copytree(src, dst, symlinks=True, ignore=ignoreExcludes)
- return Runnable(copyTree, src, dst, exclude)
+ return Runnable(run_cond, copyTree, src, dst, exclude)
-def RemoveDirectory(path):
+def RemoveDirectory(path, run_cond=None):
"""Convenience method for generating a command to remove a directory tree."""
- def remove(subst, path):
- file_tools.RemoveDirectoryIfPresent(subst.SubstituteAbsPaths(path))
- return Runnable(remove, path)
-
-
-def Remove(path):
- """Convenience method for generating a command to remove a file."""
- def remove(subst, path):
+ def remove(logger, subst, path):
path = subst.SubstituteAbsPaths(path)
- if os.path.exists(path):
- os.remove(path)
- return Runnable(remove, path)
-
-
-def Rename(src, dst):
+ logger.debug('Removing Directory: %s', path)
+ pynacl.file_tools.RemoveDirectoryIfPresent(path)
+ return Runnable(run_cond, remove, path)
+
+
+def Remove(*args):
+ """Convenience method for generating a command to remove files."""
+ def remove(logger, subst, *args):
+ for arg in args:
+ path = subst.SubstituteAbsPaths(arg)
+ logger.debug('Removing Pattern: %s', path)
+ expanded = glob.glob(path)
+ if len(expanded) == 0:
+ logger.debug('command.Remove: argument %s (substituted from %s) '
+ 'does not match any file' %
+ (path, arg))
+ for f in expanded:
+ logger.debug('Removing File: %s', f)
+ os.remove(f)
+ return Runnable(None, remove, *args)
+
+
+def Rename(src, dst, run_cond=None):
"""Convenience method for generating a command to rename a file."""
- def rename(subst, src, dst):
- os.rename(subst.SubstituteAbsPaths(src), subst.SubstituteAbsPaths(dst))
- return Runnable(rename, src, dst)
+ def rename(logger, subst, src, dst):
+ src = subst.SubstituteAbsPaths(src)
+ dst = subst.SubstituteAbsPaths(dst)
+ logger.debug('Renaming: %s -> %s', src, dst)
+ os.rename(src, dst)
+ return Runnable(run_cond, rename, src, dst)
-def WriteData(data, dst):
+def WriteData(data, dst, run_cond=None):
"""Convenience method to write a file with fixed contents."""
- def writedata(subst, dst, data):
+ def writedata(logger, subst, dst, data):
+ dst = subst.SubstituteAbsPaths(dst)
+ logger.debug('Writing Data to File: %s', dst)
with open(subst.SubstituteAbsPaths(dst), 'wb') as f:
f.write(data)
- return Runnable(writedata, dst, data)
+ return Runnable(run_cond, writedata, dst, data)
-def SyncGitRepo(url, destination, revision, reclone=False, clean=False,
- pathspec=None):
- def sync(subst, url, dest, rev, reclone, clean, pathspec):
- repo_tools.SyncGitRepo(url, subst.SubstituteAbsPaths(dest), revision,
- reclone, clean, pathspec)
- return Runnable(sync, url, destination, revision, reclone, clean, pathspec)
+def SyncGitRepoCmds(url, destination, revision, clobber_invalid_repo=False,
+ reclone=False, pathspec=None, git_cache=None, push_url=None,
+ known_mirrors=[], push_mirrors=[],
+ run_cond=None):
+ """Returns a list of commands to sync and validate a git repo.
+ Args:
+ url: Git repo URL to sync from.
+ destination: Local git repo directory to sync to.
+ revision: If not None, will sync the git repository to this revision.
+ clobber_invalid_repo: Always True for bots, but can be forced for users.
+ reclone: If True, delete the destination directory and re-clone the repo.
+ pathspec: If not None, add the path to the git checkout command, which
+ causes it to just update the working tree without switching
+ branches.
+ known_mirrors: List of tuples specifying known mirrors for a subset of the
+ git URL. IE: [('http://mirror.com/mirror', 'http://git.com')]
+ push_mirrors: List of tuples specifying known push mirrors, see
+ known_mirrors argument for the format.
+ git_cache: If not None, will use git_cache directory as a cache for the git
+ repository and share the objects with any other destination with
+ the same URL.
+ push_url: If not None, specifies what the push URL should be set to.
+ run_cond: Run condition for when to sync the git repo.
-def CleanGitWorkingDir(directory, path):
+ Returns:
+ List of commands, this is a little different from the other command funcs.
+ """
+ def update_valid_mirrors(logger, subst, url, push_url, directory,
+ known_mirrors, push_mirrors):
+ if push_url is None:
+ push_url = url
+
+ abs_dir = subst.SubstituteAbsPaths(directory)
+ git_dir = os.path.join(abs_dir, '.git')
+ if os.path.exists(git_dir):
+ fetch_list = pynacl.repo_tools.GitRemoteRepoList(abs_dir,
+ include_fetch=True,
+ include_push=False,
+ logger=logger)
+ tracked_fetch_url = dict(fetch_list).get('origin', 'None')
+
+ push_list = pynacl.repo_tools.GitRemoteRepoList(abs_dir,
+ include_fetch=False,
+ include_push=True,
+ logger=logger)
+ tracked_push_url = dict(push_list).get('origin', 'None')
+
+ if ((known_mirrors and tracked_fetch_url != url) or
+ (push_mirrors and tracked_push_url != push_url)):
+ updated_fetch_url = tracked_fetch_url
+ for mirror, url_subset in known_mirrors:
+ if mirror in updated_fetch_url:
+ updated_fetch_url = updated_fetch_url.replace(mirror, url_subset)
+
+ updated_push_url = tracked_push_url
+ for mirror, url_subset in push_mirrors:
+ if mirror in updated_push_url:
+ updated_push_url = updated_push_url.replace(mirror, url_subset)
+
+ if ((updated_fetch_url != tracked_fetch_url) or
+ (updated_push_url != tracked_push_url)):
+ logger.warn('Your git repo is using an old mirror: %s', abs_dir)
+ logger.warn('Updating git repo using known mirror:')
+ logger.warn(' [FETCH] %s -> %s',
+ tracked_fetch_url, updated_fetch_url)
+ logger.warn(' [PUSH] %s -> %s',
+ tracked_push_url, updated_push_url)
+ pynacl.repo_tools.GitSetRemoteRepo(updated_fetch_url, abs_dir,
+ push_url=updated_push_url,
+ logger=logger)
+
+ def populate_cache(logger, subst, git_cache, url):
+ if git_cache:
+ abs_git_cache = subst.SubstituteAbsPaths(git_cache)
+ logger.debug('Populating Cache: %s [%s]', abs_git_cache, url)
+ if abs_git_cache:
+ pynacl.repo_tools.PopulateGitCache(abs_git_cache, [url],
+ logger=logger)
+
+ def validate(logger, subst, url, directory):
+ abs_dir = subst.SubstituteAbsPaths(directory)
+ logger.debug('Validating Repo: %s [%s]', abs_dir, url)
+ pynacl.repo_tools.ValidateGitRepo(url,
+ subst.SubstituteAbsPaths(directory),
+ clobber_mismatch=True,
+ logger=logger)
+
+ def sync(logger, subst, url, dest, revision, reclone, pathspec, git_cache,
+ push_url):
+ abs_dest = subst.SubstituteAbsPaths(dest)
+ if git_cache:
+ git_cache = subst.SubstituteAbsPaths(git_cache)
+
+ logger.debug('Syncing Git Repo: %s [%s]', abs_dest, url)
+ try:
+ pynacl.repo_tools.SyncGitRepo(url, abs_dest, revision,
+ reclone=reclone,
+ pathspec=pathspec, git_cache=git_cache,
+ push_url=push_url, logger=logger)
+ except pynacl.repo_tools.InvalidRepoException, e:
+ remote_repos = dict(pynacl.repo_tools.GitRemoteRepoList(abs_dest,
+ logger=logger))
+ tracked_url = remote_repos.get('origin', 'None')
+ logger.error('Invalid Git Repo: %s' % e)
+ logger.error('Destination Directory: %s', abs_dest)
+ logger.error('Currently Tracked Repo: %s', tracked_url)
+ logger.error('Expected Repo: %s', e.expected_repo)
+ logger.warn('Possible solutions:')
+ logger.warn(' 1. The simplest way if you have no local changes is to'
+ ' simply delete the directory and let the tool resync.')
+ logger.warn(' 2. If the tracked repo is merely a mirror, simply go to'
+ ' the directory and run "git remote set-url origin %s"',
+ e.expected_repo)
+ raise Exception('Could not validate local git repository.')
+
+ def ClobberInvalidRepoCondition(cmd_opts):
+ # Check if caller passed their own run_cond
+ if run_cond and not run_cond(cmd_opts):
+ return False
+ elif clobber_invalid_repo:
+ return True
+ return cmd_opts.IsBot()
+
+ def CleanOnBotCondition(cmd_opts):
+ # Check if caller passed their own run_cond
+ if run_cond and not run_cond(cmd_opts):
+ return False
+ return cmd_opts.IsBot()
+
+ commands = [CleanGitWorkingDir(destination, reset=True, path=None,
+ run_cond=CleanOnBotCondition)]
+ if git_cache:
+ commands.append(Runnable(run_cond, populate_cache, git_cache, url))
+
+ commands.extend([Runnable(run_cond, update_valid_mirrors, url, push_url,
+ destination, known_mirrors, push_mirrors),
+ Runnable(ClobberInvalidRepoCondition, validate, url,
+ destination),
+ Runnable(run_cond, sync, url, destination, revision, reclone,
+ pathspec, git_cache, push_url)])
+ return commands
+
+
+def CleanGitWorkingDir(directory, reset=False, path=None, run_cond=None):
"""Clean a path in a git checkout, if the checkout directory exists."""
- def clean(subst, directory, path):
+ def clean(logger, subst, directory, reset, path):
directory = subst.SubstituteAbsPaths(directory)
+ logger.debug('Cleaning Git Working Directory: %s', directory)
if os.path.exists(directory) and len(os.listdir(directory)) > 0:
- repo_tools.CleanGitWorkingDir(directory, path)
- return Runnable(clean, directory, path)
+ pynacl.repo_tools.CleanGitWorkingDir(directory, reset, path,logger=logger)
+ return Runnable(run_cond, clean, directory, reset, path)
-def GenerateGitPatches(git_dir, info):
+def GenerateGitPatches(git_dir, info, run_cond=None):
"""Generate patches from a Git repository.
Args:
<upstream-name>[-g<commit-abbrev>]-nacl.patch: From the result of that
(or from 'upstream-base' if none above) to 'rev'.
"""
- def generatePatches(subst, git_dir, info):
- git_dir_flag = '--git-dir=' + subst.SubstituteAbsPaths(git_dir)
+ def generatePatches(logger, subst, git_dir, info, run_cond=None):
+ git_dir = subst.SubstituteAbsPaths(git_dir)
+ git_dir_flag = '--git-dir=' + git_dir
basename = info['upstream-name']
+ logger.debug('Generating Git Patches: %s', git_dir)
+
+ patch_files = []
- def generatePatch(src_rev, dst_rev, suffix):
+ def generatePatch(description, src_rev, dst_rev, suffix):
src_prefix = '--src-prefix=' + basename + '/'
dst_prefix = '--dst-prefix=' + basename + suffix + '/'
- patch_file = subst.SubstituteAbsPaths(
- path.join('%(output)s', basename + suffix + '.patch'))
+ patch_name = basename + suffix + '.patch'
+ patch_file = subst.SubstituteAbsPaths(path.join('%(output)s', patch_name))
git_args = [git_dir_flag, 'diff',
'--patch-with-stat', '--ignore-space-at-eol', '--full-index',
'--no-ext-diff', '--no-color', '--no-renames',
'--no-textconv', '--text', src_prefix, dst_prefix,
src_rev, dst_rev]
- log_tools.CheckCall(repo_tools.GitCmd() + git_args, stdout=patch_file)
+ pynacl.log_tools.CheckCall(
+ pynacl.repo_tools.GitCmd() + git_args,
+ stdout=patch_file,
+ logger=logger,
+ )
+ patch_files.append((description, patch_name))
def revParse(args):
- output = repo_tools.CheckGitOutput([git_dir_flag] + args)
+ output = pynacl.repo_tools.CheckGitOutput([git_dir_flag] + args)
lines = output.splitlines()
if len(lines) != 1:
raise Exception('"git %s" did not yield a single commit' %
# hex digits of the commit ID is what Git usually produces
# for --abbrev-commit behavior, 'git describe', etc.
suffix = '-g' + upstream_snapshot[:7]
- generatePatch(upstream_base, upstream_snapshot, suffix)
+ generatePatch('Patch the release up to the upstream snapshot version.',
+ upstream_base, upstream_snapshot, suffix)
if rev != upstream_snapshot:
# We're using local changes, so generate a patch of those.
- generatePatch(upstream_snapshot, rev, suffix + '-nacl')
- return Runnable(generatePatches, git_dir, info)
+ generatePatch('Apply NaCl-specific changes.',
+ upstream_snapshot, rev, suffix + '-nacl')
+
+ with open(subst.SubstituteAbsPaths(path.join('%(output)s',
+ info['name'] + '.series')),
+ 'w') as f:
+ f.write("""\
+# This is a "series file" in the style used by the "quilt" tool.
+# It describes how to unpack and apply patches to produce the source
+# tree of the %(name)s component of a toolchain targetting Native Client.
+
+# Source: %(upstream-name)s.tar
+"""
+ % info)
+ for patch in patch_files:
+ f.write('\n# %s\n%s\n' % patch)
+
+ return Runnable(run_cond, generatePatches, git_dir, info)