"""Build NativeClient toolchain packages."""
-# Done first to setup python module path.
-import toolchain_env
-
import logging
import optparse
import os
import sys
import textwrap
-import file_tools
-import gsd_storage
-import log_tools
-import once
-import repo_tools
-import local_storage_cache
-
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+import pynacl.file_tools
+import pynacl.gsd_storage
+import pynacl.log_tools
+import pynacl.local_storage_cache
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
NACL_DIR = os.path.dirname(SCRIPT_DIR)
ROOT_DIR = os.path.dirname(NACL_DIR)
+BUILD_DIR = os.path.join(NACL_DIR, 'build')
+PKG_VER_DIR = os.path.join(BUILD_DIR, 'package_version')
+sys.path.append(PKG_VER_DIR)
+import archive_info
+import package_info
+
+import once
+import command_options
DEFAULT_CACHE_DIR = os.path.join(SCRIPT_DIR, 'cache')
+DEFAULT_GIT_CACHE_DIR = os.path.join(SCRIPT_DIR, 'git_cache')
DEFAULT_SRC_DIR = os.path.join(SCRIPT_DIR, 'src')
DEFAULT_OUT_DIR = os.path.join(SCRIPT_DIR, 'out')
-def PrintFlush(message):
- """Flush stdout and print a message to stderr.
-
- Buildbot annotator messages must be at the beginning of a line, and we want to
- ensure that any output from the script or from subprocesses appears in the
- correct order wrt BUILD_STEP messages. So we flush stdout before printing all
- buildbot messages here.
- """
- sys.stdout.flush()
- print >>sys.stderr, message
-def PrintAnnotatorURL(url):
+def PrintAnnotatorURL(cloud_item):
"""Print an URL in buildbot annotator form.
Args:
- url: A URL to print.
+ cloud_item: once.CloudStorageItem representing a memoized item in the cloud.
"""
- PrintFlush('@@@STEP_LINK@download@%s@@@' % url)
+ if cloud_item.dir_item:
+ url = cloud_item.dir_item.url
+ pynacl.log_tools.WriteAnnotatorLine('@@@STEP_LINK@download@%s@@@' % url)
+
+ if cloud_item.log_url:
+ log_url = cloud_item.log_url
+ pynacl.log_tools.WriteAnnotatorLine('@@@STEP_LINK@log@%s@@@' % log_url)
class PackageBuilder(object):
"""Module to build a setup of packages."""
- def __init__(self, packages, args):
+ def __init__(self, packages, package_targets, args):
"""Constructor.
Args:
source targets are unconditional, this is only useful as a
convenience for commands, which may refer to the inputs by their
key name>},
- },
- '<package name>': {
+ },
+ '<package name>': {
'type': 'build',
# Build packages are memoized, and will build only if their
# inputs have changed. Their inputs consist of the output of
# output will go into the root of the output directory.
'commands':
[<list of command.Command objects to run>],
- # Objects that have a 'skip_for_incremental' attribute that
- # evaluates to True will not be run on incremental builds unless
- # the working directory is empty.
+ },
+ '<package name>': {
+ 'type': 'work',
+ # Work packages have the same keys as build packages. However,
+ # they are intended to be intermediate targets, and are not
+ # memoized or included for package_version.py. Therefore they will
+ # always run, regardless of whether their inputs have changed or
+ # of whether source syncing is skipped via the command line.
+ <same keys as build-type packages>
},
}
+ package_targets: A dictionary with the following format. This is a
+ description of output package targets the packages are
+ built for. Each output package should contain a list of
+ <package_name> referenced in the previous "packages"
+ dictionary. This list of targets is expected to stay
+ the same from build to build, so it should include
+ package names even if they aren't being built. A package
+ target is usually the platform, such as "$OS_$ARCH",
+ while the output package is usually the toolchain name,
+ such as "nacl_arm_newlib".
+ {
+ '<package_target>': {
+ '<output_package>':
+ [<list of package names included in output package>]
+ }
+ }
args: sys.argv[1:] or equivalent.
"""
self._packages = packages
+ self._package_targets = package_targets
self.DecodeArgs(packages, args)
self._build_once = once.Once(
use_cached_results=self._options.use_cached_results,
cache_results=self._options.cache_results,
print_url=PrintAnnotatorURL,
- storage=self.CreateStorage())
+ storage=self.CreateStorage(),
+ extra_paths=self.ExtraSubstitutionPaths())
self._signature_file = None
if self._options.emit_signatures is not None:
if self._options.emit_signatures == '-':
def Main(self):
"""Main entry point."""
- file_tools.MakeDirectoryIfAbsent(self._options.source)
- file_tools.MakeDirectoryIfAbsent(self._options.output)
- log_tools.SetupLogging(self._options.verbose,
- open(os.path.join(self._options.output,
- 'toolchain_build.log'), 'w'))
+ pynacl.file_tools.MakeDirectoryIfAbsent(self._options.source)
+ pynacl.file_tools.MakeDirectoryIfAbsent(self._options.output)
+
+ pynacl.log_tools.SetupLogging(
+ verbose=self._options.verbose,
+ log_file=self._options.log_file,
+ quiet=self._options.quiet,
+ no_annotator=self._options.no_annotator)
self.BuildAll()
+ self.OutputPackagesInformation()
def GetOutputDir(self, package, use_subdir):
# The output dir of source packages is in the source directory, and can be
dirname = self._packages[package].get('output_dirname', package)
return os.path.join(self._options.source, dirname)
else:
- root = os.path.join(self._options.output, package + '_install')
- if use_subdir and 'output_subdir' in self._packages[package]:
- return os.path.join(root, self._packages[package]['output_subdir'])
- return root
+ root = os.path.join(self._options.output, package + '_install')
+ if use_subdir and 'output_subdir' in self._packages[package]:
+ return os.path.join(root, self._packages[package]['output_subdir'])
+ return root
def BuildPackage(self, package):
"""Build a single package.
if 'type' not in package_info:
raise Exception('package %s does not have a type' % package)
type_text = package_info['type']
- if type_text not in ('source', 'build'):
- raise Execption('package %s has unrecognized type: %s' %
+ if type_text not in ('source', 'build', 'work'):
+ raise Exception('package %s has unrecognized type: %s' %
(package, type_text))
is_source_target = type_text == 'source'
+ is_build_target = type_text == 'build'
if 'commands' not in package_info:
raise Exception('package %s does not have any commands' % package)
logging.debug('Sync skipped: not running commands for %s' % package)
return
- PrintFlush('@@@BUILD_STEP %s (%s)@@@' % (package, type_text))
+ pynacl.log_tools.WriteAnnotatorLine(
+ '@@@BUILD_STEP %s (%s)@@@' % (package, type_text))
logging.debug('Building %s package %s' % (type_text, package))
dependencies = package_info.get('dependencies', [])
raise Exception('key "%s" found in both dependencies and inputs of '
'package "%s"' % (key, package))
inputs[key] = value
- else:
+ elif type_text != 'source':
+ # Non-source packages default to a particular input directory.
inputs['src'] = os.path.join(self._options.source, package)
# Add in each dependency by package name.
for dependency in dependencies:
work_dir = os.path.join(self._options.output, package + '_work')
if self._options.clobber:
logging.debug('Clobbering working directory %s' % work_dir)
- file_tools.RemoveDirectoryIfPresent(work_dir)
- file_tools.MakeDirectoryIfAbsent(work_dir)
+ pynacl.file_tools.RemoveDirectoryIfPresent(work_dir)
+ pynacl.file_tools.MakeDirectoryIfAbsent(work_dir)
output = self.GetOutputDir(package, False)
output_subdir = self.GetOutputDir(package, True)
if not is_source_target or self._options.clobber_source:
logging.debug('Clobbering output directory %s' % output)
- file_tools.RemoveDirectoryIfPresent(output)
+ pynacl.file_tools.RemoveDirectoryIfPresent(output)
os.makedirs(output_subdir)
commands = package_info.get('commands', [])
- if not self._options.clobber and len(os.listdir(work_dir)) > 0:
- commands = [cmd for cmd in commands if
- not (hasattr(cmd, 'skip_for_incremental') and
- cmd.skip_for_incremental)]
+
+ # Create a command option object specifying current build.
+ cmd_options = command_options.CommandOptions(
+ work_dir=work_dir,
+ clobber_working=self._options.clobber,
+ clobber_source=self._options.clobber_source,
+ trybot=self._options.trybot,
+ buildbot=self._options.buildbot)
+
# Do it.
self._build_once.Run(
package, inputs, output,
commands=commands,
+ cmd_options=cmd_options,
working_dir=work_dir,
- memoize=not is_source_target,
+ memoize=is_build_target,
signature_file=self._signature_file,
subdir=output_subdir)
if not is_source_target and self._options.install:
- logging.debug('Installing output to %s' % self._options.install)
- file_tools.CopyTree(output, self._options.install)
+ install = pynacl.platform.CygPath(self._options.install)
+ logging.debug('Installing output to %s' % install)
+ pynacl.file_tools.CopyTree(output, install)
def BuildOrder(self, targets):
"""Find what needs to be built in what order to build all targets.
for target in self._targets:
self.BuildPackage(target)
+ def OutputPackagesInformation(self):
+ """Outputs packages information for the built data."""
+ packages_dir = os.path.join(self._options.output, 'packages')
+ pynacl.file_tools.RemoveDirectoryIfPresent(packages_dir)
+ os.makedirs(packages_dir)
+
+ built_packages = []
+ for target, target_dict in self._package_targets.iteritems():
+ target_dir = os.path.join(packages_dir, target)
+ pynacl.file_tools.MakeDirectoryIfAbsent(target_dir)
+ for output_package, components in target_dict.iteritems():
+ package_desc = package_info.PackageInfo()
+
+ include_package = False
+ for component in components:
+ if '.' in component:
+ archive_name = component
+ else:
+ archive_name = component + '.tgz'
+ cache_item = self._build_once.GetCachedCloudItemForPackage(component)
+ if cache_item is None:
+ archive_desc = archive_info.ArchiveInfo(name=archive_name)
+ else:
+ if cache_item.dir_item:
+ include_package = True
+ archive_desc = archive_info.ArchiveInfo(
+ name=archive_name,
+ hash=cache_item.dir_item.hash,
+ url=cache_item.dir_item.url,
+ log_url=cache_item.log_url,
+ )
+
+ package_desc.AppendArchive(archive_desc)
+
+ # Only output package file if an archive was actually included.
+ if include_package:
+ package_file = os.path.join(target_dir, output_package + '.json')
+ package_desc.SavePackageFile(package_file)
+
+ built_packages.append(package_file)
+
+ if self._options.packages_file:
+ packages_file = pynacl.platform.CygPath(self._options.packages_file)
+ pynacl.file_tools.MakeParentDirectoryIfAbsent(packages_file)
+ with open(packages_file, 'wt') as f:
+ f.write('\n'.join(built_packages))
+
def DecodeArgs(self, packages, args):
"""Decode command line arguments to this build.
default=False, action='store_true',
help='Produce more output.')
parser.add_option(
+ '-q', '--quiet', dest='quiet',
+ default=False, action='store_true',
+ help='Produce no output.')
+ parser.add_option(
'-c', '--clobber', dest='clobber',
default=False, action='store_true',
help='Clobber working directories before building.')
default=DEFAULT_SRC_DIR,
help='Select directory containing source checkouts.')
parser.add_option(
+ '--git-cache', dest='git_cache',
+ default=DEFAULT_GIT_CACHE_DIR,
+ help='Select directory containing the git cache for syncing.')
+ parser.add_option(
'-o', '--output', dest='output',
default=DEFAULT_OUT_DIR,
help='Select directory containing build output.')
parser.add_option(
+ '--packages-file', dest='packages_file',
+ default=None,
+ help='Output packages file describing list of package files built.')
+ parser.add_option(
'--no-use-cached-results', dest='use_cached_results',
default=True, action='store_false',
help='Do not rely on cached results.')
default=True, action='store_false',
help='Do not use pinned revisions.')
parser.add_option(
+ '--no-annotator', dest='no_annotator',
+ default=False, action='store_true',
+ help='Do not print annotator headings.')
+ parser.add_option(
'--trybot', dest='trybot',
default=False, action='store_true',
help='Run and cache as if on trybot.')
default=False, action='store_true',
help='Run source target commands only')
parser.add_option(
+ '--disable-git-cache', dest='disable_git_cache',
+ default=False, action='store_true',
+ help='Disable git cache when syncing sources')
+ parser.add_option(
'--emit-signatures', dest='emit_signatures',
help='Write human readable build signature for each step to FILE.',
metavar='FILE')
parser.add_option('--install', dest='install',
help='After building, copy contents of build packages' +
' to the specified directory')
+ parser.add_option('--log-file', dest='log_file',
+ default=None, action='store',
+ help='Log all logging into a log file.')
options, targets = parser.parse_args(args)
if options.trybot and options.buildbot:
print >>sys.stderr, (
sys.exit(1)
if options.trybot or options.buildbot:
options.verbose = True
+ options.quiet = False
+ options.no_annotator = False
options.sync_sources = True
options.clobber = True
+ options.emit_signatures = '-'
self._options = options
if not targets:
if self._options.ignore_dependencies:
A storage object (GSDStorage).
"""
if self._options.buildbot:
- return gsd_storage.GSDStorage(
+ return pynacl.gsd_storage.GSDStorage(
write_bucket='nativeclient-once',
read_buckets=['nativeclient-once'])
elif self._options.trybot:
- return gsd_storage.GSDStorage(
+ return pynacl.gsd_storage.GSDStorage(
write_bucket='nativeclient-once-try',
read_buckets=['nativeclient-once', 'nativeclient-once-try'])
else:
read_buckets = []
if self._options.use_remote_cache:
read_buckets += ['nativeclient-once']
- return local_storage_cache.LocalStorageCache(
+ return pynacl.local_storage_cache.LocalStorageCache(
cache_path=self._options.cache,
- storage=gsd_storage.GSDStorage(
+ storage=pynacl.gsd_storage.GSDStorage(
write_bucket=None,
read_buckets=read_buckets))
+
+ def ExtraSubstitutionPaths(self):
+ """Returns a dictionary of extra substitution paths allowed for commands."""
+ if self._options.disable_git_cache:
+ git_cache_dir = ''
+ else:
+ git_cache_dir = self._options.git_cache
+
+ return {
+ 'top_srcdir': NACL_DIR,
+ 'git_cache_dir': git_cache_dir,
+ }