2 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Perform various tasks related to updating Portage packages."""
11 import parallel_emerge
12 import portage # pylint: disable=F0401
17 from chromite.buildbot import constants
18 from chromite.lib import commandline
19 from chromite.lib import cros_build_lib
20 from chromite.lib import osutils
21 from chromite.lib import operation
22 from chromite.lib import upgrade_table as utable
23 from chromite.scripts import merge_package_status as mps
25 # pylint: disable=E1101,W0201
27 oper = operation.Operation('cros_portage_upgrade')
29 NOT_APPLICABLE = 'N/A'
30 WORLD_TARGET = 'world'
33 # Arches we care about -- we actively develop/support/ship.
34 STANDARD_BOARD_ARCHS = set(('amd64', 'arm', 'x86'))
36 # Files we do not include in our upgrades by convention.
37 BLACKLISTED_FILES = set(['Manifest', 'ChangeLog*'])
40 """Class to accumulate package info during upgrade process.
42 This class is basically a formalized dictionary.
46 'category', # Package category only
47 # TODO(mtennant): Rename 'cpv' to 'curr_cpv' or similar.
48 'cpv', # Current full cpv (revision included)
49 'cpv_cmp_upstream', # 0 = current, >0 = outdated, <0 = futuristic
50 'latest_upstream_cpv', # Latest (non-stable ok) upstream cpv
51 'overlay', # Overlay package currently in
52 'package', # category/package_name
53 'package_name', # The 'p' in 'cpv'
54 'package_ver', # The 'pv' in 'cpv'
55 'slot', # Current package slot
56 'stable_upstream_cpv', # Latest stable upstream cpv
57 'state', # One of utable.UpgradeTable.STATE_*
58 'upgraded_cpv', # If upgraded, it is to this cpv
59 'upgraded_unmasked', # Boolean. If upgraded_cpv, indicates if unmasked.
60 'upstream_cpv', # latest/stable upstream cpv according to request
61 'user_arg', # Original user arg for this pkg, if applicable
62 'version_rev', # Just revision (e.g. 'r1'). '' if no revision
65 # Any deriving classes must maintain this cumulative attribute list.
66 __attrlist__ = __slots__
68 def __init__(self, **kwargs):
69 """Initialize all attributes to None unless specified in |kwargs|."""
70 for attr in self.__attrlist__:
71 setattr(self, attr, kwargs.get(attr))
73 def __eq__(self, other):
74 """Equality support. Used in unittests."""
76 if type(self) != type(other):
80 for attr in self.__attrlist__:
81 if getattr(self, attr, no_attr) != getattr(other, attr, no_attr):
86 def __ne__(self, other):
87 """Inequality support for completeness."""
88 return not self == other
90 class Upgrader(object):
91 """A class to perform various tasks related to updating Portage packages."""
93 PORTAGE_GIT_URL = '%s/chromiumos/overlays/portage.git' % (
94 constants.EXTERNAL_GOB_URL)
95 ORIGIN_GENTOO = 'origin/gentoo'
97 UPSTREAM_OVERLAY_NAME = 'portage'
98 UPSTREAM_TMP_REPO = os.environ.get(constants.SHARED_CACHE_ENVVAR)
99 if UPSTREAM_TMP_REPO is not None:
100 UPSTREAM_TMP_REPO = '%s/cros_portage_upgrade' % UPSTREAM_TMP_REPO
102 UPSTREAM_TMP_REPO = '/tmp'
103 UPSTREAM_TMP_REPO += '/' + UPSTREAM_OVERLAY_NAME
105 STABLE_OVERLAY_NAME = 'portage-stable'
106 CROS_OVERLAY_NAME = 'chromiumos-overlay'
107 CATEGORIES_FILE = 'profiles/categories'
108 HOST_BOARD = 'amd64-host'
109 OPT_SLOTS = ('amend', 'csv_file', 'force', 'no_upstream_cache', 'rdeps',
110 'upgrade', 'upgrade_deep', 'upstream', 'unstable_ok', 'verbose',
113 EQUERY_CMD = 'equery'
114 EMERGE_CMD = 'emerge'
115 PORTAGEQ_CMD = 'portageq'
116 BOARD_CMDS = set([EQUERY_CMD, EMERGE_CMD, PORTAGEQ_CMD])
118 __slots__ = ['_amend', # Boolean to use --amend with upgrade commit
119 '_args', # Commandline arguments (all portage targets)
120 '_curr_arch', # Architecture for current board run
121 '_curr_board', # Board for current board run
122 '_curr_table', # Package status for current board run
123 '_cros_overlay', # Path to chromiumos-overlay repo
124 '_csv_file', # File path for writing csv output
125 '_deps_graph', # Dependency graph from portage
126 '_force', # Force upgrade even when version already exists
127 '_local_only', # Skip network traffic
128 '_missing_eclass_re',# Regexp for missing eclass in equery
129 '_outdated_eclass_re',# Regexp for outdated eclass in equery
130 '_emptydir', # Path to temporary empty directory
131 '_master_archs', # Set. Archs of tables merged into master_table
132 '_master_cnt', # Number of tables merged into master_table
133 '_master_table', # Merged table from all board runs
134 '_no_upstream_cache', # Boolean. Delete upstream cache when done
135 '_porttree', # Reference to portage porttree object
136 '_rdeps', # Boolean, if True pass --root-deps=rdeps
137 '_stable_repo', # Path to portage-stable
138 '_stable_repo_categories', # Categories from profiles/categories
139 '_stable_repo_stashed', # True if portage-stable has a git stash
140 '_stable_repo_status', # git status report at start of run
141 '_targets', # Processed list of portage targets
142 '_upgrade', # Boolean indicating upgrade requested
143 '_upgrade_cnt', # Num pkg upgrades in this run (all boards)
144 '_upgrade_deep', # Boolean indicating upgrade_deep requested
145 '_upstream', # Path to upstream portage repo
146 '_unstable_ok', # Boolean to allow unstable upstream also
147 '_verbose', # Boolean
150 def __init__(self, options):
151 self._args = options.packages
152 self._targets = mps.ProcessTargets(self._args)
154 self._master_table = None
156 self._master_archs = set()
157 self._upgrade_cnt = 0
159 self._stable_repo = os.path.join(options.srcroot, 'third_party',
160 self.STABLE_OVERLAY_NAME)
161 # This can exist in two spots; the tree, or the cache.
163 self._cros_overlay = os.path.join(options.srcroot, 'third_party',
164 self.CROS_OVERLAY_NAME)
166 # Save options needed later.
167 for opt in self.OPT_SLOTS:
168 setattr(self, '_' + opt, getattr(options, opt, None))
170 self._porttree = None
171 self._emptydir = None
172 self._deps_graph = None
174 # Pre-compiled regexps for speed.
175 self._missing_eclass_re = re.compile(r'(\S+\.eclass) could not be '
177 self._outdated_eclass_re = re.compile(r'Call stack:\n'
178 r'(?:.*?\s+\S+,\sline.*?\n)*'
179 r'.*?\s+(\S+\.eclass),\s+line')
181 def _IsInUpgradeMode(self):
182 """Return True if running in upgrade mode."""
183 return self._upgrade or self._upgrade_deep
185 def _SaveStatusOnStableRepo(self):
186 """Get the 'git status' for everything in |self._stable_repo|.
188 The results are saved in a dict at self._stable_repo_status where each key
189 is a file path rooted at |self._stable_repo|, and the value is the status
190 for that file as returned by 'git status -s'. (e.g. 'A' for 'Added').
192 result = self._RunGit(self._stable_repo, ['status', '-s'],
193 redirect_stdout=True)
194 if result.returncode == 0:
196 for line in result.output.strip().split('\n'):
200 linesplit = line.split()
201 (status, path) = linesplit[0], linesplit[1]
203 # Handle a rename as separate 'D' and 'A' statuses. Example line:
204 # R path/to/foo-1.ebuild -> path/to/foo-2.ebuild
206 statuses[linesplit[3]] = 'A'
208 statuses[path] = status
210 self._stable_repo_status = statuses
212 raise RuntimeError('Unable to run "git status -s" in %s:\n%s' %
213 (self._stable_repo, result.output))
215 self._stable_repo_stashed = False
217 def _LoadStableRepoCategories(self):
218 """Load |self._stable_repo|/profiles/categories into set."""
220 self._stable_repo_categories = set()
221 cat_file_path = os.path.join(self._stable_repo, self.CATEGORIES_FILE)
222 with open(cat_file_path, 'r') as f:
226 self._stable_repo_categories.add(line)
228 def _WriteStableRepoCategories(self):
229 """Write |self._stable_repo_categories| to profiles/categories."""
231 categories = sorted(self._stable_repo_categories)
232 cat_file_path = os.path.join(self._stable_repo, self.CATEGORIES_FILE)
233 with open(cat_file_path, 'w') as f:
234 f.writelines('\n'.join(categories))
236 self._RunGit(self._stable_repo, ['add', self.CATEGORIES_FILE])
238 def _CheckStableRepoOnBranch(self):
239 """Raise exception if |self._stable_repo| is not on a branch now."""
240 result = self._RunGit(self._stable_repo, ['branch'], redirect_stdout=True)
241 if result.returncode == 0:
242 for line in result.output.split('\n'):
243 match = re.search(r'^\*\s+(.+)$', line)
245 # Found current branch, see if it is a real branch.
246 branch = match.group(1)
247 if branch != '(no branch)':
249 raise RuntimeError('To perform upgrade, %s must be on a branch.' %
252 raise RuntimeError('Unable to determine whether %s is on a branch.' %
255 def _PkgUpgradeRequested(self, pinfo):
256 """Return True if upgrade of pkg in |pinfo| was requested by user."""
257 if self._upgrade_deep:
261 return bool(pinfo.user_arg)
266 def _FindBoardArch(board):
267 """Return the architecture for a given board name."""
268 # Host is a special case
269 if board == Upgrader.HOST_BOARD:
272 # Leverage Portage 'portageq' tool to do this.
273 cmd = ['portageq-%s' % board, 'envvar', 'ARCH']
274 cmd_result = cros_build_lib.RunCommand(
275 cmd, print_cmd=False, redirect_stdout=True)
276 if cmd_result.returncode == 0:
277 return cmd_result.output.strip()
282 def _GetPreOrderDepGraphPackage(deps_graph, package, pkglist, visited):
283 """Collect packages from |deps_graph| into |pkglist| in pre-order."""
284 if package in visited:
287 for parent in deps_graph[package]['provides']:
288 Upgrader._GetPreOrderDepGraphPackage(deps_graph, parent, pkglist, visited)
289 pkglist.append(package)
292 def _GetPreOrderDepGraph(deps_graph):
293 """Return packages from |deps_graph| in pre-order."""
296 for package in deps_graph:
297 Upgrader._GetPreOrderDepGraphPackage(deps_graph, package, pkglist,
302 def _CmpCpv(cpv1, cpv2):
303 """Returns standard cmp result between |cpv1| and |cpv2|.
305 If one cpv is None then the other is greater.
307 if cpv1 is None and cpv2 is None:
313 return portage.versions.pkgcmp(portage.versions.pkgsplit(cpv1),
314 portage.versions.pkgsplit(cpv2))
317 def _GetCatPkgFromCpv(cpv):
318 """Returns category/package_name from a full |cpv|.
320 If |cpv| is incomplete, may return only the package_name.
322 If package_name cannot be determined, return None.
327 # Result is None or (cat, pn, version, rev)
328 result = portage.versions.catpkgsplit(cpv)
330 # This appears to be a quirk of portage? Category string == 'null'.
331 if result[0] is None or result[0] == 'null':
333 return '%s/%s' % (result[0], result[1])
338 def _GetVerRevFromCpv(cpv):
339 """Returns just the version-revision string from a full |cpv|."""
343 # Result is None or (cat, pn, version, rev)
344 result = portage.versions.catpkgsplit(cpv)
346 (version, rev) = result[2:4]
348 return '%s-%s' % (version, rev)
355 def _GetEbuildPathFromCpv(cpv):
356 """Returns the relative path to ebuild for |cpv|."""
360 # Result is None or (cat, pn, version, rev)
361 result = portage.versions.catpkgsplit(cpv)
363 (cat, pn, _version, _rev) = result
364 ebuild = cpv.replace(cat + '/', '') + '.ebuild'
365 return os.path.join(cat, pn, ebuild)
369 def _RunGit(self, cwd, command, redirect_stdout=False,
370 combine_stdout_stderr=False):
371 """Runs git |command| (a list of command tokens) in |cwd|.
373 This leverages the cros_build_lib.RunCommand function. The
374 |redirect_stdout| and |combine_stdout_stderr| arguments are
375 passed to that function.
377 Returns a Result object as documented by cros_build_lib.RunCommand.
378 Most usefully, the result object has a .output attribute containing
379 the output from the command (if |redirect_stdout| was True).
381 # This disables the vi-like output viewer for commands like 'git show'.
382 extra_env = {'GIT_PAGER': 'cat'}
383 cmdline = ['git'] + command
384 return cros_build_lib.RunCommand(
385 cmdline, cwd=cwd, extra_env=extra_env, print_cmd=self._verbose,
386 redirect_stdout=redirect_stdout,
387 combine_stdout_stderr=combine_stdout_stderr)
389 def _SplitEBuildPath(self, ebuild_path):
390 """Split a full ebuild path into (overlay, cat, pn, pv)."""
391 (ebuild_path, _ebuild) = os.path.splitext(ebuild_path)
392 (ebuild_path, pv) = os.path.split(ebuild_path)
393 (ebuild_path, pn) = os.path.split(ebuild_path)
394 (ebuild_path, cat) = os.path.split(ebuild_path)
395 (ebuild_path, overlay) = os.path.split(ebuild_path)
396 return (overlay, cat, pn, pv)
398 def _GenPortageEnvvars(self, arch, unstable_ok, portdir=None,
399 portage_configroot=None):
400 """Returns dictionary of envvars for running portage tools.
402 If |arch| is set, then ACCEPT_KEYWORDS will be included and set
403 according to |unstable_ok|.
405 PORTDIR is set to |portdir| value, if not None.
406 PORTAGE_CONFIGROOT is set to |portage_configroot| value, if not None.
411 envvars['ACCEPT_KEYWORDS'] = arch + ' ~' + arch
413 envvars['ACCEPT_KEYWORDS'] = arch
415 if portdir is not None:
416 envvars['PORTDIR'] = portdir
417 # Since we are clearing PORTDIR, we also have to clear PORTDIR_OVERLAY
418 # as most of those repos refer to the "normal" PORTDIR and will dump a
419 # lot of warnings if it can't be found.
420 envvars['PORTDIR_OVERLAY'] = portdir
421 if portage_configroot is not None:
422 envvars['PORTAGE_CONFIGROOT'] = portage_configroot
426 def _FindUpstreamCPV(self, pkg, unstable_ok=False):
427 """Returns latest cpv in |_upstream| that matches |pkg|.
429 The |pkg| argument can specify as much or as little of the full CPV
430 syntax as desired, exactly as accepted by the Portage 'equery' command.
431 To find whether an exact version exists upstream specify the full
432 CPV. To find the latest version specify just the category and package
435 Results are filtered by architecture keyword using |self._curr_arch|.
436 By default, only ebuilds stable on that arch will be accepted. To
437 accept unstable ebuilds, set |unstable_ok| to True.
439 Returns upstream cpv, if found.
441 envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok,
442 portdir=self._upstream,
443 portage_configroot=self._emptydir)
445 # Point equery to the upstream source to get latest version for keywords.
446 equery = ['equery', 'which', pkg]
447 cmd_result = cros_build_lib.RunCommand(
448 equery, extra_env=envvars, print_cmd=self._verbose,
449 error_code_ok=True, redirect_stdout=True, combine_stdout_stderr=True)
451 if cmd_result.returncode == 0:
452 ebuild_path = cmd_result.output.strip()
453 (_overlay, cat, _pn, pv) = self._SplitEBuildPath(ebuild_path)
454 return os.path.join(cat, pv)
458 def _GetBoardCmd(self, cmd):
459 """Return the board-specific version of |cmd|, if applicable."""
460 if cmd in self.BOARD_CMDS:
461 # Host "board" is a special case.
462 if self._curr_board != self.HOST_BOARD:
463 return '%s-%s' % (cmd, self._curr_board)
467 def _AreEmergeable(self, cpvlist):
468 """Indicate whether cpvs in |cpvlist| can be emerged on current board.
470 This essentially runs emerge with the --pretend option to verify
471 that all dependencies for these package versions are satisfied.
474 Tuple with two elements:
475 [0] True if |cpvlist| can be emerged.
476 [1] Output from the emerge command.
478 envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok=False)
479 emerge = self._GetBoardCmd(self.EMERGE_CMD)
480 cmd = [emerge, '-p'] + ['=' + cpv for cpv in cpvlist]
481 result = cros_build_lib.RunCommand(
482 cmd, error_code_ok=True, extra_env=envvars, print_cmd=False,
483 redirect_stdout=True, combine_stdout_stderr=True)
485 return (result.returncode == 0, ' '.join(cmd), result.output)
487 def _FindCurrentCPV(self, pkg):
488 """Returns current cpv on |_curr_board| that matches |pkg|, or None."""
489 envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok=False)
491 equery = self._GetBoardCmd(self.EQUERY_CMD)
492 cmd = [equery, '-C', 'which', pkg]
493 cmd_result = cros_build_lib.RunCommand(
494 cmd, error_code_ok=True, extra_env=envvars, print_cmd=False,
495 redirect_stdout=True, combine_stdout_stderr=True)
497 if cmd_result.returncode == 0:
498 ebuild_path = cmd_result.output.strip()
499 (_overlay, cat, _pn, pv) = self._SplitEBuildPath(ebuild_path)
500 return os.path.join(cat, pv)
504 def _SetUpgradedMaskBits(self, pinfo):
505 """Set pinfo.upgraded_unmasked."""
506 cpv = pinfo.upgraded_cpv
507 envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok=False)
509 equery = self._GetBoardCmd('equery')
510 cmd = [equery, '-qCN', 'list', '-F', '$mask|$cpv:$slot', '-op', cpv]
511 result = cros_build_lib.RunCommand(
512 cmd, error_code_ok=True, extra_env=envvars, print_cmd=False,
513 redirect_stdout=True, combine_stdout_stderr=True)
515 output = result.output
516 if result.returncode:
517 raise RuntimeError('equery failed on us:\n %s\noutput:\n %s'
518 % (' '.join(cmd), output))
520 # Expect output like one of these cases (~ == unstable, M == masked):
521 # ~|sys-fs/fuse-2.7.3:0
522 # |sys-fs/fuse-2.7.3:0
523 # M |sys-fs/fuse-2.7.3:0
524 # M~|sys-fs/fuse-2.7.3:0
525 for line in output.split('\n'):
526 mask = line.split('|')[0]
528 pinfo.upgraded_unmasked = 'M' != mask[0]
531 raise RuntimeError('Unable to determine whether %s is stable from equery:\n'
532 ' %s\noutput:\n %s' % (cpv, ' '.join(cmd), output))
534 def _VerifyEbuildOverlay(self, cpv, expected_overlay, was_overwrite):
535 """Raises exception if ebuild for |cpv| is not from |expected_overlay|.
537 Essentially, this verifies that the upgraded ebuild in portage-stable
538 is indeed the one being picked up, rather than some other ebuild with
539 the same version in another overlay. Unless |was_overwrite| (see below).
541 If |was_overwrite| then this upgrade was an overwrite of an existing
542 package version (via --force) and it is possible the previous package
543 is still in another overlay (e.g. chromiumos-overlay). In this case,
544 the user should get rid of the other version first.
546 # Further explanation: this check should always pass, but might not
547 # if the copy/upgrade from upstream did not work. This is just a
549 envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok=False)
551 equery = self._GetBoardCmd(self.EQUERY_CMD)
552 cmd = [equery, '-C', 'which', '--include-masked', cpv]
553 result = cros_build_lib.RunCommand(
554 cmd, error_code_ok=True, extra_env=envvars, print_cmd=False,
555 redirect_stdout=True, combine_stdout_stderr=True)
557 ebuild_path = result.output.strip()
558 (overlay, _cat, _pn, _pv) = self._SplitEBuildPath(ebuild_path)
559 if overlay != expected_overlay:
561 raise RuntimeError('Upgraded ebuild for %s is not visible because'
562 ' existing ebuild in %s overlay takes precedence\n'
563 'Please remove that ebuild before continuing.' %
566 raise RuntimeError('Upgraded ebuild for %s is not coming from %s:\n'
568 'Please show this error to the build team.' %
569 (cpv, expected_overlay, ebuild_path))
571 def _IdentifyNeededEclass(self, cpv):
572 """Return eclass that must be upgraded for this |cpv|."""
573 # Try to detect two cases:
574 # 1) The upgraded package uses an eclass not in local source, yet.
575 # 2) The upgraded package needs one or more eclasses to also be upgraded.
577 # Use the output of 'equery which'.
578 # If a needed eclass cannot be found, then the output will have lines like:
579 # * ERROR: app-admin/eselect-1.2.15 failed (depend phase):
580 # * bash-completion-r1.eclass could not be found by inherit()
582 # If a needed eclass must be upgraded, the output might have the eclass
583 # in the call stack (... used for long paths):
585 # * ebuild.sh, line 2047: Called source '.../vim-7.3.189.ebuild'
586 # * vim-7.3.189.ebuild, line 7: Called inherit 'vim'
587 # * ebuild.sh, line 1410: Called qa_source '.../vim.eclass'
588 # * ebuild.sh, line 43: Called source '.../vim.eclass'
589 # * vim.eclass, line 40: Called die
590 # * The specific snippet of code:
591 # * die "Unknown EAPI ${EAPI}"
593 envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok=True)
595 equery = self._GetBoardCmd(self.EQUERY_CMD)
596 cmd = [equery, '-C', '--no-pipe', 'which', cpv]
597 result = cros_build_lib.RunCommand(
598 cmd, error_code_ok=True, extra_env=envvars, print_cmd=False,
599 redirect_stdout=True, combine_stdout_stderr=True)
601 if result.returncode != 0:
602 output = result.output.strip()
604 # _missing_eclass_re works line by line.
605 for line in output.split('\n'):
606 match = self._missing_eclass_re.search(line)
608 eclass = match.group(1)
609 oper.Notice('Determined that %s requires %s' % (cpv, eclass))
612 # _outdated_eclass_re works on the entire output at once.
613 match = self._outdated_eclass_re.search(output)
615 eclass = match.group(1)
616 oper.Notice('Making educated guess that %s requires update of %s' %
622 def _GiveMaskedError(self, upgraded_cpv, emerge_output):
623 """Print error saying that |upgraded_cpv| is masked off.
625 See if hint found in |emerge_output| to improve error emssage.
628 # Expecting emerge_output to have lines like this:
629 # The following mask changes are necessary to proceed:
630 # #required by ... =somecategory/somepackage (some reason)
631 # # /home/mtennant/trunk/src/third_party/chromiumos-overlay/profiles\
632 # /targets/chromeos/package.mask:
636 upgraded_cp = Upgrader._GetCatPkgFromCpv(upgraded_cpv)
637 regexp = re.compile(r'#\s*required by.+=\S+.*\n'
638 r'#\s*(\S+/package\.mask):\s*\n'
639 '[<>=]+%s' % upgraded_cp)
641 match = regexp.search(emerge_output)
643 package_mask = match.group(1)
646 oper.Error('\nUpgraded package "%s" appears to be masked by a line in\n'
648 'Full emerge output is above. Address mask issue, '
649 'then run this again.' %
650 (upgraded_cpv, package_mask))
652 oper.Error('\nUpgraded package "%s" is masked somehow (See full '
653 'emerge output above). Address that and then run this '
654 'again.' % upgraded_cpv)
656 def _PkgUpgradeStaged(self, upstream_cpv):
657 """Return True if package upgrade is already staged."""
658 ebuild_path = Upgrader._GetEbuildPathFromCpv(upstream_cpv)
659 status = self._stable_repo_status.get(ebuild_path, None)
660 if status and 'A' == status:
665 def _AnyChangesStaged(self):
666 """Return True if any local changes are staged in stable repo."""
667 # Don't count files with '??' status - they aren't staged.
668 files = [f for (f, s) in self._stable_repo_status.items() if s != '??']
669 return bool(len(files))
671 def _StashChanges(self):
672 """Run 'git stash save' on stable repo."""
673 # Only one level of stashing expected/supported.
674 self._RunGit(self._stable_repo, ['stash', 'save'],
675 redirect_stdout=True, combine_stdout_stderr=True)
676 self._stable_repo_stashed = True
678 def _UnstashAnyChanges(self):
679 """Unstash any changes in stable repo."""
680 # Only one level of stashing expected/supported.
681 if self._stable_repo_stashed:
682 self._RunGit(self._stable_repo, ['stash', 'pop', '--index'],
683 redirect_stdout=True, combine_stdout_stderr=True)
684 self._stable_repo_stashed = False
686 def _DropAnyStashedChanges(self):
687 """Drop any stashed changes in stable repo."""
688 # Only one level of stashing expected/supported.
689 if self._stable_repo_stashed:
690 self._RunGit(self._stable_repo, ['stash', 'drop'],
691 redirect_stdout=True, combine_stdout_stderr=True)
692 self._stable_repo_stashed = False
694 def _CopyUpstreamPackage(self, upstream_cpv):
695 """Upgrades package in |upstream_cpv| to the version in |upstream_cpv|.
698 The upstream_cpv if the package was upgraded, None otherwise.
700 oper.Notice('Copying %s from upstream.' % upstream_cpv)
702 (cat, pkgname, _version, _rev) = portage.versions.catpkgsplit(upstream_cpv)
703 ebuild = upstream_cpv.replace(cat + '/', '') + '.ebuild'
704 catpkgsubdir = os.path.join(cat, pkgname)
705 pkgdir = os.path.join(self._stable_repo, catpkgsubdir)
706 upstream_pkgdir = os.path.join(self._upstream, cat, pkgname)
708 # Fail early if upstream_cpv ebuild is not found
709 upstream_ebuild_path = os.path.join(upstream_pkgdir, ebuild)
710 if not os.path.exists(upstream_ebuild_path):
711 # Note: this should only be possible during unit tests.
712 raise RuntimeError('Cannot find upstream ebuild at "%s"' %
713 upstream_ebuild_path)
715 # If pkgdir already exists, remove everything in it except Manifest.
716 # Note that git will remove a parent directory when it removes
717 # the last item in the directory.
718 if os.path.exists(pkgdir):
719 items = os.listdir(pkgdir)
720 items = [os.path.join(catpkgsubdir, i) for i in items if i != 'Manifest']
722 args = ['rm', '-rf', '--ignore-unmatch'] + items
723 self._RunGit(self._stable_repo, args, redirect_stdout=True)
724 # Now delete any files that git doesn't know about.
726 osutils.SafeUnlink(os.path.join(self._stable_repo, item))
728 osutils.SafeMakedirs(pkgdir)
730 # Grab all non-blacklisted, non-ebuilds from upstream plus the specific
732 items = os.listdir(upstream_pkgdir)
734 blacklisted = [b for b in BLACKLISTED_FILES
735 if fnmatch.fnmatch(os.path.basename(item), b)]
737 if not item.endswith('.ebuild') or item == ebuild:
738 src = os.path.join(upstream_pkgdir, item)
739 dst = os.path.join(pkgdir, item)
740 if os.path.isdir(src):
741 shutil.copytree(src, dst, symlinks=True)
743 shutil.copy2(src, dst)
745 # Create a new Manifest file for this package.
746 self._CreateManifest(upstream_pkgdir, pkgdir, ebuild)
748 # Now copy any eclasses that this package requires.
749 eclass = self._IdentifyNeededEclass(upstream_cpv)
750 while eclass and self._CopyUpstreamEclass(eclass):
751 eclass = self._IdentifyNeededEclass(upstream_cpv)
755 def _StabilizeEbuild(self, ebuild_path):
756 """Edit keywords to stablize ebuild at |ebuild_path| on current arch."""
757 oper.Notice('Editing %r to mark it stable for everyone' % ebuild_path)
759 # Regexp to search for KEYWORDS="...".
760 keywords_regexp = re.compile(r'^(\s*KEYWORDS=")[^"]*(")', re.MULTILINE)
762 # Read in entire ebuild.
763 content = osutils.ReadFile(ebuild_path)
765 # Replace all KEYWORDS with "*".
766 content = re.sub(keywords_regexp, r'\1*\2', content)
768 # Write ebuild file back out.
769 osutils.WriteFile(ebuild_path, content)
771 def _CreateManifest(self, upstream_pkgdir, pkgdir, ebuild):
772 """Create a trusted Manifest from available Manifests.
774 Combine the current Manifest in |pkgdir| (if it exists) with
775 the Manifest from |upstream_pkgdir| to create a new trusted
776 Manifest. Supplement with 'ebuild manifest' command.
778 It is assumed that a Manifest exists in |upstream_pkgdir|, but
779 there may not be one in |pkgdir|. The new |ebuild| in pkgdir
780 should be used for 'ebuild manifest' command.
782 The algorithm is this:
783 1) Remove all lines in upstream Manifest that duplicate
784 lines in current Manifest.
785 2) Concatenate the result of 1) onto the current Manifest.
786 3) Run 'ebuild manifest' to add to results.
788 upstream_manifest = os.path.join(upstream_pkgdir, 'Manifest')
789 current_manifest = os.path.join(pkgdir, 'Manifest')
791 if os.path.exists(current_manifest):
792 # Determine which files have DIST entries in current_manifest.
794 with open(current_manifest, 'r') as f:
796 tokens = line.split()
797 if len(tokens) > 1 and tokens[0] == 'DIST':
800 # Find DIST lines in upstream manifest not overlapping with current.
802 with open(upstream_manifest, 'r') as f:
804 tokens = line.split()
805 if len(tokens) > 1 and tokens[0] == 'DIST' and tokens[1] not in dists:
806 new_lines.append(line)
808 # Write all new_lines to current_manifest.
810 with open(current_manifest, 'a') as f:
811 f.writelines(new_lines)
813 # Use upstream_manifest as a starting point.
814 shutil.copyfile(upstream_manifest, current_manifest)
816 manifest_cmd = ['ebuild', os.path.join(pkgdir, ebuild), 'manifest']
817 manifest_result = cros_build_lib.RunCommand(
818 manifest_cmd, error_code_ok=True, print_cmd=False,
819 redirect_stdout=True, combine_stdout_stderr=True)
821 if manifest_result.returncode != 0:
822 raise RuntimeError('Failed "ebuild manifest" for upgraded package.\n'
823 'Output of %r:\n%s' %
824 (' '.join(manifest_cmd), manifest_result.output))
827 def _CopyUpstreamEclass(self, eclass):
828 """Upgrades eclass in |eclass| to upstream copy.
830 Does not do the copy if the eclass already exists locally and
831 is identical to the upstream version.
834 True if the copy was done.
836 eclass_subpath = os.path.join('eclass', eclass)
837 upstream_path = os.path.join(self._upstream, eclass_subpath)
838 local_path = os.path.join(self._stable_repo, eclass_subpath)
840 if os.path.exists(upstream_path):
841 if os.path.exists(local_path) and filecmp.cmp(upstream_path, local_path):
844 oper.Notice('Copying %s from upstream.' % eclass)
845 osutils.SafeMakedirs(os.path.dirname(local_path))
846 shutil.copy2(upstream_path, local_path)
847 self._RunGit(self._stable_repo, ['add', eclass_subpath])
850 raise RuntimeError('Cannot find upstream "%s". Looked at "%s"' %
851 (eclass, upstream_path))
853 def _GetPackageUpgradeState(self, pinfo):
854 """Return state value for package in |pinfo|."""
855 # See whether this specific cpv exists upstream.
857 cpv_exists_upstream = bool(cpv and
858 self._FindUpstreamCPV(cpv, unstable_ok=True))
860 # The value in pinfo.cpv_cmp_upstream represents a comparison of cpv
861 # version and the upstream version, where:
862 # 0 = current, >0 = outdated, <0 = futuristic!
864 # Convention is that anything not in portage overlay has been altered.
865 overlay = pinfo.overlay
866 locally_patched = (overlay != NOT_APPLICABLE and
867 overlay != self.UPSTREAM_OVERLAY_NAME and
868 overlay != self.STABLE_OVERLAY_NAME)
869 locally_duplicated = locally_patched and cpv_exists_upstream
871 # Gather status details for this package
872 if pinfo.cpv_cmp_upstream is None:
873 # No upstream cpv to compare to (although this might include a
874 # a restriction to only stable upstream versions). This is concerning
875 # if the package is coming from 'portage' or 'portage-stable' overlays.
876 if locally_patched and pinfo.latest_upstream_cpv is None:
877 state = utable.UpgradeTable.STATE_LOCAL_ONLY
879 state = utable.UpgradeTable.STATE_UPSTREAM_ONLY
881 state = utable.UpgradeTable.STATE_UNKNOWN
882 elif pinfo.cpv_cmp_upstream > 0:
883 if locally_duplicated:
884 state = utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_DUPLICATED
885 elif locally_patched:
886 state = utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_PATCHED
888 state = utable.UpgradeTable.STATE_NEEDS_UPGRADE
889 elif locally_duplicated:
890 state = utable.UpgradeTable.STATE_DUPLICATED
891 elif locally_patched:
892 state = utable.UpgradeTable.STATE_PATCHED
894 state = utable.UpgradeTable.STATE_CURRENT
898 # TODO(mtennant): Generate output from finished table instead.
899 def _PrintPackageLine(self, pinfo):
900 """Print a brief one-line report of package status."""
901 upstream_cpv = pinfo.upstream_cpv
902 if pinfo.upgraded_cpv:
903 action_stat = ' (UPGRADED)'
907 up_stat = {utable.UpgradeTable.STATE_UNKNOWN: ' no package found upstream!',
908 utable.UpgradeTable.STATE_LOCAL_ONLY: ' (exists locally only)',
909 utable.UpgradeTable.STATE_NEEDS_UPGRADE: ' -> %s' % upstream_cpv,
910 utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_PATCHED:
911 ' <-> %s' % upstream_cpv,
912 utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_DUPLICATED:
913 ' (locally duplicated) <-> %s' % upstream_cpv,
914 utable.UpgradeTable.STATE_PATCHED: ' <- %s' % upstream_cpv,
915 utable.UpgradeTable.STATE_DUPLICATED: ' (locally duplicated)',
916 utable.UpgradeTable.STATE_CURRENT: ' (current)',
919 oper.Info('[%s] %s%s%s' % (pinfo.overlay, pinfo.cpv,
920 up_stat, action_stat))
922 def _AppendPackageRow(self, pinfo):
923 """Add a row to status table for the package in |pinfo|."""
925 upgraded_cpv = pinfo.upgraded_cpv
929 upgraded_ver = Upgrader._GetVerRevFromCpv(upgraded_cpv)
931 # Assemble 'depends on' and 'required by' strings.
932 depsstr = NOT_APPLICABLE
933 usedstr = NOT_APPLICABLE
934 if cpv and self._deps_graph:
935 deps_entry = self._deps_graph[cpv]
936 depslist = sorted(deps_entry['needs'].keys()) # dependencies
937 depsstr = ' '.join(depslist)
938 usedset = deps_entry['provides'] # used by
939 usedlist = sorted([p for p in usedset])
940 usedstr = ' '.join(usedlist)
942 stable_up_ver = Upgrader._GetVerRevFromCpv(pinfo.stable_upstream_cpv)
943 if not stable_up_ver:
944 stable_up_ver = NOT_APPLICABLE
945 latest_up_ver = Upgrader._GetVerRevFromCpv(pinfo.latest_upstream_cpv)
946 if not latest_up_ver:
947 latest_up_ver = NOT_APPLICABLE
949 row = {self._curr_table.COL_PACKAGE: pinfo.package,
950 self._curr_table.COL_SLOT: pinfo.slot,
951 self._curr_table.COL_OVERLAY: pinfo.overlay,
952 self._curr_table.COL_CURRENT_VER: pinfo.version_rev,
953 self._curr_table.COL_STABLE_UPSTREAM_VER: stable_up_ver,
954 self._curr_table.COL_LATEST_UPSTREAM_VER: latest_up_ver,
955 self._curr_table.COL_STATE: pinfo.state,
956 self._curr_table.COL_DEPENDS_ON: depsstr,
957 self._curr_table.COL_USED_BY: usedstr,
958 self._curr_table.COL_TARGET: ' '.join(self._targets),
961 # Only include if upgrade was involved. Table may not have this column
962 # if upgrade was not requested.
964 row[self._curr_table.COL_UPGRADED] = upgraded_ver
966 self._curr_table.AppendRow(row)
968 def _UpgradePackage(self, pinfo):
969 """Gathers upgrade status for pkg, performs upgrade if requested.
971 The upgrade is performed only if the package is outdated and --upgrade
974 The |pinfo| must have the following entries:
975 package, category, package_name
977 Regardless, the following attributes in |pinfo| are filled in:
980 upstream_cpv (one of the above, depending on --stable-only option)
981 upgrade_cpv (if upgrade performed)
984 catpkg = pinfo.package
985 pinfo.stable_upstream_cpv = self._FindUpstreamCPV(catpkg)
986 pinfo.latest_upstream_cpv = self._FindUpstreamCPV(catpkg,
989 # The upstream version can be either latest stable or latest overall,
990 # or specified explicitly by the user at the command line. In the latter
991 # case, 'upstream_cpv' will already be set.
992 if not pinfo.upstream_cpv:
993 if not self._unstable_ok:
994 pinfo.upstream_cpv = pinfo.stable_upstream_cpv
996 pinfo.upstream_cpv = pinfo.latest_upstream_cpv
998 # Perform the actual upgrade, if requested.
999 pinfo.cpv_cmp_upstream = None
1000 pinfo.upgraded_cpv = None
1001 if pinfo.upstream_cpv:
1002 # cpv_cmp_upstream values: 0 = current, >0 = outdated, <0 = futuristic!
1003 pinfo.cpv_cmp_upstream = Upgrader._CmpCpv(pinfo.upstream_cpv, cpv)
1005 # Determine whether upgrade of this package is requested.
1006 if self._PkgUpgradeRequested(pinfo):
1007 if self._PkgUpgradeStaged(pinfo.upstream_cpv):
1008 oper.Notice('Determined that %s is already staged.' %
1010 pinfo.upgraded_cpv = pinfo.upstream_cpv
1011 elif pinfo.cpv_cmp_upstream > 0:
1012 pinfo.upgraded_cpv = self._CopyUpstreamPackage(pinfo.upstream_cpv)
1013 elif pinfo.cpv_cmp_upstream == 0:
1015 oper.Notice('Forcing upgrade of existing %s.' %
1017 upgraded_cpv = self._CopyUpstreamPackage(pinfo.upstream_cpv)
1018 pinfo.upgraded_cpv = upgraded_cpv
1020 oper.Warning('Not upgrading %s; it already exists in source.\n'
1021 'To force upgrade of this version specify --force.' %
1023 elif self._PkgUpgradeRequested(pinfo):
1024 raise RuntimeError('Unable to find upstream package for upgrading %s.' %
1027 if pinfo.upgraded_cpv:
1028 # Deal with keywords now. We always run this logic as we sometimes will
1029 # stabilizing keywords other than just our own (the unsupported arches).
1030 self._SetUpgradedMaskBits(pinfo)
1031 ebuild_path = Upgrader._GetEbuildPathFromCpv(pinfo.upgraded_cpv)
1032 self._StabilizeEbuild(os.path.join(self._stable_repo, ebuild_path))
1034 # Add all new package files to git.
1035 self._RunGit(self._stable_repo, ['add', pinfo.package])
1037 # Update profiles/categories.
1038 self._UpdateCategories(pinfo)
1040 # Regenerate the cache. In theory, this might glob too much, but
1041 # in practice, this should be fine for now ...
1042 cache_files = 'metadata/md5-cache/%s-[0-9]*' % pinfo.package
1043 self._RunGit(self._stable_repo, ['rm', '--ignore-unmatch', '-q', '-f',
1045 cmd = ['egencache', '--update', '--repo=portage-stable', pinfo.package]
1046 egen_result = cros_build_lib.RunCommand(cmd, print_cmd=False,
1047 redirect_stdout=True,
1048 combine_stdout_stderr=True)
1049 if egen_result.returncode != 0:
1050 raise RuntimeError('Failed to regenerate md5-cache for %r.\n'
1051 'Output of %r:\n%s' %
1052 (pinfo.package, ' '.join(cmd), egen_result.output))
1054 self._RunGit(self._stable_repo, ['add', cache_files])
1056 return bool(pinfo.upgraded_cpv)
1058 def _UpdateCategories(self, pinfo):
1059 """Update profiles/categories to include category in |pinfo|, if needed."""
1061 if pinfo.category not in self._stable_repo_categories:
1062 self._stable_repo_categories.add(pinfo.category)
1063 self._WriteStableRepoCategories()
1065 def _VerifyPackageUpgrade(self, pinfo):
1066 """Verify that the upgraded package in |pinfo| passes checks."""
1067 self._VerifyEbuildOverlay(pinfo.upgraded_cpv, self.STABLE_OVERLAY_NAME,
1068 pinfo.cpv_cmp_upstream == 0)
1070 def _PackageReport(self, pinfo):
1071 """Report on whatever was done with package in |pinfo|."""
1073 pinfo.state = self._GetPackageUpgradeState(pinfo)
1076 # Print a quick summary of package status.
1077 self._PrintPackageLine(pinfo)
1079 # Add a row to status table for this package
1080 self._AppendPackageRow(pinfo)
1082 def _ExtractUpgradedPkgs(self, upgrade_lines):
1083 """Extracts list of packages from standard commit |upgrade_lines|."""
1084 # Expecting message lines like this (return just package names):
1085 # Upgraded sys-libs/ncurses to version 5.7-r7 on amd64, arm, x86
1086 # Upgraded sys-apps/less to version 441 on amd64, arm
1087 # Upgraded sys-apps/less to version 442 on x86
1089 regexp = re.compile(r'^%s\s+\S+/(\S+)\s' % UPGRADED)
1090 for line in upgrade_lines:
1091 match = regexp.search(line)
1093 pkgs.add(match.group(1))
1097 def _CreateCommitMessage(self, upgrade_lines, remaining_lines=None):
1098 """Create appropriate git commit message for upgrades in |upgrade_lines|."""
1100 upgrade_pkgs = self._ExtractUpgradedPkgs(upgrade_lines)
1101 upgrade_count = len(upgrade_pkgs)
1102 upgrade_str = '\n'.join(upgrade_lines)
1103 if upgrade_count < 6:
1104 message = ('%s: upgraded package%s to upstream' %
1105 (', '.join(upgrade_pkgs), '' if upgrade_count == 1 else 's'))
1107 message = 'Upgraded the following %d packages' % upgrade_count
1108 message += '\n\n' + upgrade_str + '\n'
1111 # Keep previous remaining lines verbatim.
1112 message += '\n%s\n' % '\n'.join(remaining_lines)
1114 # The space before <fill-in> (at least for TEST=) fails pre-submit check,
1115 # which is the intention here.
1116 message += '\nBUG= <fill-in>'
1117 message += '\nTEST= <fill-in>'
1121 def _AmendCommitMessage(self, upgrade_lines):
1122 """Create git commit message combining |upgrade_lines| with last commit."""
1123 # First get the body of the last commit message.
1124 git_cmd = ['show', '-s', '--format=%b']
1125 result = self._RunGit(self._stable_repo, git_cmd, redirect_stdout=True)
1126 body = result.output
1128 remaining_lines = []
1129 # Extract the upgrade_lines of last commit. Everything after the
1130 # empty line is preserved verbatim.
1131 # Expecting message body like this:
1132 # Upgraded sys-libs/ncurses to version 5.7-r7 on amd64, arm, x86
1133 # Upgraded sys-apps/less to version 441 on amd64, arm, x86
1135 # BUG=chromium-os:20923
1136 # TEST=trybot run of chromiumos-sdk
1138 for line in body.split('\n'):
1139 if not before_break:
1140 remaining_lines.append(line)
1142 if re.search(r'^%s\s+' % UPGRADED, line):
1143 upgrade_lines.append(line)
1145 # If the lines in the message body are not in the expected
1146 # format simply push them to the end of the new commit
1147 # message body, but left intact.
1148 oper.Warning('It looks like the existing commit message '
1149 'that you are amending was not generated by\n'
1150 'this utility. Appending previous commit '
1151 'message to newly generated message.')
1152 before_break = False
1153 remaining_lines.append(line)
1155 before_break = False
1157 return self._CreateCommitMessage(upgrade_lines, remaining_lines)
1159 def _GiveEmergeResults(self, pinfolist):
1160 """Summarize emerge checks, raise RuntimeError if there is a problem."""
1162 upgraded_pinfos = [pinfo for pinfo in pinfolist if pinfo.upgraded_cpv]
1163 upgraded_cpvs = [pinfo.upgraded_cpv for pinfo in upgraded_pinfos]
1164 masked_cpvs = set([pinfo.upgraded_cpv for pinfo in upgraded_pinfos
1165 if not pinfo.upgraded_unmasked])
1167 (ok, cmd, output) = self._AreEmergeable(upgraded_cpvs)
1170 # If any of the upgraded_cpvs are masked, then emerge should have
1171 # failed. Give a helpful message. If it didn't fail then panic.
1173 raise RuntimeError('Emerge passed for masked package(s)! Something '
1174 'fishy here. Emerge output follows:\n%s\n'
1175 'Show this to the build team.' % output)
1178 oper.Error('\nEmerge output for "%s" on %s follows:' %
1179 (cmd, self._curr_arch))
1181 for masked_cpv in masked_cpvs:
1182 self._GiveMaskedError(masked_cpv, output)
1183 raise RuntimeError('\nOne or more upgraded packages are masked '
1187 oper.Notice('Confirmed that all upgraded packages can be emerged '
1188 'on %s after upgrade.' % self._curr_board)
1190 oper.Error('Packages cannot be emerged after upgrade. The output '
1191 'of "%s" follows:' % cmd)
1193 raise RuntimeError('Failed to complete upgrades on %s (see above). '
1194 'Address the emerge errors before continuing.' %
1197 def _UpgradePackages(self, pinfolist):
1198 """Given a list of cpv pinfos, adds the upstream cpv to the pinfos."""
1199 self._curr_table.Clear()
1202 upgrades_this_run = False
1203 for pinfo in pinfolist:
1204 if self._UpgradePackage(pinfo):
1205 self._upgrade_cnt += 1
1206 upgrades_this_run = True
1208 # The verification of upgrades needs to happen after upgrades are done.
1209 # The reason is that it cannot be guaranteed that pinfolist is ordered
1210 # such that dependencies are satisified after each individual upgrade,
1211 # because one or more of the packages may only exist upstream.
1212 for pinfo in pinfolist:
1213 if pinfo.upgraded_cpv:
1214 self._VerifyPackageUpgrade(pinfo)
1216 self._PackageReport(pinfo)
1218 if upgrades_this_run:
1219 self._GiveEmergeResults(pinfolist)
1221 if self._IsInUpgradeMode():
1222 # If there were any ebuilds staged before running this script, then
1223 # make sure they were targeted in pinfolist. If not, abort.
1224 self._CheckStagedUpgrades(pinfolist)
1225 except RuntimeError as ex:
1228 raise RuntimeError('\nTo reset all changes in %s now:\n'
1229 ' cd %s; git reset --hard; cd -' %
1230 (self._stable_repo, self._stable_repo))
1231 # Allow the changes to stay staged so that the user can attempt to address
1232 # the issue (perhaps an edit to package.mask is required, or another
1233 # package must also be upgraded).
1235 def _CheckStagedUpgrades(self, pinfolist):
1236 """Raise RuntimeError if staged upgrades are not also in |pinfolist|."""
1237 # This deals with the situation where a previous upgrade run staged one or
1238 # more package upgrades, but did not commit them because it found an error
1239 # of some kind. This is ok, as long as subsequent runs continue to request
1240 # an upgrade of that package again (presumably with the problem fixed).
1241 # However, if a subsequent run does not mention that package then it should
1242 # abort. The user must reset those staged changes first.
1244 if self._stable_repo_status:
1247 # Go over files with pre-existing git statuses.
1248 filelist = self._stable_repo_status.keys()
1249 ebuilds = [e for e in filelist if e.endswith('.ebuild')]
1251 for ebuild in ebuilds:
1252 status = self._stable_repo_status[ebuild]
1253 (_overlay, cat, pn, _pv) = self._SplitEBuildPath(ebuild)
1254 package = '%s/%s' % (cat, pn)
1256 # As long as this package is involved in an upgrade this is fine.
1257 matching_pinfos = [pi for pi in pinfolist if pi.package == package]
1258 if not matching_pinfos:
1259 err_msgs.append('Staged %s (status=%s) is not an upgrade target.' %
1263 raise RuntimeError('%s\n'
1264 'Add to upgrade targets or reset staged changes.' %
1265 '\n'.join(err_msgs))
1267 def _GenParallelEmergeArgv(self, args):
1268 """Creates an argv for parallel_emerge using current options and |args|."""
1269 argv = ['--emptytree', '--pretend']
1270 if self._curr_board and self._curr_board != self.HOST_BOARD:
1271 argv.append('--board=%s' % self._curr_board)
1272 if not self._verbose:
1273 argv.append('--quiet')
1275 argv.append('--root-deps=rdeps')
1280 def _SetPortTree(self, settings, trees):
1281 """Set self._porttree from portage |settings| and |trees|."""
1282 root = settings['ROOT']
1283 self._porttree = trees[root]['porttree']
1285 def _GetPortageDBAPI(self):
1286 """Retrieve the Portage dbapi object, if available."""
1288 return self._porttree.dbapi
1289 except AttributeError:
1292 def _CreatePInfoFromCPV(self, cpv, cpv_key=None):
1293 """Return a basic pinfo object created from |cpv|."""
1295 self._FillPInfoFromCPV(pinfo, cpv, cpv_key)
1298 def _FillPInfoFromCPV(self, pinfo, cpv, cpv_key=None):
1299 """Flesh out |pinfo| from |cpv|."""
1300 pkg = Upgrader._GetCatPkgFromCpv(cpv)
1301 (cat, pn) = pkg.split('/')
1304 pinfo.upstream_cpv = None
1307 pinfo.package_name = pn
1308 pinfo.category = cat
1311 setattr(pinfo, cpv_key, cpv)
1313 def _GetCurrentVersions(self, target_pinfolist):
1314 """Returns a list of pkg pinfos of the current package dependencies.
1316 The dependencies are taken from giving the 'package' values in each
1317 pinfo of |target_pinfolist| to (parallel_)emerge.
1319 The returned list is ordered such that the dependencies of any mentioned
1320 package occur earlier in the list.
1323 for pinfo in target_pinfolist:
1324 local_cpv = pinfo.cpv
1325 if local_cpv and local_cpv != WORLD_TARGET:
1326 emerge_args.append('=' + local_cpv)
1328 emerge_args.append(pinfo.package)
1329 argv = self._GenParallelEmergeArgv(emerge_args)
1331 deps = parallel_emerge.DepGraphGenerator()
1332 deps.Initialize(argv)
1335 deps_tree, deps_info = deps.GenDependencyTree()
1337 oper.Error('Run of parallel_emerge exited with error while assembling'
1338 ' package dependencies (error message should be above).\n'
1339 'Command effectively was:\n%s' %
1340 ' '.join(['parallel_emerge'] + argv))
1341 oper.Error('Address the source of the error, then run again.')
1343 self._SetPortTree(deps.emerge.settings, deps.emerge.trees)
1344 self._deps_graph = deps.GenDependencyGraph(deps_tree, deps_info)
1346 cpvlist = Upgrader._GetPreOrderDepGraph(self._deps_graph)
1351 # See if this cpv was in target_pinfolist
1353 for pinfo in target_pinfolist:
1354 if cpv == pinfo.cpv:
1355 pinfolist.append(pinfo)
1359 pinfolist.append(self._CreatePInfoFromCPV(cpv, cpv_key='cpv'))
1363 def _FinalizeLocalPInfolist(self, orig_pinfolist):
1364 """Filters and fleshes out |orig_pinfolist|, returns new list.
1366 Each pinfo object is assumed to have entries for:
1367 cpv, package, package_name, category
1370 for pinfo in orig_pinfolist:
1371 # No need to report or try to upgrade chromeos-base packages.
1372 if pinfo.category == 'chromeos-base':
1375 dbapi = self._GetPortageDBAPI()
1376 ebuild_path = dbapi.findname2(pinfo.cpv)[0]
1378 # This has only happened once. See crosbug.com/26385.
1379 # In that case, this meant the package, while in the deps graph,
1380 # was actually to be uninstalled. How is that possible? The
1381 # package was newly added to package.provided. So skip it.
1382 oper.Notice('Skipping %r from deps graph, as it appears to be'
1383 ' scheduled for uninstall.' % pinfo.cpv)
1386 (overlay, _cat, pn, pv) = self._SplitEBuildPath(ebuild_path)
1387 ver_rev = pv.replace(pn + '-', '')
1388 slot, = dbapi.aux_get(pinfo.cpv, ['SLOT'])
1391 pinfo.overlay = overlay
1392 pinfo.version_rev = ver_rev
1393 pinfo.package_ver = pv
1395 pinfolist.append(pinfo)
1399 # TODO(mtennant): It is likely this method can be yanked now that all
1400 # attributes in PInfo are initialized to something (None).
1401 # TODO(mtennant): This should probably not return anything, since it
1402 # also modifies the list that is passed in.
1403 def _FinalizeUpstreamPInfolist(self, pinfolist):
1404 """Adds missing values in upstream |pinfolist|, returns list."""
1406 for pinfo in pinfolist:
1407 pinfo.slot = NOT_APPLICABLE
1408 pinfo.overlay = NOT_APPLICABLE
1409 pinfo.version_rev = NOT_APPLICABLE
1410 pinfo.package_ver = NOT_APPLICABLE
1414 def _ResolveAndVerifyArgs(self, args, upgrade_mode):
1415 """Resolve |args| to full pkgs, and check validity of each.
1417 Each argument will be resolved to a full category/packagename, if possible,
1418 by looking in both the local overlays and the upstream overlay. Any
1419 argument that cannot be resolved will raise a RuntimeError.
1421 Arguments that specify a specific version of a package are only
1422 allowed when |upgrade_mode| is True.
1424 The 'world' target is handled as a local package.
1426 Any errors will raise a RuntimeError.
1428 Return list of package pinfos, one for each argument. Each will have:
1429 'user_arg' = Original command line argument package was resolved from
1430 'package' = Resolved category/package_name
1431 'package_name' = package_name
1432 'category' = category (None for 'world' target)
1433 Packages found in local overlays will also have:
1434 'cpv' = Current cpv ('world' for 'world' target)
1435 Packages found upstream will also have:
1436 'upstream_cpv' = Upstream cpv
1441 pinfo = PInfo(user_arg=arg)
1443 if arg == WORLD_TARGET:
1444 # The 'world' target is a special case. Consider it a valid target
1445 # locally, but not an upstream package.
1447 pinfo.package_name = arg
1448 pinfo.category = None
1451 catpkg = Upgrader._GetCatPkgFromCpv(arg)
1452 verrev = Upgrader._GetVerRevFromCpv(arg)
1454 if verrev and not upgrade_mode:
1455 raise RuntimeError('Specifying specific versions is only allowed '
1456 'in upgrade mode. Do not know what to do with '
1459 # Local cpv search ignores version in argument, if any. If version is
1460 # in argument, though, it *must* be found upstream.
1461 local_arg = catpkg if catpkg else arg
1463 local_cpv = self._FindCurrentCPV(local_arg)
1464 upstream_cpv = self._FindUpstreamCPV(arg, self._unstable_ok)
1466 # Old-style virtual packages will resolve to their target packages,
1467 # which we do not want here because if the package 'virtual/foo' was
1468 # specified at the command line we want to try upgrading the actual
1469 # 'virtual/foo' package, not whatever package equery resolves it to.
1470 # This only matters when 'virtual/foo' is currently an old-style
1471 # virtual but a new-style virtual for it exists upstream which we
1472 # want to upgrade to. For new-style virtuals, equery will resolve
1473 # 'virtual/foo' to 'virtual/foo', which is fine.
1474 if arg.startswith('virtual/'):
1475 if local_cpv and not local_cpv.startswith('virtual/'):
1478 if not upstream_cpv and upgrade_mode:
1479 # See if --unstable-ok is required for this upstream version.
1480 if not self._unstable_ok and self._FindUpstreamCPV(arg, True):
1481 raise RuntimeError('Upstream "%s" is unstable on %s. Re-run with '
1482 '--unstable-ok option?' % (arg, self._curr_arch))
1484 raise RuntimeError('Unable to find "%s" upstream on %s.' %
1485 (arg, self._curr_arch))
1487 any_cpv = local_cpv if local_cpv else upstream_cpv
1489 msg = ('Unable to resolve "%s" as a package either local or upstream.'
1491 if arg.find('/') < 0:
1492 msg = msg + ' Try specifying the full category/package_name.'
1494 raise RuntimeError(msg)
1496 self._FillPInfoFromCPV(pinfo, any_cpv)
1497 pinfo.cpv = local_cpv
1498 pinfo.upstream_cpv = upstream_cpv
1499 if local_cpv and upstream_cpv:
1500 oper.Notice('Resolved "%s" to "%s" (local) and "%s" (upstream).' %
1501 (arg, local_cpv, upstream_cpv))
1503 oper.Notice('Resolved "%s" to "%s" (local).' %
1506 oper.Notice('Resolved "%s" to "%s" (upstream).' %
1507 (arg, upstream_cpv))
1509 pinfolist.append(pinfo)
1513 def PrepareToRun(self):
1514 """Checkout upstream gentoo if necessary, and any other prep steps."""
1516 if not os.path.exists(os.path.join(
1517 self._upstream, '.git', 'shallow')):
1518 osutils.RmDir(self._upstream, ignore_missing=True)
1520 if os.path.exists(self._upstream):
1521 if self._local_only:
1522 oper.Notice('Using upstream cache as-is (no network) %s.' %
1525 # Recheck the pathway; it's possible in switching off alternates,
1526 # this was converted down to a depth=1 repo.
1528 oper.Notice('Updating previously created upstream cache at %s.' %
1530 self._RunGit(self._upstream, ['remote', 'set-url', 'origin',
1531 self.PORTAGE_GIT_URL])
1532 self._RunGit(self._upstream, ['remote', 'update'])
1533 self._RunGit(self._upstream, ['checkout', self.ORIGIN_GENTOO],
1534 redirect_stdout=True, combine_stdout_stderr=True)
1536 if self._local_only:
1537 oper.Die('--local-only specified, but no local cache exists. '
1538 'Re-run w/out --local-only to create cache automatically.')
1540 root = os.path.dirname(self._upstream)
1541 osutils.SafeMakedirs(root)
1542 # Create local copy of upstream gentoo.
1543 oper.Notice('Cloning origin/gentoo at %s as upstream reference.' %
1545 name = os.path.basename(self._upstream)
1546 args = ['clone', '--branch', os.path.basename(self.ORIGIN_GENTOO)]
1547 args += ['--depth', '1', self.PORTAGE_GIT_URL, name]
1548 self._RunGit(root, args)
1550 # Create a README file to explain its presence.
1551 with open(self._upstream + '-README', 'w') as f:
1552 f.write('Directory at %s is local copy of upstream '
1553 'Gentoo/Portage packages. Used by cros_portage_upgrade.\n'
1554 'Feel free to delete if you want the space back.\n' %
1557 # An empty directory is needed to trick equery later.
1558 self._emptydir = tempfile.mkdtemp()
1560 def RunCompleted(self):
1561 """Undo any checkout of upstream gentoo if requested."""
1562 if self._no_upstream_cache:
1563 oper.Notice('Removing upstream cache at %s as requested.'
1565 osutils.RmDir(self._upstream, ignore_missing=True)
1567 # Remove the README file, too.
1568 readmepath = self._upstream + '-README'
1569 osutils.SafeUnlink(readmepath)
1571 oper.Notice('Keeping upstream cache at %s.' % self._upstream)
1574 osutils.RmDir(self._emptydir, ignore_missing=True)
1576 def CommitIsStaged(self):
1577 """Return True if upgrades are staged and ready for a commit."""
1578 return bool(self._upgrade_cnt)
1581 """Commit whatever has been prepared in the stable repo."""
1582 # Trying to create commit message body lines that look like these:
1583 # Upgraded foo/bar-1.2.3 to version 1.2.4 on x86
1584 # Upgraded foo/baz to version 2 on arm AND version 3 on amd64, x86
1586 commit_lines = [] # Lines for the body of the commit message
1587 pkg_overlays = {} # Overlays for upgraded packages in non-portage overlays.
1589 # Assemble hash of COL_UPGRADED column names by arch.
1591 for arch in self._master_archs:
1592 tmp_col = utable.UpgradeTable.COL_UPGRADED
1593 col = utable.UpgradeTable.GetColumnName(tmp_col, arch)
1594 upgraded_cols[arch] = col
1596 table = self._master_table
1598 pkg = row[table.COL_PACKAGE]
1599 pkg_commit_line = None
1601 # First determine how many unique upgraded versions there are.
1602 upgraded_versarch = {}
1603 for arch in self._master_archs:
1604 upgraded_ver = row[upgraded_cols[arch]]
1606 # This package has been upgraded for this arch.
1607 upgraded_versarch.setdefault(upgraded_ver, []).append(arch)
1609 # Save the overlay this package is originally from, if the overlay
1610 # is not a Portage overlay (e.g. chromiumos-overlay).
1611 ovrly_col = utable.UpgradeTable.COL_OVERLAY
1612 ovrly_col = utable.UpgradeTable.GetColumnName(ovrly_col, arch)
1613 ovrly = row[ovrly_col]
1614 if (ovrly != NOT_APPLICABLE and
1615 ovrly != self.UPSTREAM_OVERLAY_NAME and
1616 ovrly != self.STABLE_OVERLAY_NAME):
1617 pkg_overlays[pkg] = ovrly
1619 if upgraded_versarch:
1620 pkg_commit_line = '%s %s to ' % (UPGRADED, pkg)
1621 pkg_commit_line += ' AND '.join(
1622 'version %s on %s' % (upgraded_ver, ', '.join(sorted(archlist)))
1623 for upgraded_ver, archlist in upgraded_versarch.iteritems())
1624 commit_lines.append(pkg_commit_line)
1628 message = self._AmendCommitMessage(commit_lines)
1629 self._RunGit(self._stable_repo, ['commit', '--amend', '-m', message])
1631 message = self._CreateCommitMessage(commit_lines)
1632 self._RunGit(self._stable_repo, ['commit', '-m', message])
1635 'Upgrade changes committed (see above),'
1636 ' but message needs edit BY YOU:\n'
1637 ' cd %s; git commit --amend; cd -' %
1639 # See if any upgraded packages are in non-portage overlays now, meaning
1640 # they probably require a patch and should not go into portage-stable.
1642 lines = ['%s [%s]' % (p, pkg_overlays[p]) for p in pkg_overlays]
1644 'The following packages were coming from a non-portage'
1645 ' overlay, which means they were probably patched.\n'
1646 'You should consider whether the upgraded package'
1647 ' needs the same patches applied now.\n'
1648 'If so, do not commit these changes in portage-stable.'
1649 ' Instead, copy them to the applicable overlay dir.\n'
1653 'To remove any individual file above from commit do:\n'
1654 ' cd %s; git reset HEAD~ <filepath>; rm <filepath>;'
1655 ' git commit --amend -C HEAD; cd -' %
1659 'If you wish to undo all the changes to %s:\n'
1660 ' cd %s; git reset --hard HEAD~; cd -' %
1661 (self.STABLE_OVERLAY_NAME, self._stable_repo))
1663 def PreRunChecks(self):
1664 """Run any board-independent validation checks before Run is called."""
1665 # Upfront check(s) if upgrade is requested.
1666 if self._upgrade or self._upgrade_deep:
1667 # Stable source must be on branch.
1668 self._CheckStableRepoOnBranch()
1670 def CheckBoardList(self, boards):
1671 """Validate list of specified |boards| before running any of them."""
1673 # If this is an upgrade run (i.e. --upgrade was specified), then in
1674 # almost all cases we want all our supported architectures to be covered.
1675 if self._IsInUpgradeMode():
1677 for board in boards:
1678 board_archs.add(Upgrader._FindBoardArch(board))
1680 if not STANDARD_BOARD_ARCHS.issubset(board_archs):
1681 # Only proceed if user acknowledges.
1682 oper.Warning('You have selected boards for archs %r, which does not'
1683 ' cover all standard archs %r' %
1684 (sorted(board_archs), sorted(STANDARD_BOARD_ARCHS)))
1685 oper.Warning('If you continue with this upgrade you may break'
1686 ' builds for architectures not covered by your\n'
1687 'boards. Continue only if you have a reason to limit'
1688 ' this upgrade to these specific architectures.\n')
1689 if not cros_build_lib.BooleanPrompt(
1690 prompt="Do you want to continue anyway?", default=False):
1691 raise RuntimeError('Missing one or more of the standard archs')
1693 def RunBoard(self, board):
1694 """Runs the upgrader based on the supplied options and arguments.
1696 Currently just lists all package dependencies in pre-order along with
1699 # Preserve status report for entire stable repo (output of 'git status -s').
1700 self._SaveStatusOnStableRepo()
1701 # Read contents of profiles/categories for later checks
1702 self._LoadStableRepoCategories()
1704 self._porttree = None
1705 self._deps_graph = None
1707 self._curr_board = board
1708 self._curr_arch = Upgrader._FindBoardArch(board)
1709 upgrade_mode = self._IsInUpgradeMode()
1710 self._curr_table = utable.UpgradeTable(self._curr_arch,
1711 upgrade=upgrade_mode,
1714 if self._AnyChangesStaged():
1715 self._StashChanges()
1718 target_pinfolist = self._ResolveAndVerifyArgs(self._args, upgrade_mode)
1719 upstream_only_pinfolist = [pi for pi in target_pinfolist if not pi.cpv]
1720 if not upgrade_mode and upstream_only_pinfolist:
1721 # This means that not all arguments were found in local source, which is
1722 # only allowed in upgrade mode.
1723 msg = ('The following packages were not found in current overlays'
1724 ' (but they do exist upstream):\n%s' %
1725 '\n'.join([pinfo.user_arg for pinfo in upstream_only_pinfolist]))
1726 raise RuntimeError(msg)
1728 full_pinfolist = None
1731 # Shallow upgrade mode only cares about targets as they were
1733 full_pinfolist = self._FinalizeUpstreamPInfolist(target_pinfolist)
1735 # Assembling dependencies only matters in status report mode or
1736 # if --upgrade-deep was requested.
1737 local_target_pinfolist = [pi for pi in target_pinfolist if pi.cpv]
1738 if local_target_pinfolist:
1739 oper.Notice('Assembling package dependencies.')
1740 full_pinfolist = self._GetCurrentVersions(local_target_pinfolist)
1741 full_pinfolist = self._FinalizeLocalPInfolist(full_pinfolist)
1745 # Append any command line targets that were not found in current
1746 # overlays. The idea is that they will still be found upstream
1749 tmp_list = self._FinalizeUpstreamPInfolist(upstream_only_pinfolist)
1750 full_pinfolist = full_pinfolist + tmp_list
1752 self._UnstashAnyChanges()
1753 self._UpgradePackages(full_pinfolist)
1756 self._DropAnyStashedChanges()
1758 # Merge tables together after each run.
1759 self._master_cnt += 1
1760 self._master_archs.add(self._curr_arch)
1761 if self._master_table:
1762 tables = [self._master_table, self._curr_table]
1763 self._master_table = mps.MergeTables(tables)
1765 self._master_table = self._curr_table
1766 self._master_table._arch = None
1768 def WriteTableFiles(self, csv=None):
1769 """Write |self._master_table| to |csv| file, if requested."""
1771 # Sort the table by package name, then slot
1772 def PkgSlotSort(row):
1773 return (row[self._master_table.COL_PACKAGE],
1774 row[self._master_table.COL_SLOT])
1775 self._master_table.Sort(PkgSlotSort)
1778 filehandle = open(csv, 'w')
1779 oper.Notice('Writing package status as csv to %s.' % csv)
1780 self._master_table.WriteCSV(filehandle)
1782 elif not self._IsInUpgradeMode():
1783 oper.Notice('Package status report file not requested (--to-csv).')
1785 def SayGoodbye(self):
1786 """Print any final messages to user."""
1787 if not self._IsInUpgradeMode():
1788 # Without this message users are confused why running a script
1789 # with 'upgrade' in the name does not actually do an upgrade.
1790 oper.Warning('Completed status report run. To run in "upgrade"'
1791 ' mode include the --upgrade option.')
1793 def _BoardIsSetUp(board):
1794 """Return true if |board| has been setup."""
1795 return os.path.isdir(cros_build_lib.GetSysroot(board=board))
1797 def _CreateParser():
1798 """Create the optparser.parser object for command-line args."""
1800 'There are essentially two "modes": status report mode and '
1801 'upgrade mode.\nStatus report mode is the default; upgrade '
1802 'mode is enabled by either --upgrade or --upgrade-deep.\n'
1804 'In either mode, packages can be specified in any manner '
1805 'commonly accepted by Portage tools. For example:\n'
1806 ' category/package_name\n'
1808 ' category/package_name-version (upgrade mode only)\n'
1810 'Status report mode will report on the status of the specified '
1811 'packages relative to upstream,\nwithout making any changes. '
1812 'In this mode, the specified packages are often high-level\n'
1813 'targets such as "virtual/target-os". '
1814 'The --to-csv option is often used in this mode.\n'
1815 'The --unstable-ok option in this mode will make '
1816 'the upstream comparison (e.g. "needs update") be\n'
1817 'relative to the latest upstream version, stable or not.\n'
1819 'Upgrade mode will attempt to upgrade the specified '
1820 'packages to one of the following versions:\n'
1821 '1) The version specified in argument (e.g. foo/bar-1.2.3)\n'
1822 '2) The latest stable version upstream (the default)\n'
1823 '3) The latest overall version upstream (with --unstable-ok)\n'
1825 'Unlike with --upgrade, if --upgrade-deep is specified, '
1826 'then the package dependencies will also be upgraded.\n'
1827 'In upgrade mode, it is ok if the specified packages only '
1829 'The --force option can be used to do a package upgrade '
1830 'even if the local version matches the upstream version.\n'
1832 'Status report mode examples:\n'
1833 '> cros_portage_upgrade --board=arm-generic:x86-generic '
1834 '--to-csv=cros-aebl.csv virtual/target-os\n'
1835 '> cros_portage_upgrade --unstable-ok --board=x86-mario '
1836 '--to-csv=cros_test-mario virtual/target-os virtual/target-os-dev '
1838 'Upgrade mode examples:\n'
1839 '> cros_portage_upgrade --board=arm-generic:x86-generic '
1840 '--upgrade sys-devel/gdb virtual/yacc\n'
1841 '> cros_portage_upgrade --unstable-ok --board=x86-mario '
1842 '--upgrade-deep gdata\n'
1843 '> cros_portage_upgrade --board=x86-generic --upgrade '
1844 'media-libs/libpng-1.2.45\n'
1848 parser = commandline.ArgumentParser(epilog=epilog)
1849 parser.add_argument('packages', nargs='*', default=None,
1850 help='Packages to process.')
1851 parser.add_argument('--amend', action='store_true', default=False,
1852 help='Amend existing commit when doing upgrade.')
1853 parser.add_argument('--board', default=None,
1854 help='Target board(s), colon-separated')
1855 parser.add_argument('--force', action='store_true', default=False,
1856 help='Force upgrade even if version already in source')
1857 parser.add_argument('--host', action='store_true', default=False,
1858 help='Host target pseudo-board')
1859 parser.add_argument('--no-upstream-cache', action='store_true', default=False,
1860 help='Do not preserve cached upstream for future runs')
1861 parser.add_argument('--rdeps', action='store_true', default=False,
1862 help='Use runtime dependencies only')
1863 parser.add_argument('--srcroot', type='path',
1864 default='%s/trunk/src' % os.environ['HOME'],
1865 help='Path to root src directory [default: %(default)s]')
1866 parser.add_argument('--to-csv', dest='csv_file', type='path',
1867 default=None, help='File to store csv-formatted results')
1868 parser.add_argument('--upgrade', action='store_true', default=False,
1869 help='Upgrade target package(s) only')
1870 parser.add_argument('--upgrade-deep', action='store_true', default=False,
1871 help='Upgrade target package(s) and all dependencies')
1872 parser.add_argument('--upstream', type='path',
1873 default=Upgrader.UPSTREAM_TMP_REPO,
1874 help='Latest upstream repo location '
1875 '[default: %(default)s]')
1876 parser.add_argument('--unstable-ok', action='store_true', default=False,
1877 help='Use latest upstream ebuild, stable or not')
1878 parser.add_argument('--verbose', action='store_true', default=False,
1879 help='Enable verbose output (for debugging)')
1880 parser.add_argument('-l', '--local-only', action='store_true', default=False,
1881 help='Do not attempt to update local portage cache')
1886 """Main function."""
1887 parser = _CreateParser()
1888 options = parser.parse_args(argv)
1889 # TODO: Can't freeze until options.host modification below is sorted.
1892 oper.verbose = options.verbose
1895 # Do some argument checking.
1898 if not options.board and not options.host:
1899 parser.print_usage()
1900 oper.Die('Board (or host) is required.')
1902 if not options.packages:
1903 parser.print_usage()
1904 oper.Die('No packages provided.')
1906 # The --upgrade and --upgrade-deep options are mutually exclusive.
1907 if options.upgrade_deep and options.upgrade:
1908 parser.print_usage()
1909 oper.Die('The --upgrade and --upgrade-deep options '
1910 'are mutually exclusive.')
1912 # The --force option only makes sense with --upgrade or --upgrade-deep.
1913 if options.force and not (options.upgrade or options.upgrade_deep):
1914 parser.print_usage()
1915 oper.Die('The --force option requires --upgrade or --upgrade-deep.')
1917 # If --to-csv given verify file can be opened for write.
1918 if options.csv_file:
1920 osutils.WriteFile(options.csv_file, '')
1921 except IOError as ex:
1922 parser.print_usage()
1923 oper.Die('Unable to open %s for writing: %s' % (options.csv_file,
1926 upgrader = Upgrader(options)
1927 upgrader.PreRunChecks()
1929 # Automatically handle board 'host' as 'amd64-host'.
1932 boards = options.board.split(':')
1934 # Specifying --board=host is equivalent to --host.
1935 if 'host' in boards:
1938 boards = [b for b in boards if b != 'host']
1940 # Make sure host pseudo-board is run first.
1941 if options.host and Upgrader.HOST_BOARD not in boards:
1942 boards.insert(0, Upgrader.HOST_BOARD)
1943 elif Upgrader.HOST_BOARD in boards:
1944 boards = [b for b in boards if b != Upgrader.HOST_BOARD]
1945 boards.insert(0, Upgrader.HOST_BOARD)
1947 # Check that all boards have been setup first.
1948 for board in boards:
1949 if (board != Upgrader.HOST_BOARD and not _BoardIsSetUp(board)):
1950 parser.print_usage()
1951 oper.Die('You must setup the %s board first.' % board)
1953 # If --board and --upgrade are given then in almost all cases
1954 # the user should cover all architectures.
1956 non_host_boards = [b for b in boards if b != Upgrader.HOST_BOARD]
1957 upgrader.CheckBoardList(non_host_boards)
1961 upgrader.PrepareToRun()
1963 for board in boards:
1964 oper.Notice('Running with board %s.' % board)
1965 upgrader.RunBoard(board)
1966 except RuntimeError as ex:
1971 upgrader.RunCompleted()
1974 oper.Die('Failed with above errors.')
1976 if upgrader.CommitIsStaged():
1979 # TODO(mtennant): Move stdout output to here, rather than as-we-go. That
1980 # way it won't come out for each board. Base it on contents of final table.
1981 # Make verbose-dependent?
1983 upgrader.WriteTableFiles(csv=options.csv_file)
1985 upgrader.SayGoodbye()