--- /dev/null
+Metadata-Version: 1.1
+Name: catkin_pkg
+Version: 0.4.23
+Summary: catkin package library
+Home-page: http://wiki.ros.org/catkin_pkg
+Author: Dirk Thomas
+Author-email: dthomas@osrfoundation.org
+License: BSD
+Description: Library for retrieving information about catkin packages.
+Keywords: catkin,ROS
+Platform: UNKNOWN
+Classifier: Programming Language :: Python
+Classifier: License :: OSI Approved :: BSD License
--- /dev/null
+catkin_pkg
+----------
+
+Standalone Python library for the `Catkin package system <http://ros.org/wiki/catkin>`_.
+
+
+Code & tickets
+--------------
+
++------------+--------------------------------------------------------+
+| catkin_pkg | http://github.com/ros-infrastructure/catkin_pkg |
++------------+--------------------------------------------------------+
+| Issues | http://github.com/ros-infrastructure/catkin_pkg/issues |
++------------+--------------------------------------------------------+
+
+Continuous Integration
+----------------------
+
++--------------------------------------------------------------------------+--------------------------------------------------------------------+
+| `Build Status <https://travis-ci.org/ros-infrastructure/catkin_pkg>`_. | .. image:: https://travis-ci.org/ros-infrastructure/catkin_pkg.png |
++--------------------------------------------------------------------------+--------------------------------------------------------------------+
--- /dev/null
+[egg_info]
+tag_build =
+tag_date = 0
+
--- /dev/null
+#!/usr/bin/env python
+
+import os
+import sys
+
+from setuptools import setup
+
+install_requires = [
+ 'docutils',
+ 'python-dateutil',
+ 'pyparsing',
+]
+
+# argparse is part of the standard library since Python 2.7
+if sys.version_info[0] == 2 and sys.version_info[1] < 7:
+ install_requires.append('argparse')
+
+kwargs = {
+ 'name': 'catkin_pkg',
+ # same version as in:
+ # - src/catkin_pkg/__init__.py
+ # - stdeb.cfg
+ 'version': '0.4.23',
+ 'packages': ['catkin_pkg', 'catkin_pkg.cli'],
+ 'package_dir': {'': 'src'},
+ 'package_data': {'catkin_pkg': ['templates/*.in']},
+ 'entry_points': {
+ 'console_scripts': [
+ 'catkin_create_pkg = catkin_pkg.cli.create_pkg:main',
+ 'catkin_find_pkg = catkin_pkg.cli.find_pkg:main',
+ 'catkin_generate_changelog = catkin_pkg.cli.generate_changelog:main_catching_runtime_error',
+ 'catkin_package_version = catkin_pkg.cli.package_version:main',
+ 'catkin_prepare_release = catkin_pkg.cli.prepare_release:main',
+ 'catkin_tag_changelog = catkin_pkg.cli.tag_changelog:main',
+ 'catkin_test_changelog = catkin_pkg.cli.test_changelog:main',
+ ]},
+ 'author': 'Dirk Thomas',
+ 'author_email': 'dthomas@osrfoundation.org',
+ 'url': 'http://wiki.ros.org/catkin_pkg',
+ 'keywords': ['catkin', 'ROS'],
+ 'classifiers': [
+ 'Programming Language :: Python',
+ 'License :: OSI Approved :: BSD License'
+ ],
+ 'description': 'catkin package library',
+ 'long_description': 'Library for retrieving information about catkin packages.',
+ 'license': 'BSD',
+ 'install_requires': install_requires,
+}
+if 'SKIP_PYTHON_MODULES' in os.environ:
+ kwargs['packages'] = []
+ kwargs['package_dir'] = {}
+ kwargs['package_data'] = {}
+if 'SKIP_PYTHON_SCRIPTS' in os.environ:
+ kwargs['name'] += '_modules'
+ kwargs['entry_points'] = {}
+
+setup(**kwargs)
--- /dev/null
+Metadata-Version: 1.1
+Name: catkin-pkg
+Version: 0.4.23
+Summary: catkin package library
+Home-page: http://wiki.ros.org/catkin_pkg
+Author: Dirk Thomas
+Author-email: dthomas@osrfoundation.org
+License: BSD
+Description: Library for retrieving information about catkin packages.
+Keywords: catkin,ROS
+Platform: UNKNOWN
+Classifier: Programming Language :: Python
+Classifier: License :: OSI Approved :: BSD License
--- /dev/null
+README.rst
+setup.py
+src/catkin_pkg/__init__.py
+src/catkin_pkg/changelog.py
+src/catkin_pkg/changelog_generator.py
+src/catkin_pkg/changelog_generator_vcs.py
+src/catkin_pkg/cmake.py
+src/catkin_pkg/condition.py
+src/catkin_pkg/group_dependency.py
+src/catkin_pkg/group_membership.py
+src/catkin_pkg/metapackage.py
+src/catkin_pkg/package.py
+src/catkin_pkg/package_templates.py
+src/catkin_pkg/package_version.py
+src/catkin_pkg/packages.py
+src/catkin_pkg/python_setup.py
+src/catkin_pkg/rospack.py
+src/catkin_pkg/terminal_color.py
+src/catkin_pkg/tool_detection.py
+src/catkin_pkg/topological_order.py
+src/catkin_pkg/workspace_vcs.py
+src/catkin_pkg/workspaces.py
+src/catkin_pkg.egg-info/PKG-INFO
+src/catkin_pkg.egg-info/SOURCES.txt
+src/catkin_pkg.egg-info/dependency_links.txt
+src/catkin_pkg.egg-info/entry_points.txt
+src/catkin_pkg.egg-info/requires.txt
+src/catkin_pkg.egg-info/top_level.txt
+src/catkin_pkg/cli/__init__.py
+src/catkin_pkg/cli/create_pkg.py
+src/catkin_pkg/cli/find_pkg.py
+src/catkin_pkg/cli/generate_changelog.py
+src/catkin_pkg/cli/package_version.py
+src/catkin_pkg/cli/prepare_release.py
+src/catkin_pkg/cli/tag_changelog.py
+src/catkin_pkg/cli/test_changelog.py
+src/catkin_pkg/templates/CMakeLists.txt.in
+src/catkin_pkg/templates/metapackage.cmake.in
+src/catkin_pkg/templates/package.xml.in
+test/test_catkin_create_pkg.py
+test/test_changelog.py
+test/test_flake8.py
+test/test_metapackage.py
+test/test_package.py
+test/test_package_version.py
+test/test_packages.py
+test/test_templates.py
+test/test_terminal_color.py
+test/test_tool_detection.py
+test/test_topological_order.py
+test/test_workspaces.py
\ No newline at end of file
--- /dev/null
+[console_scripts]
+catkin_create_pkg = catkin_pkg.cli.create_pkg:main
+catkin_find_pkg = catkin_pkg.cli.find_pkg:main
+catkin_generate_changelog = catkin_pkg.cli.generate_changelog:main_catching_runtime_error
+catkin_package_version = catkin_pkg.cli.package_version:main
+catkin_prepare_release = catkin_pkg.cli.prepare_release:main
+catkin_tag_changelog = catkin_pkg.cli.tag_changelog:main
+catkin_test_changelog = catkin_pkg.cli.test_changelog:main
+
--- /dev/null
+docutils
+pyparsing
+python-dateutil
--- /dev/null
+catkin_pkg
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2012, Willow Garage, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Willow Garage, Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""Library for retrieving information about catkin packages."""
+
+# same version as in:
+# - setup.py
+# - stdeb.cfg
+__version__ = '0.4.23'
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2013, Open Source Robotics Foundation, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Open Source Robotics Foundation, Inc. nor
+# the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior
+# written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""
+Processes ROS changelogs so that they can be used in binary packaging.
+
+The Changelog format is described in REP-0132:
+
+http://ros.org/reps/rep-0132.html
+"""
+
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import logging
+import os
+import re
+import sys
+
+import dateutil.parser
+import docutils
+import docutils.core
+import pkg_resources
+
+_py3 = sys.version_info[0] >= 3
+
+try:
+ _unicode = unicode
+except NameError:
+ _unicode = str
+
+__author__ = 'William Woodall'
+__email__ = 'william@osrfoundation.org'
+__maintainer__ = 'William Woodall'
+
+log = logging.getLogger('changelog')
+
+CHANGELOG_FILENAME = 'CHANGELOG.rst'
+
+example_rst = """\
+^^^^^^^^^^^^^^^^^^^^^^^^^
+Changelog for package foo
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+0.1
+===
+Free form text about this minor release.
+
+0.1.27 (forthcoming)
+--------------------
+* Great new feature
+
+0.1.26 (2012-12-26)
+-------------------
+* Utilizes caching to improve query performance (fix https://github.com/ros/ros_comm/pull/2)
+* Simplified API calls based on (https://github.com/ros/robot_model):
+
+ * Note that these changes are based on REP 192
+ * Also they fix a problem related to initialization
+
+* Fixed synchronization issue on startup
+
+.. not mentioning secret feature on purpose
+
+0.1.25 (2012-11-25)
+-------------------
+
+- Added thread safety
+- Replaced custom XML parser with `TinyXML <http://www.grinninglizard.com/tinyxml/>`_.
+- Fixed regression introduced in 0.1.22
+- New syntax for foo::
+
+ foo('bar')
+
+- Added a safety check for XML parsing
+
+----
+
+The library should now compile under ``Win32``
+
+0.1.0 (2012-10-01)
+------------------
+
+*First* public **stable** release
+
+0.0
+===
+
+0.0.1 (2012-01-31)
+------------------
+
+1. Initial release
+2. Initial bugs
+"""
+
+
+def bullet_list_class_from_docutils(bullet_list, bullet_type=None):
+ """
+ Process elements of bullet list into an encapsulating class.
+
+ :param bullet_list: ``docutils.nodes.bullet_list`` list to be processed
+ :param bullet_type: ``str`` either 'bullet' or 'enumerated'
+ :returns: ``BulletList`` object representing a docutils bullet_list
+ """
+ content = BulletList(bullet_type=bullet_type)
+ for child in bullet_list.children:
+ if isinstance(child, docutils.nodes.list_item):
+ content.bullets.append(mixed_text_from_docutils(child))
+ else:
+ log.debug("Skipped bullet_list child: '{0}'".format(child))
+ return content
+
+
+def mixed_text_from_docutils(node):
+ """
+ Take most Text-ish docutils objects and converts them to MixedText.
+
+ :param node: ``docutils.nodes.{paragraph, list_item, ...}`` text-ish
+ :returns: ``MixedText`` representing the given docutils object
+ """
+ content = MixedText()
+ for child in node.children:
+ if isinstance(child, docutils.nodes.paragraph):
+ content.texts.extend(mixed_text_from_docutils(child).texts)
+ elif isinstance(child, docutils.nodes.Text):
+ content.texts.append(child.astext())
+ elif isinstance(child, docutils.nodes.reference):
+ content.texts.append(reference_from_docutils(child))
+ elif isinstance(child, docutils.nodes.emphasis):
+ content.texts.append('*{0}*'.format(child.astext()))
+ elif isinstance(child, docutils.nodes.strong):
+ content.texts.append('**{0}**'.format(child.astext()))
+ elif isinstance(child, docutils.nodes.literal):
+ content.texts.append('``{0}``'.format(child.astext()))
+ elif isinstance(child, docutils.nodes.literal_block):
+ content.texts.append('\n\n ' + child.astext() + '\n')
+ elif isinstance(child, docutils.nodes.target):
+ pass
+ elif isinstance(child, docutils.nodes.system_message):
+ log.debug('Skipping system_message: {0}'.format(child))
+ elif isinstance(child, docutils.nodes.bullet_list):
+ content.texts.append(bullet_list_class_from_docutils(child))
+ else:
+ try:
+ # Try to add it as plain text
+ log.debug("Trying to add {0}'s child of type {1}: '{2}'"
+ .format(type(node), type(child), child))
+ content.texts.append(child.astext())
+ except AttributeError:
+ log.debug("Ignored {0} child of type {1}: '{2}'"
+ .format(type(node), type(child), child))
+ return content
+
+
+def get_changelog_from_path(path, package_name=None):
+ """
+ Changelog factory, which reads a changelog file into a class.
+
+ :param path: ``str`` the path of the changelog including or excluding the filename CHANGELOG.rst
+ :param package_name: ``str`` the package name
+ :returns: ``Changelog`` changelog class or None if file was not readable
+ """
+ changelog = Changelog(package_name)
+ if os.path.isdir(path):
+ path = os.path.join(path, CHANGELOG_FILENAME)
+ try:
+ with open(path, 'rb') as f:
+ populate_changelog_from_rst(changelog, f.read().decode('utf-8'))
+ except IOError:
+ return None
+ return changelog
+
+
+def populate_changelog_from_rst(changelog, rst):
+ """
+ Changelog factory, which converts the raw ReST into a class.
+
+ :param changelog: ``Changelog`` changelog to be populated
+ :param rst: ``str`` raw ReST changelog
+ :returns: ``Changelog`` changelog that was populated
+ """
+ document = docutils.core.publish_doctree(rst)
+ processes_changelog_children(changelog, document.children)
+ changelog.rst = rst
+ return changelog
+
+
+def processes_changelog_children(changelog, children):
+ """
+ Process docutils children into a REP-0132 changelog instance.
+
+ Recurse into sections, check (sub-)titles if they are valid versions.
+
+ :param changelog: ``Changelog`` changelog to be populated
+ :param section: ``docutils.nodes.section`` section to be processed
+ :returns: ``Changelog`` changelog that was populated
+ """
+ for i, child in enumerate(children):
+ if isinstance(child, docutils.nodes.section):
+ processes_changelog_children(changelog, child.children)
+ elif isinstance(child, docutils.nodes.title) or isinstance(child, docutils.nodes.subtitle):
+ version, date = None, None
+ # See if the title has a text element in it
+ if len(child.children) > 0 and isinstance(child.children[0], docutils.nodes.Text):
+ # Extract version and date from (sub-)title
+ title_text = child.children[0].rawsource
+ try:
+ version, date = version_and_date_from_title(title_text)
+ except InvalidSectionTitle:
+ # Catch invalid section titles
+ log.debug("Ignored non-compliant title: '{0}'".format(title_text))
+ continue
+ valid_section = None not in (version, date)
+ if valid_section:
+ contents = []
+ # For each remaining sibling
+ for child in children[i + 1:]:
+ # Skip sections (nesting of valid sections not allowed)
+ if isinstance(child, docutils.nodes.section):
+ log.debug("Ignored section child: '{0}'".format(child))
+ continue
+ # Skip title
+ if isinstance(child, docutils.nodes.title):
+ continue
+ # Skip comments
+ if isinstance(child, docutils.nodes.comment):
+ log.debug("Ignored section child: '{0}'".format(child))
+ continue
+ # Process other elements into the contents
+ if isinstance(child, docutils.nodes.bullet_list):
+ contents.append(bullet_list_class_from_docutils(child))
+ elif isinstance(child, docutils.nodes.enumerated_list):
+ contents.append(bullet_list_class_from_docutils(child, bullet_type='enumerated'))
+ elif isinstance(child, docutils.nodes.transition):
+ contents.append(Transition())
+ elif isinstance(child, docutils.nodes.paragraph):
+ contents.append(mixed_text_from_docutils(child))
+ else:
+ log.debug("Skipped section child: '{0}'".format(child))
+ changelog.add_version_section(version, date, contents)
+ break
+ else:
+ log.debug("Ignored non-compliant title: '{0}'".format(child))
+
+
+def reference_from_docutils(reference):
+ """
+ Turn a reference element into a ``Reference``.
+
+ :param reference: ``docutils.nodes.reference`` reference element
+ :returns: ``Reference`` simpler object representing the reference
+ """
+ name, refuri = None, None
+ for pair in reference.attlist():
+ if pair[0] == 'name':
+ name = pair[1]
+ if pair[0] == 'refuri':
+ refuri = pair[1]
+ return Reference(name, refuri)
+
+
+def version_and_date_from_title(title):
+ """
+ Split a section title into version and date if possible.
+
+ :param title: ``str`` raw section title to be processed
+ :returns: ``(str, datetime.datetime)``
+ :raises: ``InvalidSectionTitle`` for non REP-0132 section titles
+ """
+ match = re.search(r'^([0-9]+\.[0-9]+\.[0-9]+)[ ]\((.+)\)$', title)
+ if match is None:
+ raise InvalidSectionTitle(title)
+ version, date_str = match.groups()
+ try:
+ date = dateutil.parser.parse(date_str)
+ except (ValueError, TypeError) as e:
+ # Catch invalid dates
+ log.debug("Error parsing date ({0}): '{1}'".format(date_str, e))
+ raise InvalidSectionTitle(title)
+ return version, date
+
+
+class BulletList(object):
+ """Represent a bulleted list of text."""
+
+ def __init__(self, bullets=None, bullet_type=None):
+ """
+ Initialize BulletList.
+
+ :param bullets: ``list(MixedText)`` list of text bullets
+ :param bullet_type: ``str`` either 'bullet' or 'enumerated'
+ """
+ bullet_type = 'bullet' if bullet_type is None else bullet_type
+ if bullet_type not in ['bullet', 'enumerated']:
+ raise RuntimeError("Invalid bullet type: '{0}'".format(bullet_type))
+ self.bullets = bullets or []
+ self.bullet_type = bullet_type
+
+ def __iter__(self):
+ for bullet in self.bullets:
+ yield bullet
+
+ def __str__(self):
+ value = self.__unicode__()
+ if not _py3:
+ value = value.encode('ascii', 'replace')
+ return value
+
+ def __unicode__(self):
+ return self.as_txt()
+
+ def as_rst(self):
+ return self.as_txt(indent='', use_hyphen_bullet=True)
+
+ def as_txt(self, indent='', use_hyphen_bullet=False):
+ bullet = '*' if self.bullet_type == 'bullet' else '#'
+ if use_hyphen_bullet and bullet == '*':
+ bullet = '-'
+ b = self.bullet_generator(bullet)
+ i = indent
+ n = '\n' + i + ' '
+ lines = [i + next(b) + _unicode(item).replace('\n', n) for item in self]
+ return '\n'.join(lines)
+
+ def bullet_generator(self, bullet):
+ if '#' == bullet:
+ bullets = [str(i) + '. ' for i in range(1, len(self.bullets) + 1)]
+ else:
+ bullets = [bullet + ' '] * len(self.bullets)
+ for b in bullets:
+ yield b
+
+
+class Changelog(object):
+ """Represents a REP-0132 changelog."""
+
+ def __init__(self, package_name=None):
+ self.__package_name = package_name
+ self.__versions = []
+ self.__parsed_versions = []
+ self.__dates = {}
+ self.__content = {}
+ self.__rst = ''
+
+ def __str__(self):
+ value = self.__unicode__()
+ if not _py3:
+ value = value.encode('ascii', 'replace')
+ return value
+
+ def __unicode__(self):
+ msg = []
+ if self.__package_name:
+ msg.append("Changelog for package '{0}'".format(self.package_name))
+ for version, date, content in self.foreach_version(reverse=True):
+ msg.append(' ' + version + ' ({0}):'.format(date))
+ for item in content:
+ msg.extend([' ' + i for i in _unicode(item).splitlines()])
+ return '\n'.join(msg)
+
+ @property
+ def package_name(self):
+ return self.__package_name
+
+ @package_name.setter
+ def package_name(self, package_name):
+ self.__package_name = package_name
+
+ @property
+ def rst(self):
+ return self.__rst
+
+ @rst.setter
+ def rst(self, rst):
+ self.__rst = rst
+
+ def add_version_section(self, version, date, contents):
+ """
+ Add a version section.
+
+ :param version: ``str`` version as a string
+ :param date: ``datetime.datetime`` version date
+ :param contents: ``list(list([str|Reference]))``` contents as a list
+ of lists which contain a combination of ``str`` and
+ ``Reference`` objects
+ :returns: None
+ """
+ if version in self.__versions:
+ raise DuplicateVersionsException(version)
+ self.__parsed_versions.append(pkg_resources.parse_version(version))
+ self.__parsed_versions = sorted(self.__parsed_versions)
+ # Cannot go parsed -> str, so sorting must be done by comparison
+ new_versions = [None] * len(self.__parsed_versions)
+ for v in self.__versions + [version]:
+ parsed_v = pkg_resources.parse_version(v)
+ index = self.__parsed_versions.index(parsed_v)
+ if index == -1:
+ raise RuntimeError('Inconsistent internal version storage state')
+ new_versions[index] = v
+ self.__versions = new_versions
+ self.__dates[version] = date
+ self.__content[version] = contents
+
+ def foreach_version(self, reverse=False):
+ """
+ Create a generator for iterating over the versions, dates and content.
+
+ Versions are stored and iterated in order.
+
+ :param reverse: ``bool`` if True then the iteration is reversed
+ :returns: ``generator`` for iterating over versions, dates and content
+ """
+ for version in reversed(self.__versions) if reverse else self.__versions:
+ yield version, self.__dates[version], self.__content[version]
+
+ def get_date_of_version(self, version):
+ """Return date of a given version as a ``datetime.datetime``."""
+ if version not in self.__versions:
+ raise KeyError("No date for version '{0}'".format(version))
+ return self.__dates[version]
+
+ def get_content_of_version(self, version):
+ """
+ Return changelog content for a given version.
+
+ :param version: ``str`` version
+ :returns: ``list(list([str|Reference]))`` content expanded
+ """
+ if version not in self.__versions:
+ raise KeyError("No content for version '{0}'".format(version))
+ return self.__content[version]
+
+
+class DuplicateVersionsException(Exception):
+ """Raised when more than one section per version is given."""
+
+ def __init__(self, version):
+ self.version = version
+ Exception.__init__(self, "Version '{0}' is specified twice".format(version))
+
+
+class InvalidSectionTitle(Exception):
+ """raised on non REP-0132 section titles."""
+
+ def __init__(self, title):
+ self.title = title
+ msg = "Section title does not conform to REP-0132: '{0}'".format(title)
+ Exception.__init__(self, msg)
+
+
+class MixedText(object):
+ """Represents text mixed with references and nested bullets."""
+
+ def __init__(self, texts=[]):
+ self.texts = list(texts)
+
+ def __iter__(self):
+ for text in self.texts:
+ yield text
+
+ def __str__(self):
+ value = self.__unicode__()
+ if not _py3:
+ value = value.encode('ascii', 'replace')
+ return value
+
+ def __unicode__(self):
+ return self.to_txt()
+
+ def to_txt(self, bullet_indent=' '):
+ lines = []
+ for t in self:
+ if isinstance(t, BulletList):
+ bullets = [bullet_indent + x for x in _unicode(t).splitlines()]
+ bullets = ['', ''] + bullets + ['']
+ lines.extend('\n'.join(bullets))
+ else:
+ lines.append(_unicode(t))
+ return ''.join(lines)
+
+
+class Reference(object):
+ """Represents a piece of text with an associated link."""
+
+ def __init__(self, text, link):
+ self.text = text
+ self.link = link
+
+ def __str__(self):
+ value = self.__unicode__()
+ if not _py3:
+ value = value.encode('ascii', 'replace')
+ return value
+
+ def __unicode__(self):
+ return self.as_txt()
+
+ def as_rst(self):
+ """Self as rst (unicode)."""
+ if self.text is None:
+ return _unicode(self.link)
+ return '`{0} <{1}>`_'.format(self.text, self.link)
+
+ def as_txt(self):
+ """Self formatted for plain text (unicode)."""
+ if self.text is None:
+ return _unicode(self.link)
+ return '{0} <{1}>'.format(self.text, self.link)
+
+
+class Transition(object):
+ """Represents a trasition element from ReST."""
+
+ def __str__(self):
+ value = self.__unicode__()
+ if not _py3:
+ value = value.encode('ascii', 'replace')
+ return value
+
+ def __unicode__(self):
+ return '-' * 20
+
+ def __iter__(self):
+ yield self.unicode()
+
+
+def __test():
+ package_name = 'foo'
+ changelog = Changelog(package_name)
+ print(populate_changelog_from_rst(changelog, example_rst))
+
+
+if __name__ == '__main__':
+ logging.basicConfig()
+ log.setLevel(logging.DEBUG)
+ __test()
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2013, Open Source Robotics Foundation, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Open Source Robotics Foundation, Inc. nor
+# the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior
+# written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""
+Generate/update ROS changelog files.
+
+The Changelog format is described in REP-0132:
+
+http://ros.org/reps/rep-0132.html
+"""
+
+import os
+import re
+
+from catkin_pkg.changelog import CHANGELOG_FILENAME
+from catkin_pkg.changelog_generator_vcs import Tag
+
+FORTHCOMING_LABEL = 'Forthcoming'
+
+
+def get_all_changes(vcs_client, skip_merges=False, only_merges=False):
+ tags = _get_version_tags(vcs_client)
+
+ # query all log entries per tag range
+ tag2log_entries = {}
+ previous_tag = Tag(None)
+ for tag in sorted_tags(tags):
+ log_entries = vcs_client.get_log_entries(
+ from_tag=previous_tag.name, to_tag=tag.name, skip_merges=skip_merges, only_merges=only_merges)
+ tag2log_entries[previous_tag] = log_entries
+ previous_tag = tag
+ log_entries = vcs_client.get_log_entries(
+ from_tag=previous_tag.name, to_tag=None, skip_merges=skip_merges, only_merges=only_merges)
+ tag2log_entries[previous_tag] = log_entries
+ return tag2log_entries
+
+
+def get_forthcoming_changes(vcs_client, skip_merges=False, only_merges=False):
+ tags = _get_version_tags(vcs_client)
+ latest_tag_name = _get_latest_version_tag_name(vcs_client)
+
+ # query log entries since latest tag only
+ tag2log_entries = {}
+ from_tag = Tag(None)
+ to_tag = Tag(latest_tag_name)
+ for tag in sorted_tags(tags):
+ if to_tag.name is None:
+ to_tag = tag
+ # ignore non-forthcoming log entries but keep version to identify injection point of forthcoming
+ tag2log_entries[tag] = None
+ log_entries = vcs_client.get_log_entries(
+ from_tag=from_tag.name, to_tag=to_tag.name, skip_merges=skip_merges, only_merges=only_merges)
+ tag2log_entries[from_tag] = log_entries
+ return tag2log_entries
+
+
+def _get_version_tags(vcs_client):
+ # get all tags in descending order
+ tags = vcs_client.get_tags()
+ version_tags = [t for t in tags if re.match(r'^v?\d+\.\d+.\d+$', t.name)]
+ return version_tags
+
+
+def _get_latest_version_tag_name(vcs_client):
+ # get latest tag
+ tag_name = vcs_client.get_latest_tag_name()
+ if not re.match(r'^v?\d+\.\d+.\d+$', tag_name):
+ raise RuntimeError(
+ "The tag name '{}' doesn't match the version pattern v?x.y.z".format(tag_name))
+ return tag_name
+
+
+def generate_changelogs(base_path, packages, tag2log_entries, logger=None, vcs_client=None, skip_contributors=False):
+ for pkg_path, package in packages.items():
+ changelog_path = os.path.join(base_path, pkg_path, CHANGELOG_FILENAME)
+ if os.path.exists(changelog_path):
+ continue
+ # generate package specific changelog file
+ if logger:
+ logger.debug("- creating '%s'" % os.path.join(pkg_path, CHANGELOG_FILENAME))
+ pkg_tag2log_entries = filter_package_changes(tag2log_entries, pkg_path)
+ data = generate_changelog_file(package.name, pkg_tag2log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors)
+ with open(changelog_path, 'wb') as f:
+ f.write(data.encode('utf-8'))
+
+
+def update_changelogs(base_path, packages, tag2log_entries, logger=None, vcs_client=None, skip_contributors=False):
+ for pkg_path in packages.keys():
+ # update package specific changelog file
+ if logger:
+ logger.debug("- updating '%s'" % os.path.join(pkg_path, CHANGELOG_FILENAME))
+ pkg_tag2log_entries = filter_package_changes(tag2log_entries, pkg_path)
+ changelog_path = os.path.join(base_path, pkg_path, CHANGELOG_FILENAME)
+ with open(changelog_path, 'rb') as f:
+ data = f.read().decode('utf-8')
+ data = update_changelog_file(data, pkg_tag2log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors)
+ with open(changelog_path, 'wb') as f:
+ f.write(data.encode('utf-8'))
+
+
+def filter_package_changes(tag2log_entries, pkg_path):
+ pkg_tag2log_entries = {}
+ # collect all log entries relevant for this package
+ for tag, log_entries in tag2log_entries.items():
+ if log_entries is None:
+ pkg_log_entries = None
+ else:
+ pkg_log_entries = []
+ for log_entry in log_entries:
+ if log_entry.affects_path(pkg_path):
+ pkg_log_entries.append(log_entry)
+ pkg_tag2log_entries[tag] = pkg_log_entries
+ return pkg_tag2log_entries
+
+
+def generate_changelog_file(pkg_name, tag2log_entries, vcs_client=None, skip_contributors=False):
+ blocks = []
+ blocks.append(generate_package_headline(pkg_name))
+
+ for tag in sorted_tags(tag2log_entries.keys()):
+ log_entries = tag2log_entries[tag]
+ if log_entries is not None:
+ blocks.append(generate_version_block(version_from_tag(tag.name), tag.timestamp, log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors))
+
+ return '\n'.join(blocks)
+
+
+def update_changelog_file(data, tag2log_entries, vcs_client=None, skip_contributors=False):
+ tags = sorted_tags(tag2log_entries.keys())
+ for i, tag in enumerate(tags):
+ log_entries = tag2log_entries[tag]
+ if log_entries is None:
+ continue
+ content = generate_version_content(log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors)
+
+ # check if version section exists
+ match = get_version_section_match(data, version_from_tag(tag.name))
+ if match:
+ # prepend content to existing section
+ data = prepend_version_content(data, version_from_tag(tag.name), content)
+ assert data is not None
+ else:
+ # find injection point of earliest following version
+ for next_tag in list(tags)[i:]:
+ match = get_version_section_match(data, version_from_tag(next_tag.name))
+ if match:
+ block = generate_version_block(version_from_tag(tag.name), tag.timestamp, log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors)
+ data = data[:match.start()] + block + '\n' + data[match.start():]
+ break
+ if not match:
+ if tag.name is None:
+ raise RuntimeError('Could not find section "%s"' % next_tag.name)
+ else:
+ raise RuntimeError('Could neither find section "%s" nor any other section' % tag.name)
+ return data
+
+
+def get_version_section_match(data, version):
+ pattern = get_version_section_pattern(version)
+ matches = re.finditer(pattern, data, flags=re.MULTILINE)
+ matches = list(matches)
+ if len(matches) > 1:
+ raise RuntimeError('Found multiple matching sections')
+ return matches[0] if matches else None
+
+
+def get_version_section_pattern(version):
+ valid_section_characters = '!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~'
+ headline = get_version_headline(version, None)
+ pattern = '^(' + re.escape(headline) + r'( \([0-9 \-:|+]+\))?)\r?\n([' + re.escape(valid_section_characters) + ']+)\r?\n?$'
+ return pattern
+
+
+def prepend_version_content(data, version, content):
+ pattern = get_version_section_pattern(version)
+
+ def replace_section(match):
+ headline = match.group(1)
+ section = match.group(3)
+ data = content.rstrip()
+ if data:
+ data += '\n'
+ return headline + '\n' + section + '\n' + data
+
+ data, count = re.subn(pattern, replace_section, data, flags=re.MULTILINE)
+ if count > 1:
+ raise RuntimeError('Found multiple matching sections')
+ return data if count == 1 else None
+
+
+def version_from_tag(tag_name):
+ if tag_name is None:
+ return None
+ if tag_name.startswith('v'):
+ return tag_name[1:]
+ return tag_name
+
+
+def sorted_tags(tags):
+ # first return the forthcoming tag
+ for tag in tags:
+ if not tag.name:
+ yield tag
+ # then return the tags in descending order
+ name_and_tag = [(t.name, t) for t in tags if t.name]
+ name_and_tag.sort(key=lambda x: [int(y) for y in version_from_tag(x[0]).split('.')])
+ name_and_tag.reverse()
+ for (_, tag) in name_and_tag:
+ yield tag
+
+
+def generate_package_headline(pkg_name):
+ headline = 'Changelog for package %s' % pkg_name
+ section_marker = '^' * len(headline)
+ return '%s\n%s\n%s\n' % (section_marker, headline, section_marker)
+
+
+def generate_version_block(version, timestamp, log_entries, vcs_client=None, skip_contributors=False):
+ data = generate_version_headline(version, timestamp)
+ data += generate_version_content(log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors)
+ return data
+
+
+def generate_version_headline(version, timestamp):
+ headline = get_version_headline(version, timestamp)
+ return '%s\n%s\n' % (headline, '-' * len(headline))
+
+
+def get_version_headline(version, timestamp):
+ if not version:
+ return FORTHCOMING_LABEL
+ headline = version
+ if timestamp:
+ headline += ' (%s)' % timestamp
+ return headline
+
+
+def generate_version_content(log_entries, vcs_client=None, skip_contributors=False):
+ data = ''
+ all_authors = set()
+ for entry in log_entries:
+ msg = entry.msg
+ lines = msg.splitlines()
+ lines = [line.strip() for line in lines]
+ lines = [line for line in lines if line and not line.startswith('Signed-off-by:')]
+ lines = [escape_trailing_underscores(line) for line in lines]
+ data += '* %s\n' % (replace_repository_references(lines[0], vcs_client=vcs_client) if lines else '')
+ for line in lines[1:]:
+ data += ' %s\n' % replace_repository_references(line, vcs_client=vcs_client)
+ all_authors.add(entry.author)
+ if all_authors and not skip_contributors:
+ data += '* Contributors: %s\n' % ', '.join(sorted(all_authors))
+ return data
+
+
+def escape_trailing_underscores(line):
+ if line.endswith('_'):
+ line = line[:-1] + r'\_'
+ # match words ending with an underscore which are not followed by another word
+ # and insert a backslash before the underscore to escape it
+ line = re.sub(r'(\w+)_([^\w])', '\\1\\_\\2', line)
+ return line
+
+
+def replace_repository_references(line, vcs_client=None):
+ if vcs_client:
+ line = vcs_client.replace_repository_references(line)
+ return line
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2013, Open Source Robotics Foundation, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Open Source Robotics Foundation, Inc. nor
+# the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior
+# written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""Extract log information from repositories."""
+
+import os
+import re
+import shutil
+import subprocess
+import tempfile
+
+
+try:
+ from shutil import which
+except ImportError:
+ # fallback for Python < 3.3
+ def which(cmd):
+ for path in os.getenv('PATH').split(os.path.pathsep):
+ file_path = os.path.join(path, cmd)
+ if os.path.isfile(file_path):
+ return file_path
+ return None
+
+
+class Tag(object):
+
+ def __init__(self, name, timestamp=None):
+ self.name = name
+ self.timestamp = timestamp
+
+
+class LogEntry(object):
+
+ def __init__(self, msg, affected_paths, author):
+ self.msg = msg
+ self.author = author
+ self._affected_paths = [p for p in affected_paths if p]
+
+ def affects_path(self, path):
+ for apath in self._affected_paths:
+ # if the path is the root of the repository
+ # it is affected by all changes
+ if path == '.':
+ return True
+ if apath.startswith(os.path.join(path, '')):
+ return True
+ return False
+
+
+class VcsClientBase(object):
+
+ def __init__(self, path):
+ self.path = path
+
+ def get_tags(self):
+ raise NotImplementedError()
+
+ def get_latest_tag_name(self):
+ raise NotImplementedError()
+
+ def get_log_entries(self, from_tag, to_tag, skip_merges=False, only_merges=False):
+ raise NotImplementedError()
+
+ def replace_repository_references(self, line):
+ return line
+
+ def _run_command(self, cmd, env=None):
+ cwd = os.path.abspath(self.path)
+ result = {'cmd': ' '.join(cmd), 'cwd': cwd}
+ try:
+ proc = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env)
+ output, _ = proc.communicate()
+ result['output'] = output.rstrip().decode('utf-8')
+ result['returncode'] = proc.returncode
+ except subprocess.CalledProcessError as e:
+ result['output'] = e.output
+ result['returncode'] = e.returncode
+ return result
+
+ def _truncate_timestamps(self, tags):
+ # truncate timestamps to shortest unique representation
+ # - date only
+ # - date including hours and minutes
+ # - date include hours, minutes and seconds
+ lengths = [10, 16, 19]
+ for length in lengths:
+ # filter tags which have not been truncated yet
+ considered_tags = [t for t in tags if len(t.timestamp) > length]
+ # count tags which timestamps have the same truncated representation
+ grouped_by_timestamp = {}
+ for t in considered_tags:
+ truncated_timestamp = t.timestamp[:length]
+ if truncated_timestamp not in grouped_by_timestamp:
+ grouped_by_timestamp[truncated_timestamp] = []
+ grouped_by_timestamp[truncated_timestamp].append(t)
+ # truncate timestamp of tags which are unique
+ for truncated_timestamp, similar_tags in grouped_by_timestamp.items():
+ if len(similar_tags) == 1:
+ similar_tags[0].timestamp = truncated_timestamp
+
+
+class GitClient(VcsClientBase):
+
+ type = 'git' # noqa: A003
+
+ def __init__(self, path):
+ super(GitClient, self).__init__(path)
+ self._executable = which('git')
+ self._repo_hosting = None
+ self._github_base_url = 'https://github.com/'
+ self._github_path = None
+ self._gitlab_base_url = 'https://gitlab.com/'
+ self._gitlab_path = None
+
+ # query author
+ def _get_author(self, hash_):
+ cmd = [self._executable, 'log', hash_, '-n', '1', '--format=format:%aN']
+ result = self._run_command(cmd)
+ if result['returncode']:
+ raise RuntimeError('Could not fetch author:\n%s' % result['output'])
+ return result['output']
+
+ def get_tags(self):
+ # Get a decorated log, use the refnames to find the ancestor tags
+ cmd_tag = [self._executable, 'log', '--simplify-by-decoration', '--decorate', '--pretty=oneline']
+ result_tag = self._run_command(cmd_tag)
+ if result_tag['returncode']:
+ raise RuntimeError('Could not fetch tags:\n%s' % result_tag['output'])
+ # Parse a comma-separated list of refname decorators out of the log
+ decorations = ', '.join(re.findall(r'^[a-f0-9]+ \(([^)]*)\) .', result_tag['output'], re.MULTILINE)) + ','
+ # Extract only refnames that are tags
+ tag_names = re.findall('tag: ([^,]+)[,]', decorations)
+
+ tags = []
+ for tag_name in tag_names:
+ cmd = [self._executable, 'log', tag_name, '-n', '1', '--format=format:%ai']
+ result = self._run_command(cmd)
+ if result['returncode']:
+ raise RuntimeError('Could not fetch timestamp:\n%s' % result['output'])
+ tags.append(Tag(tag_name, result['output']))
+ self._truncate_timestamps(tags)
+ return tags
+
+ def get_latest_tag_name(self):
+ cmd_describe = [self._executable, 'describe', '--abbrev=0', '--tags']
+ result_describe = self._run_command(cmd_describe)
+ if result_describe['returncode']:
+ raise RuntimeError('Could not fetch latest tag:\n%s' % result_describe['output'])
+ tag_name = result_describe['output']
+ return tag_name
+
+ def get_log_entries(self, from_tag, to_tag, skip_merges=False, only_merges=False):
+ # query all hashes in the range
+ cmd = [self._executable, 'log']
+ if from_tag or to_tag:
+ cmd.append('%s%s' % ('%s..' % to_tag if to_tag else '', from_tag if from_tag else ''))
+ cmd.append('--format=format:%H')
+ if skip_merges and only_merges:
+ raise RuntimeError('Both "skip_merges" and "only_merges" are set to True, which contradicts.')
+ if skip_merges:
+ cmd.append('--no-merges')
+ if only_merges:
+ cmd.append('--merges')
+ result = self._run_command(cmd)
+ if result['returncode']:
+ raise RuntimeError('Could not fetch commit hashes:\n%s' % result['output'])
+
+ log_entries = []
+ if result['output']:
+ # query further information for each changeset
+ hashes = result['output'].splitlines()
+ for hash_ in hashes:
+ # query commit message
+ cmd = [self._executable, 'log', hash_, '-n', '1', '--format=format:%B']
+ result = self._run_command(cmd)
+ if result['returncode']:
+ raise RuntimeError('Could not fetch commit message:\n%s' % result['output'])
+ if result['output'] == from_tag:
+ continue
+ msg = result['output']
+ # query affected paths
+ cmd = [self._executable, 'show', '--first-parent', hash_, '--name-only', '--format=format:""']
+ result = self._run_command(cmd)
+ if result['returncode']:
+ raise RuntimeError('Could not fetch affected paths:\n%s' % result['output'])
+ affected_paths = result['output'].splitlines()
+ log_entries.append(LogEntry(msg, affected_paths, self._get_author(hash_)))
+ return log_entries
+
+ def replace_repository_references(self, line):
+ if self._repo_hosting is None:
+ self._repo_hosting = False
+ try:
+ self._determine_repo_hosting()
+ except RuntimeError:
+ pass
+ if self._repo_hosting == 'github':
+ line = self._replace_github_issue_references(line)
+ elif self._repo_hosting == 'gitlab':
+ line = self._replace_gitlab_issue_references(line)
+ return line
+
+ def _determine_repo_hosting(self):
+ cmd = [self._executable, 'config', '--get', 'remote.origin.url']
+ result = self._run_command(cmd)
+ if result['returncode']:
+ raise RuntimeError('Could not fetch remote url:\n%s' % result['output'])
+
+ # detect github hosting
+ prefixes = ['git@github.com:', 'https://github.com/', 'git://github.com/']
+ for prefix in prefixes:
+ if result['output'].startswith(prefix):
+ self._repo_hosting = 'github'
+ path = result['output'][len(prefix):]
+ if path.endswith('.git'):
+ path = path[:-4]
+ self._github_path = path
+ break
+
+ # detect gitlab hosting
+ prefixes = ['git@gitlab.com:', 'https://gitlab.com/', 'git://gitlab.com/']
+ for prefix in prefixes:
+ if result['output'].startswith(prefix):
+ self._repo_hosting = 'gitlab'
+ path = result['output'][len(prefix):]
+ if path.endswith('.git'):
+ path = path[:-4]
+ self._gitlab_path = path
+ break
+
+ def _replace_github_issue_references(self, line):
+ valid_name = '[\\w._-]+'
+ issue_pattern = '#(\\d+)'
+
+ def replace_issue_number(match):
+ issue_url = self._github_base_url
+ if match.group(1):
+ path = match.group(1)
+ issue_url += path
+ else:
+ path = ''
+ issue_url += self._github_path
+ issue_number = match.group(2)
+ issue_url += '/issues/' + issue_number
+ return '`%s#%s <%s>`_' % (path, issue_number, issue_url)
+ line = re.sub(('(%s/%s)?' % (valid_name, valid_name)) + issue_pattern, replace_issue_number, line)
+ return line
+
+ def _replace_gitlab_issue_references(self, line):
+ valid_name = '[\\w._-]+'
+ issue_pattern = '#(\\d+)'
+ merge_request_pattern = '!(\\d+)'
+
+ def replace_issue_number(match):
+ issue_url = self._gitlab_base_url
+ if match.group(1):
+ path = match.group(1)
+ issue_url += path
+ else:
+ path = ''
+ issue_url += self._gitlab_path
+ issue_number = match.group(3)
+ issue_url += '/-/issues/' + issue_number
+ return '`%s#%s <%s>`_' % (path, issue_number, issue_url)
+ line = re.sub(('(%s(/%s)+)?' % (valid_name, valid_name)) + issue_pattern, replace_issue_number, line)
+
+ def replace_merge_request_number(match):
+ merge_request_url = self._gitlab_base_url
+ if match.group(1):
+ path = match.group(1)
+ merge_request_url += path
+ else:
+ path = ''
+ merge_request_url += self._gitlab_path
+ merge_request_number = match.group(3)
+ merge_request_url += '/-/merge_requests/' + merge_request_number
+ return '`%s!%s <%s>`_' % (path, merge_request_number, merge_request_url)
+ line = re.sub(('(%s(/%s)+)?' % (valid_name, valid_name)) + merge_request_pattern, replace_merge_request_number, line)
+ return line
+
+
+class HgClient(VcsClientBase):
+
+ type = 'hg' # noqa: A003
+
+ def __init__(self, path):
+ super(HgClient, self).__init__(path)
+ self._executable = which('hg')
+
+ # query author
+ def _get_author(self, hash_):
+ cmd = [self._executable, 'log', '-r', hash_, '--template', '{author}']
+ result = self._run_command(cmd)
+ if result['returncode']:
+ raise RuntimeError('Could not fetch author:\n%s' % result['output'])
+ return result['output']
+
+ def get_tags(self):
+ cmd_tag = [self._executable, 'tags', '-q']
+ result_tag = self._run_command(cmd_tag)
+ if result_tag['returncode']:
+ raise RuntimeError('Could not fetch tags:\n%s' % result_tag['output'])
+ tag_names = result_tag['output'].splitlines()
+
+ tags = []
+ for tag_name in tag_names:
+ cmd = [self._executable, 'log', '-r', tag_name, '--template', '{date|isodatesec}']
+ result = self._run_command(cmd)
+ if result['returncode']:
+ raise RuntimeError('Could not fetch timestamp:\n%s' % result['output'])
+ tags.append(Tag(tag_name, result['output']))
+ self._truncate_timestamps(tags)
+ return tags
+
+ def get_latest_tag_name(self):
+ cmd_log = [self._executable, 'log', '--rev', '.', '--template', '{latesttag}']
+ result_log = self._run_command(cmd_log)
+ if result_log['returncode']:
+ raise RuntimeError('Could not fetch latest tag:\n%s' % result_log['output'])
+ tag_name = result_log['output']
+ if tag_name == 'null':
+ raise RuntimeError('Could not find latest tagn')
+ return tag_name
+
+ def get_log_entries(self, from_tag, to_tag, skip_merges=False, only_merges=False):
+ # query all hashes in the range
+ # ascending chronological order since than it is easier to handle empty tag names
+ revrange = '%s:%s' % ((to_tag if to_tag else ''), (from_tag if from_tag else 'tip'))
+ if to_tag:
+ revrange += '-%s' % to_tag
+ if from_tag:
+ revrange += '-%s' % from_tag
+ cmd = [self._executable, 'log', '-r', revrange, '--template', '{rev}\n']
+ result = self._run_command(cmd)
+ if result['returncode']:
+ raise RuntimeError('Could not fetch commit hashes:\n%s' % result['output'])
+
+ tmp_base = tempfile.mkdtemp('-hg-style')
+ try:
+ style_file = os.path.join(tmp_base, 'hg-changeset-files-per-line.style')
+ with open(style_file, 'w') as f:
+ f.write("changeset = '{files}'\n")
+ f.write("file = '{file}\\n'\n")
+
+ log_entries = []
+ if result['output']:
+ # query further information for each changeset
+ revs = reversed(result['output'].splitlines())
+ for rev in revs:
+ # query commit message
+ cmd = [self._executable, 'log', '-r', rev, '-l', '1', '--template', '{desc}']
+ result = self._run_command(cmd)
+ if result['returncode']:
+ raise RuntimeError('Could not fetch commit message:\n%s' % result['output'])
+ if result['output'] == from_tag:
+ continue
+ msg = result['output']
+ # query affected paths
+ cmd = [self._executable, 'log', '-r', rev, '-l', '1', '--style', style_file]
+ result = self._run_command(cmd)
+ if result['returncode']:
+ raise RuntimeError('Could not fetch affected paths:\n%s' % result['output'])
+ affected_paths = result['output'].splitlines()
+ log_entries.append(LogEntry(msg, affected_paths, self._get_author(rev)))
+ finally:
+ shutil.rmtree(tmp_base)
+ return log_entries
+
+
+def get_vcs_client(base_path):
+ vcs_clients = []
+ vcs_clients.append(GitClient)
+ vcs_clients.append(HgClient)
+ client_types = [c.type for c in vcs_clients]
+ if len(client_types) != len(set(client_types)):
+ raise RuntimeError('Multiple vcs clients share the same type: %s' % ', '.join(sorted(client_types)))
+
+ for vcs_client in vcs_clients:
+ if os.path.exists(os.path.join(base_path, '.%s' % vcs_client.type)):
+ return vcs_client(base_path)
+ raise RuntimeError('Could not detect repository type - currently supports: %s' % ', '.join([c.type for c in vcs_clients]))
--- /dev/null
+"""This script creates the skeletton of a catkin package."""
+
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+
+from catkin_pkg.package_templates import create_package_files, PackageTemplate
+
+
+def main(argv=sys.argv[1:], parent_path=os.getcwd()):
+ parser = argparse.ArgumentParser(
+ description='Creates a new catkin package')
+ parser.add_argument('name',
+ nargs=1,
+ help='The name for the package')
+ parser.add_argument('--meta',
+ action='store_true',
+ help='Creates meta-package files')
+ parser.add_argument('dependencies',
+ nargs='*',
+ help='Catkin package Dependencies')
+ parser.add_argument('-s', '--sys-deps',
+ nargs='*',
+ help='System Dependencies')
+ parser.add_argument('-b', '--boost-comps',
+ nargs='*',
+ help='Boost Components')
+ parser.add_argument('-V', '--pkg_version',
+ action='store',
+ help='Initial Package version')
+ parser.add_argument('-D', '--description',
+ action='store',
+ help='Description')
+ parser.add_argument('-l', '--license',
+ action='append',
+ help='Name for License, (e.g. BSD, MIT, GPLv3...)')
+ parser.add_argument('-a', '--author',
+ action='append',
+ help='A single author, may be used multiple times')
+ parser.add_argument('-m', '--maintainer',
+ action='append',
+ help='A single maintainer, may be used multiple times')
+ rosdistro_name = os.environ['ROS_DISTRO'] if 'ROS_DISTRO' in os.environ else None
+ parser.add_argument('--rosdistro', required=rosdistro_name is None, default=rosdistro_name, help='The ROS distro (default: environment variable ROS_DISTRO if defined)')
+
+ args = parser.parse_args(argv)
+
+ try:
+ package_name = args.name[0]
+ target_path = os.path.join(parent_path, package_name)
+ package_template = PackageTemplate._create_package_template(
+ package_name=package_name,
+ description=args.description,
+ licenses=args.license or [],
+ maintainer_names=args.maintainer,
+ author_names=args.author,
+ version=args.pkg_version,
+ catkin_deps=args.dependencies,
+ system_deps=args.sys_deps,
+ boost_comps=args.boost_comps)
+ create_package_files(target_path=target_path,
+ package_template=package_template,
+ rosdistro=args.rosdistro,
+ newfiles={},
+ meta=args.meta)
+ print('Successfully created files in %s. Please adjust the values in package.xml.' % target_path)
+ except ValueError as vae:
+ parser.error(str(vae))
--- /dev/null
+"""This script finds a catkin packages."""
+
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+
+from catkin_pkg.packages import find_packages
+
+
+def main(argv=sys.argv[1:]):
+ parser = argparse.ArgumentParser(description='Find a catkin package')
+ parser.add_argument('pkg', help='The name of the package')
+ parser.add_argument('base_path', nargs='?', default=os.curdir, help='The base path to crawl for packages')
+
+ args = parser.parse_args(argv)
+
+ try:
+ packages = find_packages(args.base_path)
+ catkin_pkg = [path for path, p in packages.items() if p.name == args.pkg]
+ if catkin_pkg:
+ print(catkin_pkg[0])
+ else:
+ print("Could not find package '%s'." % args.pkg, file=sys.stderr)
+ sys.exit(2)
+ except RuntimeError as e:
+ print('ERROR: ' + str(e), file=sys.stderr)
+ sys.exit(1)
--- /dev/null
+"""This script generates REP-0132 CHANGELOG.rst files for git or hg repositories."""
+
+from __future__ import print_function
+
+import argparse
+import logging
+import os
+import sys
+
+from catkin_pkg.changelog import CHANGELOG_FILENAME
+from catkin_pkg.changelog_generator import generate_changelog_file, generate_changelogs, get_all_changes, get_forthcoming_changes, update_changelogs
+from catkin_pkg.changelog_generator_vcs import get_vcs_client
+from catkin_pkg.packages import find_packages
+
+try:
+ raw_input
+except NameError:
+ raw_input = input # noqa: A001
+
+
+def prompt_continue(msg, default):
+ """Prompt the user for continuation."""
+ if default:
+ msg += ' [Y/n]?'
+ else:
+ msg += ' [y/N]?'
+
+ while True:
+ response = raw_input(msg)
+ if not response:
+ response = 'y' if default else 'n'
+ else:
+ response = response.lower()
+
+ if response in ['y', 'n']:
+ return response == 'y'
+
+ print("Response '%s' was not recognized, please use one of the following options: y, Y, n, N" % response, file=sys.stderr)
+
+
+def main(sysargs=None):
+ parser = argparse.ArgumentParser(description='Generate a REP-0132 %s' % CHANGELOG_FILENAME)
+ group_merge = parser.add_mutually_exclusive_group()
+ parser.add_argument(
+ '-a', '--all', action='store_true', default=False,
+ help='Generate changelog for all versions instead of only the forthcoming one (only supported when no changelog file exists yet)')
+ group_merge.add_argument(
+ '--only-merges', action='store_true', default=False,
+ help='Only add merge commits to the changelog')
+ parser.add_argument(
+ '--print-root', action='store_true', default=False,
+ help='Output changelog content to the console as if there would be only one package in the root of the repository')
+ parser.add_argument(
+ '--skip-contributors', action='store_true', default=False,
+ help='Skip adding the list of contributors to the changelog')
+ group_merge.add_argument(
+ '--skip-merges', action='store_true', default=False,
+ help='Skip adding merge commits to the changelog')
+ parser.add_argument(
+ '-y', '--non-interactive', action='store_true', default=False,
+ help="Run without user interaction, confirming all questions with 'yes'")
+ args = parser.parse_args(sysargs)
+
+ base_path = '.'
+ logging.basicConfig(format='%(message)s', level=logging.DEBUG)
+
+ vcs_client = get_vcs_client(base_path)
+
+ if args.print_root:
+ # printing status messages to stderr to allow piping the changelog to a file
+ if args.all:
+ print('Querying all tags and commit information...', file=sys.stderr)
+ tag2log_entries = get_all_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges)
+ print('Generating changelog output with all versions...', file=sys.stderr)
+ else:
+ print('Querying commit information since latest tag...', file=sys.stderr)
+ tag2log_entries = get_forthcoming_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges)
+ print('Generating changelog files with forthcoming version...', file=sys.stderr)
+ print('', file=sys.stderr)
+ data = generate_changelog_file('repository-level', tag2log_entries, vcs_client=vcs_client)
+ print(data)
+ return 0
+
+ # find packages
+ packages = find_packages(base_path)
+ if not packages:
+ raise RuntimeError('No packages found')
+ print('Found packages: %s' % ', '.join(sorted(p.name for p in packages.values())))
+
+ # check for missing changelogs
+ missing_changelogs = []
+ for pkg_path, package in packages.items():
+ changelog_path = os.path.join(base_path, pkg_path, CHANGELOG_FILENAME)
+ if not os.path.exists(changelog_path):
+ missing_changelogs.append(package.name)
+
+ if args.all and not missing_changelogs:
+ raise RuntimeError('All packages already have a changelog. Either remove (some of) them before using --all or invoke the script without --all.')
+
+ if args.all and len(missing_changelogs) != len(packages):
+ ignored = set([p.name for p in packages.values()]) - set(missing_changelogs)
+ print('The following packages already have a changelog file and will be ignored: %s' % ', '.join(sorted(ignored)), file=sys.stderr)
+
+ # prompt to switch to --all
+ if not args.all and missing_changelogs:
+ print('Some of the packages have no changelog file: %s' % ', '.join(sorted(missing_changelogs)))
+ print('You might consider to use --all to generate the changelogs for all versions (not only for the forthcoming version).')
+ if not args.non_interactive and not prompt_continue('Continue without --all option', default=False):
+ raise RuntimeError('Skipping generation, rerun the script with --all.')
+
+ if args.all:
+ print('Querying all tags and commit information...')
+ tag2log_entries = get_all_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges)
+ print('Generating changelog files with all versions...')
+ generate_changelogs(base_path, packages, tag2log_entries, logger=logging, vcs_client=vcs_client, skip_contributors=args.skip_contributors)
+ else:
+ print('Querying commit information since latest tag...')
+ tag2log_entries = get_forthcoming_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges)
+ # separate packages with/without a changelog file
+ packages_without = {pkg_path: package for pkg_path, package in packages.items() if package.name in missing_changelogs}
+ if packages_without:
+ print('Generating changelog files with forthcoming version...')
+ generate_changelogs(base_path, packages_without, tag2log_entries, logger=logging, vcs_client=vcs_client, skip_contributors=args.skip_contributors)
+ packages_with = {pkg_path: package for pkg_path, package in packages.items() if package.name not in missing_changelogs}
+ if packages_with:
+ print('Updating forthcoming section of changelog files...')
+ update_changelogs(base_path, packages_with, tag2log_entries, logger=logging, vcs_client=vcs_client, skip_contributors=args.skip_contributors)
+ print('Done.')
+ print('Please review the extracted commit messages and consolidate the changelog entries before committing the files!')
+
+
+def main_catching_runtime_error(*args, **kwargs):
+ try:
+ main(*args, **kwargs)
+ except RuntimeError as e:
+ print('ERROR: ' + str(e), file=sys.stderr)
+ sys.exit(1)
--- /dev/null
+from __future__ import print_function
+
+import argparse
+import os
+import sys
+
+from catkin_pkg.package_version import bump_version
+from catkin_pkg.package_version import update_versions
+from catkin_pkg.packages import find_packages, verify_equal_package_versions
+
+# find the import relatively if available to work before installing catkin or overlaying installed version
+if os.path.exists(os.path.join(os.path.dirname(__file__), '..', 'python', 'catkin', '__init__.py')):
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'python'))
+
+
+def main():
+ parser = argparse.ArgumentParser(description='Show or bump the version number in package.xml files.')
+ parser.add_argument('path', nargs='?', default='.', help='The path to a parent folder which contains package.xml files (default: .)')
+ parser.add_argument('--bump', choices=('major', 'minor', 'patch'), help='Which part of the version number to bump?')
+ args = parser.parse_args()
+
+ try:
+ packages = find_packages(args.path)
+ if not packages:
+ print('No packages found', file=sys.stderr)
+ sys.exit(1)
+ version = verify_equal_package_versions(packages.values())
+
+ # only print the version number
+ if args.bump is None:
+ print(version)
+
+ else:
+ # bump the version number
+ new_version = bump_version(version, args.bump)
+ update_versions(packages.keys(), new_version)
+ print('%s -> %s' % (version, new_version))
+ except Exception as e:
+ sys.exit(str(e))
--- /dev/null
+from __future__ import print_function
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+
+from catkin_pkg import metapackage
+from catkin_pkg.changelog import CHANGELOG_FILENAME, get_changelog_from_path
+from catkin_pkg.package import InvalidPackage, PACKAGE_MANIFEST_FILENAME
+from catkin_pkg.package_version import bump_version
+from catkin_pkg.package_version import get_forthcoming_label, update_changelog_sections, update_versions
+from catkin_pkg.packages import find_packages, verify_equal_package_versions
+from catkin_pkg.terminal_color import disable_ANSI_colors, fmt
+from catkin_pkg.workspace_vcs import get_repository_type, vcs_remotes
+
+try:
+ from shutil import which
+except ImportError:
+ # fallback for Python < 3.3
+ def which(exe):
+ for path in os.getenv('PATH').split(os.path.pathsep):
+ file_path = os.path.join(path, exe)
+ if os.path.isfile(file_path):
+ return file_path
+ return None
+
+try:
+ raw_input
+except NameError:
+ raw_input = input # noqa: A001
+
+
+def has_changes(base_path, path, vcs_type):
+ cmd = [_find_executable(vcs_type), 'diff', path]
+ try:
+ output = subprocess.check_output(cmd, cwd=base_path)
+ except subprocess.CalledProcessError as e:
+ raise RuntimeError(fmt("@{rf}Failed to check if '@{boldon}%s@{boldoff}' has modifications: %s" % (path, str(e))))
+ return output.decode('utf-8').rstrip() != ''
+
+
+def prompt_continue(msg, default):
+ """Prompt the user for continuation."""
+ if default:
+ msg += fmt(' @{yf}[Y/n]@{reset}?')
+ else:
+ msg += fmt(' @{yf}[y/N]@{reset}?')
+
+ while True:
+ _flush_stdin()
+ try:
+ response = raw_input(msg)
+ except EOFError:
+ response = ''
+ if not response:
+ response = 'y' if default else 'n'
+ else:
+ response = response.lower()
+
+ if response in ['y', 'n']:
+ return response == 'y'
+
+ print(
+ fmt(
+ "@{yf}Response '@{boldon}%s@{boldoff}' was not recognized, please use one of the following options: %s" %
+ (response, ', '.join([('@{boldon}%s@{boldoff}' % x) for x in ['y', 'Y', 'n', 'N']]))
+ ), file=sys.stderr)
+
+
+def _flush_stdin():
+ try:
+ from termios import tcflush, TCIFLUSH
+ tcflush(sys.stdin, TCIFLUSH)
+ except ImportError:
+ # fallback if not supported on some platforms
+ pass
+
+
+def get_git_branch(base_path):
+ cmd_branch = [_find_executable('git'), 'rev-parse', '--abbrev-ref', 'HEAD']
+ try:
+ branch = subprocess.check_output(cmd_branch, cwd=base_path)
+ except subprocess.CalledProcessError as e:
+ raise RuntimeError(fmt('@{rf}Could not determine git branch: %s' % str(e)))
+ return branch.decode('utf-8').rstrip()
+
+
+def get_git_remote(base_path):
+ branch = get_git_branch(base_path)
+
+ cmd_remote = [_find_executable('git'), 'config', '--get', 'branch.%s.remote' % branch]
+ try:
+ remote = subprocess.check_output(cmd_remote, cwd=base_path)
+ except subprocess.CalledProcessError as e:
+ msg = 'Could not determine git remote: %s' % str(e)
+ msg += "\n\nMay be the branch '%s' is not tracking a remote branch?" % branch
+ raise RuntimeError(fmt('@{rf}%s' % msg))
+ return remote.decode('utf-8').rstrip()
+
+
+def try_repo_push(base_path, vcs_type):
+ if vcs_type in ['git']:
+ print('Trying to push to remote repository (dry run)...')
+ cmd = [_find_executable(vcs_type), 'push']
+ if vcs_type == 'git':
+ cmd.extend(['-n'] + [get_git_remote(base_path), get_git_branch(base_path)])
+ try:
+ subprocess.check_call(cmd, cwd=base_path)
+ except (subprocess.CalledProcessError, RuntimeError) as e:
+ raise RuntimeError(fmt('@{rf}Failed to dry push to repository: %s' % str(e)))
+
+
+def check_clean_working_copy(base_path, vcs_type):
+ if vcs_type in ['bzr', 'hg', 'svn']:
+ cmd = [_find_executable(vcs_type), 'status']
+ elif vcs_type in ['git']:
+ cmd = [_find_executable(vcs_type), 'status', '-s', '-u']
+ else:
+ assert False, 'Unknown vcs type: %s' % vcs_type
+ try:
+ output = subprocess.check_output(cmd, cwd=base_path)
+ except subprocess.CalledProcessError as e:
+ raise RuntimeError(fmt('@{rf}Failed to check working copy state: %s' % str(e)))
+ output = output.decode('utf-8').rstrip()
+ if output != '':
+ print(output)
+ return False
+ return True
+
+
+def commit_files(base_path, vcs_type, packages, packages_with_changelogs, message, dry_run=False):
+ cmd = [_find_executable(vcs_type), 'commit', '-m', message]
+ cmd += [os.path.join(p, PACKAGE_MANIFEST_FILENAME) for p in packages.keys()]
+ cmd += [path for path, _, _ in packages_with_changelogs.values()]
+ if not dry_run:
+ try:
+ subprocess.check_call(cmd, cwd=base_path)
+ except subprocess.CalledProcessError as e:
+ raise RuntimeError(fmt('@{rf}Failed to commit package.xml files: %s' % str(e)))
+ return cmd
+
+
+def tag_repository(base_path, vcs_type, tag_name, has_tag_prefix, dry_run=False):
+ if vcs_type in ['bzr', 'git', 'hg']:
+ cmd = [_find_executable(vcs_type), 'tag', tag_name]
+ elif vcs_type == 'svn':
+ svn_url = vcs_remotes(base_path, 'svn')[5:]
+ if os.path.basename(svn_url) == 'trunk':
+ # tag "trunk"
+ base_url = os.path.dirname(svn_url)
+ elif os.path.basename(os.path.dirname(svn_url)) == 'branches':
+ # tag a direct subfolder of "branches"
+ base_url = os.path.dirname(os.path.dirname(svn_url))
+ elif svn_url.rfind('/trunk/') != -1:
+ # tag any subfolder of trunk but require a tag prefix
+ if not has_tag_prefix:
+ raise RuntimeError(fmt('@{rf}When tagging a subfolder you must use --tag-prefix to make your tag name unique'))
+ base_url = svn_url[:svn_url.rfind('/trunk/')]
+ elif svn_url.rfind('/branches/') != -1:
+ # tag any subfolder of trunk but require a tag prefix
+ if not has_tag_prefix:
+ raise RuntimeError(fmt('@{rf}When tagging a subfolder you must use --tag-prefix to make your tag name unique'))
+ base_url = svn_url[:svn_url.rfind('/branches/')]
+ else:
+ raise RuntimeError(fmt("@{rf}Could not determine base URL of SVN repository '%s'" % svn_url))
+ tag_url = '%s/tags/%s' % (base_url, tag_name)
+ cmd = ['svn', 'cp', '-m', '"tagging %s"' % tag_name, svn_url, tag_url]
+ else:
+ assert False, 'Unknown vcs type: %s' % vcs_type
+ if not dry_run:
+ try:
+ subprocess.check_call(cmd, cwd=base_path)
+ except subprocess.CalledProcessError as e:
+ raise RuntimeError(fmt('@{rf}Failed to tag repository: %s' % str(e)))
+ return cmd
+
+
+def push_changes(base_path, vcs_type, tag_name, dry_run=False):
+ commands = []
+
+ # push changes to the repository
+ cmd = [_find_executable(vcs_type), 'push']
+ if vcs_type == 'git':
+ cmd.extend([get_git_remote(base_path), get_git_branch(base_path)])
+ commands.append(cmd)
+ if not dry_run:
+ try:
+ subprocess.check_call(cmd, cwd=base_path)
+ except subprocess.CalledProcessError as e:
+ raise RuntimeError(fmt('@{rf}Failed to push changes to the repository: %s\n\nYou need to manually push the changes/tag to the repository.' % str(e)))
+
+ # push tags to the repository
+ if vcs_type in ['git']:
+ cmd = [_find_executable(vcs_type), 'push', get_git_remote(base_path), tag_name]
+ commands.append(cmd)
+ if not dry_run:
+ try:
+ subprocess.check_call(cmd, cwd=base_path)
+ except subprocess.CalledProcessError as e:
+ raise RuntimeError(fmt('@{rf}Failed to push tag to the repository: %s\n\nYou need to manually push the new tag to the repository.' % str(e)))
+
+ return commands
+
+
+def _find_executable(vcs_type):
+ file_path = which(vcs_type)
+ if file_path is None:
+ raise RuntimeError(fmt('@{rf}Could not find vcs binary: %s' % vcs_type))
+ return file_path
+
+
+def main():
+ try:
+ _main()
+ except RuntimeError as e:
+ print(e, file=sys.stderr)
+ sys.exit(1)
+
+
+def _main():
+ parser = argparse.ArgumentParser(
+ description='Runs the commands to bump the version number, commit the modified %s files and create a tag in the repository.' % PACKAGE_MANIFEST_FILENAME)
+ parser.add_argument('--bump', choices=('major', 'minor', 'patch'), default='patch', help='Which part of the version number to bump? (default: %(default)s)')
+ parser.add_argument('--version', help='Specify a specific version to use')
+ parser.add_argument('--no-color', action='store_true', default=False, help='Disables colored output')
+ parser.add_argument('--no-push', action='store_true', default=False, help='Disables pushing to remote repository')
+ parser.add_argument('-t', '--tag-prefix', default='', help='Add this prefix to the created release tag')
+ parser.add_argument('-y', '--non-interactive', action='store_true', default=False, help="Run without user interaction, confirming all questions with 'yes'")
+ args = parser.parse_args()
+
+ if args.version and not re.match('^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)$', args.version):
+ parser.error('The passed version must follow the conventions (positive integers x.y.z with no leading zeros)')
+
+ if args.tag_prefix and ' ' in args.tag_prefix:
+ parser.error('The tag prefix must not contain spaces')
+
+ # force --no-color if stdout is non-interactive
+ if not sys.stdout.isatty():
+ args.no_color = True
+ # disable colors if asked
+ if args.no_color:
+ disable_ANSI_colors()
+
+ base_path = '.'
+
+ print(fmt('@{gf}Prepare the source repository for a release.'))
+
+ # determine repository type
+ vcs_type = get_repository_type(base_path)
+ if vcs_type is None:
+ raise RuntimeError(fmt("@{rf}Could not determine repository type of @{boldon}'%s'@{boldoff}" % base_path))
+ print(fmt('Repository type: @{boldon}%s@{boldoff}' % vcs_type))
+
+ # find packages
+ try:
+ packages = find_packages(base_path)
+ except InvalidPackage as e:
+ raise RuntimeError(fmt("@{rf}Invalid package at path @{boldon}'%s'@{boldoff}:\n %s" % (os.path.abspath(base_path), str(e))))
+ if not packages:
+ raise RuntimeError(fmt('@{rf}No packages found'))
+ print('Found packages: %s' % ', '.join([fmt('@{bf}@{boldon}%s@{boldoff}@{reset}' % p.name) for p in packages.values()]))
+
+ # complain about packages with unsupported build_type as they might require additional steps before being released
+ # complain about packages with upper case character since they won't be releasable with bloom
+ unsupported_pkg_names = []
+ invalid_pkg_names = []
+ for package in packages.values():
+ build_types = [export.content for export in package.exports if export.tagname == 'build_type']
+ build_type = build_types[0] if build_types else 'catkin'
+ if build_type not in ('catkin', 'ament_cmake'):
+ unsupported_pkg_names.append(package.name)
+ if package.name != package.name.lower():
+ invalid_pkg_names.append(package.name)
+ if unsupported_pkg_names:
+ print(
+ fmt(
+ "@{yf}Warning: the following package are not of build_type catkin or ament_cmake and may require manual steps to release': %s" %
+ ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(unsupported_pkg_names)])
+ ), file=sys.stderr)
+ if not args.non_interactive and not prompt_continue('Continue anyway', default=False):
+ raise RuntimeError(fmt('@{rf}Aborted release, verify that unsupported packages are ready to be released or release manually.'))
+ if invalid_pkg_names:
+ print(
+ fmt(
+ "@{yf}Warning: the following package names contain upper case characters which violate both ROS and Debian naming conventions': %s" %
+ ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(invalid_pkg_names)])
+ ), file=sys.stderr)
+ if not args.non_interactive and not prompt_continue('Continue anyway', default=False):
+ raise RuntimeError(fmt('@{rf}Aborted release, fix the names of the packages.'))
+
+ local_modifications = []
+ for pkg_path, package in packages.items():
+ # verify that the package.xml files don't have modifications pending
+ package_xml_path = os.path.join(pkg_path, PACKAGE_MANIFEST_FILENAME)
+ if has_changes(base_path, package_xml_path, vcs_type):
+ local_modifications.append(package_xml_path)
+ # verify that metapackages are valid
+ if package.is_metapackage():
+ try:
+ metapackage.validate_metapackage(pkg_path, package)
+ except metapackage.InvalidMetapackage as e:
+ raise RuntimeError(fmt(
+ "@{rf}Invalid metapackage at path '@{boldon}%s@{boldoff}':\n %s\n\nSee requirements for metapackages: %s" %
+ (os.path.abspath(pkg_path), str(e), metapackage.DEFINITION_URL)))
+
+ # fetch current version and verify that all packages have same version number
+ old_version = verify_equal_package_versions(packages.values())
+ if args.version:
+ new_version = args.version
+ else:
+ new_version = bump_version(old_version, args.bump)
+ tag_name = args.tag_prefix + new_version
+
+ if (
+ not args.non_interactive and
+ not prompt_continue(
+ fmt(
+ "Prepare release of version '@{bf}@{boldon}%s@{boldoff}@{reset}'%s" %
+ (new_version, " (tagged as '@{bf}@{boldon}%s@{boldoff}@{reset}')" % tag_name if args.tag_prefix else '')
+ ), default=True)
+ ):
+ raise RuntimeError(fmt("@{rf}Aborted release, use option '--bump' to release a different version and/or '--tag-prefix' to add a prefix to the tag name."))
+
+ # check for changelog entries
+ missing_changelogs = []
+ missing_changelogs_but_forthcoming = {}
+ for pkg_path, package in packages.items():
+ changelog_path = os.path.join(pkg_path, CHANGELOG_FILENAME)
+ if not os.path.exists(changelog_path):
+ missing_changelogs.append(package.name)
+ continue
+ # verify that the changelog files don't have modifications pending
+ if has_changes(base_path, changelog_path, vcs_type):
+ local_modifications.append(changelog_path)
+ changelog = get_changelog_from_path(changelog_path, package.name)
+ try:
+ changelog.get_content_of_version(new_version)
+ except KeyError:
+ # check that forthcoming section exists
+ forthcoming_label = get_forthcoming_label(changelog.rst)
+ if forthcoming_label:
+ missing_changelogs_but_forthcoming[package.name] = (changelog_path, changelog, forthcoming_label)
+ else:
+ missing_changelogs.append(package.name)
+
+ if local_modifications:
+ raise RuntimeError(fmt('@{rf}The following files have modifications, please commit/revert them before:' + ''.join([('\n- @{boldon}%s@{boldoff}' % path) for path in local_modifications])))
+
+ if missing_changelogs:
+ print(
+ fmt(
+ "@{yf}Warning: the following packages do not have a changelog file or entry for version '@{boldon}%s@{boldoff}': %s" %
+ (new_version, ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(missing_changelogs)]))
+ ), file=sys.stderr)
+ if not args.non_interactive and not prompt_continue('Continue without changelogs', default=False):
+ raise RuntimeError(fmt("@{rf}Aborted release, populate the changelog with '@{boldon}catkin_generate_changelog@{boldoff}' and review / clean up the content."))
+
+ # verify that repository is pushable (if the vcs supports dry run of push)
+ if not args.no_push:
+ try_repo_push(base_path, vcs_type)
+
+ # check for staged changes and modified and untracked files
+ print(fmt('@{gf}Checking if working copy is clean (no staged changes, no modified files, no untracked files)...'))
+ is_clean = check_clean_working_copy(base_path, vcs_type)
+ if not is_clean:
+ print(fmt('@{yf}Warning: the working copy contains other changes. Consider reverting/committing/stashing them before preparing a release.'), file=sys.stderr)
+ if not args.non_interactive and not prompt_continue('Continue anyway', default=False):
+ raise RuntimeError(fmt('@{rf}Aborted release, clean the working copy before trying again.'))
+
+ # for svn verify that we know how to tag that repository
+ if vcs_type in ['svn']:
+ tag_svn_cmd = tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '', dry_run=True)
+
+ # tag forthcoming changelog sections
+ update_changelog_sections(missing_changelogs_but_forthcoming, new_version)
+ print(fmt(
+ "@{gf}Rename the forthcoming section@{reset} of the following packages to version '@{bf}@{boldon}%s@{boldoff}@{reset}': %s" %
+ (new_version, ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(missing_changelogs_but_forthcoming.keys())]))))
+
+ # bump version number
+ update_versions(packages.keys(), new_version)
+ print(fmt("@{gf}Bump version@{reset} of all packages from '@{bf}%s@{reset}' to '@{bf}@{boldon}%s@{boldoff}@{reset}'" % (old_version, new_version)))
+
+ pushed = None
+ if vcs_type in ['svn']:
+ # for svn everything affects the remote repository immediately
+ commands = []
+ commands.append(commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name, dry_run=True))
+ commands.append(tag_svn_cmd)
+ if not args.no_push:
+ print(fmt('@{gf}The following commands will be executed to commit the changes and tag the new version:'))
+ else:
+ print(fmt('@{gf}You can use the following commands to manually commit the changes and tag the new version:'))
+ for cmd in commands:
+ print(fmt(' @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd)))
+
+ if not args.no_push:
+ if not args.non_interactive:
+ # confirm before modifying repository
+ if not prompt_continue('Execute commands which will modify the repository', default=True):
+ pushed = False
+ if pushed is None:
+ commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name)
+ tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '')
+ pushed = True
+
+ else:
+ # for other vcs types the changes are first done locally
+ print(fmt('@{gf}Committing the package.xml files...'))
+ commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name)
+
+ print(fmt("@{gf}Creating tag '@{boldon}%s@{boldoff}'..." % (tag_name)))
+ tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '')
+
+ try:
+ commands = push_changes(base_path, vcs_type, tag_name, dry_run=True)
+ except RuntimeError:
+ print(fmt('@{yf}Warning: could not determine commands to push the changes and tag to the remote repository. Do you have a remote configured for the current branch?'))
+ else:
+ if not args.no_push:
+ print(fmt('@{gf}The following commands will be executed to push the changes and tag to the remote repository:'))
+ else:
+ print(fmt('@{gf}You can use the following commands to manually push the changes to the remote repository:'))
+ for cmd in commands:
+ print(fmt(' @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd)))
+
+ if not args.no_push:
+ if not args.non_interactive:
+ # confirm commands to push to remote repository
+ if not prompt_continue('Execute commands to push the local commits and tags to the remote repository', default=True):
+ pushed = False
+ if pushed is None:
+ push_changes(base_path, vcs_type, tag_name)
+ pushed = True
+
+ if pushed:
+ print(fmt("@{gf}The source repository has been released successfully. The next step will be '@{boldon}bloom-release@{boldoff}'."))
+ else:
+ msg = 'The release of the source repository has been prepared successfully but the changes have not been pushed yet. ' \
+ "After pushing the changes manually the next step will be '@{boldon}bloom-release@{boldoff}'."
+ if args.no_push or pushed is False:
+ print(fmt('@{yf}%s' % msg))
+ else:
+ raise RuntimeError(fmt('@{rf}%s' % msg))
--- /dev/null
+"""This script renames the forthcoming section in changelog files with the upcoming version and the current date."""
+
+from __future__ import print_function
+
+import argparse
+import datetime
+import os
+import re
+import sys
+
+from catkin_pkg.changelog import CHANGELOG_FILENAME, get_changelog_from_path
+from catkin_pkg.changelog_generator import FORTHCOMING_LABEL
+from catkin_pkg.package_version import bump_version
+from catkin_pkg.packages import find_packages, verify_equal_package_versions
+
+import docutils.core
+
+
+def get_forthcoming_label(rst):
+ document = docutils.core.publish_doctree(rst)
+ forthcoming_label = None
+ for child in document.children:
+ title = None
+ if isinstance(child, docutils.nodes.subtitle):
+ title = child
+ elif isinstance(child, docutils.nodes.section):
+ section = child
+ if len(section.children) > 0 and isinstance(section.children[0], docutils.nodes.title):
+ title = section.children[0]
+ if title and len(title.children) > 0 and isinstance(title.children[0], docutils.nodes.Text):
+ title_text = title.children[0].rawsource
+ if FORTHCOMING_LABEL.lower() in title_text.lower():
+ if forthcoming_label:
+ raise RuntimeError('Found multiple forthcoming sections')
+ forthcoming_label = title_text
+ return forthcoming_label
+
+
+def rename_section(data, old_label, new_label):
+ valid_section_characters = '!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~'
+
+ def replace_section(match):
+ section_char = match.group(2)[0]
+ return new_label + '\n' + section_char * len(new_label)
+ pattern = '^(' + re.escape(old_label) + ')\n([' + re.escape(valid_section_characters) + ']+)$'
+ data, count = re.subn(pattern, replace_section, data, flags=re.MULTILINE)
+ if count == 0:
+ raise RuntimeError('Could not find section')
+ if count > 1:
+ raise RuntimeError('Found multiple matching sections')
+ return data
+
+
+def main(sysargs=None):
+ parser = argparse.ArgumentParser(description='Tag the forthcoming section in the changelog files with an upcoming version number')
+ parser.add_argument('--bump', choices=('major', 'minor', 'patch'), default='patch', help='Which part of the version number to bump? (default: %(default)s)')
+ args = parser.parse_args(sysargs)
+
+ base_path = '.'
+
+ # find packages
+ packages = find_packages(base_path)
+ if not packages:
+ raise RuntimeError('No packages found')
+ print('Found packages: %s' % ', '.join([p.name for p in packages.values()]))
+
+ # fetch current version and verify that all packages have same version number
+ old_version = verify_equal_package_versions(packages.values())
+ new_version = bump_version(old_version, args.bump)
+ print('Tag version %s' % new_version)
+
+ # check for changelog entries
+ changelogs = []
+ missing_forthcoming = []
+ already_tagged = []
+ for pkg_path, package in packages.items():
+ changelog_path = os.path.join(base_path, pkg_path, CHANGELOG_FILENAME)
+ if not os.path.exists(changelog_path):
+ missing_forthcoming.append(package.name)
+ continue
+ changelog = get_changelog_from_path(changelog_path, package.name)
+ if not changelog:
+ missing_forthcoming.append(package.name)
+ continue
+ # check that forthcoming section exists
+ forthcoming_label = get_forthcoming_label(changelog.rst)
+ if not forthcoming_label:
+ missing_forthcoming.append(package.name)
+ continue
+ # check that new_version section does not exist yet
+ try:
+ changelog.get_content_of_version(new_version)
+ already_tagged.append(package.name)
+ continue
+ except KeyError:
+ pass
+ changelogs.append((package.name, changelog_path, changelog, forthcoming_label))
+ if missing_forthcoming:
+ print('The following packages do not have a forthcoming section in their changelog file: %s' % ', '.join(sorted(missing_forthcoming)), file=sys.stderr)
+ if already_tagged:
+ print("The following packages do already have a section '%s' in their changelog file: %s" % (new_version, ', '.join(sorted(already_tagged))), file=sys.stderr)
+
+ # rename forthcoming sections to new_version including current date
+ new_changelog_data = []
+ new_label = '%s (%s)' % (new_version, datetime.date.today().isoformat())
+ for (pkg_name, changelog_path, changelog, forthcoming_label) in changelogs:
+ print("Renaming section '%s' to '%s' in package '%s'..." % (forthcoming_label, new_label, pkg_name))
+ data = rename_section(changelog.rst, forthcoming_label, new_label)
+ new_changelog_data.append((changelog_path, data))
+
+ print('Writing updated changelog files...')
+ for (changelog_path, data) in new_changelog_data:
+ with open(changelog_path, 'wb') as f:
+ f.write(data.encode('utf-8'))
--- /dev/null
+"""This script tests REP-0132 changelog files."""
+
+from __future__ import print_function
+
+import argparse
+import logging
+import os
+import sys
+
+import catkin_pkg.changelog
+from catkin_pkg.changelog import Changelog, CHANGELOG_FILENAME
+from catkin_pkg.changelog import populate_changelog_from_rst
+
+
+def main(sysargs=None):
+ parser = argparse.ArgumentParser(
+ description='Tests a REP-0132 %s' % CHANGELOG_FILENAME)
+ parser.add_argument(
+ 'changelog_file',
+ help='%s file to parse' % CHANGELOG_FILENAME,
+ default='.',
+ nargs='?')
+
+ args = parser.parse_args(sysargs)
+
+ if os.path.isdir(args.changelog_file):
+ changelog_file = os.path.join(args.changelog_file, CHANGELOG_FILENAME)
+ if not os.path.exists(changelog_file):
+ print("No {0} file in given directory: '{1}'"
+ .format(CHANGELOG_FILENAME, args.changelog_file), file=sys.stderr)
+ return 1
+ else:
+ changelog_file = args.changelog_file
+ if not os.path.exists(changelog_file):
+ print("{0} file given does not exist: '{1}'"
+ .format(CHANGELOG_FILENAME, args.changelog_file), file=sys.stderr)
+ return 1
+
+ if os.path.basename(changelog_file) != CHANGELOG_FILENAME:
+ print('WARNING: changelog file name should be %s' % CHANGELOG_FILENAME)
+
+ logging.basicConfig()
+ catkin_pkg.changelog.log.setLevel(logging.DEBUG)
+ changelog = Changelog()
+ with open(changelog_file, 'r') as f:
+ print(populate_changelog_from_rst(changelog, f.read()))
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2013, Open Source Robotics Foundation, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Open Source Robotics Foundation, Inc. nor
+# the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior
+# written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import print_function
+
+import os
+import re
+
+
+def get_metapackage_cmake_template_path():
+ """
+ Return the location of the metapackage CMakeLists.txt CMake template.
+
+ :returns: ``str`` location of the metapackage CMakeLists.txt CMake template
+ """
+ rel_path = os.path.join('templates', 'metapackage.cmake.in')
+ return os.path.join(os.path.dirname(__file__), rel_path)
+
+
+def configure_file(template_file, environment): # noqa: D402
+ """
+ Evaluate a .in template file used in CMake with configure_file().
+
+ :param template_file: path to the template, ``str``
+ :param environment: dictionary of placeholders to substitute,
+ ``dict``
+ :returns: string with evaluates template
+ :raises: KeyError for placeholders in the template which are not
+ in the environment
+ """
+ with open(template_file, 'r') as f:
+ template = f.read()
+ return configure_string(template, environment)
+
+
+def configure_string(template, environment):
+ """
+ Substitute variables enclosed by @ characters.
+
+ :param template: the template, ``str``
+ :param environment: dictionary of placeholders to substitute,
+ ``dict``
+ :returns: string with evaluates template
+ :raises: KeyError for placeholders in the template which are not
+ in the environment
+ """
+ def substitute(match):
+ var = match.group(0)[1:-1]
+ return environment[var]
+ return re.sub('@[a-zA-Z0-9_]+@', substitute, template)
--- /dev/null
+# Copyright 2017 Open Source Robotics Foundation, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import operator
+
+import pyparsing as pp
+
+# operatorPrecedence renamed to infixNotation in 1.5.7
+try:
+ from pyparsing import infixNotation
+except ImportError:
+ from pyparsing import operatorPrecedence as infixNotation
+
+
+def evaluate_condition(condition, context):
+ if condition is None:
+ return True
+ expr = _get_condition_expression()
+ try:
+ parse_results = expr.parseString(condition, parseAll=True)
+ except pp.ParseException as e:
+ raise ValueError(
+ "condition '%s' failed to parse: %s" % (condition, e))
+ return parse_results[0](context)
+
+
+_condition_expression = None
+
+
+def _get_condition_expression():
+ global _condition_expression
+ if not _condition_expression:
+ operator = pp.Regex('==|!=|>=|>|<=|<').setName('operator')
+ operator.setParseAction(_Operator)
+
+ identifier = pp.Word('$', pp.alphanums + '_', min=2).setName('identifier')
+ identifier.setParseAction(_Identifier)
+
+ value = pp.Word(pp.alphanums + '_-').setName('value')
+ value.setParseAction(_Value)
+
+ double_quoted_value = pp.QuotedString('"').setName(
+ 'double_quoted_value')
+ double_quoted_value.setParseAction(_Value)
+ single_quoted_value = pp.QuotedString("'").setName(
+ 'single_quoted_value')
+ single_quoted_value.setParseAction(_Value)
+
+ comparison_term = identifier | value | double_quoted_value | \
+ single_quoted_value
+
+ condition = pp.Group(comparison_term + operator + comparison_term).setName('condition')
+ condition.setParseAction(_Condition)
+
+ _condition_expression = infixNotation(
+ condition, [
+ ('and', 2, pp.opAssoc.LEFT, _And),
+ ('or', 2, pp.opAssoc.LEFT, _Or),
+ ])
+ return _condition_expression
+
+
+class _Operator:
+ operators = {
+ '==': operator.eq,
+ '!=': operator.ne,
+ '<=': operator.le,
+ '<': operator.lt,
+ '>=': operator.ge,
+ '>': operator.gt,
+ }
+
+ def __init__(self, t):
+ self.value = t[0]
+
+ def __call__(self, arg1, arg2, context):
+ assert self.value in self.operators
+ return self.operators[self.value](arg1(context), arg2(context))
+
+ def __str__(self):
+ return self.value
+
+ __repr__ = __str__
+
+
+class _Identifier:
+
+ def __init__(self, t):
+ self.value = t[0]
+
+ def __call__(self, context):
+ return str(context.get(self.value[1:], ''))
+
+ def __str__(self):
+ return self.value
+
+ __repr__ = __str__
+
+
+class _Value:
+
+ def __init__(self, t):
+ self.value = t[0]
+
+ def __call__(self, context):
+ return self.value
+
+ def __str__(self):
+ return self.value
+
+ __repr__ = __str__
+
+
+class _Condition:
+
+ def __init__(self, t):
+ self.value = t[0]
+
+ def __call__(self, context):
+ return self.value[1](self.value[0], self.value[2], context)
+
+ def __str__(self):
+ return ' '.join(map(str, self.value))
+
+ __repr__ = __str__
+
+
+class _BinOp:
+
+ def __init__(self, t):
+ self.args = t[0][0::2]
+
+ def __call__(self, context):
+ return self.evalop(a(context) for a in self.args)
+
+ def __str__(self):
+ sep = ' %s ' % self.reprsymbol
+ return '(' + sep.join(map(str, self.args)) + ')'
+
+ __repr__ = __str__
+
+
+class _And(_BinOp):
+ reprsymbol = 'and'
+ evalop = all
+
+
+class _Or(_BinOp):
+ reprsymbol = 'or'
+ evalop = any
--- /dev/null
+# Copyright 2017 Open Source Robotics Foundation, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from catkin_pkg.condition import evaluate_condition
+
+
+class GroupDependency:
+ __slots__ = [
+ 'name',
+ 'condition',
+ 'evaluated_condition',
+ 'members',
+ ]
+
+ def __init__(self, name, condition=None, members=None):
+ self.name = name
+ self.condition = condition
+ self.members = members
+ self.evaluated_condition = None
+
+ def __eq__(self, other):
+ if not isinstance(other, GroupDependency):
+ return False
+ return all(getattr(self, attr) == getattr(other, attr)
+ for attr in self.__slots__)
+
+ def __str__(self):
+ return self.name
+
+ def evaluate_condition(self, context):
+ """
+ Evaluate the condition.
+
+ The result is also stored in the member variable `evaluated_condition`.
+
+ :param context: A dictionary with key value pairs to replace variables
+ starting with $ in the condition.
+
+ :returns: True if the condition evaluates to True, else False
+ :raises: :exc:`ValueError` if the condition fails to parse
+ """
+ self.evaluated_condition = evaluate_condition(self.condition, context)
+ return self.evaluated_condition
+
+ def extract_group_members(self, packages):
+ self.members = set()
+ for pkg in packages:
+ for g in pkg.member_of_groups:
+ assert g.evaluated_condition is not None
+ if self.name in (g.name for g in pkg.member_of_groups if g.evaluated_condition):
+ self.members.add(pkg.name)
--- /dev/null
+# Copyright 2017 Open Source Robotics Foundation, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from catkin_pkg.condition import evaluate_condition
+
+
+class GroupMembership:
+ __slots__ = [
+ 'name',
+ 'condition',
+ 'evaluated_condition',
+ ]
+
+ def __init__(self, name, condition=None):
+ self.name = name
+ self.condition = condition
+ self.evaluated_condition = None
+
+ def __eq__(self, other):
+ if not isinstance(other, GroupMembership):
+ return False
+ return all(getattr(self, attr) == getattr(other, attr)
+ for attr in self.__slots__)
+
+ def __str__(self):
+ return self.name
+
+ def evaluate_condition(self, context):
+ """
+ Evaluate the condition.
+
+ The result is also stored in the member variable `evaluated_condition`.
+
+ :param context: A dictionary with key value pairs to replace variables
+ starting with $ in the condition.
+
+ :returns: True if the condition evaluates to True, else False
+ :raises: :exc:`ValueError` if the condition fails to parse
+ """
+ self.evaluated_condition = evaluate_condition(self.condition, context)
+ return self.evaluated_condition
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2013, Open Source Robotics Foundation, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Open Source Robotics Foundation, Inc. nor
+# the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior
+# written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""
+Checks metapackages for compliance with REP-0127.
+
+Reference: http://ros.org/reps/rep-0127.html#metapackage
+"""
+
+from __future__ import print_function
+
+import os
+import re
+
+from catkin_pkg.cmake import configure_file
+from catkin_pkg.cmake import get_metapackage_cmake_template_path
+
+__author__ = 'William Woodall'
+__email__ = 'william@osrfoundation.org'
+__maintainer__ = 'William Woodall'
+
+DEFINITION_URL = 'http://ros.org/reps/rep-0127.html#metapackage'
+
+
+class InvalidMetapackage(Exception):
+
+ def __init__(self, msg, path, package):
+ self.path = path
+ self.package = package
+ Exception.__init__(self, "Metapackage '%s': %s" % (package.name, msg))
+
+
+def get_expected_cmakelists_txt(metapackage_name):
+ """
+ Return the expected boilerplate CMakeLists.txt file for a metapackage.
+
+ :param metapackage_name: name of the metapackage
+ :type metapackage_name: str
+ :returns: expected CMakeLists.txt file
+ :rtype: str
+ """
+ env = {
+ 'name': metapackage_name,
+ 'metapackage_arguments': ''
+ }
+ return configure_file(get_metapackage_cmake_template_path(), env)
+
+
+def has_cmakelists_txt(path):
+ """
+ Return True if the given path contains a CMakeLists.txt, otherwise False.
+
+ :param path: path to folder potentially containing CMakeLists.txt
+ :type path: str
+ :returns: True if path contains CMakeLists.txt, else False
+ :rtype: bool
+ """
+ cmakelists_txt_path = os.path.join(path, 'CMakeLists.txt')
+ return os.path.isfile(cmakelists_txt_path)
+
+
+def get_cmakelists_txt(path):
+ """
+ Fetch the CMakeLists.txt from a given path.
+
+ :param path: path to the folder containing the CMakeLists.txt
+ :type path: str
+ :returns: contents of CMakeLists.txt file in given path
+ :rtype: str
+ :raises OSError: if there is no CMakeLists.txt in given path
+ """
+ cmakelists_txt_path = os.path.join(path, 'CMakeLists.txt')
+ with open(cmakelists_txt_path, 'r') as f:
+ return f.read()
+
+
+def has_valid_cmakelists_txt(path, metapackage_name):
+ """
+ Return True if the given path contains a valid CMakeLists.txt, otherwise False.
+
+ A valid CMakeLists.txt for a metapackage is defined by REP-0127
+
+ :param path: path to folder containing CMakeLists.txt
+ :type path: str
+ :param metapackage_name: name of the metapackage being tested
+ :type metapackage_name: str
+ :returns: True if the path contains a valid CMakeLists.txt, else False
+ :rtype: bool
+ :raises OSError: if there is no CMakeLists.txt in given path
+ """
+ cmakelists_txt = get_cmakelists_txt(path)
+ expected = get_expected_cmakelists_txt(metapackage_name)
+ prefix, suffix = expected.split('2.8.3', 1)
+ if not cmakelists_txt.startswith(prefix):
+ return False
+ if not cmakelists_txt.endswith(suffix):
+ return False
+ version = cmakelists_txt[len(prefix):-len(suffix)]
+ return re.match(r'^\d+\.\d+\.\d+$', version)
+
+
+def validate_metapackage(path, package):
+ """
+ Validate the given package (catkin_pkg.package.Package) as a metapackage.
+
+ This validates the metapackage against the definition from REP-0127
+
+ :param path: directory of the package being checked
+ :type path: str
+ :param package: package to be validated
+ :type package: :py:class:`catkin_pkg.package.Package`
+ :raises InvalidMetapackage: if package is not a valid metapackage
+ :raises OSError: if there is not package.xml at the given path
+ """
+ # Is there actually a package at the given path, else raise
+ # Cannot do package_exists_at from catkin_pkg.packages because of circular dep
+ if not os.path.isdir(path) or not os.path.isfile(os.path.join(path, 'package.xml')):
+ raise OSError("No package.xml found at path: '%s'" % path)
+ # Is it a metapackage, else raise
+ if not package.is_metapackage():
+ raise InvalidMetapackage('No <metapackage/> tag in <export> section of package.xml', path, package)
+ # Is there a CMakeLists.txt, else raise
+ if not has_cmakelists_txt(path):
+ raise InvalidMetapackage('No CMakeLists.txt', path, package)
+ # Is the CMakeLists.txt correct, else raise
+ if not has_valid_cmakelists_txt(path, package.name):
+ expected = get_expected_cmakelists_txt(package.name)
+ expected = expected.replace('2.8.3', '<version x.y.z>')
+ raise InvalidMetapackage("""\
+Invalid CMakeLists.txt
+Expected:
+<<<%s>>>
+Got:
+<<<%s>>>""" % (expected, get_cmakelists_txt(path)), path, package
+ )
+ # Does it buildtool depend on catkin, else raise
+ if not package.has_buildtool_depend_on_catkin():
+ raise InvalidMetapackage('No buildtool dependency on catkin', path, package)
+ # Does it have only run depends, else raise
+ if package.has_invalid_metapackage_dependencies():
+ raise InvalidMetapackage(
+ 'Has build, buildtool, and/or test depends, but only run depends are allowed (except buildtool catkin)',
+ path, package)
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2012, Willow Garage, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Willow Garage, Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""Library for parsing package.xml and providing an object representation."""
+
+from __future__ import print_function
+
+from copy import deepcopy
+import os
+import re
+import sys
+import xml.dom.minidom as dom
+
+from catkin_pkg.condition import evaluate_condition
+
+PACKAGE_MANIFEST_FILENAME = 'package.xml'
+PACKAGE_MANIFEST_SCHEMA_URLS = [
+ 'http://download.ros.org/schema/package_format1.xsd',
+ 'http://download.ros.org/schema/package_format2.xsd',
+ 'http://download.ros.org/schema/package_format3.xsd',
+]
+
+
+class Package(object):
+ """Object representation of a package manifest file."""
+
+ __slots__ = [
+ 'package_format',
+ 'name',
+ 'version',
+ 'version_compatibility',
+ 'description',
+ 'maintainers',
+ 'licenses',
+ 'urls',
+ 'authors',
+ 'build_depends',
+ 'buildtool_depends',
+ 'build_export_depends',
+ 'buildtool_export_depends',
+ 'exec_depends',
+ 'test_depends',
+ 'doc_depends',
+ 'conflicts',
+ 'replaces',
+ 'group_depends',
+ 'member_of_groups',
+ 'exports',
+ 'filename'
+ ]
+
+ def __init__(self, filename=None, **kwargs):
+ """
+ Initialize Package.
+
+ :param filename: location of package.xml. Necessary if
+ converting ``${prefix}`` in ``<export>`` values, ``str``.
+ """
+ # initialize all slots ending with "s" with lists, all other with plain values
+ for attr in self.__slots__:
+ if attr.endswith('s'):
+ value = list(kwargs[attr]) if attr in kwargs else []
+ setattr(self, attr, value)
+ else:
+ value = kwargs[attr] if attr in kwargs else None
+ setattr(self, attr, value)
+ if 'depends' in kwargs:
+ for d in kwargs['depends']:
+ for slot in [self.build_depends, self.build_export_depends, self.exec_depends]:
+ if d not in slot:
+ slot.append(deepcopy(d))
+ del kwargs['depends']
+ if 'run_depends' in kwargs:
+ for d in kwargs['run_depends']:
+ for slot in [self.build_export_depends, self.exec_depends]:
+ if d not in slot:
+ slot.append(deepcopy(d))
+ del kwargs['run_depends']
+ self.filename = filename
+ self.licenses = [license_ if isinstance(license_, License) else License(license_) for license_ in self.licenses]
+ # verify that no unknown keywords are passed
+ unknown = set(kwargs.keys()).difference(self.__slots__)
+ if unknown:
+ raise TypeError('Unknown properties: %s' % ', '.join(unknown))
+
+ def __getattr__(self, name):
+ if name == 'run_depends':
+ # merge different dependencies if they are not exactly equal
+ # potentially having the same dependency name multiple times with different attributes
+ run_depends = []
+ [run_depends.append(deepcopy(d)) for d in self.exec_depends + self.build_export_depends if d not in run_depends]
+ return run_depends
+ raise AttributeError(name)
+
+ def __getitem__(self, key):
+ if key in self.__slots__ + ['run_depends']:
+ return getattr(self, key)
+ raise KeyError('Unknown key "%s"' % key)
+
+ def __iter__(self):
+ for slot in self.__slots__:
+ yield slot
+
+ def __str__(self):
+ data = {}
+ for attr in self.__slots__:
+ data[attr] = getattr(self, attr)
+ return str(data)
+
+ def has_buildtool_depend_on_catkin(self):
+ """
+ Return True if this Package buildtool depends on catkin, otherwise False.
+
+ :returns: True if the given package buildtool depends on catkin
+ :rtype: bool
+ """
+ return 'catkin' in (d.name for d in self.buildtool_depends)
+
+ def get_build_type(self):
+ """
+ Return value of export/build_type element, or 'catkin' if unspecified.
+
+ :returns: package build type
+ :rtype: str
+ :raises: :exc:`InvalidPackage`
+ """
+ # for backward compatibility a build type without an evaluated
+ # condition is still being considered (i.e. evaluated_condition is None)
+ build_type_exports = [
+ e.content for e in self.exports
+ if e.tagname == 'build_type' and e.evaluated_condition is not False]
+ if not build_type_exports:
+ return 'catkin'
+ if len(build_type_exports) == 1:
+ return build_type_exports[0]
+ raise InvalidPackage('Only one <build_type> element is permitted.', self.filename)
+
+ def has_invalid_metapackage_dependencies(self):
+ """
+ Return True if this package has invalid dependencies for a metapackage.
+
+ This is defined by REP-0127 as any non-run_depends dependencies other then a buildtool_depend on catkin.
+
+ :returns: True if the given package has any invalid dependencies, otherwise False
+ :rtype: bool
+ """
+ buildtool_depends = [d.name for d in self.buildtool_depends if d.name != 'catkin']
+ return len(self.build_depends + buildtool_depends + self.test_depends) > 0
+
+ def is_metapackage(self):
+ """
+ Return True if this pacakge is a metapackage, otherwise False.
+
+ :returns: True if metapackage, else False
+ :rtype: bool
+ """
+ return 'metapackage' in (e.tagname for e in self.exports)
+
+ def evaluate_conditions(self, context):
+ """
+ Evaluate the conditions of all dependencies and memberships.
+
+ :param context: A dictionary with key value pairs to replace variables
+ starting with $ in the condition.
+ :raises: :exc:`ValueError` if any condition fails to parse
+ """
+ for attr in (
+ 'build_depends',
+ 'buildtool_depends',
+ 'build_export_depends',
+ 'buildtool_export_depends',
+ 'exec_depends',
+ 'test_depends',
+ 'doc_depends',
+ 'conflicts',
+ 'replaces',
+ 'group_depends',
+ 'member_of_groups',
+ 'exports',
+ ):
+ conditionals = getattr(self, attr)
+ for conditional in conditionals:
+ conditional.evaluate_condition(context)
+
+ def validate(self, warnings=None):
+ """
+ Make sure all standards for packages are met.
+
+ :param package: Package to check
+ :param warnings: Print warnings if None or return them in the given list
+ :raises InvalidPackage: in case validation fails
+ """
+ errors = []
+ new_warnings = []
+
+ if self.package_format:
+ if not re.match('^[1-9][0-9]*$', str(self.package_format)):
+ errors.append('The "format" attribute of the package must contain a positive integer if present')
+
+ if not self.name:
+ errors.append('Package name must not be empty')
+ # accepting upper case letters and hyphens only for backward compatibility
+ if not re.match('^[a-zA-Z0-9][a-zA-Z0-9_-]*$', self.name):
+ errors.append('Package name "%s" does not follow naming conventions' % self.name)
+ else:
+ if not re.match('^[a-z][a-z0-9_-]*$', self.name):
+ new_warnings.append(
+ 'Package name "%s" does not follow the naming conventions. It should start with '
+ 'a lower case letter and only contain lower case letters, digits, underscores, and dashes.' % self.name)
+
+ version_regexp = r'^[0-9]+\.[0-9]+\.[0-9]+$'
+ if not self.version:
+ errors.append('Package version must not be empty')
+ elif not re.match(version_regexp, self.version):
+ errors.append('Package version "%s" does not follow version conventions' % self.version)
+ elif not re.match(r'^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)$', self.version):
+ new_warnings.append('Package "%s" does not follow the version conventions. It should not contain leading zeros (unless the number is 0).' % self.name)
+ if self.version_compatibility:
+ if not re.match(version_regexp, self.version_compatibility):
+ errors.append(
+ "Package compatibility version '%s' does not follow "
+ 'version conventions' % self.version_compatibility)
+
+ if not self.description:
+ errors.append('Package description must not be empty')
+
+ if not self.maintainers:
+ errors.append("Package '{0}' must declare at least one maintainer".format(self.name))
+ for maintainer in self.maintainers:
+ try:
+ maintainer.validate()
+ except InvalidPackage as e:
+ errors.append(e.msg)
+ if not maintainer.email:
+ errors.append('Maintainers must have an email address')
+
+ if not self.licenses:
+ errors.append('The package node must contain at least one "license" tag')
+ if [license_ for license_ in self.licenses if not license_.strip()]:
+ errors.append('The license tag must neither be empty nor only contain whitespaces')
+
+ if self.authors is not None:
+ for author in self.authors:
+ try:
+ author.validate()
+ except InvalidPackage as e:
+ errors.append(e.msg)
+
+ dep_types = {
+ 'build': self.build_depends,
+ 'buildtool': self.buildtool_depends,
+ 'build_export': self.build_export_depends,
+ 'buildtool_export': self.buildtool_export_depends,
+ 'exec': self.exec_depends,
+ 'test': self.test_depends,
+ 'doc': self.doc_depends
+ }
+ for dep_type, depends in dep_types.items():
+ for depend in depends:
+ if depend.name == self.name:
+ errors.append('The package "%s" must not "%s_depend" on a package with the same name as this package' % (self.name, dep_type))
+
+ if (
+ set([d.name for d in self.group_depends]) &
+ set([g.name for g in self.member_of_groups])
+ ):
+ errors.append(
+ "The package must not 'group_depend' on a package which it "
+ 'also declares to be a member of')
+
+ if self.is_metapackage():
+ if not self.has_buildtool_depend_on_catkin():
+ # TODO escalate to error in the future, or use metapackage.validate_metapackage
+ new_warnings.append('Metapackage "%s" must buildtool_depend on catkin.' % self.name)
+ if self.has_invalid_metapackage_dependencies():
+ new_warnings.append('Metapackage "%s" should not have other dependencies besides a '
+ 'buildtool_depend on catkin and %s.' %
+ (self.name, 'run_depends' if self.package_format == 1 else 'exec_depends'))
+
+ for warning in new_warnings:
+ if warnings is None:
+ print('WARNING: ' + warning, file=sys.stderr)
+ elif warning not in warnings:
+ warnings.append(warning)
+
+ if errors:
+ raise InvalidPackage('\n'.join(errors), self.filename)
+
+
+class Dependency(object):
+ __slots__ = [
+ 'name',
+ 'version_lt', 'version_lte', 'version_eq', 'version_gte', 'version_gt',
+ 'condition',
+ 'evaluated_condition',
+ ]
+
+ def __init__(self, name, **kwargs):
+ self.evaluated_condition = None
+ for attr in self.__slots__:
+ value = kwargs[attr] if attr in kwargs else None
+ setattr(self, attr, value)
+ self.name = name
+ # verify that no unknown keywords are passed
+ unknown = set(kwargs.keys()).difference(self.__slots__)
+ if unknown:
+ raise TypeError('Unknown properties: %s' % ', '.join(unknown))
+
+ def __eq__(self, other):
+ if not isinstance(other, Dependency):
+ return False
+ return all(getattr(self, attr) == getattr(other, attr) for attr in self.__slots__ if attr != 'evaluated_condition')
+
+ def __hash__(self):
+ return hash(tuple(getattr(self, slot) for slot in self.__slots__))
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ kv = []
+ for slot in self.__slots__:
+ attr = getattr(self, slot, None)
+ if attr is not None:
+ kv.append('{}={!r}'.format(slot, attr))
+ return '{}({})'.format(self.__class__.__name__, ', '.join(kv))
+
+ def evaluate_condition(self, context):
+ """
+ Evaluate the condition.
+
+ The result is also stored in the member variable `evaluated_condition`.
+
+ :param context: A dictionary with key value pairs to replace variables
+ starting with $ in the condition.
+
+ :returns: True if the condition evaluates to True, else False
+ :raises: :exc:`ValueError` if the condition fails to parse
+ """
+ self.evaluated_condition = evaluate_condition(self.condition, context)
+ return self.evaluated_condition
+
+
+class Export(object):
+ __slots__ = ['tagname', 'attributes', 'content', 'evaluated_condition']
+
+ def __init__(self, tagname, content=None):
+ self.tagname = tagname
+ self.attributes = {}
+ self.content = content
+ self.evaluated_condition = None
+
+ def __str__(self):
+ txt = '<%s' % self.tagname
+ for key in sorted(self.attributes.keys()):
+ txt += ' %s="%s"' % (key, self.attributes[key])
+ if self.content:
+ txt += '>%s</%s>' % (self.content, self.tagname)
+ else:
+ txt += '/>'
+ return txt
+
+ def evaluate_condition(self, context):
+ """
+ Evaluate the condition.
+
+ The result is also stored in the member variable `evaluated_condition`.
+
+ :param context: A dictionary with key value pairs to replace variables
+ starting with $ in the condition.
+
+ :returns: True if the condition evaluates to True, else False
+ :raises: :exc:`ValueError` if the condition fails to parse
+ """
+ self.evaluated_condition = evaluate_condition(self.attributes.get('condition'), context)
+ return self.evaluated_condition
+
+
+# Subclassing ``str`` to keep backward compatibility.
+class License(str):
+
+ def __new__(cls, value, file_=None):
+ obj = str.__new__(cls, str(value))
+ obj.file = file_
+ return obj
+
+
+class Person(object):
+ __slots__ = ['name', 'email']
+
+ def __init__(self, name, email=None):
+ self.name = name
+ self.email = email
+
+ def __str__(self):
+ name = self.name
+ if not isinstance(name, str):
+ name = name.encode('utf-8')
+ if self.email is not None:
+ return '%s <%s>' % (name, self.email)
+ else:
+ return '%s' % name
+
+ def validate(self):
+ if self.email is None:
+ return
+ if not re.match(r'^[-a-zA-Z0-9_%+]+(\.[-a-zA-Z0-9_%+]+)*@[-a-zA-Z0-9%]+(\.[-a-zA-Z0-9%]+)*\.[a-zA-Z]{2,}$', self.email):
+ raise InvalidPackage('Invalid email "%s" for person "%s"' % (self.email, self.name))
+
+
+class Url(object):
+ __slots__ = ['url', 'type']
+
+ def __init__(self, url, type_=None):
+ self.url = url
+ self.type = type_
+
+ def __str__(self):
+ return self.url
+
+
+def parse_package_for_distutils(path=None):
+ print('WARNING: %s/setup.py: catkin_pkg.package.parse_package_for_distutils() is deprecated. Please use catkin_pkg.python_setup.generate_distutils_setup(**kwargs) instead.' %
+ os.path.basename(os.path.abspath('.')))
+ from .python_setup import generate_distutils_setup
+ data = {}
+ if path is not None:
+ data['package_xml_path'] = path
+ return generate_distutils_setup(**data)
+
+
+class InvalidPackage(Exception):
+
+ def __init__(self, msg, package_path=None):
+ self.msg = msg
+ self.package_path = package_path
+ Exception.__init__(self, self.msg)
+
+ def __str__(self):
+ result = '' if not self.package_path else "Error(s) in package '%s':\n" % self.package_path
+ return result + Exception.__str__(self)
+
+
+def package_exists_at(path):
+ """
+ Check that a package exists at the given path.
+
+ :param path: path to a package
+ :type path: str
+ :returns: True if package exists in given path, else False
+ :rtype: bool
+ """
+ return os.path.isdir(path) and os.path.isfile(os.path.join(path, PACKAGE_MANIFEST_FILENAME))
+
+
+def _get_package_xml(path):
+ """
+ Get xml of package manifest.
+
+ :param path: The path of the package.xml file, it may or may not
+ include the filename
+
+ :returns: a tuple with the xml as well as the path of the read file
+ :raises: :exc:`IOError`
+ """
+ if os.path.isfile(path):
+ filename = path
+ elif package_exists_at(path):
+ filename = os.path.join(path, PACKAGE_MANIFEST_FILENAME)
+ if not os.path.isfile(filename):
+ raise IOError('Directory "%s" does not contain a "%s"' % (path, PACKAGE_MANIFEST_FILENAME))
+ else:
+ raise IOError('Path "%s" is neither a directory containing a "%s" file nor a file' % (path, PACKAGE_MANIFEST_FILENAME))
+
+ # Force utf8 encoding for python3.
+ # This way unicode files can still be processed on non-unicode locales.
+ kwargs = {}
+ if sys.version_info[0] >= 3:
+ kwargs['encoding'] = 'utf8'
+
+ with open(filename, 'r', **kwargs) as f:
+ return f.read(), filename
+
+
+def has_ros_schema_reference(path):
+ """
+ Check if the XML file contains a processing instruction referencing a ROS package manifest schema.
+
+ :param path: The path of the package.xml file, it may or may not
+ include the filename
+ :type path: str
+ :returns: True if it contains the known reference, else False
+ :rtype: bool
+ :raises: :exc:`IOError`
+ """
+ xml, _ = _get_package_xml(path)
+ return has_ros_schema_reference_string(xml)
+
+
+def has_ros_schema_reference_string(data):
+ """
+ Check if the XML data contains a processing instruction referencing a ROS package manifest schema.
+
+ :param data: package.xml contents
+ :type data: str
+ :returns: True if it contains the known reference, else False
+ :rtype: bool
+ """
+ if sys.version_info[0] == 2 and not isinstance(data, str):
+ data = data.encode('utf-8')
+ try:
+ root = dom.parseString(data)
+ except Exception:
+ # invalid XML
+ return False
+
+ for child in root.childNodes:
+ if child.nodeType == child.PROCESSING_INSTRUCTION_NODE:
+ if child.target == 'xml-model':
+ # extract schema url from "xml-model" processing instruction
+ schema_url = re.search(r'href="([A-Za-z0-9\._/:]*)"', child.data).group(1)
+ if schema_url in PACKAGE_MANIFEST_SCHEMA_URLS:
+ return True
+
+ return False
+
+
+def parse_package(path, warnings=None):
+ """
+ Parse package manifest.
+
+ :param path: The path of the package.xml file, it may or may not
+ include the filename
+ :param warnings: Print warnings if None or return them in the given list
+
+ :returns: return :class:`Package` instance, populated with parsed fields
+ :raises: :exc:`InvalidPackage`
+ :raises: :exc:`IOError`
+ """
+ xml, filename = _get_package_xml(path)
+ return parse_package_string(xml, filename, warnings=warnings)
+
+
+def _check_known_attributes(node, known):
+ if node.hasAttributes():
+ attrs = map(str, node.attributes.keys())
+ # colon is the namespace separator in attributes, xmlns can be added to any tag
+ unknown_attrs = [attr for attr in attrs if not (attr in known or attr == 'xmlns' or ':' in attr)]
+ if unknown_attrs:
+ return ['The "%s" tag must not have the following attributes: %s' % (node.tagName, ', '.join(unknown_attrs))]
+ return []
+
+
+def parse_package_string(data, filename=None, warnings=None):
+ """
+ Parse package.xml string contents.
+
+ :param data: package.xml contents, ``str``
+ :param filename: full file path for debugging, ``str``
+ :param warnings: Print warnings if None or return them in the given list
+ :returns: return parsed :class:`Package`
+ :raises: :exc:`InvalidPackage`
+ """
+ if sys.version_info[0] == 2 and not isinstance(data, str):
+ data = data.encode('utf-8')
+ try:
+ root = dom.parseString(data)
+ except Exception as ex:
+ raise InvalidPackage('The manifest contains invalid XML:\n%s' % ex, filename)
+
+ pkg = Package(filename)
+
+ # verify unique root node
+ nodes = _get_nodes(root, 'package')
+ if len(nodes) != 1:
+ raise InvalidPackage('The manifest must contain a single "package" root tag', filename)
+ root = nodes[0]
+
+ # format attribute
+ value = _get_node_attr(root, 'format', default=1)
+ pkg.package_format = int(value)
+ assert pkg.package_format in (1, 2, 3), \
+ "Unable to handle package.xml format version '%d', please update catkin_pkg " \
+ '(e.g. on Ubuntu/Debian use: sudo apt-get update && sudo apt-get install --only-upgrade python-catkin-pkg)' % pkg.package_format
+
+ # name
+ pkg.name = _get_node_value(_get_node(root, 'name', filename))
+
+ # version and optional compatibility
+ version_node = _get_node(root, 'version', filename)
+ pkg.version = _get_node_value(version_node)
+ pkg.version_compatibility = _get_node_attr(
+ version_node, 'compatibility', default=None)
+
+ # description
+ pkg.description = _get_node_value(_get_node(root, 'description', filename), allow_xml=True, apply_str=False)
+
+ # at least one maintainer, all must have email
+ maintainers = _get_nodes(root, 'maintainer')
+ for node in maintainers:
+ pkg.maintainers.append(Person(
+ _get_node_value(node, apply_str=False),
+ _get_node_attr(node, 'email')
+ ))
+
+ # urls with optional type
+ urls = _get_nodes(root, 'url')
+ for node in urls:
+ pkg.urls.append(Url(
+ _get_node_value(node),
+ _get_node_attr(node, 'type', default='website')
+ ))
+
+ # authors with optional email
+ authors = _get_nodes(root, 'author')
+ for node in authors:
+ pkg.authors.append(Person(
+ _get_node_value(node, apply_str=False),
+ _get_node_attr(node, 'email', default=None)
+ ))
+
+ # at least one license
+ licenses = _get_nodes(root, 'license')
+ for node in licenses:
+ pkg.licenses.append(License(
+ _get_node_value(node),
+ _get_node_attr(node, 'file', default=None)
+ ))
+
+ errors = []
+ # dependencies and relationships
+ pkg.build_depends = _get_dependencies(root, 'build_depend')
+ pkg.buildtool_depends = _get_dependencies(root, 'buildtool_depend')
+ if pkg.package_format == 1:
+ run_depends = _get_dependencies(root, 'run_depend')
+ for d in run_depends:
+ pkg.build_export_depends.append(deepcopy(d))
+ pkg.exec_depends.append(deepcopy(d))
+ if pkg.package_format != 1:
+ pkg.build_export_depends = _get_dependencies(root, 'build_export_depend')
+ pkg.buildtool_export_depends = _get_dependencies(root, 'buildtool_export_depend')
+ pkg.exec_depends = _get_dependencies(root, 'exec_depend')
+ depends = _get_dependencies(root, 'depend')
+ for dep in depends:
+ # check for collisions with specific dependencies
+ same_build_depends = ['build_depend' for d in pkg.build_depends if d == dep]
+ same_build_export_depends = ['build_export_depend' for d in pkg.build_export_depends if d == dep]
+ same_exec_depends = ['exec_depend' for d in pkg.exec_depends if d == dep]
+ if same_build_depends or same_build_export_depends or same_exec_depends:
+ errors.append("The generic dependency on '%s' is redundant with: %s" % (dep.name, ', '.join(same_build_depends + same_build_export_depends + same_exec_depends)))
+ # only append non-duplicates
+ if not same_build_depends:
+ pkg.build_depends.append(deepcopy(dep))
+ if not same_build_export_depends:
+ pkg.build_export_depends.append(deepcopy(dep))
+ if not same_exec_depends:
+ pkg.exec_depends.append(deepcopy(dep))
+ pkg.doc_depends = _get_dependencies(root, 'doc_depend')
+ pkg.test_depends = _get_dependencies(root, 'test_depend')
+ pkg.conflicts = _get_dependencies(root, 'conflict')
+ pkg.replaces = _get_dependencies(root, 'replace')
+
+ # group dependencies and memberships
+ pkg.group_depends = _get_group_dependencies(root, 'group_depend')
+ pkg.member_of_groups = _get_group_memberships(root, 'member_of_group')
+
+ if pkg.package_format == 1:
+ for test_depend in pkg.test_depends:
+ same_build_depends = ['build_depend' for d in pkg.build_depends if d == test_depend]
+ same_run_depends = ['run_depend' for d in pkg.run_depends if d == test_depend]
+ if same_build_depends or same_run_depends:
+ errors.append('The test dependency on "%s" is redundant with: %s' % (test_depend.name, ', '.join(same_build_depends + same_run_depends)))
+
+ # exports
+ export_node = _get_optional_node(root, 'export', filename)
+ if export_node is not None:
+ exports = []
+ for node in [n for n in export_node.childNodes if n.nodeType == n.ELEMENT_NODE]:
+ export = Export(str(node.tagName), _get_node_value(node, allow_xml=True))
+ for key, value in node.attributes.items():
+ export.attributes[str(key)] = str(value)
+ exports.append(export)
+ pkg.exports = exports
+
+ # verify that no unsupported tags and attributes are present
+ errors += _check_known_attributes(root, ['format'])
+ depend_attributes = ['version_lt', 'version_lte', 'version_eq', 'version_gte', 'version_gt']
+ if pkg.package_format > 2:
+ depend_attributes.append('condition')
+ known = {
+ 'name': [],
+ 'version': ['compatibility'],
+ 'description': [],
+ 'maintainer': ['email'],
+ 'license': [],
+ 'url': ['type'],
+ 'author': ['email'],
+ 'build_depend': depend_attributes,
+ 'buildtool_depend': depend_attributes,
+ 'test_depend': depend_attributes,
+ 'conflict': depend_attributes,
+ 'replace': depend_attributes,
+ 'export': [],
+ }
+ if pkg.package_format == 1:
+ known.update({
+ 'run_depend': depend_attributes,
+ })
+ if pkg.package_format != 1:
+ known.update({
+ 'build_export_depend': depend_attributes,
+ 'buildtool_export_depend': depend_attributes,
+ 'depend': depend_attributes,
+ 'exec_depend': depend_attributes,
+ 'doc_depend': depend_attributes,
+ })
+ if pkg.package_format > 2:
+ known.update({
+ 'group_depend': ['condition'],
+ 'member_of_group': ['condition']
+ })
+ if pkg.package_format > 2:
+ known.update({
+ 'license': ['file'],
+ })
+ nodes = [n for n in root.childNodes if n.nodeType == n.ELEMENT_NODE]
+ unknown_tags = set([n.tagName for n in nodes if n.tagName not in known.keys()])
+ if unknown_tags:
+ errors.append('The manifest of package "%s" (with format version %d) must not contain the following tags: %s' % (pkg.name, pkg.package_format, ', '.join(unknown_tags)))
+ if 'run_depend' in unknown_tags and pkg.package_format >= 2:
+ errors.append('Please replace <run_depend> tags with <exec_depend> tags.')
+ elif 'exec_depend' in unknown_tags and pkg.package_format < 2:
+ errors.append('Either update to a newer format or replace <exec_depend> tags with <run_depend> tags.')
+ for node in [n for n in nodes if n.tagName in known.keys()]:
+ errors += _check_known_attributes(node, known[node.tagName])
+ if node.tagName not in ['description', 'export']:
+ subnodes = [n for n in node.childNodes if n.nodeType == n.ELEMENT_NODE]
+ if subnodes:
+ errors.append('The "%s" tag must not contain the following children: %s' % (node.tagName, ', '.join([n.tagName for n in subnodes])))
+
+ if errors:
+ raise InvalidPackage('Error(s):%s' % (''.join(['\n- %s' % e for e in errors])), filename)
+
+ pkg.validate(warnings=warnings)
+
+ return pkg
+
+
+def _get_nodes(parent, tagname):
+ return [n for n in parent.childNodes if n.nodeType == n.ELEMENT_NODE and n.tagName == tagname]
+
+
+def _get_node(parent, tagname, filename):
+ nodes = _get_nodes(parent, tagname)
+ if len(nodes) != 1:
+ raise InvalidPackage('The manifest must contain exactly one "%s" tag' % tagname, filename)
+ return nodes[0]
+
+
+def _get_optional_node(parent, tagname, filename):
+ nodes = _get_nodes(parent, tagname)
+ if len(nodes) > 1:
+ raise InvalidPackage('The manifest must not contain more than one "%s" tags' % tagname, filename)
+ return nodes[0] if nodes else None
+
+
+def _get_node_value(node, allow_xml=False, apply_str=True):
+ if allow_xml:
+ value = (''.join([n.toxml() for n in node.childNodes])).strip(' \n\r\t')
+ else:
+ value = (''.join([n.data for n in node.childNodes if n.nodeType == n.TEXT_NODE])).strip(' \n\r\t')
+ if apply_str:
+ value = str(value)
+ return value
+
+
+def _get_node_attr(node, attr, default=False):
+ """:param default: False means value is required."""
+ if node.hasAttribute(attr):
+ return str(node.getAttribute(attr))
+ if default is False:
+ raise InvalidPackage('The "%s" tag must have the attribute "%s"' % (node.tagName, attr))
+ return default
+
+
+def _get_dependencies(parent, tagname):
+ depends = []
+ for node in _get_nodes(parent, tagname):
+ depend = Dependency(_get_node_value(node))
+ for attr in ('version_lt', 'version_lte', 'version_eq', 'version_gte', 'version_gt', 'condition'):
+ setattr(depend, attr, _get_node_attr(node, attr, None))
+ depends.append(depend)
+ return depends
+
+
+def _get_group_dependencies(parent, tagname):
+ from .group_dependency import GroupDependency
+ depends = []
+ for node in _get_nodes(parent, tagname):
+ depends.append(
+ GroupDependency(
+ _get_node_value(node),
+ condition=_get_node_attr(node, 'condition', default=None)))
+ return depends
+
+
+def _get_group_memberships(parent, tagname):
+ from .group_membership import GroupMembership
+ memberships = []
+ for node in _get_nodes(parent, tagname):
+ memberships.append(
+ GroupMembership(
+ _get_node_value(node),
+ condition=_get_node_attr(node, 'condition', default=None)))
+ return memberships
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2012, Willow Garage, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Willow Garage, Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import print_function
+
+import getpass
+import os
+import string
+import sys
+
+from catkin_pkg.cmake import configure_file
+from catkin_pkg.cmake import get_metapackage_cmake_template_path
+from catkin_pkg.package import Dependency
+from catkin_pkg.package import Package
+from catkin_pkg.package import PACKAGE_MANIFEST_FILENAME
+from catkin_pkg.package import Person
+
+
+class PackageTemplate(Package):
+
+ def __init__(self, catkin_deps=None, system_deps=None, boost_comps=None, **kwargs):
+ super(PackageTemplate, self).__init__(**kwargs)
+ self.catkin_deps = catkin_deps or []
+ self.system_deps = system_deps or []
+ self.boost_comps = boost_comps or []
+ self.validate()
+
+ @staticmethod
+ def _create_package_template(package_name, description=None, licenses=None,
+ maintainer_names=None, author_names=None,
+ version=None, catkin_deps=None, system_deps=None,
+ boost_comps=None):
+ """
+ Alternative factory method mapping CLI args to argument for Package class.
+
+ :param package_name:
+ :param description:
+ :param licenses:
+ :param maintainer_names:
+ :param authors:
+ :param version:
+ :param catkin_deps:
+ """
+ # Sort so they are alphebetical
+ licenses = list(licenses or ['TODO'])
+ licenses.sort()
+ if not maintainer_names:
+ maintainer_names = [getpass.getuser()]
+ maintainer_names = list(maintainer_names or [])
+ maintainer_names.sort()
+ maintainers = []
+ for maintainer_name in maintainer_names:
+ maintainers.append(
+ Person(maintainer_name,
+ '%s@todo.todo' % maintainer_name.split()[-1])
+ )
+ author_names = list(author_names or [])
+ author_names.sort()
+ authors = []
+ for author_name in author_names:
+ authors.append(Person(author_name))
+ catkin_deps = list(catkin_deps or [])
+ catkin_deps.sort()
+ pkg_catkin_deps = []
+ depends = []
+ build_depends = []
+ exec_depends = []
+ buildtool_depends = [Dependency('catkin')]
+ for dep in catkin_deps:
+ if dep.lower() == 'catkin':
+ catkin_deps.remove(dep)
+ continue
+ if dep.lower() == 'genmsg':
+ sys.stderr.write('WARNING: Packages with messages or services should not depend on genmsg, but on message_generation and message_runtime\n')
+ buildtool_depends.append(Dependency('genmsg'))
+ continue
+ if dep.lower() == 'message_generation':
+ if 'message_runtime' not in catkin_deps:
+ sys.stderr.write('WARNING: Packages with messages or services should depend on both message_generation and message_runtime\n')
+ build_depends.append(Dependency('message_generation'))
+ continue
+ if dep.lower() == 'message_runtime':
+ if 'message_generation' not in catkin_deps:
+ sys.stderr.write('WARNING: Packages with messages or services should depend on both message_generation and message_runtime\n')
+ exec_depends.append(Dependency('message_runtime'))
+ continue
+ pkg_catkin_deps.append(Dependency(dep))
+ for dep in pkg_catkin_deps:
+ depends.append(dep)
+ if boost_comps:
+ if not system_deps:
+ system_deps = ['boost']
+ elif 'boost' not in system_deps:
+ system_deps.append('boost')
+ for dep in system_deps or []:
+ if not dep.lower().startswith('python-'):
+ depends.append(Dependency(dep))
+ else:
+ exec_depends.append(Dependency(dep))
+ package_temp = PackageTemplate(
+ name=package_name,
+ version=version or '0.0.0',
+ description=description or 'The %s package' % package_name,
+ buildtool_depends=buildtool_depends,
+ build_depends=build_depends,
+ depends=depends,
+ exec_depends=exec_depends,
+ catkin_deps=catkin_deps,
+ system_deps=system_deps,
+ boost_comps=boost_comps,
+ licenses=licenses,
+ authors=authors,
+ maintainers=maintainers,
+ urls=[])
+ return package_temp
+
+
+def read_template_file(filename, rosdistro):
+ template_dir = os.path.join(os.path.dirname(__file__), 'templates')
+ templates = []
+ templates.append(os.path.join(template_dir, rosdistro, '%s.in' % filename))
+ templates.append(os.path.join(template_dir, '%s.in' % filename))
+ for template in templates:
+ if os.path.isfile(template):
+ with open(template, 'r') as fhand:
+ template_contents = fhand.read()
+ return template_contents
+ raise IOError(
+ 'Could not read template for ROS distro '
+ "'{}' at '{}': ".format(rosdistro, ', '.join(templates)) +
+ 'no such file or directory'
+ )
+
+
+def _safe_write_files(newfiles, target_dir):
+ """
+ Write file contents to target_dir/filepath for all entries of newfiles.
+
+ Aborts early if files exist in places for new files or directories
+
+ :param newfiles: a dict {filepath: contents}
+ :param target_dir: a string
+ """
+ # first check no filename conflict exists
+ for filename in newfiles:
+ target_file = os.path.join(target_dir, filename)
+ if os.path.exists(target_file):
+ raise ValueError('File exists: %s' % target_file)
+ dirname = os.path.dirname(target_file)
+ while dirname != target_dir:
+ if os.path.isfile(dirname):
+ raise ValueError('Cannot create directory, file exists: %s' %
+ dirname)
+ dirname = os.path.dirname(dirname)
+
+ for filename, content in newfiles.items():
+ target_file = os.path.join(target_dir, filename)
+ dirname = os.path.dirname(target_file)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ # print(target_file, content)
+ with open(target_file, 'ab') as fhand:
+ fhand.write(content.encode())
+ print('Created file %s' % os.path.relpath(target_file, os.path.dirname(target_dir)))
+
+
+def create_package_files(target_path, package_template, rosdistro,
+ newfiles=None, meta=False):
+ """
+ Create several files from templates to start a new package.
+
+ :param target_path: parent folder where to create the package
+ :param package_template: contains the required information
+ :param rosdistro: name of the distro to look up respective template
+ :param newfiles: dict {filepath: contents} for additional files to write
+ """
+ if newfiles is None:
+ newfiles = {}
+ # allow to replace default templates when path string is equal
+ manifest_path = os.path.join(target_path, PACKAGE_MANIFEST_FILENAME)
+ if manifest_path not in newfiles:
+ newfiles[manifest_path] = \
+ create_package_xml(package_template, rosdistro, meta=meta)
+ cmake_path = os.path.join(target_path, 'CMakeLists.txt')
+ if cmake_path not in newfiles:
+ newfiles[cmake_path] = create_cmakelists(package_template, rosdistro, meta=meta)
+ _safe_write_files(newfiles, target_path)
+ if 'roscpp' in package_template.catkin_deps:
+ fname = os.path.join(target_path, 'include', package_template.name)
+ os.makedirs(fname)
+ print('Created folder %s' % os.path.relpath(fname, os.path.dirname(target_path)))
+ if 'roscpp' in package_template.catkin_deps or \
+ 'rospy' in package_template.catkin_deps:
+ fname = os.path.join(target_path, 'src')
+ os.makedirs(fname)
+ print('Created folder %s' % os.path.relpath(fname, os.path.dirname(target_path)))
+
+
+class CatkinTemplate(string.Template):
+ """subclass to use @ instead of $ as markers."""
+
+ delimiter = '@'
+ escape = '@'
+
+
+def create_cmakelists(package_template, rosdistro, meta=False):
+ """Create CMake file contents from the template.
+
+ :param package_template: contains the required information
+ :returns: file contents as string
+ """
+ if meta:
+ template_path = get_metapackage_cmake_template_path()
+ temp_dict = {
+ 'name': package_template.name,
+ 'metapackage_arguments': '',
+ }
+ return configure_file(template_path, temp_dict)
+ else:
+ cmakelists_txt_template = read_template_file('CMakeLists.txt', rosdistro)
+ ctemp = CatkinTemplate(cmakelists_txt_template)
+ if package_template.catkin_deps == []:
+ components = ''
+ else:
+ components = ' COMPONENTS\n %s\n' % '\n '.join(package_template.catkin_deps)
+ boost_find_package = \
+ ('' if not package_template.boost_comps
+ else ('find_package(Boost REQUIRED COMPONENTS %s)\n' %
+ ' '.join(package_template.boost_comps)))
+ system_find_package = ''
+ for sysdep in package_template.system_deps:
+ if sysdep == 'boost':
+ continue
+ if sysdep.startswith('python-'):
+ system_find_package += '# '
+ system_find_package += 'find_package(%s REQUIRED)\n' % sysdep
+ # provide dummy values
+ catkin_depends = (' '.join(package_template.catkin_deps)
+ if package_template.catkin_deps
+ else 'other_catkin_pkg')
+ system_depends = (' '.join(package_template.system_deps)
+ if package_template.system_deps
+ else 'system_lib')
+ message_pkgs = [pkg for pkg in package_template.catkin_deps if pkg.endswith('_msgs')]
+ if message_pkgs:
+ message_depends = '# %s' % '# '.join(message_pkgs)
+ else:
+ message_depends = '# std_msgs # Or other packages containing msgs'
+ temp_dict = {'name': package_template.name,
+ 'components': components,
+ 'include_directories': _create_include_macro(package_template),
+ 'boost_find': boost_find_package,
+ 'systems_find': system_find_package,
+ 'catkin_depends': catkin_depends,
+ 'system_depends': system_depends,
+ 'target_libraries': _create_targetlib_args(package_template),
+ 'message_dependencies': message_depends
+ }
+ return ctemp.substitute(temp_dict)
+
+
+def _create_targetlib_args(package_template):
+ result = '# ${catkin_LIBRARIES}\n'
+ if package_template.boost_comps:
+ result += '# ${Boost_LIBRARIES}\n'
+ if package_template.system_deps:
+ result += (''.join(['# ${%s_LIBRARIES}\n' %
+ sdep for sdep in package_template.system_deps]))
+ return result
+
+
+def _create_include_macro(package_template):
+ includes = ['# include']
+ includes.append((' ' if package_template.catkin_deps else '# ') + '${catkin_INCLUDE_DIRS}')
+ if package_template.boost_comps:
+ includes.append(' ${Boost_INCLUDE_DIRS}')
+ if package_template.system_deps:
+ deplist = []
+ for sysdep in package_template.system_deps:
+ if not sysdep.startswith('python-'):
+ deplist.append(sysdep)
+ if deplist:
+ todo_incl = '# TODO: Check names of system library include directories'
+ includes.append(todo_incl + (' (%s)' % ', '.join(deplist)))
+ includes.extend([' ${%s_INCLUDE_DIRS}' % sysdep for sysdep in deplist])
+ result = ''
+ if includes:
+ result += '\n'.join(includes)
+ return result
+
+
+def _create_depend_tag(dep_type,
+ name,
+ version_eq=None,
+ version_lt=None,
+ version_lte=None,
+ version_gt=None,
+ version_gte=None):
+ """Create xml snippet for package.xml."""
+ version_string = []
+ for key, var in {'version_eq': version_eq,
+ 'version_lt': version_lt,
+ 'version_lte': version_lte,
+ 'version_gt': version_gt,
+ 'version_gte': version_gte}.items():
+ if var is not None:
+ version_string.append(' %s="%s"' % (key, var))
+ result = ' <%s%s>%s</%s>\n' % (dep_type,
+ ''.join(version_string),
+ name,
+ dep_type)
+ return result
+
+
+def create_package_xml(package_template, rosdistro, meta=False):
+ """
+ Create package xml file content.
+
+ :param package_template: contains the required information
+ :returns: file contents as string
+ """
+ package_xml_template = \
+ read_template_file(PACKAGE_MANIFEST_FILENAME, rosdistro)
+ ctemp = CatkinTemplate(package_xml_template)
+ temp_dict = {}
+ for key in package_template.__slots__:
+ temp_dict[key] = getattr(package_template, key)
+
+ if package_template.version_compatibility:
+ temp_dict['version_compatibility'] = \
+ ' compatibility="%s"' % package_template.version_compatibility
+ else:
+ temp_dict['version_compatibility'] = ''
+
+ if not package_template.description:
+ temp_dict['description'] = 'The %s package ...' % package_template.name
+
+ licenses = []
+ for plicense in package_template.licenses:
+ licenses.append(' <license>%s</license>\n' % plicense)
+ temp_dict['licenses'] = ''.join(licenses)
+
+ def get_person_tag(tagname, person):
+ email_string = (
+ '' if person.email is None else 'email="%s"' % person.email
+ )
+ return ' <%s %s>%s</%s>\n' % (tagname, email_string,
+ person.name, tagname)
+
+ maintainers = []
+ for maintainer in package_template.maintainers:
+ maintainers.append(get_person_tag('maintainer', maintainer))
+ temp_dict['maintainers'] = ''.join(maintainers)
+
+ urls = []
+ for url in package_template.urls:
+ type_string = ('' if url.type is None
+ else 'type="%s"' % url.type)
+ urls.append(' <url %s >%s</url>\n' % (type_string, url.url))
+ temp_dict['urls'] = ''.join(urls)
+
+ authors = []
+ for author in package_template.authors:
+ authors.append(get_person_tag('author', author))
+ temp_dict['authors'] = ''.join(authors)
+
+ dependencies = []
+ dep_map = {
+ 'build_depend': package_template.build_depends,
+ 'build_export_depend': package_template.build_export_depends,
+ 'buildtool_depend': package_template.buildtool_depends,
+ 'exec_depend': package_template.exec_depends,
+ 'test_depend': package_template.test_depends,
+ 'conflict': package_template.conflicts,
+ 'replace': package_template.replaces
+ }
+ for dep_type in ['buildtool_depend', 'build_depend', 'build_export_depend',
+ 'exec_depend', 'test_depend', 'conflict', 'replace']:
+ for dep in sorted(dep_map[dep_type], key=lambda x: x.name):
+ if 'depend' in dep_type:
+ dep_tag = _create_depend_tag(
+ dep_type,
+ dep.name,
+ dep.version_eq,
+ dep.version_lt,
+ dep.version_lte,
+ dep.version_gt,
+ dep.version_gte
+ )
+ dependencies.append(dep_tag)
+ else:
+ dependencies.append(_create_depend_tag(dep_type,
+ dep.name))
+ temp_dict['dependencies'] = ''.join(dependencies)
+
+ exports = []
+ if package_template.exports is not None:
+ for export in package_template.exports:
+ if export.content is not None:
+ print('WARNING: Create package does not know how to '
+ 'serialize exports with content: '
+ '%s, %s, ' % (export.tagname, export.attributes) +
+ '%s' % (export.content),
+ file=sys.stderr)
+ else:
+ attribs = [' %s="%s"' % (k, v) for (k, v) in export.attributes.items()]
+ line = ' <%s%s/>\n' % (export.tagname, ''.join(attribs))
+ exports.append(line)
+
+ if meta:
+ exports.append(' <metapackage/>')
+ temp_dict['exports'] = ''.join(exports)
+
+ temp_dict['components'] = package_template.catkin_deps
+
+ return ctemp.substitute(temp_dict)
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2012, Willow Garage, Inc.
+# Copyright (c) 2013, Open Source Robotics Foundation, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Open Source Robotics Foundation, Inc. nor
+# the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior
+# written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import print_function
+
+import datetime
+import os
+import re
+
+from catkin_pkg.changelog_generator import FORTHCOMING_LABEL
+import docutils.core
+
+
+def bump_version(version, bump='patch'):
+ """
+ Increases version number.
+
+ :param str version: must be in version format "int.int.int"
+ :param str bump: one of 'patch, minor, major'
+ :returns: version with the given part increased, and all inferior parts reset to 0
+ :rtype: str
+ :raises ValueError: if the version string is not in the format x.y.z
+ """
+ # split the version number
+ match = re.match(r'^(\d+)\.(\d+)\.(\d+)$', version)
+ if match is None:
+ raise ValueError('Invalid version string, must be int.int.int: "%s"' % version)
+ new_version = match.groups()
+ new_version = [int(x) for x in new_version]
+ # find the desired index
+ idx = {'major': 0, 'minor': 1, 'patch': 2}[bump]
+ # increment the desired part
+ new_version[idx] += 1
+ # reset all parts behind the bumped part
+ new_version = new_version[:idx + 1] + [0 for x in new_version[idx + 1:]]
+ return '%d.%d.%d' % tuple(new_version)
+
+
+def _replace_version(package_str, new_version):
+ """
+ Replace the version tag in contents if there is only one instance.
+
+ :param str package_str: contents of package.xml
+ :param str new_version: version number
+ :returns: new package.xml
+ :rtype: str
+ :raises RuntimeError:
+ """
+ # try to replace contens
+ new_package_str, number_of_subs = re.subn('<version([^<>]*)>[^<>]*</version>', r'<version\g<1>>%s</version>' % new_version, package_str)
+ if number_of_subs != 1:
+ raise RuntimeError('Illegal number of version tags: %s' % (number_of_subs))
+ return new_package_str
+
+
+def _check_for_version_comment(package_str, new_version):
+ """
+ Check if a comment is present behind the version tag and return it.
+
+ :param str package_str: contents of package.xml
+ :param str new_version: version number
+ :returns: comment if available, else None
+ :rtype: str
+ """
+ version_tag = '>%s</version>' % new_version
+ pattern = '%s[ \t]*%s *(.+) *%s' % (re.escape(version_tag), re.escape('<!--'), re.escape('-->'))
+ comment = re.search(pattern, package_str)
+ if comment:
+ comment = comment.group(1)
+ return comment
+
+
+def update_versions(paths, new_version):
+ """
+ Bulk replace of version: searches for package.xml files directly in given folders and replaces version tag within.
+
+ :param list paths: folder names
+ :param str new_version: version string "int.int.int"
+ :raises RuntimeError: if any one package.xml cannot be updated
+ """
+ files = {}
+ for path in paths:
+ package_path = os.path.join(path, 'package.xml')
+ with open(package_path, 'r') as f:
+ package_str = f.read()
+ try:
+ new_package_str = _replace_version(package_str, new_version)
+ comment = _check_for_version_comment(new_package_str, new_version)
+ if comment:
+ print('NOTE: The package manifest "%s" contains a comment besides the version tag:\n %s' % (path, comment))
+ except RuntimeError as rue:
+ raise RuntimeError('Could not bump version number in file %s: %s' % (package_path, str(rue)))
+ files[package_path] = new_package_str
+ # if all replacements successful, write back modified package.xml
+ for package_path, new_package_str in files.items():
+ with open(package_path, 'w') as f:
+ f.write(new_package_str)
+
+
+def get_forthcoming_label(rst):
+ document = docutils.core.publish_doctree(rst)
+ forthcoming_label = None
+ for child in document.children:
+ title = None
+ if isinstance(child, docutils.nodes.subtitle):
+ title = child
+ elif isinstance(child, docutils.nodes.section):
+ section = child
+ if len(section.children) > 0 and isinstance(section.children[0], docutils.nodes.title):
+ title = section.children[0]
+ if title and len(title.children) > 0 and isinstance(title.children[0], docutils.nodes.Text):
+ title_text = title.children[0].rawsource
+ if FORTHCOMING_LABEL.lower() in title_text.lower():
+ if forthcoming_label:
+ raise RuntimeError('Found multiple forthcoming sections')
+ forthcoming_label = title_text
+ return forthcoming_label
+
+
+def update_changelog_sections(changelogs, new_version):
+ # rename forthcoming sections to new_version including current date
+ new_changelog_data = {}
+ new_label = '%s (%s)' % (new_version, datetime.date.today().isoformat())
+ for pkg_name, (changelog_path, changelog, forthcoming_label) in changelogs.items():
+ data = rename_section(changelog.rst, forthcoming_label, new_label)
+ new_changelog_data[changelog_path] = data
+
+ for changelog_path, data in new_changelog_data.items():
+ with open(changelog_path, 'wb') as f:
+ f.write(data.encode('utf-8'))
+
+
+def rename_section(data, old_label, new_label):
+ valid_section_characters = '!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~'
+
+ def replace_section(match):
+ section_char = match.group(2)[0]
+ return new_label + '\n' + section_char * len(new_label)
+ pattern = '^(' + re.escape(old_label) + ')\r?\n([' + re.escape(valid_section_characters) + ']+)\r?$'
+ data, count = re.subn(pattern, replace_section, data, flags=re.MULTILINE)
+ if count == 0:
+ raise RuntimeError('Could not find section')
+ if count > 1:
+ raise RuntimeError('Found multiple matching sections')
+ return data
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2012, Willow Garage, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Willow Garage, Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""Library to find packages in the filesystem."""
+
+import multiprocessing
+import os
+
+from .package import _get_package_xml
+from .package import PACKAGE_MANIFEST_FILENAME
+from .package import parse_package_string
+
+
+def find_package_paths(basepath, exclude_paths=None, exclude_subspaces=False):
+ """
+ Crawls the filesystem to find package manifest files.
+
+ When a subfolder contains either of the following files it is ignored:
+ - ``AMENT_IGNORE``
+ - ``CATKIN_IGNORE``
+ - ``COLCON_IGNORE``
+
+ :param basepath: The path to search in, ``str``
+ :param exclude_paths: A list of paths which should not be searched, ``list``
+ :param exclude_subspaces: The flag is subfolders containing a .catkin file should not be
+ searched, ``bool``
+ :returns: A list of relative paths containing package manifest files ``list``
+ """
+ paths = []
+ real_exclude_paths = [os.path.realpath(p) for p in exclude_paths] if exclude_paths is not None else []
+ for dirpath, dirnames, filenames in os.walk(basepath, followlinks=True):
+ if set(dirnames + filenames) & {'AMENT_IGNORE', 'CATKIN_IGNORE', 'COLCON_IGNORE'} or \
+ os.path.realpath(dirpath) in real_exclude_paths or \
+ (exclude_subspaces and '.catkin' in filenames):
+ del dirnames[:]
+ continue
+ elif PACKAGE_MANIFEST_FILENAME in filenames:
+ paths.append(os.path.relpath(dirpath, basepath))
+ del dirnames[:]
+ continue
+ # filter out hidden directories in-place
+ dirnames[:] = [d for d in dirnames if not d.startswith('.')]
+ return paths
+
+
+def find_packages(basepath, exclude_paths=None, exclude_subspaces=False, warnings=None):
+ """
+ Crawls the filesystem to find package manifest files and parses them.
+
+ :param basepath: The path to search in, ``str``
+ :param exclude_paths: A list of paths which should not be searched, ``list``
+ :param exclude_subspaces: The flag is subfolders containing a .catkin file should not be
+ searched, ``bool``
+ :param warnings: Print warnings if None or return them in the given list
+ :returns: A dict mapping relative paths to ``Package`` objects ``dict``
+ :raises: :exc:RuntimeError` If multiple packages have the same name
+ """
+ packages = find_packages_allowing_duplicates(basepath, exclude_paths=exclude_paths, exclude_subspaces=exclude_subspaces, warnings=warnings)
+ package_paths_by_name = {}
+ for path, package in packages.items():
+ if package.name not in package_paths_by_name:
+ package_paths_by_name[package.name] = set()
+ package_paths_by_name[package.name].add(path)
+ duplicates = dict([(name, paths) for name, paths in package_paths_by_name.items() if len(paths) > 1])
+ if duplicates:
+ duplicates = ['Multiple packages found with the same name "%s":%s' % (name, ''.join(['\n- %s' % path_ for path_ in sorted(duplicates[name])])) for name in sorted(duplicates.keys())]
+ raise RuntimeError('\n'.join(duplicates))
+ return packages
+
+
+class _PackageParser(object):
+
+ def __init__(self, capture_warnings):
+ self.capture_warnings = capture_warnings
+
+ def __call__(self, xml_and_path_and_filename):
+ xml, path, filename = xml_and_path_and_filename
+ warnings = [] if self.capture_warnings else None
+ parsed_package = parse_package_string(xml, filename=filename, warnings=warnings)
+ return (path, parsed_package), warnings
+
+
+def find_packages_allowing_duplicates(basepath, exclude_paths=None, exclude_subspaces=False, warnings=None):
+ """
+ Crawls the filesystem to find package manifest files and parses them.
+
+ :param basepath: The path to search in, ``str``
+ :param exclude_paths: A list of paths which should not be searched, ``list``
+ :param exclude_subspaces: The flag is subfolders containing a .catkin file should not be
+ searched, ``bool``
+ :param warnings: Print warnings if None or return them in the given list
+ :returns: A dict mapping relative paths to ``Package`` objects ``dict``
+ """
+ package_paths = find_package_paths(basepath, exclude_paths=exclude_paths, exclude_subspaces=exclude_subspaces)
+
+ xmls = {}
+ for path in package_paths:
+ xmls[path] = _get_package_xml(os.path.join(basepath, path))
+
+ data = [(v[0], k, v[1]) for k, v in xmls.items()]
+
+ if not data:
+ return {}
+
+ parallel = len(data) > 100
+ if parallel:
+ try:
+ pool = multiprocessing.Pool()
+ except OSError:
+ # On chroot environment, multiprocessing is not available
+ # https://stackoverflow.com/questions/6033599/oserror-38-errno-38-with-multiprocessing
+ parallel = False
+
+ if not parallel:
+ # use sequential loop
+ parsed_packages = {}
+ for xml, path, filename in data:
+ parsed_package = parse_package_string(
+ xml, filename=filename, warnings=warnings)
+ parsed_packages[path] = parsed_package
+ return parsed_packages
+
+ # use multiprocessing pool
+ parser = _PackageParser(warnings is not None)
+ try:
+ path_parsed_packages, warnings_lists = zip(*pool.map(parser, data))
+ finally:
+ pool.close()
+ pool.join()
+ if parser.capture_warnings:
+ map(warnings.extend, warnings_lists)
+ return dict(path_parsed_packages)
+
+
+def verify_equal_package_versions(packages):
+ """
+ Verify that all packages have the same version number.
+
+ :param packages: The list of ``Package`` objects, ``list``
+ :returns: The version number
+ :raises: :exc:RuntimeError` If the version is not equal in all packages
+ """
+ version = None
+ for package in packages:
+ if version is None:
+ version = package.version
+ elif package.version != version:
+ raise RuntimeError('Two packages have different version numbers (%s != %s):\n- %s\n- %s' % (package.version, version, package.filename, list(packages)[0].filename))
+ return version
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2012, Willow Garage, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Willow Garage, Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""Library for providing the relevant information from the package manifest for the Python setup.py file."""
+
+from __future__ import print_function
+
+import os
+import sys
+
+from .package import InvalidPackage, parse_package
+
+
+def generate_distutils_setup(package_xml_path=os.path.curdir, **kwargs):
+ """
+ Extract the information relevant for distutils from the package manifest.
+
+ The following keys will be set:
+
+ The "name" and "version" are taken from the eponymous tags.
+
+ A single maintainer will set the keys "maintainer" and
+ "maintainer_email" while multiple maintainers are merged into the
+ "maintainer" fields (including their emails). Authors are handled
+ likewise.
+
+ The first URL of type "website" (or without a type) is used for
+ the "url" field.
+
+ The "description" is taken from the eponymous tag if it does not
+ exceed 200 characters. If it does "description" contains the
+ truncated text while "description_long" contains the complete.
+
+ All licenses are merged into the "license" field.
+
+ :param kwargs: All keyword arguments are passed through. The above
+ mentioned keys are verified to be identical if passed as a
+ keyword argument
+
+ :returns: return dict populated with parsed fields and passed
+ keyword arguments
+ :raises: :exc:`InvalidPackage`
+ :raises: :exc:`IOError`
+ """
+ package = parse_package(package_xml_path)
+
+ data = {}
+ data['name'] = package.name
+ data['version'] = package.version
+
+ # either set one author with one email or join all in a single field
+ if len(package.authors) == 1 and package.authors[0].email is not None:
+ data['author'] = package.authors[0].name
+ data['author_email'] = package.authors[0].email
+ else:
+ data['author'] = ', '.join([('%s <%s>' % (a.name, a.email) if a.email is not None else a.name) for a in package.authors])
+
+ # either set one maintainer with one email or join all in a single field
+ if len(package.maintainers) == 1:
+ data['maintainer'] = package.maintainers[0].name
+ data['maintainer_email'] = package.maintainers[0].email
+ else:
+ data['maintainer'] = ', '.join(['%s <%s>' % (m.name, m.email) for m in package.maintainers])
+
+ # either set the first URL with the type 'website' or the first URL of any type
+ websites = [url.url for url in package.urls if url.type == 'website']
+ if websites:
+ data['url'] = websites[0]
+ elif package.urls:
+ data['url'] = package.urls[0].url
+
+ if len(package.description) <= 200:
+ data['description'] = package.description
+ else:
+ data['description'] = package.description[:197] + '...'
+ data['long_description'] = package.description
+
+ data['license'] = ', '.join(package.licenses)
+
+ # pass keyword arguments and verify equality if generated and passed in
+ for k, v in kwargs.items():
+ if k in data:
+ if v != data[k]:
+ raise InvalidPackage('The keyword argument "%s" does not match the information from package.xml: "%s" != "%s"' % (k, v, data[k]), package_xml_path)
+ else:
+ data[k] = v
+
+ return data
+
+
+def get_global_bin_destination():
+ return 'bin'
+
+
+def get_global_etc_destination():
+ return 'etc'
+
+
+def get_global_include_destination():
+ return 'include'
+
+
+def get_global_lib_destination():
+ return 'lib'
+
+
+def get_global_libexec_destination():
+ return 'lib'
+
+
+def get_global_python_destination():
+ dest = 'lib/python%u.%u/' % (sys.version_info[0], sys.version_info[1])
+ if '--install-layout=deb' not in sys.argv[1:]:
+ dest += 'site-packages'
+ else:
+ dest += 'dist-packages'
+ return dest
+
+
+def get_global_share_destination():
+ return 'share'
+
+
+def get_package_bin_destination(pkgname):
+ return os.path.join(get_global_libexec_destination(), pkgname)
+
+
+def get_package_etc_destination(pkgname):
+ return os.path.join(get_global_etc_destination(), pkgname)
+
+
+def get_package_include_destination(pkgname):
+ return os.path.join(get_global_include_destination(), pkgname)
+
+
+def get_package_lib_destination(_pkgname):
+ return get_global_lib_destination()
+
+
+def get_package_python_destination(pkgname):
+ return os.path.join(get_global_python_destination(), pkgname)
+
+
+def get_package_share_destination(pkgname):
+ return os.path.join(get_global_share_destination(), pkgname)
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2012, Willow Garage, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Willow Garage, Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""API provided for rospack to reorder include/library paths according to the chained workspaces."""
+
+from __future__ import print_function
+
+from .workspaces import get_spaces, order_paths
+
+
+def reorder_paths(paths):
+ paths_to_order = paths.split(' ') if paths else []
+ ordered_paths = order_paths(paths_to_order, get_spaces())
+ return ' '.join(ordered_paths)
--- /dev/null
+cmake_minimum_required(VERSION 3.0.2)
+project(@name)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED@components)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+@boost_find@systems_find
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+## * add a build_depend tag for "message_generation"
+## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
+## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+## but can be declared for certainty nonetheless:
+## * add a exec_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+## * add "message_generation" and every package in MSG_DEP_SET to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * add "message_runtime" and every package in MSG_DEP_SET to
+## catkin_package(CATKIN_DEPENDS ...)
+## * uncomment the add_*_files sections below as needed
+## and list every .msg/.srv/.action file to be processed
+## * uncomment the generate_messages entry below
+## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+# add_message_files(
+# FILES
+# Message1.msg
+# Message2.msg
+# )
+
+## Generate services in the 'srv' folder
+# add_service_files(
+# FILES
+# Service1.srv
+# Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+# FILES
+# Action1.action
+# Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+# generate_messages(
+# DEPENDENCIES
+@message_dependencies
+# )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+## * add "dynamic_reconfigure" to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * uncomment the "generate_dynamic_reconfigure_options" section below
+## and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+# cfg/DynReconf1.cfg
+# cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+# INCLUDE_DIRS include
+# LIBRARIES @{name}
+# CATKIN_DEPENDS @catkin_depends
+# DEPENDS @system_depends
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+@include_directories
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+# src/${PROJECT_NAME}/@name.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+# add_executable(${PROJECT_NAME}_node src/@{name}_node.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(${PROJECT_NAME}_node
+@target_libraries# )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# catkin_install_python(PROGRAMS
+# scripts/my_python_script
+# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
+# install(TARGETS ${PROJECT_NAME}_node
+# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark libraries for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
+# install(TARGETS ${PROJECT_NAME}
+# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+# FILES_MATCHING PATTERN "*.h"
+# PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+# # myfile1
+# # myfile2
+# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_@name.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
--- /dev/null
+cmake_minimum_required(VERSION 2.8.3)
+project(@name@)
+find_package(catkin REQUIRED)
+catkin_metapackage(@metapackage_arguments@)
--- /dev/null
+<?xml version="1.0"?>
+<package format="2">
+ <name>@name</name>
+ <version@version_compatibility>@version</version>
+ <description>@description</description>
+
+ <!-- One maintainer tag required, multiple allowed, one person per tag -->
+ <!-- Example: -->
+ <!-- <maintainer email="jane.doe@@example.com">Jane Doe</maintainer> -->
+@maintainers
+
+ <!-- One license tag required, multiple allowed, one license per tag -->
+ <!-- Commonly used license strings: -->
+ <!-- BSD, MIT, Boost Software License, GPLv2, GPLv3, LGPLv2.1, LGPLv3 -->
+@licenses
+
+ <!-- Url tags are optional, but multiple are allowed, one per tag -->
+ <!-- Optional attribute type can be: website, bugtracker, or repository -->
+ <!-- Example: -->
+ <!-- <url type="website">http://wiki.ros.org/@name</url> -->
+@urls
+
+ <!-- Author tags are optional, multiple are allowed, one per tag -->
+ <!-- Authors do not have to be maintainers, but could be -->
+ <!-- Example: -->
+ <!-- <author email="jane.doe@@example.com">Jane Doe</author> -->
+@authors
+
+ <!-- The *depend tags are used to specify dependencies -->
+ <!-- Dependencies can be catkin packages or system dependencies -->
+ <!-- Examples: -->
+ <!-- Use depend as a shortcut for packages that are both build and exec dependencies -->
+ <!-- <depend>roscpp</depend> -->
+ <!-- Note that this is equivalent to the following: -->
+ <!-- <build_depend>roscpp</build_depend> -->
+ <!-- <exec_depend>roscpp</exec_depend> -->
+ <!-- Use build_depend for packages you need at compile time: -->
+ <!-- <build_depend>message_generation</build_depend> -->
+ <!-- Use build_export_depend for packages you need in order to build against this package: -->
+ <!-- <build_export_depend>message_generation</build_export_depend> -->
+ <!-- Use buildtool_depend for build tool packages: -->
+ <!-- <buildtool_depend>catkin</buildtool_depend> -->
+ <!-- Use exec_depend for packages you need at runtime: -->
+ <!-- <exec_depend>message_runtime</exec_depend> -->
+ <!-- Use test_depend for packages you need only for testing: -->
+ <!-- <test_depend>gtest</test_depend> -->
+ <!-- Use doc_depend for packages you need only for building documentation: -->
+ <!-- <doc_depend>doxygen</doc_depend> -->
+@dependencies
+
+ <!-- The export tag contains other, unspecified, tags -->
+ <export>
+ <!-- Other tools can request additional information be placed here -->
+@exports
+ </export>
+</package>
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2012, Willow Garage, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Willow Garage, Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""Module to enable color terminal output."""
+
+from __future__ import print_function
+
+import os
+import string
+
+_ansi = {}
+
+
+def ansi(key):
+ """Return the escape sequence for a given ansi color key."""
+ global _ansi
+ return _ansi[key]
+
+
+def enable_ANSI_colors():
+ """Populate the global module dictionary `ansi` with ANSI escape sequences."""
+ global _ansi
+ color_order = [
+ 'black', 'red', 'green', 'yellow', 'blue', 'purple', 'cyan', 'white'
+ ]
+ short_colors = {
+ 'black': 'k', 'red': 'r', 'green': 'g', 'yellow': 'y', 'blue': 'b',
+ 'purple': 'p', 'cyan': 'c', 'white': 'w'
+ }
+ _ansi = {
+ 'escape': '\033', 'reset': 0, '|': 0,
+ 'boldon': 1, '!': 1, 'italicson': 3, '/': 3, 'ulon': 4, '_': 4,
+ 'invon': 7, 'boldoff': 22, 'italicsoff': 23,
+ 'uloff': 24, 'invoff': 27
+ }
+
+ # Convert plain numbers to escapes
+ for key in _ansi:
+ if key != 'escape':
+ _ansi[key] = '{0}[{1}m'.format(_ansi['escape'], _ansi[key])
+
+ # Foreground
+ for index, color in enumerate(color_order):
+ _ansi[color] = '{0}[{1}m'.format(_ansi['escape'], 30 + index)
+ _ansi[color + 'f'] = _ansi[color]
+ _ansi[short_colors[color] + 'f'] = _ansi[color + 'f']
+
+ # Background
+ for index, color in enumerate(color_order):
+ _ansi[color + 'b'] = '{0}[{1}m'.format(_ansi['escape'], 40 + index)
+ _ansi[short_colors[color] + 'b'] = _ansi[color + 'b']
+
+ # Fmt sanitizers
+ _ansi['atexclimation'] = '@!'
+ _ansi['atfwdslash'] = '@/'
+ _ansi['atunderscore'] = '@_'
+ _ansi['atbar'] = '@|'
+
+
+def disable_ANSI_colors():
+ """Set all the ANSI escape sequences to empty strings, effectively disabling console colors."""
+ global _ansi
+ for key in _ansi:
+ _ansi[key] = ''
+
+
+# Default to ansi colors on
+enable_ANSI_colors()
+if os.name in ['nt']:
+ disable_ANSI_colors()
+
+
+class ColorTemplate(string.Template):
+ delimiter = '@'
+
+
+def sanitize(msg):
+ """Sanitize the existing msg, use before adding color annotations."""
+ msg = msg.replace('@', '@@')
+ msg = msg.replace('{', '{{')
+ msg = msg.replace('}', '}}')
+ msg = msg.replace('@@!', '@{atexclimation}')
+ msg = msg.replace('@@/', '@{atfwdslash}')
+ msg = msg.replace('@@_', '@{atunderscore}')
+ msg = msg.replace('@@|', '@{atbar}')
+ return msg
+
+
+def fmt(msg):
+ """Replace color annotations with ansi escape sequences."""
+ global _ansi
+ msg = msg.replace('@!', '@{boldon}')
+ msg = msg.replace('@/', '@{italicson}')
+ msg = msg.replace('@_', '@{ulon}')
+ msg = msg.replace('@|', '@{reset}')
+ t = ColorTemplate(msg)
+ return t.substitute(_ansi) + ansi('reset')
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2015, Open Source Robotics Foundation, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Willow Garage, Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""
+Common functions that can be used to mark spaces, e.g. build and devel, to indicate which tools previously built the space.
+
+This allows the tools to detect cross tool talk and avoid it where appropriate
+"""
+
+
+from __future__ import print_function
+
+import os
+
+SPACE_BUILT_BY_MARKER_FILENAME = '.built_by'
+
+
+def get_previous_tool_used_on_the_space(space_path):
+ """
+ Return the tool used to build the space at the given path, or None.
+
+ Returns None if the path does not exist or if there is no built by file.
+
+ :param str space_path: path to the space in question.
+ :returns: str identifying the tool used to build the space or None.
+ """
+ if os.path.isdir(space_path):
+ marker_path = os.path.join(space_path, SPACE_BUILT_BY_MARKER_FILENAME)
+ if os.path.isfile(marker_path):
+ with open(marker_path, 'r') as f:
+ return f.read().strip()
+ return None
+
+
+def mark_space_as_built_by(space_path, tool_name):
+ """
+ Place a marker file in the space at the given path, telling who built it.
+
+ The path to the marker is created if necessary.
+
+ :param str space_path: path to the space which should be marked.
+ :param str tool_name: name of the tool doing the marking.
+ :raises: OSError, others, when trying to create the folder.
+ """
+ if not os.path.isdir(space_path):
+ # Might fail if it's a file already or for permissions.
+ os.makedirs(space_path)
+ marker_path = os.path.join(space_path, SPACE_BUILT_BY_MARKER_FILENAME)
+ with open(marker_path, 'w') as f:
+ f.write(tool_name)
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2012, Willow Garage, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Willow Garage, Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import print_function
+
+import copy
+import os
+import sys
+
+from .packages import find_packages
+from .workspaces import get_spaces
+
+
+class _PackageDecorator(object):
+
+ def __init__(self, package, path):
+ self.package = package
+ self.path = path
+ self.is_metapackage = 'metapackage' in (e.tagname for e in self.package.exports)
+ message_generators = [e.content for e in self.package.exports if e.tagname == 'message_generator']
+ self.message_generator = message_generators[0] if message_generators else None
+ # a set containing this package name, direct build depends
+ # and recursive run_depends of these build_depends
+ self.depends_for_topological_order = None
+ # a set containing this package name and recursive run_depends
+ self._recursive_run_depends_for_topological_order = None
+
+ def __getattr__(self, name):
+ if name.startswith('__'):
+ raise AttributeError(name)
+ return getattr(self.package, name)
+
+ def calculate_depends_for_topological_order(self, packages):
+ """
+ Set self.depends_for_topological_order to the recursive dependencies required for topological order.
+
+ It contains this package name, all direct build- and buildtool
+ dependencies and their recursive runtime dependencies.
+ The set only contains packages which are in the passed packages
+ dictionary.
+
+ :param packages: dict of name to ``_PackageDecorator``
+ """
+ self.depends_for_topological_order = set()
+ all_depends = self.package.build_depends + self.package.buildtool_depends + self.package.test_depends
+ names = [d.name for d in all_depends if d.evaluated_condition]
+
+ # collect all group dependencies
+ for group_depend in self.package.group_depends:
+ if group_depend.evaluated_condition:
+ assert group_depend.members is not None, \
+ 'Group members need to be determined before'
+ names += group_depend.members
+
+ # skip external dependencies, meaning names that are not known packages
+ for name in [n for n in names if n in packages.keys()]:
+ if not self.is_metapackage and packages[name].is_metapackage:
+ print('WARNING: package "%s" should not depend on metapackage "%s" but on its packages instead' % (self.name, name), file=sys.stderr)
+ if name in self.depends_for_topological_order:
+ # avoid function call to improve performance
+ # check within the loop since the set changes every cycle
+ continue
+ packages[name]._add_recursive_run_depends(packages, self.depends_for_topological_order)
+
+ def _add_recursive_run_depends(self, packages, depends_for_topological_order):
+ """
+ Modify depends_for_topological_order argument by adding run_depends of self recursively.
+
+ Only packages which are in the passed packages are added and recursed into.
+
+ :param packages: dict of name to ``_PackageDecorator``
+ :param depends_for_topological_order: set to be extended
+ """
+ if self._recursive_run_depends_for_topological_order is None:
+ self._recursive_run_depends_for_topological_order = set()
+ self._recursive_run_depends_for_topological_order.add(self.package.name)
+ package_names = packages.keys()
+ names = [d.name for d in self.package.run_depends if d.evaluated_condition]
+
+ for group_depend in self.package.group_depends:
+ if group_depend.evaluated_condition:
+ assert group_depend.members is not None, \
+ 'Group members need to be determined before'
+ names += group_depend.members
+
+ for name in [n for n in names
+ if (n in package_names and
+ n not in self._recursive_run_depends_for_topological_order)]:
+ packages[name]._add_recursive_run_depends(packages,
+ self._recursive_run_depends_for_topological_order)
+
+ depends_for_topological_order.update(self._recursive_run_depends_for_topological_order)
+
+
+def topological_order(root_dir, whitelisted=None, blacklisted=None, underlay_workspaces=None):
+ """
+ Crawls the filesystem to find packages and uses their dependencies to return a topologically order list.
+
+ When a circular dependency is detected, the last item in the returned list
+ is a tuple with None and a string giving a superset of the guilty packages.
+
+ :param root_dir: The path to search in, ``str``
+ :param whitelisted: A list of whitelisted package names, ``list``
+ :param blacklisted: A list of blacklisted package names, ``list``
+ :param underlay_workspaces: A list of underlay workspaces of packages which might provide dependencies in case of partial workspaces, ``list``
+ :returns: A list of tuples containing the relative path and a ``Package`` object, ``list``
+ """
+ packages = find_packages(root_dir)
+
+ # find packages in underlayed workspaces
+ underlay_packages = {}
+ if underlay_workspaces:
+ for workspace in reversed(underlay_workspaces):
+ # since underlay workspace might be a devel space
+ # consider spaces stored in the .catkin file
+ spaces = get_spaces([workspace])
+ for space in spaces:
+ for path, package in find_packages(space).items():
+ underlay_packages[package.name] = (path, package)
+
+ return topological_order_packages(packages, whitelisted=whitelisted, blacklisted=blacklisted, underlay_packages=dict(underlay_packages.values()))
+
+
+def topological_order_packages(packages, whitelisted=None, blacklisted=None, underlay_packages=None):
+ """
+ Topologically orders packages.
+
+ evaluate_conditions() will be called for each package.
+
+ If group dependencies haven't determined their members yet
+ extract_group_members() will be called for each group dependency to do so.
+
+ First returning packages which have message generators and then
+ the rest based on direct build-/buildtool_depends and indirect
+ recursive run_depends.
+
+ When a circular dependency is detected, the last item in the returned list
+ is a tuple with None and a string giving a superset of the guilty packages.
+
+ :param packages: A dict mapping relative paths to ``Package`` objects ``dict``
+ :param whitelisted: A list of whitelisted package names, ``list``
+ :param blacklisted: A list of blacklisted package names, ``list``
+ :param underlay_packages: A dict mapping relative paths to ``Package`` objects ``dict``
+ :returns: A list of tuples containing the relative path and a ``Package`` object, ``list``
+ """
+ decorators_by_name = {}
+ for path, package in packages.items():
+ # skip non-whitelisted packages
+ if whitelisted and package.name not in whitelisted:
+ continue
+ # skip blacklisted packages
+ if blacklisted and package.name in blacklisted:
+ continue
+ if package.name in decorators_by_name:
+ path_with_same_name = decorators_by_name[package.name].path
+ raise RuntimeError('Two packages with the same name "%s" in the workspace:\n- %s\n- %s' % (package.name, path_with_same_name, path))
+ decorators_by_name[package.name] = _PackageDecorator(package, path)
+
+ underlay_decorators_by_name = {}
+ if underlay_packages:
+ for path, package in underlay_packages.items():
+ # skip overlayed packages
+ if package.name in decorators_by_name:
+ continue
+ underlay_decorators_by_name[package.name] = _PackageDecorator(package, path)
+ decorators_by_name.update(underlay_decorators_by_name)
+
+ # evaluate conditions and determine group membership
+ pkgs = [d.package for d in decorators_by_name.values()]
+ for pkg in pkgs:
+ pkg.evaluate_conditions(os.environ)
+ for pkg in pkgs:
+ for group_depend in pkg.group_depends:
+ if group_depend.evaluated_condition:
+ group_depend.extract_group_members(pkgs)
+
+ # calculate transitive dependencies
+ for decorator in decorators_by_name.values():
+ decorator.calculate_depends_for_topological_order(decorators_by_name)
+
+ tuples = _sort_decorated_packages(decorators_by_name)
+ # remove underlay packages from result
+ return [(path, package) for path, package in tuples if path is None or package.name not in underlay_decorators_by_name]
+
+
+def _reduce_cycle_set(packages_orig):
+ """
+ Remove iteratively some packages from a set that are definitely not part of any cycle.
+
+ When there is a cycle in the package dependencies,
+ _sort_decorated_packages only knows the set of packages containing
+ the cycle.
+ :param packages: A dict mapping package name to ``_PackageDecorator`` objects ``dict``
+ :returns: A list of package names from the input which could not easily be detected as not being part of a cycle.
+ """
+ assert(packages_orig)
+ packages = copy.copy(packages_orig)
+ last_depended = None
+ while len(packages) > 0:
+ depended = set()
+ for name, decorator in packages.items():
+ if decorator.depends_for_topological_order:
+ depended = depended.union(decorator.depends_for_topological_order)
+ for name in list(packages.keys()):
+ if name not in depended:
+ del packages[name]
+ if last_depended:
+ if last_depended == depended:
+ return packages.keys()
+ last_depended = depended
+
+
+def _sort_decorated_packages(packages_orig):
+ """
+ Sorts packages according to dependency ordering.
+
+ First considering the message generators and their recursive dependencies
+ and then the rest of the packages.
+ When a circle is detected, a tuple with None and a string giving a
+ superset of the guilty packages.
+
+ :param packages: A dict mapping package name to ``_PackageDecorator`` objects ``dict``
+ :returns: A List of tuples containing the relative path and a ``Package`` object ``list``
+ """
+ packages = copy.deepcopy(packages_orig)
+
+ # mark all packages which are (recursively) dependent on by message generators
+ dependency_names_to_follow = set([name for name, decorator in packages.items() if decorator.message_generator])
+ not_marked_package_names = set(packages.keys()) - dependency_names_to_follow
+ while dependency_names_to_follow:
+ pkg_name = dependency_names_to_follow.pop()
+ for name in packages[pkg_name].depends_for_topological_order:
+ if name in not_marked_package_names:
+ # mark package
+ packages[name].message_generator = True
+ not_marked_package_names.remove(name)
+ # queue for recursion
+ dependency_names_to_follow.add(name)
+
+ ordered_packages = []
+ while len(packages) > 0:
+ # find all packages without build dependencies
+ message_generators = []
+ non_message_generators = []
+ for name, decorator in packages.items():
+ if not decorator.depends_for_topological_order:
+ if decorator.message_generator:
+ message_generators.append(name)
+ else:
+ non_message_generators.append(name)
+ # first choose message generators
+ if message_generators:
+ names = message_generators
+ elif non_message_generators:
+ names = non_message_generators
+ else:
+ # in case of a circular dependency pass a string with
+ # the names list of remaining package names, with path
+ # None to indicate cycle
+ ordered_packages.append([None, ', '.join(sorted(_reduce_cycle_set(packages)))])
+ break
+
+ # alphabetic order only for convenience
+ names.sort()
+
+ # add first candidates to ordered list
+ # do not add all candidates since removing the depends from the first might affect the next candidates
+ name = names[0]
+ ordered_packages.append([packages[name].path, packages[name].package])
+ # remove package from further processing
+ del packages[name]
+ for package in packages.values():
+ if name in package.depends_for_topological_order:
+ package.depends_for_topological_order.remove(name)
+ return ordered_packages
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2012, Willow Garage, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Willow Garage, Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import print_function
+
+import os
+import subprocess
+
+
+def get_repository_type(path):
+ for vcs_type in ['bzr', 'git', 'hg', 'svn']:
+ if os.path.isdir(os.path.join(path, '.%s' % vcs_type)):
+ return vcs_type
+ return None
+
+
+def vcs_remotes(path, vcs_type=None):
+ if vcs_type is None:
+ vcs_type = get_repository_type(path)
+ if vcs_type == 'git':
+ output = subprocess.check_output(['git', 'remote', '-v'], cwd=path)
+ return output.decode('utf-8').rstrip()
+ elif vcs_type == 'hg':
+ output = subprocess.check_output(['hg', 'paths'], cwd=path)
+ return output.decode('utf-8').rstrip()
+ elif vcs_type == 'svn':
+ output = subprocess.check_output(['svn', 'info'], cwd=path)
+ output = output.decode('utf-8').rstrip()
+ for line in output.split(os.linesep):
+ if line.startswith('URL: '):
+ return line
+ raise RuntimeError('Could not determine URL of svn working copy')
+ else:
+ raise RuntimeError('"remotes" command not supported for vcs type "%s"' % vcs_type)
--- /dev/null
+# Software License Agreement (BSD License)
+#
+# Copyright (c) 2012, Willow Garage, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Willow Garage, Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""Library to provided logic for chained workspaces."""
+
+from __future__ import print_function
+
+import os
+
+CATKIN_WORKSPACE_MARKER_FILE = '.catkin_workspace'
+
+
+def get_spaces(paths=None):
+ """
+ Return a list of spaces based on the CMAKE_PREFIX_PATH or passed in list of workspaces.
+
+ It resolves the source space for each devel space and ignores non-catkin paths.
+ :param paths_to_order: list of paths
+ :param prefix_paths: list of prefixes, must not end with '/'
+ """
+ if paths is None:
+ if 'CMAKE_PREFIX_PATH' not in os.environ:
+ raise RuntimeError('Neither the environment variable CMAKE_PREFIX_PATH is set nor was a list of paths passed.')
+ paths = os.environ['CMAKE_PREFIX_PATH'].split(os.pathsep) if os.environ['CMAKE_PREFIX_PATH'] else []
+
+ spaces = []
+ for path in paths:
+ marker = os.path.join(path, '.catkin')
+ # ignore non catkin paths
+ if not os.path.exists(marker):
+ continue
+ spaces.append(path)
+
+ # append source spaces
+ with open(marker, 'r') as f:
+ data = f.read()
+ if data:
+ spaces += data.split(';')
+ return spaces
+
+
+def order_paths(paths_to_order, prefix_paths):
+ """
+ Return a list containing all items of paths_to_order ordered by list of prefix_paths, compared as strings.
+
+ :param paths_to_order: list of paths
+ :param prefix_paths: list of prefixes, must not end with '/'
+ """
+ # the ordered paths contains a list for each prefix plus one more which contains paths which do not match one of the prefix_paths
+ ordered_paths = [[] for _ in range(len(prefix_paths) + 1)]
+
+ for path in paths_to_order:
+ # put each directory into the slot where it matches the prefix, or last otherwise
+ index = 0
+ for prefix in prefix_paths:
+ if _is_equal_or_in_parents(prefix, path):
+ break
+ index += 1
+ ordered_paths[index].append(path)
+
+ # flatten list of lists
+ return [j for i in ordered_paths for j in i]
+
+
+def _is_equal_or_in_parents(dir_, path):
+ dir_ = os.path.normcase(os.path.realpath(dir_))
+ path = os.path.normcase(os.path.realpath(path))
+ return path == dir_ or path.startswith(dir_ + os.sep)
+
+
+def ensure_workspace_marker(base_path):
+ """
+ Create workspace marker file at path if not existing.
+
+ :param path: target folder
+ """
+ if not os.path.exists(os.path.join(base_path, CATKIN_WORKSPACE_MARKER_FILE)):
+ with open(os.path.join(base_path, CATKIN_WORKSPACE_MARKER_FILE), 'a') as fhand:
+ fhand.write('# This file currently only serves to mark the location of a catkin workspace for tool integration\n')
--- /dev/null
+import os
+import shutil
+import tempfile
+import unittest
+
+try:
+ from catkin_pkg.package_templates import PackageTemplate
+except ImportError as impe:
+ raise ImportError(
+ 'Please adjust your pythonpath before running this test: %s' % str(impe))
+
+
+from catkin_pkg.cli.create_pkg import main
+
+
+class CreatePkgTest(unittest.TestCase):
+
+ def test_create_package_template(self):
+ template = PackageTemplate._create_package_template('foopackage')
+ self.assertEqual('foopackage', template.name)
+ self.assertEqual('0.0.0', template.version)
+ self.assertEqual('The foopackage package', template.description)
+ self.assertEqual([], template.catkin_deps)
+ self.assertEqual([], template.authors)
+ self.assertEqual(1, len(template.maintainers))
+ self.assertIsNotNone(template.maintainers[0].email)
+ self.assertEqual([], template.urls)
+ # with args
+ template = PackageTemplate._create_package_template(
+ 'foopackage',
+ description='foo_desc',
+ licenses=['a', 'b'],
+ maintainer_names=['John Doe', 'Jim Daniels'],
+ author_names=['Harry Smith'],
+ version='1.2.3',
+ catkin_deps=['foobar', 'baz'])
+ self.assertEqual('foopackage', template.name)
+ self.assertEqual('1.2.3', template.version)
+ self.assertEqual('foo_desc', template.description)
+ self.assertEqual(['baz', 'foobar'], template.catkin_deps)
+ self.assertEqual(1, len(template.authors))
+ self.assertEqual('Jim Daniels', template.maintainers[0].name)
+ self.assertEqual('John Doe', template.maintainers[1].name)
+ self.assertEqual('Harry Smith', template.authors[0].name)
+ self.assertEqual(2, len(template.maintainers))
+ self.assertEqual([], template.urls)
+
+ def test_main(self):
+ try:
+ root_dir = tempfile.mkdtemp()
+ main(['--rosdistro', 'groovy', 'foo'], root_dir)
+ self.assertTrue(os.path.isdir(os.path.join(root_dir, 'foo')))
+ self.assertTrue(os.path.isfile(os.path.join(root_dir, 'foo', 'CMakeLists.txt')))
+ self.assertTrue(os.path.isfile(os.path.join(root_dir, 'foo', 'package.xml')))
+ finally:
+ shutil.rmtree(root_dir)
--- /dev/null
+# coding=utf-8
+
+import unittest
+
+from catkin_pkg.changelog import BulletList
+from catkin_pkg.changelog import Changelog
+from catkin_pkg.changelog import example_rst
+from catkin_pkg.changelog import InvalidSectionTitle
+from catkin_pkg.changelog import MixedText
+from catkin_pkg.changelog import populate_changelog_from_rst
+from catkin_pkg.changelog import Transition
+from catkin_pkg.changelog import version_and_date_from_title
+
+
+class TestSectionTitleParsing(unittest.TestCase):
+ """Tests the section title parsing."""
+
+ def test_version_and_date_from_title(self):
+ title = '0.1.26 (2012-12-26)'
+ assert '0.1.26' == version_and_date_from_title(title)[0]
+ title = '0.1'
+ self.assertRaises(InvalidSectionTitle, version_and_date_from_title, title)
+ title = '0.1.27 (forthcoming)'
+ self.assertRaises(InvalidSectionTitle, version_and_date_from_title, title)
+ title = ' 0.1.26 (2012-12-26)'
+ self.assertRaises(InvalidSectionTitle, version_and_date_from_title, title)
+ title = '0.1.26 (2012-12-26) '
+ self.assertRaises(InvalidSectionTitle, version_and_date_from_title, title)
+ # TODO: Add some more sofisticated Date entries
+
+
+def check_0_1_26(content):
+ assert len(content) == 1
+ assert type(content[0]) == BulletList
+ assert len(content[0].bullets) == 3
+
+
+def check_0_1_25(content):
+ assert len(content) == 3
+ assert type(content[0]) == BulletList
+ assert len(content[0].bullets) == 5
+ mtext = content[0].bullets[3]
+ assert type(mtext) == MixedText
+ assert len(mtext.texts) == 2
+ assert type(content[1]) == Transition
+ assert type(content[2]) == MixedText
+
+
+def check_0_1_0(content):
+ assert len(content) == 1
+ assert type(content[0]) == MixedText
+ assert len(content[0].texts) == 4
+
+
+def check_0_0_1(content):
+ assert len(content) == 1
+ assert type(content[0]) == BulletList
+ assert content[0].bullet_type == 'enumerated'
+
+
+def test_Changelog():
+ # Example is from REP-0132
+ changelog = Changelog('foo')
+ populate_changelog_from_rst(changelog, example_rst)
+ expected_versions = ['0.1.26', '0.1.25', '0.1.0', '0.0.1']
+ versions = []
+ content_checks = {
+ '0.1.26': check_0_1_26,
+ '0.1.25': check_0_1_25,
+ '0.1.0': check_0_1_0,
+ '0.0.1': check_0_0_1
+ }
+ for version, date, content in changelog.foreach_version():
+ versions.append(version)
+ if version in content_checks:
+ content_checks[version](content)
+ assert sorted(expected_versions) == sorted(versions)
+
+
+single_version_rst = """\
+0.0.1 (2012-01-31)
+------------------
+
+* Initial release
+* Initial bugs
+* Contributors: Sömeöne with UTF-8 in their name
+"""
+
+
+def test_single_version_Changelog():
+ changelog = Changelog('foo')
+ populate_changelog_from_rst(changelog, single_version_rst)
+ expected_versions = ['0.0.1']
+ versions = []
+ for version, date, content in changelog.foreach_version():
+ versions.append(version)
+ assert sorted(expected_versions) == sorted(versions)
+ str(changelog)
+
+
+single_version_with_header_rst = """\
+^^^^^^^^^^^^^^^^^^^^^^^^^
+Changelog for package foo
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+0.0.1 (2012-01-31)
+------------------
+
+* Initial release
+* Initial bugs
+"""
+
+
+def test_single_version_with_header_Changelog():
+ changelog = Changelog('foo')
+ populate_changelog_from_rst(changelog, single_version_with_header_rst)
+ expected_versions = ['0.0.1']
+ versions = []
+ for version, date, content in changelog.foreach_version():
+ versions.append(version)
+ assert sorted(expected_versions) == sorted(versions)
--- /dev/null
+# Copyright 2018 Open Source Robotics Foundation, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import os
+import sys
+
+from flake8.api.legacy import get_style_guide
+
+
+def test_flake8():
+ # Configure flake8 using the .flake8 file in the root of this repository.
+ style_guide = get_style_guide()
+
+ style_guide.options.exclude += ['*/doc/_build']
+
+ stdout = sys.stdout
+ sys.stdout = sys.stderr
+ # implicitly calls report_errors()
+ report = style_guide.check_files([
+ os.path.dirname(os.path.dirname(__file__)),
+ ])
+ sys.stdout = stdout
+
+ if report.total_errors:
+ # output summary with per-category counts
+ print()
+ report._application.formatter.show_statistics(report._stats)
+ print(
+ 'flake8 reported %d errors' % report.total_errors,
+ file=sys.stderr)
+
+ assert not report.total_errors, \
+ 'flake8 reported %d errors' % report.total_errors
--- /dev/null
+from __future__ import print_function
+
+import contextlib
+import os
+import re
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from io import StringIO
+import sys
+import unittest
+
+from catkin_pkg.metapackage import get_expected_cmakelists_txt
+from catkin_pkg.metapackage import InvalidMetapackage
+from catkin_pkg.metapackage import validate_metapackage
+from catkin_pkg.packages import find_packages
+
+
+test_data_dir = os.path.join(os.path.dirname(__file__), 'data', 'metapackages')
+
+test_expectations = {
+ # Test name: [ExceptionType or None, ExceptionRegex or None, WarningRegex or None]
+ 'invalid_cmake': [InvalidMetapackage, 'Invalid CMakeLists.txt', None],
+ 'invalid_depends': [InvalidMetapackage, 'Has build, buildtool, and/or test depends', None],
+ 'leftover_files': [None, None, None],
+ 'no_buildtool_depend_catkin': [InvalidMetapackage, 'No buildtool dependency on catkin', None],
+ 'no_cmake': [InvalidMetapackage, 'No CMakeLists.txt', None],
+ 'no_metapackage_tag': [InvalidMetapackage, 'No <metapackage/> tag in <export>', None],
+ 'NonConformingName': [None, None, None],
+ 'valid_metapackage': [None, None, None],
+ 'valid_metapackage_format2': [None, None, None],
+}
+
+test_expected_warnings = [
+ 'Metapackage "invalid_depends" should not have other dependencies besides '
+ 'a buildtool_depend on catkin and run_depends.',
+ 'Metapackage "no_buildtool_depend_catkin" must buildtool_depend on '
+ 'catkin.',
+ 'Package name "NonConformingName" does not follow the naming conventions. '
+ 'It should start with a lower case letter and only contain lower case '
+ 'letters, digits and underscores.']
+
+
+@contextlib.contextmanager
+def assert_warning(warnreg):
+ orig_stdout = sys.stdout
+ orig_stderr = sys.stderr
+ try:
+ out = StringIO()
+ sys.stdout = out
+ sys.stderr = sys.stdout
+ yield
+ finally:
+ if warnreg is not None:
+ out = out.getvalue()
+ assert re.search(warnreg, out) is not None, "'%s' does not match warning '%s'" % (warnreg, out)
+ else:
+ print(out)
+ sys.stdout = orig_stdout
+ sys.stderr = orig_stderr
+
+
+def _validate_metapackage(path, package):
+ try:
+ validate_metapackage(path, package)
+ except Exception:
+ # print('on package ' + package.name, file=sys.stderr)
+ raise
+
+
+class TestMetapackageValidation(unittest.TestCase):
+ """Tests the metapackage validator."""
+
+ if sys.version_info[0] == 2:
+ assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
+
+ def test_validate_metapackage(self):
+ pkgs_dict = find_packages(test_data_dir)
+ for path, package in pkgs_dict.items():
+ path = os.path.join(test_data_dir, path)
+ assert package.name in test_expectations, 'Unknown test %s' % package.name
+ exc, excreg, warnreg = test_expectations[package.name]
+ with assert_warning(warnreg):
+ if exc is not None:
+ if excreg is not None:
+ with self.assertRaisesRegex(exc, excreg):
+ _validate_metapackage(path, package)
+ else:
+ with self.assertRaises(exc):
+ _validate_metapackage(path, package)
+ else:
+ _validate_metapackage(path, package)
+
+ def test_collect_warnings(self):
+ """Tests warnings collection."""
+ warnings = []
+ find_packages(test_data_dir, warnings=warnings)
+
+ self.assertEqual(warnings.sort(), test_expected_warnings.sort())
+
+
+def test_get_expected_cmakelists_txt():
+ expected = """\
+cmake_minimum_required(VERSION 2.8.3)
+project(example)
+find_package(catkin REQUIRED)
+catkin_metapackage()
+"""
+ assert expected == get_expected_cmakelists_txt('example')
--- /dev/null
+import os.path
+# Redirect stderr to stdout to suppress output in tests
+import sys
+import unittest
+
+import xml.dom.minidom as dom
+
+from catkin_pkg.package import (
+ _check_known_attributes,
+ _get_package_xml,
+ Dependency,
+ Export,
+ has_ros_schema_reference_string,
+ InvalidPackage,
+ License,
+ Package,
+ parse_package,
+ parse_package_string,
+ Person,
+)
+
+from mock import Mock
+
+sys.stderr = sys.stdout
+
+test_data_dir = os.path.join(os.path.dirname(__file__), 'data', 'package')
+
+
+class PackageTest(unittest.TestCase):
+
+ if sys.version_info[0] == 2:
+ assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
+
+ def get_maintainer(self):
+ maint = Mock()
+ maint.email = 'foo@bar.com'
+ maint.name = 'John Foo'
+ return maint
+
+ def get_group_dependency(self, name):
+ group = Mock()
+ group.name = name
+ return group
+
+ def test_init(self):
+ maint = self.get_maintainer()
+ pack = Package(name='foo',
+ version='0.0.0',
+ maintainers=[maint],
+ licenses=['BSD'])
+ self.assertEqual(None, pack.filename)
+ self.assertEqual('0.0.0', pack.version)
+ self.assertEqual(None, pack.version_compatibility)
+ self.assertEqual([], pack.urls)
+ self.assertEqual([], pack.authors)
+ self.assertEqual([maint], pack.maintainers)
+ self.assertEqual(['BSD'], pack.licenses)
+ self.assertEqual([None], [license_.file for license_ in pack.licenses])
+ self.assertEqual([], pack.build_depends)
+ self.assertEqual([], pack.buildtool_depends)
+ self.assertEqual([], pack.run_depends)
+ self.assertEqual([], pack.test_depends)
+ self.assertEqual([], pack.conflicts)
+ self.assertEqual([], pack.replaces)
+ self.assertEqual([], pack.exports)
+ self.assertEqual([], pack.group_depends)
+ self.assertEqual([], pack.member_of_groups)
+ pack = Package('foo',
+ name='bar',
+ version='0.0.0',
+ licenses=['BSD'],
+ maintainers=[self.get_maintainer()])
+ self.assertEqual('foo', pack.filename)
+
+ self.assertRaises(TypeError, Package, unknownattribute=42)
+
+ def test_init_dependency(self):
+ dep = Dependency('foo',
+ version_lt=1,
+ version_lte=2,
+ version_eq=3,
+ version_gte=4,
+ version_gt=5,
+ condition='$foo == 23 and $bar != 42')
+ self.assertEqual('foo', dep.name)
+ self.assertEqual(1, dep.version_lt)
+ self.assertEqual(2, dep.version_lte)
+ self.assertEqual(3, dep.version_eq)
+ self.assertEqual(4, dep.version_gte)
+ self.assertEqual(5, dep.version_gt)
+ self.assertFalse(dep.evaluate_condition({'foo': 23, 'bar': 42}))
+ self.assertFalse(dep.evaluated_condition)
+ self.assertTrue(dep.evaluate_condition({'foo': 23, 'bar': 43}))
+ self.assertTrue(dep.evaluated_condition)
+ self.assertRaises(TypeError, Dependency, 'foo', unknownattribute=42)
+
+ d = {}
+ d[dep] = None
+ dep2 = Dependency('foo',
+ version_lt=1,
+ version_lte=2,
+ version_eq=3,
+ version_gte=4,
+ version_gt=5,
+ condition='$foo == 23 and $bar != 42')
+ dep2.evaluate_condition({'foo': 23, 'bar': 43})
+ d[dep2] = None
+ self.assertEqual(len(d), 1)
+ dep3 = Dependency('foo',
+ version_lt=1,
+ version_lte=2,
+ version_eq=3,
+ version_gte=4,
+ version_gt=6)
+ d[dep3] = None
+ self.assertEqual(len(d), 2)
+
+ dep = Dependency('foo', condition='foo > bar and bar < baz')
+ self.assertTrue(dep.evaluate_condition({}))
+
+ dep = Dependency('foo', condition='foo <= bar or bar >= baz')
+ self.assertFalse(dep.evaluate_condition({}))
+
+ dep = Dependency('foo', condition='$foo == ""')
+ self.assertTrue(dep.evaluate_condition({}))
+ self.assertFalse(dep.evaluate_condition({'foo': 'foo'}))
+
+ dep = Dependency('foo', condition='$foo == "foo \' bar"')
+ self.assertTrue(dep.evaluate_condition({'foo': "foo ' bar"}))
+ self.assertFalse(dep.evaluate_condition({}))
+
+ dep = Dependency('foo', condition="$foo == ''")
+ self.assertTrue(dep.evaluate_condition({}))
+ self.assertFalse(dep.evaluate_condition({'foo': 'foo'}))
+
+ dep = Dependency('foo', condition="$foo == 'foo \" bar'")
+ self.assertTrue(dep.evaluate_condition({'foo': 'foo " bar'}))
+ self.assertFalse(dep.evaluate_condition({}))
+
+ # Testing for more than 1 conditions
+ dep = Dependency('foo', condition='foo > bar and bar < baz and foo > bar')
+ self.assertTrue(dep.evaluate_condition({}))
+
+ dep = Dependency('foo', condition='foo <= bar and bar >= baz and foo > bar')
+ self.assertFalse(dep.evaluate_condition({}))
+
+ dep = Dependency('foo', condition='foo > bar or bar < baz or foo <= bar')
+ self.assertTrue(dep.evaluate_condition({}))
+
+ dep = Dependency('foo', condition='foo <= bar or bar >= baz or foo <= bar')
+ self.assertFalse(dep.evaluate_condition({}))
+
+ dep = Dependency('foo', condition='foo <= bar and bar < baz or foo > bar')
+ self.assertTrue(dep.evaluate_condition({}))
+
+ dep = Dependency('foo', condition='foo <= bar or bar < baz and foo < bar')
+ self.assertFalse(dep.evaluate_condition({}))
+
+ dep = Dependency('foo', condition='foo > bar and bar >= baz or foo > bar')
+ self.assertTrue(dep.evaluate_condition({}))
+
+ dep = Dependency('foo', condition='foo <= bar or bar >= baz and foo < bar')
+ self.assertFalse(dep.evaluate_condition({}))
+
+ def test_dependency_repr(self):
+ dep = Dependency('foo', condition='$foo == 2')
+ assert repr(dep) == "Dependency(name='foo', condition='$foo == 2')"
+ self.assertTrue(dep.evaluate_condition({'foo': 2}))
+ assert repr(dep) == "Dependency(name='foo', condition='$foo == 2', evaluated_condition=True)"
+ self.assertFalse(dep.evaluate_condition({'foo': 3}))
+ assert repr(dep) == "Dependency(name='foo', condition='$foo == 2', evaluated_condition=False)"
+
+ def test_init_kwargs_string(self):
+ pack = Package('foo',
+ name='bar',
+ package_format='1',
+ version='0.0.1',
+ version_compatibility='0.0.0',
+ description='pdesc',
+ licenses=['BSD'],
+ maintainers=[self.get_maintainer()])
+ self.assertEqual('foo', pack.filename)
+ self.assertEqual('bar', pack.name)
+ self.assertEqual('1', pack.package_format)
+ self.assertEqual('0.0.0', pack.version_compatibility)
+ self.assertEqual('0.0.1', pack.version)
+ self.assertEqual('pdesc', pack.description)
+
+ def test_init_kwargs_object(self):
+ mmain = [self.get_maintainer(), self.get_maintainer()]
+ mlis = ['MIT', License('BSD', 'LICENSE')]
+ mauth = [self.get_maintainer(), self.get_maintainer()]
+ murl = [Mock(), Mock()]
+ mbuilddep = [Mock(), Mock()]
+ mbuildtooldep = [Mock(), Mock()]
+ mrundep = [Mock(), Mock()]
+ mtestdep = [Mock(), Mock()]
+ mconf = [Mock(), Mock()]
+ mrepl = [Mock(), Mock()]
+ mexp = [Mock(), Mock()]
+ mgroup = [
+ self.get_group_dependency('group1'),
+ self.get_group_dependency('group2')]
+ mmember = ['member1', 'member2']
+ pack = Package(package_format='3',
+ name='bar',
+ version='0.0.0',
+ maintainers=mmain,
+ licenses=mlis,
+ urls=murl,
+ authors=mauth,
+ build_depends=mbuilddep,
+ buildtool_depends=mbuildtooldep,
+ run_depends=mrundep,
+ test_depends=mtestdep,
+ conflicts=mconf,
+ replaces=mrepl,
+ group_depends=mgroup,
+ member_of_groups=mmember,
+ exports=mexp)
+ self.assertEqual(mmain, pack.maintainers)
+ self.assertEqual(mlis, pack.licenses)
+ self.assertEqual([None, 'LICENSE'], [license_.file for license_ in pack.licenses])
+ self.assertEqual(murl, pack.urls)
+ self.assertEqual(mauth, pack.authors)
+ self.assertEqual(mbuilddep, pack.build_depends)
+ self.assertEqual(mbuildtooldep, pack.buildtool_depends)
+ # since run_depends are getting stores as build_export_depends as well as exec_depends
+ # and the dependency objects are being cloned only the double count can be checked for
+ self.assertEqual(2 * len(mrundep), len(pack.run_depends))
+ self.assertEqual(mtestdep, pack.test_depends)
+ self.assertEqual(mconf, pack.conflicts)
+ self.assertEqual(mrepl, pack.replaces)
+ self.assertEqual(mexp, pack.exports)
+ self.assertEqual(mgroup, pack.group_depends)
+ self.assertEqual(mmember, pack.member_of_groups)
+
+ def test_validate_package(self):
+ maint = self.get_maintainer()
+ pack = Package('foo',
+ name='bar_2go',
+ package_format='1',
+ version='0.0.1',
+ description='pdesc',
+ licenses=['BSD'],
+ maintainers=[maint])
+ pack.validate()
+
+ # names that should error
+ pack.name = 'bar bza'
+ self.assertRaises(InvalidPackage, Package.validate, pack)
+ pack.name = 'foo%'
+ self.assertRaises(InvalidPackage, Package.validate, pack)
+
+ # names that should throw warnings
+ pack.name = '2bar'
+ warnings = []
+ pack.validate(warnings=warnings)
+ self.assertIn('naming conventions', warnings[0])
+
+ pack.name = 'bar-bza'
+ warnings = []
+ pack.validate(warnings=warnings)
+ self.assertEqual(warnings, [])
+
+ pack.name = 'BAR'
+ warnings = []
+ pack.validate(warnings=warnings)
+ self.assertIn('naming conventions', warnings[0])
+
+ # dashes are permitted for a non-catkin package
+ pack.exports.append(Export('build_type', 'other'))
+ pack.name = 'bar-bza'
+ warnings = []
+ pack.validate(warnings=warnings)
+ self.assertEqual(warnings, [])
+ pack.exports.pop()
+
+ # check authors emails
+ pack.name = 'bar'
+ auth1 = Mock()
+ auth2 = Mock()
+ auth2.validate.side_effect = InvalidPackage('foo')
+ pack.authors = [auth1, auth2]
+ self.assertRaises(InvalidPackage, Package.validate, pack)
+ pack.authors = []
+ pack.validate()
+
+ # check maintainer required with email
+ pack.maintainers = []
+ self.assertRaises(InvalidPackage, Package.validate, pack)
+ pack.maintainers = [maint]
+ maint.email = None
+ self.assertRaises(InvalidPackage, Package.validate, pack)
+ maint.email = 'foo@bar.com'
+
+ for dep_type in [pack.build_depends, pack.buildtool_depends, pack.build_export_depends, pack.buildtool_export_depends, pack.exec_depends, pack.test_depends, pack.doc_depends]:
+ pack.validate()
+ depend = Dependency(pack.name)
+ dep_type.append(depend)
+ self.assertRaises(InvalidPackage, Package.validate, pack)
+ dep_type.remove(depend)
+
+ def test_invalid_package_exception(self):
+ try:
+ raise InvalidPackage('foo')
+ except InvalidPackage as e:
+ self.assertEqual('foo', str(e))
+ self.assertEqual(None, e.package_path)
+ try:
+ raise InvalidPackage('foo', package_path='./bar')
+ except InvalidPackage as e:
+ self.assertEqual("Error(s) in package './bar':\nfoo", str(e))
+ self.assertEqual('./bar', e.package_path)
+
+ def test_validate_person(self):
+ auth1 = Person('foo')
+ auth1.email = 'foo@bar.com'
+ auth1.validate()
+ auth1.email = 'foo-bar@bar.com'
+ auth1.validate()
+ auth1.email = 'foo+bar@bar.com'
+ auth1.validate()
+
+ auth1.email = 'foo[at]bar.com'
+ self.assertRaises(InvalidPackage, Person.validate, auth1)
+ auth1.email = 'foo bar.com'
+ self.assertRaises(InvalidPackage, Person.validate, auth1)
+ auth1.email = 'foo<bar.com'
+ self.assertRaises(InvalidPackage, Person.validate, auth1)
+
+ def test_check_known_attributes(self):
+
+ def create_node(tag, attrs):
+ data = '<%s %s/>' % (tag, ' '.join(('%s="%s"' % p) for p in attrs.items()))
+ return dom.parseString(data).firstChild
+
+ try:
+ create_node('tag', {'key': 'value'})
+ except Exception as e:
+ self.fail('create_node() raised %s "%s" unexpectedly!' % (type(e), str(e)))
+
+ self.assertRaisesRegex(Exception, 'unbound prefix: line 1, column 0', create_node, 'tag', {'ns:key': 'value'})
+
+ try:
+ create_node('tag', {'ns:key': 'value', 'xmlns:ns': 'urn:ns'})
+ except Exception as e:
+ self.fail('create_node() raised %s "%s" unexpectedly!' % (type(e), str(e)))
+
+ def check(attrs, known, res=[]):
+ self.assertEqual(res, _check_known_attributes(create_node('tag', attrs), known))
+
+ expected_err = ['The "tag" tag must not have the following attributes: attr2']
+
+ check({}, [])
+ check({}, ['attr'])
+ check({'attr': 'value'}, ['attr'])
+ check({'attr2': 'value'}, ['attr'], expected_err)
+
+ check({'xmlns': 'urn:ns'}, ['attr'])
+ check({'xmlns:ns': 'urn:ns'}, ['attr'])
+ check({'xmlns:ns': 'urn:ns', 'ns:attr': 'value'}, ['attr'])
+ check({'xmlns:ns': 'urn:ns', 'ns:attr': 'value', 'attr2': 'value'}, ['attr'], expected_err)
+
+ def test_parse_package_valid(self):
+ filename = os.path.join(test_data_dir, 'valid_package.xml')
+ package = parse_package(filename)
+ assert package.filename == filename
+ assert not package.is_metapackage()
+ assert package.name == 'valid_package'
+ assert package.description == 'valid_package description'
+ assert package.version == '0.1.0'
+ assert package.licenses == ['BSD']
+ assert [x.name for x in package.run_depends] == ['foo', 'bar', 'baz']
+
+ def test_parse_package_invalid(self):
+ filename = os.path.join(test_data_dir, 'invalid_package.xml')
+ self.assertRaises(InvalidPackage, parse_package, filename)
+
+ def test_parse_package_string(self):
+ filename = os.path.join(test_data_dir, 'valid_package.xml')
+ xml = _get_package_xml(filename)[0]
+
+ assert isinstance(xml, str)
+ parse_package_string(xml)
+
+ if sys.version_info[0] == 2:
+ xml = xml.decode('utf-8')
+ assert not isinstance(xml, str)
+ else:
+ xml = xml.encode('utf-8')
+ assert isinstance(xml, bytes)
+ parse_package_string(xml)
+
+ def test_has_ros_schema_reference_string(self):
+ self.assertFalse(
+ has_ros_schema_reference_string(
+ """\
+<?xml version="1.0"?>
+<?xml-model href="http://some.url/to/a_wrong_schema.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
+<package/>
+"""))
+ self.assertFalse(
+ has_ros_schema_reference_string(
+ """\
+<?xml version="1.0"?>
+<package/>
+"""))
+ for format_version in (1, 2, 3):
+ self.assertTrue(
+ has_ros_schema_reference_string(
+ """\
+<?xml version="1.0"?>
+<?xml-model href="http://download.ros.org/schema/package_format%d.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
+<package/>
+""" % format_version))
--- /dev/null
+import datetime
+import os
+import shutil
+import tempfile
+import unittest
+
+from catkin_pkg.package_version import _replace_version
+from catkin_pkg.package_version import bump_version
+from catkin_pkg.package_version import update_changelog_sections
+from catkin_pkg.package_version import update_versions
+
+import mock
+
+from .util import in_temporary_directory
+
+
+class PackageVersionTest(unittest.TestCase):
+
+ def test_bump_version(self):
+ self.assertEqual('0.0.1', bump_version('0.0.0'))
+ self.assertEqual('1.0.1', bump_version('1.0.0'))
+ self.assertEqual('0.1.1', bump_version('0.1.0'))
+ self.assertEqual('0.0.1', bump_version('0.0.0', 'patch'))
+ self.assertEqual('1.0.1', bump_version('1.0.0', 'patch'))
+ self.assertEqual('0.1.1', bump_version('0.1.0', 'patch'))
+ self.assertEqual('1.0.0', bump_version('0.0.0', 'major'))
+ self.assertEqual('1.0.0', bump_version('0.0.1', 'major'))
+ self.assertEqual('1.0.0', bump_version('0.1.1', 'major'))
+ self.assertEqual('0.1.0', bump_version('0.0.0', 'minor'))
+ self.assertEqual('0.1.0', bump_version('0.0.1', 'minor'))
+ self.assertEqual('1.1.0', bump_version('1.0.1', 'minor'))
+ self.assertRaises(ValueError, bump_version, '0.0.asd')
+ self.assertRaises(ValueError, bump_version, '0.0')
+ self.assertRaises(ValueError, bump_version, '0')
+ self.assertRaises(ValueError, bump_version, '0.0.-1')
+
+ def test_replace_version(self):
+ self.assertEqual('<package><version>0.1.1</version></package>',
+ _replace_version('<package><version>0.1.0</version></package>', '0.1.1'))
+ self.assertEqual("<package><version abi='0.1.0'>0.1.1</version></package>",
+ _replace_version("<package><version abi='0.1.0'>0.1.0</version></package>", '0.1.1'))
+ self.assertRaises(RuntimeError, _replace_version, '<package></package>', '0.1.1')
+ self.assertRaises(RuntimeError, _replace_version, '<package><version>0.1.1</version><version>0.1.1</version></package>', '0.1.1')
+
+ def test_update_versions(self):
+ try:
+ root_dir = tempfile.mkdtemp()
+ sub_dir = os.path.join(root_dir, 'sub')
+ with open(os.path.join(root_dir, 'package.xml'), 'w') as fhand:
+ fhand.write('<package><version>2.3.4</version></package>')
+ os.makedirs(os.path.join(sub_dir))
+ with open(os.path.join(sub_dir, 'package.xml'), 'w') as fhand:
+ fhand.write('<package><version>1.5.4</version></package>')
+
+ update_versions([root_dir, sub_dir], '7.6.5')
+
+ with open(os.path.join(root_dir, 'package.xml'), 'r') as fhand:
+ contents = fhand.read()
+ self.assertEqual('<package><version>7.6.5</version></package>', contents)
+ with open(os.path.join(sub_dir, 'package.xml'), 'r') as fhand:
+ contents = fhand.read()
+ self.assertEqual('<package><version>7.6.5</version></package>', contents)
+ finally:
+ shutil.rmtree(root_dir)
+
+ @in_temporary_directory
+ def test_update_changelog_unicode(self, directory=None):
+ """Test that updating the changelog does not throw an exception on unicode characters."""
+ temp_file = os.path.join(directory, 'changelog')
+ missing_changelogs_but_forthcoming = {}
+ # Mock the Changelog object from catkin_pkg
+ mock_changelog = mock.Mock()
+ # Create a changelog entry with a unicode char.
+ mock_changelog.rst = ('^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n'
+ 'Changelog for package fake_pkg\n'
+ '^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n'
+ '\n'
+ 'Forthcoming\n'
+ '-----------\n'
+ '* This is my changelog entry\n'
+ '* This is a line that has unicode' u'\xfc''\n'
+ '\n'
+ '0.0.9 (2017-01-30)\n'
+ '------------------\n'
+ '* This is old version.\n')
+
+ # Create tuple with expected entires.
+ missing_changelogs_but_forthcoming['fake_pkg'] = (temp_file, mock_changelog, 'Forthcoming')
+ # Should not raise an exception
+ update_changelog_sections(missing_changelogs_but_forthcoming, '1.0.0')
+
+ # Generate dynamic lines, using present system date,
+ # the length of the line of '-'s for the underline
+ # and the utf-8 encoded data expected to be read back.
+ ver_line = '1.0.0 (%s)' % datetime.date.today().isoformat()
+ ver_line = ver_line.encode('utf-8')
+ dash_line = '-' * len(ver_line)
+ dash_line = dash_line.encode('utf-8')
+ unicode_line = u'* This is a line that has unicode\xfc'.encode('utf-8')
+ expected = [b'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
+ b'Changelog for package fake_pkg',
+ b'^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^',
+ b'',
+ ver_line,
+ dash_line,
+ b'* This is my changelog entry',
+ unicode_line,
+ b'',
+ b'0.0.9 (2017-01-30)',
+ b'------------------',
+ b'* This is old version.']
+
+ # Open the file written, and compare each line written to
+ # the one read back.
+ with open(temp_file, 'rb') as verify_file:
+ content = verify_file.read().splitlines()
+ for line_written, line_expected in zip(content, expected):
+ self.assertEqual(line_written.strip(), line_expected)
--- /dev/null
+import os
+
+from catkin_pkg.package import InvalidPackage
+from catkin_pkg.packages import find_package_paths
+from catkin_pkg.packages import find_packages
+from catkin_pkg.packages import find_packages_allowing_duplicates
+
+from .util import in_temporary_directory
+
+
+def _create_pkg_in_dir(path, version='0.1.0'):
+ path = os.path.abspath(path)
+ os.makedirs(path)
+
+ template = """\
+<?xml version="1.0"?>
+<package>
+ <name>{0}</name>
+ <version>{1}</version>
+ <description>Package {0}</description>
+ <license>BSD</license>
+
+ <maintainer email="foo@bar.com">Foo Bar</maintainer>
+</package>
+""".format(path.split('/')[-1], version)
+
+ with open(os.path.join(path, 'package.xml'), 'w+') as f:
+ f.write(template)
+
+
+@in_temporary_directory
+def test_package_paths_with_hidden_directories():
+ _create_pkg_in_dir('.test1')
+ _create_pkg_in_dir('.test2')
+ _create_pkg_in_dir('test3') # not hidden
+ _create_pkg_in_dir('.test4')
+
+ res = find_package_paths('.')
+ assert res == ['test3']
+
+
+@in_temporary_directory
+def test_find_packages_allowing_duplicates_with_no_packages():
+ res = find_packages_allowing_duplicates('.')
+ assert isinstance(res, dict)
+ assert not res
+
+
+@in_temporary_directory
+def test_find_packages_invalid_version():
+ version = ':{version}'
+ path = 'src/foo'
+ _create_pkg_in_dir(path, version)
+ try:
+ find_packages(path.split('/')[0])
+ assert False, 'Must raise'
+ except InvalidPackage as e:
+ exception_message = str(e)
+ assert version in exception_message
+ assert path in exception_message
--- /dev/null
+import os
+import shutil
+import tempfile
+import unittest
+
+from catkin_pkg.package import Dependency, Export, PACKAGE_MANIFEST_FILENAME, parse_package, Url
+from catkin_pkg.package_templates import _create_include_macro, _create_targetlib_args, _safe_write_files, \
+ create_cmakelists, create_package_files, create_package_xml, PackageTemplate
+from catkin_pkg.python_setup import generate_distutils_setup
+
+from mock import MagicMock, Mock
+
+
+def u(line):
+ try:
+ return unicode(line)
+ except NameError:
+ return line
+
+
+class TemplateTest(unittest.TestCase):
+
+ def get_maintainer(self):
+ maint = Mock()
+ maint.email = 'foo@bar.com'
+ maint.name = 'John Foo'
+ return maint
+
+ def test_safe_write_files(self):
+ file1 = os.path.join('foo', 'bar')
+ file2 = os.path.join('foo', 'baz')
+ newfiles = {file1: 'foobar', file2: 'barfoo'}
+ try:
+ rootdir = tempfile.mkdtemp()
+ _safe_write_files(newfiles, rootdir)
+ self.assertTrue(os.path.isfile(os.path.join(rootdir, file1)))
+ self.assertTrue(os.path.isfile(os.path.join(rootdir, file2)))
+ self.assertRaises(ValueError, _safe_write_files, newfiles, rootdir)
+ finally:
+ shutil.rmtree(rootdir)
+
+ def test_create_cmakelists(self):
+ mock_pack = MagicMock()
+ mock_pack.name = 'foo'
+ mock_pack.catkin_deps = []
+ result = create_cmakelists(mock_pack, 'groovy')
+ self.assertTrue('project(foo)' in result, result)
+ self.assertTrue('find_package(catkin REQUIRED)' in result, result)
+
+ mock_pack.catkin_deps = ['bar', 'baz']
+ result = create_cmakelists(mock_pack, 'groovy')
+ self.assertTrue('project(foo)' in result, result)
+ expected = """find_package(catkin REQUIRED COMPONENTS
+ bar
+ baz
+)"""
+
+ self.assertTrue(expected in result, result)
+
+ def test_create_package_xml(self):
+ maint = self.get_maintainer()
+ pack = PackageTemplate(name='foo',
+ description='foo',
+ version='0.0.0',
+ maintainers=[maint],
+ licenses=['BSD'])
+
+ result = create_package_xml(pack, 'groovy')
+ self.assertTrue('<name>foo</name>' in result, result)
+
+ def test_create_targetlib_args(self):
+ mock_pack = MagicMock()
+ mock_pack.name = 'foo'
+ mock_pack.catkin_deps = []
+ mock_pack.boost_comps = []
+ mock_pack.system_deps = []
+ statement = _create_targetlib_args(mock_pack)
+ self.assertEqual('# ${catkin_LIBRARIES}\n', statement)
+ mock_pack.catkin_deps = ['roscpp', 'rospy']
+ mock_pack.boost_comps = []
+ mock_pack.system_deps = []
+ statement = _create_targetlib_args(mock_pack)
+ self.assertEqual('# ${catkin_LIBRARIES}\n', statement)
+ mock_pack.catkin_deps = ['roscpp']
+ mock_pack.boost_comps = ['thread', 'filesystem']
+ mock_pack.system_deps = []
+ statement = _create_targetlib_args(mock_pack)
+ self.assertEqual('# ${catkin_LIBRARIES}\n# ${Boost_LIBRARIES}\n', statement)
+ mock_pack.catkin_deps = ['roscpp']
+ mock_pack.boost_comps = []
+ mock_pack.system_deps = ['log4cxx', 'BZip2']
+ statement = _create_targetlib_args(mock_pack)
+ self.assertEqual('# ${catkin_LIBRARIES}\n# ${log4cxx_LIBRARIES}\n# ${BZip2_LIBRARIES}\n', statement)
+ mock_pack.catkin_deps = ['roscpp']
+ mock_pack.boost_comps = ['thread', 'filesystem']
+ mock_pack.system_deps = ['log4cxx', 'BZip2']
+ statement = _create_targetlib_args(mock_pack)
+ self.assertEqual('# ${catkin_LIBRARIES}\n# ${Boost_LIBRARIES}\n# ${log4cxx_LIBRARIES}\n# ${BZip2_LIBRARIES}\n', statement)
+
+ def test_create_include_macro(self):
+ mock_pack = MagicMock()
+ mock_pack.name = 'foo'
+ mock_pack.catkin_deps = []
+ mock_pack.boost_comps = []
+ mock_pack.system_deps = []
+ statement = _create_include_macro(mock_pack)
+ self.assertEqual('# include\n# ${catkin_INCLUDE_DIRS}', statement)
+ mock_pack.catkin_deps = ['roscpp', 'rospy']
+ mock_pack.boost_comps = []
+ mock_pack.system_deps = []
+ statement = _create_include_macro(mock_pack)
+ self.assertEqual('# include\n ${catkin_INCLUDE_DIRS}', statement)
+ mock_pack.catkin_deps = ['roscpp']
+ mock_pack.boost_comps = ['thread', 'filesystem']
+ mock_pack.system_deps = []
+ statement = _create_include_macro(mock_pack)
+ self.assertEqual('# include\n ${catkin_INCLUDE_DIRS}\n ${Boost_INCLUDE_DIRS}', statement)
+ mock_pack.catkin_deps = ['roscpp']
+ mock_pack.boost_comps = []
+ mock_pack.system_deps = ['log4cxx', 'BZip2']
+ statement = _create_include_macro(mock_pack)
+ self.assertEqual('# include\n ${catkin_INCLUDE_DIRS}\n# TODO: Check names of system library include directories (log4cxx, BZip2)\n'
+ ' ${log4cxx_INCLUDE_DIRS}\n ${BZip2_INCLUDE_DIRS}', statement)
+ mock_pack.catkin_deps = ['roscpp']
+ mock_pack.boost_comps = ['thread', 'filesystem']
+ mock_pack.system_deps = ['log4cxx', 'BZip2']
+ statement = _create_include_macro(mock_pack)
+ self.assertEqual('# include\n ${catkin_INCLUDE_DIRS}\n ${Boost_INCLUDE_DIRS}\n# TODO: Check names of system library include directories (log4cxx, BZip2)\n'
+ ' ${log4cxx_INCLUDE_DIRS}\n ${BZip2_INCLUDE_DIRS}', statement)
+
+ def test_create_package(self):
+ maint = self.get_maintainer()
+ pack = PackageTemplate(name='bar',
+ description='bar',
+ package_format='1',
+ version='0.0.1',
+ version_compatibility='0.0.0',
+ maintainers=[maint],
+ licenses=['BSD'])
+ try:
+ rootdir = tempfile.mkdtemp()
+ file1 = os.path.join(rootdir, 'CMakeLists.txt')
+ file2 = os.path.join(rootdir, PACKAGE_MANIFEST_FILENAME)
+ create_package_files(rootdir, pack, 'groovy', {file1: ''})
+ self.assertTrue(os.path.isfile(file1))
+ self.assertTrue(os.path.isfile(file2))
+ finally:
+ shutil.rmtree(rootdir)
+
+ def test_create_package_template(self):
+ template = PackageTemplate._create_package_template(
+ package_name='bar2',
+ catkin_deps=['dep1', 'dep2'])
+ self.assertEqual('dep1', template.build_depends[0].name)
+ self.assertEqual('dep2', template.build_depends[1].name)
+
+ def test_parse_generated(self):
+ maint = self.get_maintainer()
+ pack = PackageTemplate(name='bar',
+ package_format=2,
+ version='0.0.1',
+ version_compatibility='0.0.0',
+ urls=[Url('foo')],
+ description='pdesc',
+ maintainers=[maint],
+ licenses=['BSD'])
+ try:
+ rootdir = tempfile.mkdtemp()
+ file1 = os.path.join(rootdir, 'CMakeLists.txt')
+ file2 = os.path.join(rootdir, PACKAGE_MANIFEST_FILENAME)
+ create_package_files(rootdir, pack, 'groovy')
+ self.assertTrue(os.path.isfile(file1))
+ self.assertTrue(os.path.isfile(file2))
+
+ pack_result = parse_package(file2)
+ self.assertEqual(pack.name, pack_result.name)
+ self.assertEqual(pack.package_format, pack_result.package_format)
+ self.assertEqual(pack.version, pack_result.version)
+ self.assertEqual(pack.version_compatibility, pack_result.version_compatibility)
+ self.assertEqual(pack.description, pack_result.description)
+ self.assertEqual(pack.maintainers[0].name, pack_result.maintainers[0].name)
+ self.assertEqual(pack.maintainers[0].email, pack_result.maintainers[0].email)
+ self.assertEqual(pack.authors, pack_result.authors)
+ self.assertEqual(pack.urls[0].url, pack_result.urls[0].url)
+ self.assertEqual('website', pack_result.urls[0].type)
+ self.assertEqual(pack.licenses, pack_result.licenses)
+ self.assertEqual(pack.build_depends, pack_result.build_depends)
+ self.assertEqual(pack.buildtool_depends, pack_result.buildtool_depends)
+ self.assertEqual(pack.run_depends, pack_result.run_depends)
+ self.assertEqual(pack.test_depends, pack_result.test_depends)
+ self.assertEqual(pack.conflicts, pack_result.conflicts)
+ self.assertEqual(pack.replaces, pack_result.replaces)
+ self.assertEqual(pack.exports, pack_result.exports)
+
+ rdict = generate_distutils_setup(package_xml_path=file2)
+ self.assertEqual({'name': 'bar',
+ 'maintainer': u('John Foo'),
+ 'maintainer_email': 'foo@bar.com',
+ 'description': 'pdesc',
+ 'license': 'BSD',
+ 'version': '0.0.1',
+ 'author': '',
+ 'url': 'foo'}, rdict)
+ finally:
+ shutil.rmtree(rootdir)
+
+ def test_parse_generated_multi(self):
+ # test with multiple attributes filled
+ maint = self.get_maintainer()
+ pack = PackageTemplate(name='bar',
+ package_format=2,
+ version='0.0.1',
+ version_compatibility='0.0.0',
+ description='pdesc',
+ maintainers=[maint, maint],
+ authors=[maint, maint],
+ licenses=['BSD', 'MIT'],
+ urls=[Url('foo', 'bugtracker'), Url('bar')],
+ build_depends=[Dependency('dep1')],
+ buildtool_depends=[Dependency('dep2'),
+ Dependency('dep3')],
+ run_depends=[Dependency('dep4', version_lt='4')],
+ test_depends=[Dependency('dep5',
+ version_gt='4',
+ version_lt='4')],
+ conflicts=[Dependency('dep6')],
+ replaces=[Dependency('dep7'),
+ Dependency('dep8')],
+ exports=[Export('architecture_independent'),
+ Export('meta_package')])
+
+ def assertEqualDependencies(deplist1, deplist2):
+ if len(deplist1) != len(deplist1):
+ return False
+ for depx, depy in zip(deplist1, deplist2):
+ for attr in ['name', 'version_lt', 'version_lte',
+ 'version_eq', 'version_gte', 'version_gt']:
+ if getattr(depx, attr) != getattr(depy, attr):
+ return False
+ return True
+
+ try:
+ rootdir = tempfile.mkdtemp()
+ file1 = os.path.join(rootdir, 'CMakeLists.txt')
+ file2 = os.path.join(rootdir, PACKAGE_MANIFEST_FILENAME)
+ create_package_files(rootdir, pack, 'groovy')
+ self.assertTrue(os.path.isfile(file1))
+ self.assertTrue(os.path.isfile(file2))
+
+ pack_result = parse_package(file2)
+ self.assertEqual(pack.name, pack_result.name)
+ self.assertEqual(pack.package_format, pack_result.package_format)
+ self.assertEqual(pack.version, pack_result.version)
+ self.assertEqual(pack.version_compatibility, pack_result.version_compatibility)
+ self.assertEqual(pack.description, pack_result.description)
+ self.assertEqual(len(pack.maintainers), len(pack_result.maintainers))
+ self.assertEqual(len(pack.authors), len(pack_result.authors))
+ self.assertEqual(len(pack.urls), len(pack_result.urls))
+ self.assertEqual(pack.urls[0].url, pack_result.urls[0].url)
+ self.assertEqual(pack.urls[0].type, pack_result.urls[0].type)
+ self.assertEqual(pack.licenses, pack_result.licenses)
+ self.assertTrue(assertEqualDependencies(pack.build_depends,
+ pack_result.build_depends))
+ self.assertTrue(assertEqualDependencies(pack.build_depends,
+ pack_result.build_depends))
+ self.assertTrue(assertEqualDependencies(pack.buildtool_depends,
+ pack_result.buildtool_depends))
+ self.assertTrue(assertEqualDependencies(pack.run_depends,
+ pack_result.run_depends))
+ self.assertTrue(assertEqualDependencies(pack.test_depends,
+ pack_result.test_depends))
+ self.assertTrue(assertEqualDependencies(pack.conflicts,
+ pack_result.conflicts))
+ self.assertTrue(assertEqualDependencies(pack.replaces,
+ pack_result.replaces))
+ self.assertEqual(pack.exports[0].tagname, pack_result.exports[0].tagname)
+ self.assertEqual(pack.exports[1].tagname, pack_result.exports[1].tagname)
+
+ rdict = generate_distutils_setup(package_xml_path=file2)
+ self.assertEqual({'name': 'bar',
+ 'maintainer': u('John Foo <foo@bar.com>, John Foo <foo@bar.com>'),
+ 'description': 'pdesc',
+ 'license': 'BSD, MIT',
+ 'version': '0.0.1',
+ 'author': u('John Foo <foo@bar.com>, John Foo <foo@bar.com>'),
+ 'url': 'bar'}, rdict)
+ finally:
+ shutil.rmtree(rootdir)
--- /dev/null
+import unittest
+
+from catkin_pkg.terminal_color import ansi, enable_ANSI_colors, fmt, sanitize
+
+try:
+ char = unichr
+except NameError:
+ char = chr
+
+
+class TerminalColorTest(unittest.TestCase):
+
+ def test_terminal_colors(self):
+ # since other test might disable ansi colors
+ # we need to ensure it is enabled
+ enable_ANSI_colors()
+ assert ansi('reset') != ''
+ test = '@_This is underlined@|'
+ rslt = '\033[4mThis is underlined\033[0m\033[0m'
+ assert fmt(test) == rslt
+ test = 'This has bad stuff @! @/ @_ @| OK!'
+ test = sanitize(test)
+ rslt = 'This has bad stuff @! @/ @_ @| OK!\033[0m'
+ assert fmt(test) == rslt
+ test = char(2018)
+ test = sanitize(test)
+ rslt = char(2018)
+ assert test == rslt
--- /dev/null
+import os
+
+from catkin_pkg.tool_detection import get_previous_tool_used_on_the_space
+from catkin_pkg.tool_detection import mark_space_as_built_by
+
+from .util import in_temporary_directory
+
+
+@in_temporary_directory
+def test_get_previous_tool_used_on_the_space():
+ res = get_previous_tool_used_on_the_space('folder_that_does_not_exist')
+ assert res is None, res
+ os.makedirs('build')
+ res = get_previous_tool_used_on_the_space('build')
+ assert res is None, res
+ mark_space_as_built_by('build', 'foo')
+ res = get_previous_tool_used_on_the_space('build')
+ assert res == 'foo', res
--- /dev/null
+from __future__ import print_function
+
+import sys
+import unittest
+
+from mock import Mock
+
+try:
+ from catkin_pkg.topological_order import topological_order_packages, _PackageDecorator, \
+ _sort_decorated_packages
+except ImportError as e:
+ raise ImportError('Please adjust your PYTHONPATH before running this test: %s' % str(e))
+
+
+def create_mock(name, build_depends, run_depends, path):
+ m = Mock()
+ m.name = name
+ m.build_depends = build_depends
+ m.buildtool_depends = []
+ m.run_depends = run_depends
+ m.test_depends = []
+ m.group_depends = []
+ m.exports = []
+ m.path = path
+ return m
+
+
+class TopologicalOrderTest(unittest.TestCase):
+
+ if sys.version_info[0] == 2:
+ assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
+
+ def test_topological_order_packages(self):
+ mc = create_mock('c', [], [], 'pc')
+ md = create_mock('d', [], [], 'pd')
+ ma = create_mock('a', [mc], [md], 'pa')
+ mb = create_mock('b', [ma], [], 'pb')
+
+ packages = {ma.path: ma,
+ mb.path: mb,
+ mc.path: mc,
+ md.path: md}
+
+ ordered_packages = topological_order_packages(packages, blacklisted=['c'])
+ # d before b because of the run dependency from a to d
+ # a before d only because of alphabetic order, a run depend on d should not influence the order
+ self.assertEqual(['pa', 'pd', 'pb'], [path for path, _ in ordered_packages])
+
+ ordered_packages = topological_order_packages(packages, whitelisted=['a', 'b', 'c'])
+ # c before a because of the run dependency from a to c
+ self.assertEqual(['pc', 'pa', 'pb'], [path for path, _ in ordered_packages])
+
+ def test_topological_order_packages_with_duplicates(self):
+ pkg1 = create_mock('pkg', [], [], 'path/to/pkg1')
+ pkg2_dep = create_mock('pkg_dep', [], [], 'path/to/pkg2_dep')
+ pkg2 = create_mock('pkg', [pkg2_dep], [], 'path/to/pkg2')
+ with self.assertRaisesRegex(RuntimeError, 'Two packages with the same name "pkg" in the workspace'):
+ topological_order_packages({
+ pkg1.path: pkg1,
+ pkg2_dep.path: pkg2_dep,
+ pkg2.path: pkg2,
+ })
+
+ def test_package_decorator_init(self):
+
+ mockproject = Mock()
+
+ mockexport = Mock()
+ mockexport.tagname = 'message_generator'
+ mockexport.content = 'foolang'
+ mockproject.exports = [mockexport]
+
+ pd = _PackageDecorator(mockproject, 'foo/bar')
+ self.assertEqual(mockproject.name, pd.name)
+ self.assertEqual('foo/bar', pd.path)
+ self.assertFalse(pd.is_metapackage)
+ self.assertEqual(mockexport.content, pd.message_generator)
+ self.assertIsNotNone(str(pd))
+
+ def test_calculate_depends_for_topological_order(self):
+ def create_mock(name, run_depends):
+ m = Mock()
+ m.name = name
+ m.build_depends = []
+ m.buildtool_depends = []
+ m.run_depends = run_depends
+ m.group_depends = []
+ m.exports = []
+ return m
+
+ mockproject1 = _PackageDecorator(create_mock('n1', []), 'p1')
+ mockproject2 = _PackageDecorator(create_mock('n2', []), 'p2')
+ mockproject3 = _PackageDecorator(create_mock('n3', []), 'p3')
+ mockproject4 = _PackageDecorator(create_mock('n4', []), 'p4')
+ mockproject5 = _PackageDecorator(create_mock('n5', [mockproject4]), 'p5')
+ mockproject6 = _PackageDecorator(create_mock('n6', [mockproject5]), 'p6')
+ mockproject7 = _PackageDecorator(create_mock('n7', []), 'p7')
+
+ mockproject = Mock()
+ mockproject.build_depends = [mockproject1, mockproject2]
+ mockproject.buildtool_depends = [mockproject3, mockproject6]
+ mockproject.run_depends = [mockproject7]
+ mockproject.test_depends = []
+ mockproject.group_depends = []
+ mockproject.exports = []
+
+ pd = _PackageDecorator(mockproject, 'foo/bar')
+ # 2 and 3 as external dependencies
+ packages = {mockproject1.name: mockproject1,
+ mockproject4.name: mockproject4,
+ mockproject5.name: mockproject5,
+ mockproject6.name: mockproject6}
+
+ pd.calculate_depends_for_topological_order(packages)
+ self.assertEqual(set([mockproject1.name, mockproject4.name, mockproject5.name, mockproject6.name]),
+ pd.depends_for_topological_order)
+
+ def test_sort_decorated_packages(self):
+ projects = {}
+ sprojects = _sort_decorated_packages(projects)
+ self.assertEqual([], sprojects)
+
+ def create_mock(path):
+ m = Mock()
+ m.path = path
+ m.depends_for_topological_order = set()
+ m.message_generator = False
+ return m
+
+ mock1 = create_mock('mock1')
+ mock2 = create_mock('mock2')
+ mock3 = create_mock('mock3')
+ mock3.message_generator = True
+
+ projects = {'mock3': mock3, 'mock2': mock2, 'mock1': mock1}
+ sprojects = _sort_decorated_packages(projects)
+
+ # mock3 first since it is a message generator
+ # mock1 before mock2 due to alphabetic order
+ self.assertEqual(['mock3', 'mock1', 'mock2'], [path for path, _ in sprojects])
+
+ def test_sort_decorated_packages_favoring_message_generators(self):
+ def create_mock(path):
+ m = Mock()
+ m.path = path
+ m.depends_for_topological_order = set()
+ m.message_generator = False
+ return m
+
+ mock1 = create_mock('mock1')
+ mock2 = create_mock('mock2')
+ mock3 = create_mock('mock3')
+ mock3.depends_for_topological_order = set(['mock2'])
+ mock3.message_generator = True
+
+ projects = {'mock3': mock3, 'mock2': mock2, 'mock1': mock1}
+ sprojects = _sort_decorated_packages(projects)
+
+ # mock2 first since it is the dependency of a message generator
+ # mock3 since it is a message generator
+ # mock1 last, although having no dependencies and being first in alphabetic order
+ self.assertEqual(['mock2', 'mock3', 'mock1'], [path for path, _ in sprojects])
+
+ def test_sort_decorated_packages_cycles(self):
+ def create_mock(path, depend):
+ m = Mock()
+ m.path = path
+ m.depends_for_topological_order = set([depend])
+ m.message_generator = False
+ return m
+
+ # creating a cycle for cycle detection
+ mock1 = create_mock('mock1', 'mock2')
+ mock2 = create_mock('mock2', 'mock3')
+ mock3 = create_mock('mock3', 'mock4')
+ mock4 = create_mock('mock4', 'mock2')
+
+ projects = {'mock3': mock3, 'mock2': mock2, 'mock1': mock1, 'mock4': mock4}
+ sprojects = _sort_decorated_packages(projects)
+ self.assertEqual([[None, 'mock2, mock3, mock4']], sprojects)
+
+ # remove cycle
+ mock4.depends_for_topological_order = set()
+ sprojects = _sort_decorated_packages(projects)
+
+ # mock4 first since it has no dependencies
+ # than mock3 since it only had mock4 as a dependency
+ # than mock2 since it only had mock3 as a dependency
+ # than mock1 since it only had mock2 as a dependency
+ self.assertEqual(['mock4', 'mock3', 'mock2', 'mock1'], [path for path, _ in sprojects])
+
+ def test_topological_order_packages_with_underlay(self):
+ def create_mock(name, build_depends, path):
+ m = Mock()
+ m.name = name
+ m.build_depends = build_depends
+ m.buildtool_depends = []
+ m.run_depends = []
+ m.test_depends = []
+ m.group_depends = []
+ m.exports = []
+ m.path = path
+ return m
+
+ mc = create_mock('c', [], 'pc')
+ mb = create_mock('b', [mc], 'pb')
+ ma = create_mock('a', [mb], 'pa')
+
+ packages = {ma.path: ma,
+ mc.path: mc}
+ underlay_packages = {mb.path: mb}
+
+ ordered_packages = topological_order_packages(packages, underlay_packages=underlay_packages)
+ # c before a because of the indirect dependency via b which is part of an underlay
+ self.assertEqual(['pc', 'pa'], [path for path, _ in ordered_packages])
+
+ def test_topological_order_packages_cycles(self):
+ def create_mock(name, build_depends, path):
+ m = Mock()
+ m.name = name
+ m.build_depends = build_depends
+ m.buildtool_depends = []
+ m.test_depends = []
+ m.run_depends = []
+ m.group_depends = []
+ m.exports = []
+ m.path = path
+ return m
+
+ mc = create_mock('c', [], 'pc')
+ mb = create_mock('b', [mc], 'pb')
+ ma = create_mock('a', [mb], 'pa')
+ mc.build_depends = [ma]
+
+ packages = {ma.path: ma,
+ mb.math: mb,
+ mc.path: mc}
+
+ ordered_packages = topological_order_packages(packages)
+ self.assertEqual([(None, 'a, b, c')], ordered_packages)
--- /dev/null
+from __future__ import print_function
+
+import os
+import shutil
+import tempfile
+import unittest
+
+try:
+ from catkin_pkg.workspaces import ensure_workspace_marker, get_spaces, order_paths,\
+ CATKIN_WORKSPACE_MARKER_FILE
+except ImportError as e:
+ raise ImportError('Please adjust your PYTHONPATH before running this test: %s' % str(e))
+
+
+class WorkspacesTest(unittest.TestCase):
+
+ def test_ensure_workspace_marker(self):
+ root_dir = tempfile.mkdtemp()
+ try:
+ ensure_workspace_marker(root_dir)
+ self.assertTrue(os.path.exists(os.path.join(root_dir, CATKIN_WORKSPACE_MARKER_FILE)))
+ # assert no exception on revisit
+ ensure_workspace_marker(root_dir)
+ finally:
+ shutil.rmtree(root_dir)
+
+ def test_get_spaces(self):
+ self.assertEqual([], get_spaces([]))
+ root_dir = tempfile.mkdtemp()
+ try:
+ self.assertEqual([], get_spaces([root_dir]))
+ with open(os.path.join(root_dir, '.catkin'), 'a') as fhand:
+ fhand.write('')
+ self.assertEqual([root_dir], get_spaces([root_dir]))
+ finally:
+ shutil.rmtree(root_dir)
+
+ def test_order_paths(self):
+ self.assertEqual([], order_paths([], []))
+ self.assertEqual(['bar', 'baz'], order_paths(['bar', 'baz'], ['foo']))
+ self.assertEqual(['foo', 'bar'], order_paths(['bar', 'foo'], ['foo']))
+ self.assertEqual(['baz', 'foo', 'bar'], order_paths(['bar', 'foo', 'baz'], ['baz', 'foo']))
+ self.assertEqual(['foo' + os.sep + 'bim', 'bar'], order_paths(['bar', 'foo' + os.sep + 'bim'], ['foo']))
+
+ def test_order_paths_with_symlink(self):
+ root_dir = tempfile.mkdtemp()
+ try:
+ foo = os.path.join(root_dir, 'foo')
+ foo_inc = os.path.join(foo, 'include')
+ foo_ln = os.path.join(root_dir, 'foo_symlink')
+ try:
+ os.symlink(foo, foo_ln)
+ except (AttributeError, OSError):
+ self.skipTest('requires symlink availability')
+
+ self.assertEqual([foo, 'bar'], order_paths(['bar', foo], [foo_ln]))
+ self.assertEqual([foo_ln, 'bar'], order_paths(['bar', foo_ln], [foo]))
+ self.assertEqual([foo_inc, 'bar'], order_paths(['bar', foo_inc], [foo_ln]))
+ finally:
+ shutil.rmtree(root_dir)