--- /dev/null
+pull_request_rules:
+- name: auto-merge
+ conditions:
+ - base=master
+ - label=auto-merge
+ - status-success=continuous-integration/appveyor/pr
+ - status-success=continuous-integration/travis-ci/pr
+ - status-success=deploy/netlify
+ actions:
+ merge:
+ method: merge
+v40.7.0
+-------
+
+* #1551: File inputs for the `license` field in `setup.cfg` files now explicitly raise an error.
+* #1180: Add support for non-ASCII in setup.cfg (#1062). Add support for native strings on some parameters (#1136).
+* #1499: ``setuptools.package_index`` no longer relies on the deprecated ``urllib.parse.splituser`` per Python #27485.
+* #1544: Added tests for PackageIndex.download (for git URLs).
+* #1625: In PEP 517 build_meta builder, ensure that sdists are built as gztar per the spec.
+
+
v40.6.3
-------
<h3>Questions? Suggestions? Contributions?</h3>
-<p>Visit the <a href="https://github.com/pypa/setuptools">Setuptools project page</a> </p>
+<p>Visit the <a href="{{ package_url }}">Project page</a> </p>
<h3 class="donation">Professional support</h3>
<p>
Professionally-supported {{ project }} is available with the
-<a href="https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme">Tidelift Subscription</a>.
+<a href="https://tidelift.com/subscription/pkg/pypi-{{ project }}?utm_source=pypi-{{ project }}&utm_medium=readme">Tidelift Subscription</a>.
</p>
changelog entry.
``setuptools`` uses `towncrier <https://pypi.org/project/towncrier/>`_
-for changelog managment, so when making a PR, please add a news fragment in the
-``changelog.d/`` folder. Changelog files are written in Restructured Text and
+for changelog management, so when making a PR, please add a news fragment in the
+``changelog.d/`` folder. Changelog files are written in reStructuredText and
should be a 1 or 2 sentence description of the substantive changes in the PR.
They should be named ``<pr_number>.<category>.rst``, where the categories are:
- ``breaking``: Any backwards-compatibility breaking change
- ``doc``: A change to the documentation
- ``misc``: Changes internal to the repo like CI, test and build changes
-- ``deprecation``: For deprecations of an existing feature of behavior
+- ``deprecation``: For deprecations of an existing feature or behavior
A pull request may have more than one of these components, for example a code
change may introduce a new feature that deprecates an old feature, in which
$ cat changelog.d/1288.change.rst
Add support for maintainer in PKG-INFO
+-------------------
+Auto-Merge Requests
+-------------------
+
+To support running all code through CI, even lightweight contributions,
+the project employs Mergify to auto-merge pull requests tagged as
+auto-merge.
+
+Use ``hub pull-request -l auto-merge`` to create such a pull request
+from the command line after pushing a new branch.
+
-------
Testing
-------
sphinx!=1.8.0
rst.linker>=1.9
-jaraco.packaging>=3.2
+jaraco.packaging>=6.1
setuptools>=34
but their use is not advised.
============================== ================= ================= =============== =====
-Key Aliases Type Minumum Version Notes
+Key Aliases Type Minimum Version Notes
============================== ================= ================= =============== =====
name str
version attr:, file:, str 39.2.0 (1)
maintainer str
maintainer_email maintainer-email str
classifiers classifier file:, list-comma
-license file:, str
+license str
description summary file:, str
long_description long-description file:, str
long_description_content_type str 38.6.0
[build-system]
requires = ["wheel"]
+build-backend = "setuptools.build_meta"
[tool.towncrier]
package = "setuptools"
[bumpversion]
-current_version = 40.6.3
+current_version = 40.7.0
commit = True
tag = True
setup_params = dict(
name="setuptools",
- version="40.6.3",
+ version="40.7.0",
description=(
"Easily download, build, install, upgrade, and uninstall "
"Python packages"
import functools
import distutils.core
import distutils.filelist
+import re
+from distutils.errors import DistutilsOptionError
from distutils.util import convert_path
from fnmatch import fnmatchcase
from ._deprecation_warning import SetuptoolsDeprecationWarning
-from setuptools.extern.six import PY3
+from setuptools.extern.six import PY3, string_types
from setuptools.extern.six.moves import filter, map
import setuptools.version
_Command.__init__(self, dist)
vars(self).update(kw)
+ def _ensure_stringlike(self, option, what, default=None):
+ val = getattr(self, option)
+ if val is None:
+ setattr(self, option, default)
+ return default
+ elif not isinstance(val, string_types):
+ raise DistutilsOptionError("'%s' must be a %s (got `%s`)"
+ % (option, what, val))
+ return val
+
+ def ensure_string_list(self, option):
+ r"""Ensure that 'option' is a list of strings. If 'option' is
+ currently a string, we split it either on /,\s*/ or /\s+/, so
+ "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
+ ["foo", "bar", "baz"].
+ """
+ val = getattr(self, option)
+ if val is None:
+ return
+ elif isinstance(val, string_types):
+ setattr(self, option, re.split(r',\s*|\s+', val))
+ else:
+ if isinstance(val, list):
+ ok = all(isinstance(v, string_types) for v in val)
+ else:
+ ok = False
+ if not ok:
+ raise DistutilsOptionError(
+ "'%s' must be a list of strings (got %r)"
+ % (option, val))
+
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
vars(cmd).update(kw)
return dist_infos[0]
+def _file_with_extension(directory, extension):
+ matching = (
+ f for f in os.listdir(directory)
+ if f.endswith(extension)
+ )
+ file, = matching
+ return file
+
+
def build_wheel(wheel_directory, config_settings=None,
metadata_directory=None):
config_settings = _fix_config(config_settings)
shutil.rmtree(wheel_directory)
shutil.copytree('dist', wheel_directory)
- wheels = [f for f in os.listdir(wheel_directory)
- if f.endswith('.whl')]
-
- assert len(wheels) == 1
- return wheels[0]
+ return _file_with_extension(wheel_directory, '.whl')
def build_sdist(sdist_directory, config_settings=None):
config_settings = _fix_config(config_settings)
sdist_directory = os.path.abspath(sdist_directory)
- sys.argv = sys.argv[:1] + ['sdist'] + \
+ sys.argv = sys.argv[:1] + ['sdist', '--formats', 'gztar'] + \
config_settings["--global-option"] + \
["--dist-dir", sdist_directory]
_run_setup()
- sdists = [f for f in os.listdir(sdist_directory)
- if f.endswith('.tar.gz')]
-
- assert len(sdists) == 1
- return sdists[0]
+ return _file_with_extension(sdist_directory, '.tar.gz')
from setuptools.extern import six
-from pkg_resources import Distribution, PathMetadata, normalize_path
+import pkg_resources
from setuptools.command.easy_install import easy_install
from setuptools import namespaces
import setuptools
if self.egg_path is None:
self.egg_path = os.path.abspath(ei.egg_base)
- target = normalize_path(self.egg_base)
- egg_path = normalize_path(os.path.join(self.install_dir,
- self.egg_path))
+ target = pkg_resources.normalize_path(self.egg_base)
+ egg_path = pkg_resources.normalize_path(
+ os.path.join(self.install_dir, self.egg_path))
if egg_path != target:
raise DistutilsOptionError(
"--egg-path must be a relative path from the install"
)
# Make a distribution for the package's source
- self.dist = Distribution(
+ self.dist = pkg_resources.Distribution(
target,
- PathMetadata(target, os.path.abspath(ei.egg_info)),
+ pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
project_name=ei.egg_name
)
path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
if path_to_setup != os.curdir:
path_to_setup = '../' * (path_to_setup.count('/') + 1)
- resolved = normalize_path(
+ resolved = pkg_resources.normalize_path(
os.path.join(install_dir, egg_path, path_to_setup)
)
- if resolved != normalize_path(os.curdir):
+ if resolved != pkg_resources.normalize_path(os.curdir):
raise DistutilsOptionError(
"Can't get a consistent path to setup script from"
- " installation directory", resolved, normalize_path(os.curdir))
+ " installation directory", resolved,
+ pkg_resources.normalize_path(os.curdir))
return path_to_setup
def install_for_development(self):
self.reinitialize_command('build_py', inplace=0)
self.run_command('build_py')
bpy_cmd = self.get_finalized_command("build_py")
- build_path = normalize_path(bpy_cmd.build_lib)
+ build_path = pkg_resources.normalize_path(bpy_cmd.build_lib)
# Build extensions
self.reinitialize_command('egg_info', egg_base=build_path)
self.egg_path = build_path
self.dist.location = build_path
# XXX
- self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info)
+ self.dist._provider = pkg_resources.PathMetadata(
+ build_path, ei_cmd.egg_info)
else:
# Without 2to3 inplace works fine:
self.run_command('egg_info')
name as the 'requirement' so that scripts will work across
multiple versions.
+ >>> from pkg_resources import Distribution
>>> dist = Distribution(project_name='foo', version='1.0')
>>> str(dist.as_requirement())
'foo==1.0'
import os
import hashlib
import getpass
-import platform
from base64 import standard_b64encode
from setuptools.extern.six.moves.urllib.error import HTTPError
from setuptools.extern.six.moves.urllib.parse import urlparse
+
class upload(orig.upload):
"""
Override default upload behavior to obtain password
'version': meta.get_version(),
# file content
- 'content': (os.path.basename(filename),content),
+ 'content': (os.path.basename(filename), content),
'filetype': command,
'pyversion': pyversion,
'md5_digest': hashlib.md5(content).hexdigest(),
value = value.lower()
return value in ('1', 'true', 'yes')
+ @classmethod
+ def _exclude_files_parser(cls, key):
+ """Returns a parser function to make sure field inputs
+ are not files.
+
+ Parses a value after getting the key so error messages are
+ more informative.
+
+ :param key:
+ :rtype: callable
+ """
+ def parser(value):
+ exclude_directive = 'file:'
+ if value.startswith(exclude_directive):
+ raise ValueError(
+ 'Only strings are accepted for the {0} field, '
+ 'files are not accepted'.format(key))
+ return value
+ return parser
+
@classmethod
def _parse_file(cls, value):
"""Represents value as a string, allowing including text
directory with setup.py.
Examples:
- file: LICENSE
file: README.rst, CHANGELOG.md, src/file.txt
:param str value:
section_parser_method = getattr(
self,
- # Dots in section names are tranlsated into dunderscores.
+ # Dots in section names are translated into dunderscores.
('parse_section%s' % method_postfix).replace('.', '__'),
None)
def _deprecated_config_handler(self, func, msg, warning_class):
""" this function will wrap around parameters that are deprecated
-
- :param msg: deprecation message
+
+ :param msg: deprecation message
:param warning_class: class of warning exception to be raised
:param func: function to be wrapped around
"""
def config_handler(*args, **kwargs):
warnings.warn(msg, warning_class)
return func(*args, **kwargs)
-
+
return config_handler
parse_list = self._parse_list
parse_file = self._parse_file
parse_dict = self._parse_dict
+ exclude_files_parser = self._exclude_files_parser
return {
'platforms': parse_list,
'keywords': parse_list,
'provides': parse_list,
- 'requires': self._deprecated_config_handler(parse_list,
- "The requires parameter is deprecated, please use " +
+ 'requires': self._deprecated_config_handler(
+ parse_list,
+ "The requires parameter is deprecated, please use "
"install_requires for runtime dependencies.",
DeprecationWarning),
'obsoletes': parse_list,
'classifiers': self._get_parser_compound(parse_file, parse_list),
- 'license': parse_file,
+ 'license': exclude_files_parser('license'),
'description': parse_file,
'long_description': parse_file,
'version': self._parse_version,
# -*- coding: utf-8 -*-
__all__ = ['Distribution']
+import io
+import sys
import re
import os
import warnings
import distutils.core
import distutils.cmd
import distutils.dist
+from distutils.errors import DistutilsOptionError
+from distutils.util import strtobool
+from distutils.debug import DEBUG
+from distutils.fancy_getopt import translate_longopt
import itertools
-
from collections import defaultdict
from email import message_from_file
from setuptools import windows_support
from setuptools.monkey import get_unpatched
from setuptools.config import parse_configuration
+from .unicode_utils import detect_encoding
import pkg_resources
-from .py36compat import Distribution_parse_config_files
__import__('setuptools.extern.packaging.specifiers')
__import__('setuptools.extern.packaging.version')
_Distribution = get_unpatched(distutils.core.Distribution)
-class Distribution(Distribution_parse_config_files, _Distribution):
+class Distribution(_Distribution):
"""Distribution with support for features, tests, and package data
This is an enhanced version of 'distutils.dist.Distribution' that
req.marker = None
return req
+ def _parse_config_files(self, filenames=None):
+ """
+ Adapted from distutils.dist.Distribution.parse_config_files,
+ this method provides the same functionality in subtly-improved
+ ways.
+ """
+ from setuptools.extern.six.moves.configparser import ConfigParser
+
+ # Ignore install directory options if we have a venv
+ if six.PY3 and sys.prefix != sys.base_prefix:
+ ignore_options = [
+ 'install-base', 'install-platbase', 'install-lib',
+ 'install-platlib', 'install-purelib', 'install-headers',
+ 'install-scripts', 'install-data', 'prefix', 'exec-prefix',
+ 'home', 'user', 'root']
+ else:
+ ignore_options = []
+
+ ignore_options = frozenset(ignore_options)
+
+ if filenames is None:
+ filenames = self.find_config_files()
+
+ if DEBUG:
+ self.announce("Distribution.parse_config_files():")
+
+ parser = ConfigParser()
+ for filename in filenames:
+ with io.open(filename, 'rb') as fp:
+ encoding = detect_encoding(fp)
+ if DEBUG:
+ self.announce(" reading %s [%s]" % (
+ filename, encoding or 'locale')
+ )
+ reader = io.TextIOWrapper(fp, encoding=encoding)
+ (parser.read_file if six.PY3 else parser.readfp)(reader)
+ for section in parser.sections():
+ options = parser.options(section)
+ opt_dict = self.get_option_dict(section)
+
+ for opt in options:
+ if opt != '__name__' and opt not in ignore_options:
+ val = parser.get(section, opt)
+ opt = opt.replace('-', '_')
+ opt_dict[opt] = (filename, val)
+
+ # Make the ConfigParser forget everything (so we retain
+ # the original filenames that options come from)
+ parser.__init__()
+
+ # If there was a "global" section in the config file, use it
+ # to set Distribution options.
+
+ if 'global' in self.command_options:
+ for (opt, (src, val)) in self.command_options['global'].items():
+ alias = self.negative_opt.get(opt)
+ try:
+ if alias:
+ setattr(self, alias, not strtobool(val))
+ elif opt in ('verbose', 'dry_run'): # ugh!
+ setattr(self, opt, strtobool(val))
+ else:
+ setattr(self, opt, val)
+ except ValueError as msg:
+ raise DistutilsOptionError(msg)
+
+ def _set_command_options(self, command_obj, option_dict=None):
+ """
+ Set the options for 'command_obj' from 'option_dict'. Basically
+ this means copying elements of a dictionary ('option_dict') to
+ attributes of an instance ('command').
+
+ 'command_obj' must be a Command instance. If 'option_dict' is not
+ supplied, uses the standard option dictionary for this command
+ (from 'self.command_options').
+
+ (Adopted from distutils.dist.Distribution._set_command_options)
+ """
+ command_name = command_obj.get_command_name()
+ if option_dict is None:
+ option_dict = self.get_option_dict(command_name)
+
+ if DEBUG:
+ self.announce(" setting options for '%s' command:" % command_name)
+ for (option, (source, value)) in option_dict.items():
+ if DEBUG:
+ self.announce(" %s = %s (from %s)" % (option, value,
+ source))
+ try:
+ bool_opts = [translate_longopt(o)
+ for o in command_obj.boolean_options]
+ except AttributeError:
+ bool_opts = []
+ try:
+ neg_opt = command_obj.negative_opt
+ except AttributeError:
+ neg_opt = {}
+
+ try:
+ is_string = isinstance(value, six.string_types)
+ if option in neg_opt and is_string:
+ setattr(command_obj, neg_opt[option], not strtobool(value))
+ elif option in bool_opts and is_string:
+ setattr(command_obj, option, strtobool(value))
+ elif hasattr(command_obj, option):
+ setattr(command_obj, option, value)
+ else:
+ raise DistutilsOptionError(
+ "error in %s: command '%s' has no such option '%s'"
+ % (source, command_name, option))
+ except ValueError as msg:
+ raise DistutilsOptionError(msg)
+
def parse_config_files(self, filenames=None, ignore_option_errors=False):
"""Parses configuration files from various levels
and loads configuration.
"""
- _Distribution.parse_config_files(self, filenames=filenames)
+ self._parse_config_files(filenames=filenames)
parse_configuration(self, self.command_options,
ignore_option_errors=ignore_option_errors)
def _download_svn(self, url, filename):
warnings.warn("SVN download support is deprecated", UserWarning)
+ def splituser(host):
+ user, delim, host = host.rpartition('@')
+ return user, host
url = url.split('#', 1)[0] # remove any fragment for svn's sake
creds = ''
if url.lower().startswith('svn:') and '@' in url:
scheme, netloc, path, p, q, f = urllib.parse.urlparse(url)
if not netloc and path.startswith('//') and '/' in path[2:]:
netloc, path = path[2:].split('/', 1)
- auth, host = urllib.parse.splituser(netloc)
+ auth, host = splituser(netloc)
if auth:
if ':' in auth:
user, pw = auth.split(':', 1)
def open_with_auth(url, opener=urllib.request.urlopen):
"""Open a urllib2 request, handling HTTP authentication"""
- scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
+ parsed = urllib.parse.urlparse(url)
+ scheme, netloc, path, params, query, frag = parsed
# Double scheme does not raise on Mac OS X as revealed by a
# failing test. We would expect "nonnumeric port". Refs #20.
if netloc.endswith(':'):
raise http_client.InvalidURL("nonnumeric port: ''")
- if scheme in ('http', 'https'):
- auth, host = urllib.parse.splituser(netloc)
+ if scheme in ('http', 'https') and parsed.username:
+ auth = ':'.join((parsed.username, parsed.password))
else:
auth = None
if auth:
auth = "Basic " + _encode_auth(auth)
- parts = scheme, host, path, params, query, frag
+ parts = scheme, parsed.hostname, path, params, query, frag
new_url = urllib.parse.urlunparse(parts)
request = urllib.request.Request(new_url)
request.add_header("Authorization", auth)
# Put authentication info back into request URL if same host,
# so that links found on the page will work
s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url)
- if s2 == scheme and h2 == host:
+ if s2 == scheme and h2 == parsed.hostname:
parts = s2, netloc, path2, param2, query2, frag2
fp.url = urllib.parse.urlunparse(parts)
def get_darwin_arches(major, minor, machine):
"""Return a list of supported arches (including group arches) for
- the given major, minor and machine architecture of an macOS machine.
+ the given major, minor and machine architecture of a macOS machine.
"""
arches = []
+++ /dev/null
-import sys
-from distutils.errors import DistutilsOptionError
-from distutils.util import strtobool
-from distutils.debug import DEBUG
-
-
-class Distribution_parse_config_files:
- """
- Mix-in providing forward-compatibility for functionality to be
- included by default on Python 3.7.
-
- Do not edit the code in this class except to update functionality
- as implemented in distutils.
- """
- def parse_config_files(self, filenames=None):
- from configparser import ConfigParser
-
- # Ignore install directory options if we have a venv
- if sys.prefix != sys.base_prefix:
- ignore_options = [
- 'install-base', 'install-platbase', 'install-lib',
- 'install-platlib', 'install-purelib', 'install-headers',
- 'install-scripts', 'install-data', 'prefix', 'exec-prefix',
- 'home', 'user', 'root']
- else:
- ignore_options = []
-
- ignore_options = frozenset(ignore_options)
-
- if filenames is None:
- filenames = self.find_config_files()
-
- if DEBUG:
- self.announce("Distribution.parse_config_files():")
-
- parser = ConfigParser(interpolation=None)
- for filename in filenames:
- if DEBUG:
- self.announce(" reading %s" % filename)
- parser.read(filename)
- for section in parser.sections():
- options = parser.options(section)
- opt_dict = self.get_option_dict(section)
-
- for opt in options:
- if opt != '__name__' and opt not in ignore_options:
- val = parser.get(section,opt)
- opt = opt.replace('-', '_')
- opt_dict[opt] = (filename, val)
-
- # Make the ConfigParser forget everything (so we retain
- # the original filenames that options come from)
- parser.__init__()
-
- # If there was a "global" section in the config file, use it
- # to set Distribution options.
-
- if 'global' in self.command_options:
- for (opt, (src, val)) in self.command_options['global'].items():
- alias = self.negative_opt.get(opt)
- try:
- if alias:
- setattr(self, alias, not strtobool(val))
- elif opt in ('verbose', 'dry_run'): # ugh!
- setattr(self, opt, strtobool(val))
- else:
- setattr(self, opt, val)
- except ValueError as msg:
- raise DistutilsOptionError(msg)
-
-
-if sys.version_info < (3,):
- # Python 2 behavior is sufficient
- class Distribution_parse_config_files:
- pass
-
-
-if False:
- # When updated behavior is available upstream,
- # disable override here.
- class Distribution_parse_config_files:
- pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
- http://tools.ietf.org/html/rfc6125#section-6.4.3
+ https://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
def build_files(file_defs, prefix=""):
"""
- Build a set of files/directories, as described by the file_defs dictionary.
+ Build a set of files/directories, as described by the
+ file_defs dictionary.
- Each key/value pair in the dictionary is interpreted as a filename/contents
- pair. If the contents value is a dictionary, a directory is created, and the
+ Each key/value pair in the dictionary is interpreted as
+ a filename/contents
+ pair. If the contents value is a dictionary, a directory
+ is created, and the
dictionary interpreted as the files within it, recursively.
For example:
s.stop()
"""
- def __init__(self, server_address=('', 0),
+ def __init__(
+ self, server_address=('', 0),
RequestHandlerClass=SimpleHTTPServer.SimpleHTTPRequestHandler):
- BaseHTTPServer.HTTPServer.__init__(self, server_address,
- RequestHandlerClass)
+ BaseHTTPServer.HTTPServer.__init__(
+ self, server_address, RequestHandlerClass)
self._run = True
def start(self):
A simple HTTP Server that records the requests made to it.
"""
- def __init__(self, server_address=('', 0),
+ def __init__(
+ self, server_address=('', 0),
RequestHandlerClass=RequestRecorder):
- BaseHTTPServer.HTTPServer.__init__(self, server_address,
- RequestHandlerClass)
+ BaseHTTPServer.HTTPServer.__init__(
+ self, server_address, RequestHandlerClass)
threading.Thread.__init__(self)
self.setDaemon(True)
self.requests = []
import pytest
-import os
-import shutil
import mock
from distutils.errors import DistutilsSetupError
# with that out of the way, let's see if the crude dependency
# system works
cmd.compiler = mock.MagicMock(spec=cmd.compiler)
- mock_newer.return_value = ([],[])
+ mock_newer.return_value = ([], [])
obj_deps = {'': ('global.h',), 'example.c': ('example.h',)}
- libs = [('example', {'sources': ['example.c'] ,'obj_deps': obj_deps})]
+ libs = [('example', {'sources': ['example.c'], 'obj_deps': obj_deps})]
cmd.build_libraries(libs)
- assert [['example.c', 'global.h', 'example.h']] in mock_newer.call_args[0]
+ assert [['example.c', 'global.h', 'example.h']] in \
+ mock_newer.call_args[0]
assert not cmd.compiler.compile.called
assert cmd.compiler.create_static_lib.call_count == 1
sdist_name = build_backend.build_sdist(sdist_into_directory)
assert os.path.isfile(os.path.join(sdist_into_directory, sdist_name))
- # if the setup.py changes subsequent call of the build meta should still succeed, given the
+ # if the setup.py changes subsequent call of the build meta
+ # should still succeed, given the
# sdist_directory the frontend specifies is empty
with open(os.path.abspath("setup.py"), 'rt') as file_handler:
content = file_handler.read()
with open(os.path.abspath("setup.py"), 'wt') as file_handler:
- file_handler.write(content.replace("version='0.0.0'", "version='0.0.1'"))
+ file_handler.write(
+ content.replace("version='0.0.0'", "version='0.0.1'"))
shutil.rmtree(sdist_into_directory)
os.makedirs(sdist_into_directory)
sdist_name = build_backend.build_sdist("out_sdist")
- assert os.path.isfile(os.path.join(os.path.abspath("out_sdist"), sdist_name))
+ assert os.path.isfile(
+ os.path.join(os.path.abspath("out_sdist"), sdist_name))
def test_build_sdist_setup_py_exists(tmpdir_cwd):
with tarfile.open(os.path.join("temp", targz_path)) as tar:
assert not any('setup.py' in name for name in tar.getnames())
+
+def test_build_sdist_builds_targz_even_if_zip_indicated(tmpdir_cwd):
+ files = {
+ 'setup.py': DALS("""
+ __import__('setuptools').setup(
+ name='foo',
+ version='0.0.0',
+ py_modules=['hello']
+ )"""),
+ 'hello.py': '',
+ 'setup.cfg': DALS("""
+ [sdist]
+ formats=zip
+ """)
+ }
+
+ build_files(files)
+ build_sdist("temp")
+# -*- coding: UTF-8 -*-
+from __future__ import unicode_literals
+
import contextlib
import pytest
+
from distutils.errors import DistutilsOptionError, DistutilsFileError
from mock import patch
from setuptools.dist import Distribution, _Distribution
from setuptools.config import ConfigHandler, read_configuration
+from setuptools.extern.six.moves.configparser import InterpolationMissingOptionError
+from setuptools.tests import is_ascii
from . import py2_only, py3_only
+from .textwrap import DALS
+
class ErrConfigHandler(ConfigHandler):
"""Erroneous handler. Fails to implement required methods."""
dir_package = dir_package.mkdir(dir_name)
init_file = None
if not ns:
- init_file = dir_package.join('__init__.py')
- init_file.write('')
+ init_file = dir_package.join('__init__.py')
+ init_file.write('')
return dir_package, init_file
-def fake_env(tmpdir, setup_cfg, setup_py=None, package_path='fake_package'):
+def fake_env(tmpdir, setup_cfg, setup_py=None, encoding='ascii', package_path='fake_package'):
if setup_py is None:
setup_py = (
tmpdir.join('setup.py').write(setup_py)
config = tmpdir.join('setup.cfg')
- config.write(setup_cfg)
+ config.write(setup_cfg.encode(encoding), mode='wb')
package_dir, init_file = make_package_dir(package_path, tmpdir)
assert metadata.download_url == 'http://test.test.com/test/'
assert metadata.maintainer_email == 'test@test.com'
+ def test_license_cfg(self, tmpdir):
+ fake_env(
+ tmpdir,
+ DALS("""
+ [metadata]
+ name=foo
+ version=0.0.1
+ license=Apache 2.0
+ """)
+ )
+
+ with get_dist(tmpdir) as dist:
+ metadata = dist.metadata
+
+ assert metadata.name == "foo"
+ assert metadata.version == "0.0.1"
+ assert metadata.license == "Apache 2.0"
+
def test_file_mixed(self, tmpdir):
fake_env(
tmpdir.join('fake_package', 'version.txt').write('1.2.3\n4.5.6\n')
with pytest.raises(DistutilsOptionError):
with get_dist(tmpdir) as dist:
- _ = dist.metadata.version
+ dist.metadata.version
def test_version_with_package_dir_simple(self, tmpdir):
assert metadata.description == 'Some description'
assert metadata.requires == ['some', 'requirement']
+ def test_interpolation(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'description = %(message)s\n'
+ )
+ with pytest.raises(InterpolationMissingOptionError):
+ with get_dist(tmpdir):
+ pass
+
+ skip_if_not_ascii = pytest.mark.skipif(not is_ascii, reason='Test not supported with this locale')
+
+ @skip_if_not_ascii
+ def test_non_ascii_1(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '[metadata]\n'
+ 'description = éà ïôñ\n',
+ encoding='utf-8'
+ )
+ with pytest.raises(UnicodeDecodeError):
+ with get_dist(tmpdir):
+ pass
+
+ def test_non_ascii_2(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '# -*- coding: invalid\n'
+ )
+ with pytest.raises(LookupError):
+ with get_dist(tmpdir):
+ pass
+
+ def test_non_ascii_3(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '\n'
+ '# -*- coding: invalid\n'
+ )
+ with get_dist(tmpdir):
+ pass
+
+ @skip_if_not_ascii
+ def test_non_ascii_4(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '# -*- coding: utf-8\n'
+ '[metadata]\n'
+ 'description = éà ïôñ\n',
+ encoding='utf-8'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.description == 'éà ïôñ'
+
+ @skip_if_not_ascii
+ def test_non_ascii_5(self, tmpdir):
+ fake_env(
+ tmpdir,
+ '# vim: set fileencoding=iso-8859-15 :\n'
+ '[metadata]\n'
+ 'description = éà ïôñ\n',
+ encoding='iso-8859-15'
+ )
+ with get_dist(tmpdir) as dist:
+ assert dist.metadata.description == 'éà ïôñ'
+
class TestOptions:
'tests_require = mock==0.7.2; pytest\n'
'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n'
'dependency_links = http://some.com/here/1, '
- 'http://some.com/there/2\n'
+ 'http://some.com/there/2\n'
'python_requires = >=1.0, !=2.8\n'
'py_modules = module1, module2\n'
)
dir_sub_two, _ = make_package_dir('sub_two', dir_package, ns=True)
with get_dist(tmpdir) as dist:
- assert set(dist.packages) == {
+ assert set(dist.packages) == {
'fake_package', 'fake_package.sub_two', 'fake_package.sub_one'
}
tmpdir,
'[options.entry_points]\n'
'group1 = point1 = pack.module:func, '
- '.point2 = pack.module2:func_rest [rest]\n'
+ '.point2 = pack.module2:func_rest [rest]\n'
'group2 = point3 = pack.module:func2\n'
)
]
assert sorted(dist.data_files) == sorted(expected)
+
saved_dist_init = _Distribution.__init__
+
+
class TestExternalSetters:
# During creation of the setuptools Distribution() object, we call
# the init of the parent distutils Distribution object via
class TestGetModuleConstant:
- def test_basic(self):
- """
- Invoke get_module_constant on a module in
- the test package.
- """
- mod_name = 'setuptools.tests.mod_with_constant'
- val = depends.get_module_constant(mod_name, 'value')
- assert val == 'three, sir!'
- assert 'setuptools.tests.mod_with_constant' not in sys.modules
+ def test_basic(self):
+ """
+ Invoke get_module_constant on a module in
+ the test package.
+ """
+ mod_name = 'setuptools.tests.mod_with_constant'
+ val = depends.get_module_constant(mod_name, 'value')
+ assert val == 'three, sir!'
+ assert 'setuptools.tests.mod_with_constant' not in sys.modules
import pytest
+
def test_dist_fetch_build_egg(tmpdir):
"""
Check multiple calls to `Distribution.fetch_build_egg` work as expected.
'classifiers': [
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
- 'License :: OSI Approved :: MIT License'
- ]})),
+ 'License :: OSI Approved :: MIT License',
+ ]})),
('Metadata version 1.1: Download URL', merge_dicts(base_attrs, {
'download_url': 'https://example.com'
})),
('Metadata Version 1.2: Requires-Python', merge_dicts(base_attrs, {
'python_requires': '>=3.7'
})),
- pytest.param('Metadata Version 1.2: Project-Url',
+ pytest.param(
+ 'Metadata Version 1.2: Project-Url',
merge_dicts(base_attrs, {
'project_urls': {
'Foo': 'https://example.bar'
}
}), marks=pytest.mark.xfail(
reason="Issue #1578: project_urls not read"
- )),
+ )),
('Metadata Version 2.1: Long Description Content Type',
merge_dicts(base_attrs, {
'long_description_content_type': 'text/x-rst; charset=UTF-8'
})),
- pytest.param('Metadata Version 2.1: Provides Extra',
+ pytest.param(
+ 'Metadata Version 2.1: Provides Extra',
merge_dicts(base_attrs, {
'provides_extras': ['foo', 'bar']
- }), marks=pytest.mark.xfail(reason="provides_extras not read")),
+ }), marks=pytest.mark.xfail(reason="provides_extras not read")),
('Missing author, missing author e-mail',
{'name': 'foo', 'version': '1.0.0'}),
('Missing author',
import io
import zipfile
import mock
-from setuptools.command.easy_install import EasyInstallDeprecationWarning, ScriptWriter, WindowsScriptWriter
+from setuptools.command.easy_install import (
+ EasyInstallDeprecationWarning, ScriptWriter, WindowsScriptWriter,
+)
import time
from setuptools.extern import six
from setuptools.extern.six.moves import urllib
cmd.easy_install(sdist_script)
assert (target / 'mypkg_script').exists()
-
def test_dist_get_script_args_deprecated(self):
with pytest.warns(EasyInstallDeprecationWarning):
ScriptWriter.get_script_args(None, None)
with pytest.warns(EasyInstallDeprecationWarning):
WindowsScriptWriter.get_writer()
+
@pytest.mark.filterwarnings('ignore:Unbuilt egg')
class TestPTHFileWriter:
def test_add_from_cwd_site_sets_dirty(self):
-import datetime
import sys
import ast
import os
import stat
import time
-from setuptools.command.egg_info import egg_info, manifest_maker, EggInfoDeprecationWarning, get_pkg_info_revision
+from setuptools.command.egg_info import (
+ egg_info, manifest_maker, EggInfoDeprecationWarning, get_pkg_info_revision,
+)
from setuptools.dist import Distribution
from setuptools.extern.six.moves import map
]
assert sorted(actual) == expected
+ def test_license_is_a_string(self, tmpdir_cwd, env):
+ setup_config = DALS("""
+ [metadata]
+ name=foo
+ version=0.0.1
+ license=file:MIT
+ """)
+
+ setup_script = DALS("""
+ from setuptools import setup
+
+ setup()
+ """)
+
+ build_files({'setup.py': setup_script,
+ 'setup.cfg': setup_config})
+
+ # This command should fail with a ValueError, but because it's
+ # currently configured to use a subprocess, the actual traceback
+ # object is lost and we need to parse it from stderr
+ with pytest.raises(AssertionError) as exc:
+ self._run_egg_info_command(tmpdir_cwd, env)
+
+ # Hopefully this is not too fragile: the only argument to the
+ # assertion error should be a traceback, ending with:
+ # ValueError: ....
+ #
+ # assert not 1
+ tb = exc.value.args[0].split('\n')
+ assert tb[-3].lstrip().startswith('ValueError')
+
def test_rebuilt(self, tmpdir_cwd, env):
"""Ensure timestamps are updated when the command is re-run."""
self._create_project()
data_stream=1,
env=environ,
)
- if code:
- raise AssertionError(data)
+ assert not code, data
+
if output:
assert output in data
def test_get_pkg_info_revision_deprecated(self):
pytest.warns(EggInfoDeprecationWarning, get_pkg_info_revision)
+
+ EGG_INFO_TESTS = (
+ # Check for issue #1136: invalid string type when
+ # reading declarative `setup.cfg` under Python 2.
+ {
+ 'setup.py': DALS(
+ """
+ from setuptools import setup
+ setup(
+ name="foo",
+ )
+ """),
+ 'setup.cfg': DALS(
+ """
+ [options]
+ package_dir =
+ = src
+ """),
+ 'src': {},
+ },
+ # Check Unicode can be used in `setup.py` under Python 2.
+ {
+ 'setup.py': DALS(
+ """
+ # -*- coding: utf-8 -*-
+ from __future__ import unicode_literals
+ from setuptools import setup, find_packages
+ setup(
+ name="foo",
+ package_dir={'': 'src'},
+ )
+ """),
+ 'src': {},
+ }
+ )
+
+ @pytest.mark.parametrize('package_files', EGG_INFO_TESTS)
+ def test_egg_info(self, tmpdir_cwd, env, package_files):
+ """
+ """
+ build_files(package_files)
+ code, data = environment.run_setup_py(
+ cmd=['egg_info'],
+ data_stream=1,
+ )
+ assert not code, data
from setuptools.extern.six import PY3
from setuptools import find_packages
if PY3:
- from setuptools import find_namespace_packages
+ from setuptools import find_namespace_packages
-# modeled after CPython's test.support.can_symlink
+# modeled after CPython's test.support.can_symlink
def can_symlink():
TESTFN = tempfile.mktemp()
symlink_path = TESTFN + "can_symlink"
def test_pep420_ns_package_no_includes(self):
packages = find_namespace_packages(
self.dist_dir, exclude=['pkg.subpkg.assets'])
- self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg'])
+ self._assert_packages(
+ packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg'])
@py3_only
def test_pep420_ns_package_no_includes_or_excludes(self):
packages = find_namespace_packages(self.dist_dir)
- expected = ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets']
+ expected = [
+ 'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets']
self._assert_packages(packages, expected)
@py3_only
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
packages = find_namespace_packages(self.dist_dir)
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
-
@pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only')
def test_executable_with_spaces_escaping_unix(self, tmpdir):
"""
- Ensure that shebang on Unix is not quoted, even when a value with spaces
+ Ensure that shebang on Unix is not quoted, even when
+ a value with spaces
is specified using --executable.
"""
expected = '#!%s\n' % self.unix_spaces_exe
@pytest.mark.skipif(sys.platform != 'win32', reason='Windows only')
def test_executable_arg_escaping_win32(self, tmpdir):
"""
- Ensure that shebang on Windows is quoted when getting a path with spaces
+ Ensure that shebang on Windows is quoted when
+ getting a path with spaces
from --executable, that is itself properly quoted.
"""
expected = '#!"%s"\n' % self.win32_exe
import glob
import os
import sys
+import re
+import subprocess
+import functools
+import tarfile
+import zipfile
from setuptools.extern.six.moves import urllib
import pytest
assert os.path.exists(os.path.join(pyuri.location, 'pyuri', 'uri.regex'))
-import re
-import subprocess
-import functools
-import tarfile, zipfile
+build_deps = ['appdirs', 'packaging', 'pyparsing', 'six']
-build_deps = ['appdirs', 'packaging', 'pyparsing', 'six']
@pytest.mark.parametrize("build_dep", build_deps)
-@pytest.mark.skipif(sys.version_info < (3, 6), reason='run only on late versions')
+@pytest.mark.skipif(
+ sys.version_info < (3, 6), reason='run only on late versions')
def test_build_deps_on_distutils(request, tmpdir_factory, build_dep):
"""
All setuptools build dependencies must build without
breaker.write('raise ImportError()')
cmd = [sys.executable, 'setup.py', 'install', '--prefix', install_dir]
env = dict(os.environ, PYTHONPATH=pkg_dir)
- output = subprocess.check_output(cmd, cwd=pkg_dir, env=env, stderr=subprocess.STDOUT)
+ output = subprocess.check_output(
+ cmd, cwd=pkg_dir, env=env, stderr=subprocess.STDOUT)
return output.decode('utf-8')
def download_and_extract(request, req, target):
- cmd = [sys.executable, '-m', 'pip', 'download', '--no-deps',
- '--no-binary', ':all:', req]
+ cmd = [
+ sys.executable, '-m', 'pip', 'download', '--no-deps',
+ '--no-binary', ':all:', req,
+ ]
output = subprocess.check_output(cmd, encoding='utf-8')
filename = re.search('Saved (.*)', output).group(1)
request.addfinalizer(functools.partial(os.remove, filename))
from setuptools.dist import Distribution
from setuptools.extern import six
from setuptools.tests.textwrap import DALS
-from . import py3_only
import pytest
# Glob matching
('*.txt', ['foo.txt', 'bar.txt'], ['foo/foo.txt']),
- ('dir/*.txt', ['dir/foo.txt', 'dir/bar.txt', 'dir/.txt'], ['notdir/foo.txt']),
+ (
+ 'dir/*.txt',
+ ['dir/foo.txt', 'dir/bar.txt', 'dir/.txt'], ['notdir/foo.txt']),
('*/*.py', ['bin/start.py'], []),
('docs/page-?.txt', ['docs/page-9.txt'], ['docs/page-10.txt']),
def test_exclude(self):
"""Include everything in app/ except the text files"""
- l = make_local_path
+ ml = make_local_path
self.make_manifest(
"""
include app/*
exclude app/*.txt
""")
- files = default_files | set([l('app/c.rst')])
+ files = default_files | set([ml('app/c.rst')])
assert files == self.get_files()
def test_include_multiple(self):
"""Include with multiple patterns."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest("include app/*.txt app/static/*")
files = default_files | set([
- l('app/a.txt'), l('app/b.txt'),
- l('app/static/app.js'), l('app/static/app.js.map'),
- l('app/static/app.css'), l('app/static/app.css.map')])
+ ml('app/a.txt'), ml('app/b.txt'),
+ ml('app/static/app.js'), ml('app/static/app.js.map'),
+ ml('app/static/app.css'), ml('app/static/app.css.map')])
assert files == self.get_files()
def test_graft(self):
"""Include the whole app/static/ directory."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest("graft app/static")
files = default_files | set([
- l('app/static/app.js'), l('app/static/app.js.map'),
- l('app/static/app.css'), l('app/static/app.css.map')])
+ ml('app/static/app.js'), ml('app/static/app.js.map'),
+ ml('app/static/app.css'), ml('app/static/app.css.map')])
assert files == self.get_files()
def test_graft_glob_syntax(self):
"""Include the whole app/static/ directory."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest("graft */static")
files = default_files | set([
- l('app/static/app.js'), l('app/static/app.js.map'),
- l('app/static/app.css'), l('app/static/app.css.map')])
+ ml('app/static/app.js'), ml('app/static/app.js.map'),
+ ml('app/static/app.css'), ml('app/static/app.css.map')])
assert files == self.get_files()
def test_graft_global_exclude(self):
"""Exclude all *.map files in the project."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest(
"""
graft app/static
global-exclude *.map
""")
files = default_files | set([
- l('app/static/app.js'), l('app/static/app.css')])
+ ml('app/static/app.js'), ml('app/static/app.css')])
assert files == self.get_files()
def test_global_include(self):
"""Include all *.rst, *.js, and *.css files in the whole tree."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest(
"""
global-include *.rst *.js *.css
""")
files = default_files | set([
- '.hidden.rst', 'testing.rst', l('app/c.rst'),
- l('app/static/app.js'), l('app/static/app.css')])
+ '.hidden.rst', 'testing.rst', ml('app/c.rst'),
+ ml('app/static/app.js'), ml('app/static/app.css')])
assert files == self.get_files()
def test_graft_prune(self):
"""Include all files in app/, except for the whole app/static/ dir."""
- l = make_local_path
+ ml = make_local_path
self.make_manifest(
"""
graft app
prune app/static
""")
files = default_files | set([
- l('app/a.txt'), l('app/b.txt'), l('app/c.rst')])
+ ml('app/a.txt'), ml('app/b.txt'), ml('app/c.rst')])
assert files == self.get_files()
def test_process_template_line(self):
# testing all MANIFEST.in template patterns
file_list = FileList()
- l = make_local_path
+ ml = make_local_path
# simulated file list
self.make_files([
'buildout.cfg',
# filelist does not filter out VCS directories,
# it's sdist that does
- l('.hg/last-message.txt'),
- l('global/one.txt'),
- l('global/two.txt'),
- l('global/files.x'),
- l('global/here.tmp'),
- l('f/o/f.oo'),
- l('dir/graft-one'),
- l('dir/dir2/graft2'),
- l('dir3/ok'),
- l('dir3/sub/ok.txt'),
+ ml('.hg/last-message.txt'),
+ ml('global/one.txt'),
+ ml('global/two.txt'),
+ ml('global/files.x'),
+ ml('global/here.tmp'),
+ ml('f/o/f.oo'),
+ ml('dir/graft-one'),
+ ml('dir/dir2/graft2'),
+ ml('dir3/ok'),
+ ml('dir3/sub/ok.txt'),
])
MANIFEST_IN = DALS("""\
'buildout.cfg',
'four.txt',
'ok',
- l('.hg/last-message.txt'),
- l('dir/graft-one'),
- l('dir/dir2/graft2'),
- l('f/o/f.oo'),
- l('global/one.txt'),
- l('global/two.txt'),
+ ml('.hg/last-message.txt'),
+ ml('dir/graft-one'),
+ ml('dir/dir2/graft2'),
+ ml('f/o/f.oo'),
+ ml('global/one.txt'),
+ ml('global/two.txt'),
]
file_list.sort()
assert False, "Should have thrown an error"
def test_include(self):
- l = make_local_path
+ ml = make_local_path
# include
file_list = FileList()
- self.make_files(['a.py', 'b.txt', l('d/c.py')])
+ self.make_files(['a.py', 'b.txt', ml('d/c.py')])
file_list.process_template_line('include *.py')
file_list.sort()
self.assertWarnings()
def test_exclude(self):
- l = make_local_path
+ ml = make_local_path
# exclude
file_list = FileList()
- file_list.files = ['a.py', 'b.txt', l('d/c.py')]
+ file_list.files = ['a.py', 'b.txt', ml('d/c.py')]
file_list.process_template_line('exclude *.py')
file_list.sort()
- assert file_list.files == ['b.txt', l('d/c.py')]
+ assert file_list.files == ['b.txt', ml('d/c.py')]
self.assertNoWarnings()
file_list.process_template_line('exclude *.rb')
file_list.sort()
- assert file_list.files == ['b.txt', l('d/c.py')]
+ assert file_list.files == ['b.txt', ml('d/c.py')]
self.assertWarnings()
def test_global_include(self):
- l = make_local_path
+ ml = make_local_path
# global-include
file_list = FileList()
- self.make_files(['a.py', 'b.txt', l('d/c.py')])
+ self.make_files(['a.py', 'b.txt', ml('d/c.py')])
file_list.process_template_line('global-include *.py')
file_list.sort()
- assert file_list.files == ['a.py', l('d/c.py')]
+ assert file_list.files == ['a.py', ml('d/c.py')]
self.assertNoWarnings()
file_list.process_template_line('global-include *.rb')
file_list.sort()
- assert file_list.files == ['a.py', l('d/c.py')]
+ assert file_list.files == ['a.py', ml('d/c.py')]
self.assertWarnings()
def test_global_exclude(self):
- l = make_local_path
+ ml = make_local_path
# global-exclude
file_list = FileList()
- file_list.files = ['a.py', 'b.txt', l('d/c.py')]
+ file_list.files = ['a.py', 'b.txt', ml('d/c.py')]
file_list.process_template_line('global-exclude *.py')
file_list.sort()
self.assertWarnings()
def test_recursive_include(self):
- l = make_local_path
+ ml = make_local_path
# recursive-include
file_list = FileList()
- self.make_files(['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')])
+ self.make_files(['a.py', ml('d/b.py'), ml('d/c.txt'), ml('d/d/e.py')])
file_list.process_template_line('recursive-include d *.py')
file_list.sort()
- assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertNoWarnings()
file_list.process_template_line('recursive-include e *.py')
file_list.sort()
- assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertWarnings()
def test_recursive_exclude(self):
- l = make_local_path
+ ml = make_local_path
# recursive-exclude
file_list = FileList()
- file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')]
+ file_list.files = ['a.py', ml('d/b.py'), ml('d/c.txt'), ml('d/d/e.py')]
file_list.process_template_line('recursive-exclude d *.py')
file_list.sort()
- assert file_list.files == ['a.py', l('d/c.txt')]
+ assert file_list.files == ['a.py', ml('d/c.txt')]
self.assertNoWarnings()
file_list.process_template_line('recursive-exclude e *.py')
file_list.sort()
- assert file_list.files == ['a.py', l('d/c.txt')]
+ assert file_list.files == ['a.py', ml('d/c.txt')]
self.assertWarnings()
def test_graft(self):
- l = make_local_path
+ ml = make_local_path
# graft
file_list = FileList()
- self.make_files(['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')])
+ self.make_files(['a.py', ml('d/b.py'), ml('d/d/e.py'), ml('f/f.py')])
file_list.process_template_line('graft d')
file_list.sort()
- assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertNoWarnings()
file_list.process_template_line('graft e')
file_list.sort()
- assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')]
self.assertWarnings()
def test_prune(self):
- l = make_local_path
+ ml = make_local_path
# prune
file_list = FileList()
- file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')]
+ file_list.files = ['a.py', ml('d/b.py'), ml('d/d/e.py'), ml('f/f.py')]
file_list.process_template_line('prune d')
file_list.sort()
- assert file_list.files == ['a.py', l('f/f.py')]
+ assert file_list.files == ['a.py', ml('f/f.py')]
self.assertNoWarnings()
file_list.process_template_line('prune e')
file_list.sort()
- assert file_list.files == ['a.py', l('f/f.py')]
+ assert file_list.files == ['a.py', ml('f/f.py')]
self.assertWarnings()
for k in hive if k.startswith(key.lower())
)
- return mock.patch.multiple(distutils.msvc9compiler.Reg,
+ return mock.patch.multiple(
+ distutils.msvc9compiler.Reg,
read_keys=read_keys, read_values=read_values)
"""
key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir'
- key_64 = r'software\wow6432node\microsoft\devdiv\vcforpython\9.0\installdir'
+ key_64 = key_32.replace(r'\microsoft', r'\wow6432node\microsoft')
def test_patched(self):
"Test the module is actually patched"
from __future__ import absolute_import, unicode_literals
-import os
import sys
import subprocess
from setuptools.extern import six
from setuptools.extern.six.moves import urllib, http_client
+import mock
+import pytest
import pkg_resources
import setuptools.package_index
hosts=('www.example.com',)
)
- url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
+ url = (
+ 'url:%20https://svn.plone.org/svn'
+ '/collective/inquant.contentmirror.plone/trunk'
+ )
try:
v = index.open_url(url)
except Exception as v:
index.opener = _urlopen
url = 'http://example.com'
try:
- v = index.open_url(url)
- except Exception as v:
- assert 'line' in str(v)
+ index.open_url(url)
+ except Exception as exc:
+ assert 'line' in str(exc)
else:
raise AssertionError('Should have raise here!')
index.open_url(url)
except distutils.errors.DistutilsError as error:
msg = six.text_type(error)
- assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg
+ assert (
+ 'nonnumeric port' in msg
+ or 'getaddrinfo failed' in msg
+ or 'Name or service not known' in msg
+ )
return
raise RuntimeError("Did not raise")
assert dists[0].version == ''
assert dists[1].version == vc
+ def test_download_git_with_rev(self, tmpdir):
+ url = 'git+https://github.example/group/project@master#egg=foo'
+ index = setuptools.package_index.PackageIndex()
+
+ with mock.patch("os.system") as os_system_mock:
+ result = index.download(url, str(tmpdir))
+
+ os_system_mock.assert_called()
+
+ expected_dir = str(tmpdir / 'project@master')
+ expected = (
+ 'git clone --quiet '
+ 'https://github.example/group/project {expected_dir}'
+ ).format(**locals())
+ first_call_args = os_system_mock.call_args_list[0][0]
+ assert first_call_args == (expected,)
+
+ tmpl = '(cd {expected_dir} && git checkout --quiet master)'
+ expected = tmpl.format(**locals())
+ assert os_system_mock.call_args_list[1][0] == (expected,)
+ assert result == expected_dir
+
+ def test_download_git_no_rev(self, tmpdir):
+ url = 'git+https://github.example/group/project#egg=foo'
+ index = setuptools.package_index.PackageIndex()
+
+ with mock.patch("os.system") as os_system_mock:
+ result = index.download(url, str(tmpdir))
+
+ os_system_mock.assert_called()
+
+ expected_dir = str(tmpdir / 'project')
+ expected = (
+ 'git clone --quiet '
+ 'https://github.example/group/project {expected_dir}'
+ ).format(**locals())
+ os_system_mock.assert_called_once_with(expected)
+
+ def test_download_svn(self, tmpdir):
+ url = 'svn+https://svn.example/project#egg=foo'
+ index = setuptools.package_index.PackageIndex()
+
+ with pytest.warns(UserWarning):
+ with mock.patch("os.system") as os_system_mock:
+ result = index.download(url, str(tmpdir))
+
+ os_system_mock.assert_called()
+
+ expected_dir = str(tmpdir / 'project')
+ expected = (
+ 'svn checkout -q '
+ 'svn+https://svn.example/project {expected_dir}'
+ ).format(**locals())
+ os_system_mock.assert_called_once_with(expected)
+
class TestContentCheckers:
def test_md5(self):
if sys.version_info < (3, 3):
config_vars.update({'Py_UNICODE_SIZE': 2})
mock_gcf = self.mock_get_config_var(**config_vars)
- with patch('setuptools.pep425tags.sysconfig.get_config_var', mock_gcf):
+ with patch(
+ 'setuptools.pep425tags.sysconfig.get_config_var',
+ mock_gcf):
abi_tag = pep425tags.get_abi_tag()
assert abi_tag == base + flags
"""
It should be possible to execute a setup.py with a Byte Order Mark
"""
- target = pkg_resources.resource_filename(__name__,
+ target = pkg_resources.resource_filename(
+ __name__,
'script-with-bom.py')
namespace = types.ModuleType('namespace')
setuptools.sandbox._execfile(target, vars(namespace))
from json import __version__
assert dep.get_module_constant('json', '__version__') == __version__
assert dep.get_module_constant('sys', 'version') == sys.version
- assert dep.get_module_constant('setuptools.tests.test_setuptools', '__doc__') == __doc__
+ assert dep.get_module_constant(
+ 'setuptools.tests.test_setuptools', '__doc__') == __doc__
@needs_bytecode
def testRequire(self):
self.req = Require('Distutils', '1.0.3', 'distutils')
self.dist = makeSetup(
features={
- 'foo': Feature("foo", standard=True, require_features=['baz', self.req]),
+ 'foo': Feature(
+ "foo", standard=True, require_features=['baz', self.req]),
'bar': Feature("bar", standard=True, packages=['pkg.bar'],
py_modules=['bar_et'], remove=['bar.ext'],
),
('with-dwim', None, 'include DWIM') in dist.feature_options
)
assert (
- ('without-dwim', None, 'exclude DWIM (default)') in dist.feature_options
+ ('without-dwim', None, 'exclude DWIM (default)')
+ in dist.feature_options
)
assert (
('with-bar', None, 'include bar (default)') in dist.feature_options
from distutils import log
import os
-import sys
import pytest
from distutils import log
from distutils.errors import DistutilsError
-from distutils.version import StrictVersion
import pytest
-import distutils.command
import glob
import os
import sys
"""
Quick and dirty test to ensure all external dependencies are vendored.
"""
- for command in ('upload',):#sorted(distutils.command.__all__):
+ for command in ('upload',): # sorted(distutils.command.__all__):
bare_virtualenv.run(' && '.join((
'cd {source}',
'python setup.py {command} -h',
}),
)
+
@pytest.mark.parametrize(
('filename', 'info'), WHEEL_INFO_TESTS,
ids=[t[0] for t in WHEEL_INFO_TESTS]
)
+
@pytest.mark.parametrize(
'params', WHEEL_INSTALL_TESTS,
ids=list(params['id'] for params in WHEEL_INSTALL_TESTS),
'arg 4\\',
'arg5 a\\\\b',
]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
+ proc = subprocess.Popen(
+ cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii'))
actual = stdout.decode('ascii').replace('\r\n', '\n')
expected = textwrap.dedent(r"""
with (tmpdir / 'foo-script.py').open('w') as f:
f.write(self.prep_script(tmpl))
cmd = [str(tmpdir / 'foo.exe')]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
+ proc = subprocess.Popen(
+ cmd,
+ stdout=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
stdout, stderr = proc.communicate()
actual = stdout.decode('ascii').replace('\r\n', '\n')
expected = textwrap.dedent(r"""
str(tmpdir / 'test_output.txt'),
'Test Argument',
]
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
+ proc = subprocess.Popen(
+ cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
stdout, stderr = proc.communicate()
assert not stdout
assert not stderr
import unicodedata
import sys
+import re
from setuptools.extern import six
return string.encode(enc)
except UnicodeEncodeError:
return None
+
+
+CODING_RE = re.compile(br'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)')
+
+
+def detect_encoding(fp):
+ first_line = fp.readline()
+ fp.seek(0)
+ m = CODING_RE.match(first_line)
+ if m is None:
+ return None
+ return m.group(1).decode('ascii')
import re
import zipfile
-from pkg_resources import Distribution, PathMetadata, parse_version
+import pkg_resources
+import setuptools
+from pkg_resources import parse_version
from setuptools.extern.packaging.utils import canonicalize_name
from setuptools.extern.six import PY3
-from setuptools import Distribution as SetuptoolsDistribution
from setuptools import pep425tags
from setuptools.command.egg_info import write_requirements
return next((True for t in self.tags() if t in supported_tags), False)
def egg_name(self):
- return Distribution(
+ return pkg_resources.Distribution(
project_name=self.project_name, version=self.version,
platform=(None if self.platform == 'any' else get_platform()),
).egg_name() + '.egg'
zf.extractall(destination_eggdir)
# Convert metadata.
dist_info = os.path.join(destination_eggdir, dist_info)
- dist = Distribution.from_location(
+ dist = pkg_resources.Distribution.from_location(
destination_eggdir, dist_info,
- metadata=PathMetadata(destination_eggdir, dist_info),
+ metadata=pkg_resources.PathMetadata(destination_eggdir, dist_info),
)
# Note: Evaluate and strip markers now,
os.path.join(egg_info, 'METADATA'),
os.path.join(egg_info, 'PKG-INFO'),
)
- setup_dist = SetuptoolsDistribution(
+ setup_dist = setuptools.Distribution(
attrs=dict(
install_requires=install_requires,
extras_require=extras_require,
pytest-flake8<=1.0.0; python_version=="3.4"
virtualenv>=13.0.0
pytest-virtualenv>=1.2.7
-pytest>=3.0.2
+# pytest pinned to <4 due to #1638
+pytest>=3.7,<4
wheel
coverage>=4.5.1
pytest-cov>=2.5.1
paver; python_version>="3.6"
futures; python_version=="2.7"
+pip==18.1 # Temporary workaround for #1644.
# Changed from default (`python -m pip ...`)
# to prevent the current working directory
# from being added to `sys.path`.
-install_command={envbindir}/pip install {opts} {packages}
+install_command=python -c 'import sys; sys.path.remove(""); from pkg_resources import load_entry_point; load_entry_point("pip", "console_scripts", "pip")()' install {opts} {packages}
# Same as above.
list_dependencies_command={envbindir}/pip freeze
setenv=COVERAGE_FILE={toxworkdir}/.coverage.{envname}