summaryrefslogtreecommitdiff
path: root/setuptools/_vendor
diff options
context:
space:
mode:
authorJason R. Coombs <jaraco@jaraco.com>2020-09-04 10:22:54 -0400
committerJason R. Coombs <jaraco@jaraco.com>2020-09-04 10:22:54 -0400
commitfc4d9828768ceebd2f9337481450c88376c013e9 (patch)
tree6dff622c50739ca19fdfbecf1008a42589d37b57 /setuptools/_vendor
parent7bb73a477de24069002516eb6eb1d755bed9d65b (diff)
parent03d36b9edb53e266a0b4b836e1e3178f989a0781 (diff)
downloadpython-setuptools-git-feature/implicit-bootstrap.tar.gz
Merge branch 'master' into feature/implicit-bootstrapfeature/implicit-bootstrap
Diffstat (limited to 'setuptools/_vendor')
-rw-r--r--setuptools/_vendor/packaging/__about__.py4
-rw-r--r--setuptools/_vendor/packaging/_compat.py9
-rw-r--r--setuptools/_vendor/packaging/_structures.py26
-rw-r--r--setuptools/_vendor/packaging/_typing.py48
-rw-r--r--setuptools/_vendor/packaging/markers.py54
-rw-r--r--setuptools/_vendor/packaging/py.typed0
-rw-r--r--setuptools/_vendor/packaging/requirements.py11
-rw-r--r--setuptools/_vendor/packaging/specifiers.py190
-rw-r--r--setuptools/_vendor/packaging/tags.py569
-rw-r--r--setuptools/_vendor/packaging/utils.py18
-rw-r--r--setuptools/_vendor/packaging/version.py151
-rw-r--r--setuptools/_vendor/six.py868
-rw-r--r--setuptools/_vendor/vendored.txt3
13 files changed, 889 insertions, 1062 deletions
diff --git a/setuptools/_vendor/packaging/__about__.py b/setuptools/_vendor/packaging/__about__.py
index dc95138d..4d998578 100644
--- a/setuptools/_vendor/packaging/__about__.py
+++ b/setuptools/_vendor/packaging/__about__.py
@@ -18,10 +18,10 @@ __title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
-__version__ = "19.2"
+__version__ = "20.4"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
-__license__ = "BSD or Apache License, Version 2.0"
+__license__ = "BSD-2-Clause or Apache-2.0"
__copyright__ = "Copyright 2014-2019 %s" % __author__
diff --git a/setuptools/_vendor/packaging/_compat.py b/setuptools/_vendor/packaging/_compat.py
index 25da473c..e54bd4ed 100644
--- a/setuptools/_vendor/packaging/_compat.py
+++ b/setuptools/_vendor/packaging/_compat.py
@@ -5,6 +5,11 @@ from __future__ import absolute_import, division, print_function
import sys
+from ._typing import TYPE_CHECKING
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import Any, Dict, Tuple, Type
+
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
@@ -18,14 +23,16 @@ else:
def with_metaclass(meta, *bases):
+ # type: (Type[Any], Tuple[Type[Any], ...]) -> Any
"""
Create a base class with a metaclass.
"""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
- class metaclass(meta):
+ class metaclass(meta): # type: ignore
def __new__(cls, name, this_bases, d):
+ # type: (Type[Any], str, Tuple[Any], Dict[Any, Any]) -> Any
return meta(name, bases, d)
return type.__new__(metaclass, "temporary_class", (), {})
diff --git a/setuptools/_vendor/packaging/_structures.py b/setuptools/_vendor/packaging/_structures.py
index 68dcca63..800d5c55 100644
--- a/setuptools/_vendor/packaging/_structures.py
+++ b/setuptools/_vendor/packaging/_structures.py
@@ -4,65 +4,83 @@
from __future__ import absolute_import, division, print_function
-class Infinity(object):
+class InfinityType(object):
def __repr__(self):
+ # type: () -> str
return "Infinity"
def __hash__(self):
+ # type: () -> int
return hash(repr(self))
def __lt__(self, other):
+ # type: (object) -> bool
return False
def __le__(self, other):
+ # type: (object) -> bool
return False
def __eq__(self, other):
+ # type: (object) -> bool
return isinstance(other, self.__class__)
def __ne__(self, other):
+ # type: (object) -> bool
return not isinstance(other, self.__class__)
def __gt__(self, other):
+ # type: (object) -> bool
return True
def __ge__(self, other):
+ # type: (object) -> bool
return True
def __neg__(self):
+ # type: (object) -> NegativeInfinityType
return NegativeInfinity
-Infinity = Infinity()
+Infinity = InfinityType()
-class NegativeInfinity(object):
+class NegativeInfinityType(object):
def __repr__(self):
+ # type: () -> str
return "-Infinity"
def __hash__(self):
+ # type: () -> int
return hash(repr(self))
def __lt__(self, other):
+ # type: (object) -> bool
return True
def __le__(self, other):
+ # type: (object) -> bool
return True
def __eq__(self, other):
+ # type: (object) -> bool
return isinstance(other, self.__class__)
def __ne__(self, other):
+ # type: (object) -> bool
return not isinstance(other, self.__class__)
def __gt__(self, other):
+ # type: (object) -> bool
return False
def __ge__(self, other):
+ # type: (object) -> bool
return False
def __neg__(self):
+ # type: (object) -> InfinityType
return Infinity
-NegativeInfinity = NegativeInfinity()
+NegativeInfinity = NegativeInfinityType()
diff --git a/setuptools/_vendor/packaging/_typing.py b/setuptools/_vendor/packaging/_typing.py
new file mode 100644
index 00000000..77a8b918
--- /dev/null
+++ b/setuptools/_vendor/packaging/_typing.py
@@ -0,0 +1,48 @@
+"""For neatly implementing static typing in packaging.
+
+`mypy` - the static type analysis tool we use - uses the `typing` module, which
+provides core functionality fundamental to mypy's functioning.
+
+Generally, `typing` would be imported at runtime and used in that fashion -
+it acts as a no-op at runtime and does not have any run-time overhead by
+design.
+
+As it turns out, `typing` is not vendorable - it uses separate sources for
+Python 2/Python 3. Thus, this codebase can not expect it to be present.
+To work around this, mypy allows the typing import to be behind a False-y
+optional to prevent it from running at runtime and type-comments can be used
+to remove the need for the types to be accessible directly during runtime.
+
+This module provides the False-y guard in a nicely named fashion so that a
+curious maintainer can reach here to read this.
+
+In packaging, all static-typing related imports should be guarded as follows:
+
+ from packaging._typing import TYPE_CHECKING
+
+ if TYPE_CHECKING:
+ from typing import ...
+
+Ref: https://github.com/python/mypy/issues/3216
+"""
+
+__all__ = ["TYPE_CHECKING", "cast"]
+
+# The TYPE_CHECKING constant defined by the typing module is False at runtime
+# but True while type checking.
+if False: # pragma: no cover
+ from typing import TYPE_CHECKING
+else:
+ TYPE_CHECKING = False
+
+# typing's cast syntax requires calling typing.cast at runtime, but we don't
+# want to import typing at runtime. Here, we inform the type checkers that
+# we're importing `typing.cast` as `cast` and re-implement typing.cast's
+# runtime behavior in a block that is ignored by type checkers.
+if TYPE_CHECKING: # pragma: no cover
+ # not executed at runtime
+ from typing import cast
+else:
+ # executed at runtime
+ def cast(type_, value): # noqa
+ return value
diff --git a/setuptools/_vendor/packaging/markers.py b/setuptools/_vendor/packaging/markers.py
index 4bdfdb24..03fbdfcc 100644
--- a/setuptools/_vendor/packaging/markers.py
+++ b/setuptools/_vendor/packaging/markers.py
@@ -13,8 +13,14 @@ from setuptools.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString
from setuptools.extern.pyparsing import Literal as L # noqa
from ._compat import string_types
+from ._typing import TYPE_CHECKING
from .specifiers import Specifier, InvalidSpecifier
+if TYPE_CHECKING: # pragma: no cover
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+
+ Operator = Callable[[str, str], bool]
+
__all__ = [
"InvalidMarker",
@@ -46,30 +52,37 @@ class UndefinedEnvironmentName(ValueError):
class Node(object):
def __init__(self, value):
+ # type: (Any) -> None
self.value = value
def __str__(self):
+ # type: () -> str
return str(self.value)
def __repr__(self):
+ # type: () -> str
return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
def serialize(self):
+ # type: () -> str
raise NotImplementedError
class Variable(Node):
def serialize(self):
+ # type: () -> str
return str(self)
class Value(Node):
def serialize(self):
+ # type: () -> str
return '"{0}"'.format(self)
class Op(Node):
def serialize(self):
+ # type: () -> str
return str(self)
@@ -85,13 +98,13 @@ VARIABLE = (
| L("python_version")
| L("sys_platform")
| L("os_name")
- | L("os.name")
+ | L("os.name") # PEP-345
| L("sys.platform") # PEP-345
| L("platform.version") # PEP-345
| L("platform.machine") # PEP-345
| L("platform.python_implementation") # PEP-345
- | L("python_implementation") # PEP-345
- | L("extra") # undocumented setuptools legacy
+ | L("python_implementation") # undocumented setuptools legacy
+ | L("extra") # PEP-508
)
ALIASES = {
"os.name": "os_name",
@@ -131,6 +144,7 @@ MARKER = stringStart + MARKER_EXPR + stringEnd
def _coerce_parse_result(results):
+ # type: (Union[ParseResults, List[Any]]) -> List[Any]
if isinstance(results, ParseResults):
return [_coerce_parse_result(i) for i in results]
else:
@@ -138,6 +152,8 @@ def _coerce_parse_result(results):
def _format_marker(marker, first=True):
+ # type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str
+
assert isinstance(marker, (list, tuple, string_types))
# Sometimes we have a structure like [[...]] which is a single item list
@@ -172,10 +188,11 @@ _operators = {
"!=": operator.ne,
">=": operator.ge,
">": operator.gt,
-}
+} # type: Dict[str, Operator]
def _eval_op(lhs, op, rhs):
+ # type: (str, Op, str) -> bool
try:
spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier:
@@ -183,7 +200,7 @@ def _eval_op(lhs, op, rhs):
else:
return spec.contains(lhs)
- oper = _operators.get(op.serialize())
+ oper = _operators.get(op.serialize()) # type: Optional[Operator]
if oper is None:
raise UndefinedComparison(
"Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
@@ -192,13 +209,18 @@ def _eval_op(lhs, op, rhs):
return oper(lhs, rhs)
-_undefined = object()
+class Undefined(object):
+ pass
+
+
+_undefined = Undefined()
def _get_env(environment, name):
- value = environment.get(name, _undefined)
+ # type: (Dict[str, str], str) -> str
+ value = environment.get(name, _undefined) # type: Union[str, Undefined]
- if value is _undefined:
+ if isinstance(value, Undefined):
raise UndefinedEnvironmentName(
"{0!r} does not exist in evaluation environment.".format(name)
)
@@ -207,7 +229,8 @@ def _get_env(environment, name):
def _evaluate_markers(markers, environment):
- groups = [[]]
+ # type: (List[Any], Dict[str, str]) -> bool
+ groups = [[]] # type: List[List[bool]]
for marker in markers:
assert isinstance(marker, (list, tuple, string_types))
@@ -234,6 +257,7 @@ def _evaluate_markers(markers, environment):
def format_full_version(info):
+ # type: (sys._version_info) -> str
version = "{0.major}.{0.minor}.{0.micro}".format(info)
kind = info.releaselevel
if kind != "final":
@@ -242,9 +266,13 @@ def format_full_version(info):
def default_environment():
+ # type: () -> Dict[str, str]
if hasattr(sys, "implementation"):
- iver = format_full_version(sys.implementation.version)
- implementation_name = sys.implementation.name
+ # Ignoring the `sys.implementation` reference for type checking due to
+ # mypy not liking that the attribute doesn't exist in Python 2.7 when
+ # run with the `--py27` flag.
+ iver = format_full_version(sys.implementation.version) # type: ignore
+ implementation_name = sys.implementation.name # type: ignore
else:
iver = "0"
implementation_name = ""
@@ -266,6 +294,7 @@ def default_environment():
class Marker(object):
def __init__(self, marker):
+ # type: (str) -> None
try:
self._markers = _coerce_parse_result(MARKER.parseString(marker))
except ParseException as e:
@@ -275,12 +304,15 @@ class Marker(object):
raise InvalidMarker(err_str)
def __str__(self):
+ # type: () -> str
return _format_marker(self._markers)
def __repr__(self):
+ # type: () -> str
return "<Marker({0!r})>".format(str(self))
def evaluate(self, environment=None):
+ # type: (Optional[Dict[str, str]]) -> bool
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the
diff --git a/setuptools/_vendor/packaging/py.typed b/setuptools/_vendor/packaging/py.typed
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/setuptools/_vendor/packaging/py.typed
diff --git a/setuptools/_vendor/packaging/requirements.py b/setuptools/_vendor/packaging/requirements.py
index 8a0c2cb9..5d50c7d7 100644
--- a/setuptools/_vendor/packaging/requirements.py
+++ b/setuptools/_vendor/packaging/requirements.py
@@ -9,11 +9,15 @@ import re
from setuptools.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
from setuptools.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
from setuptools.extern.pyparsing import Literal as L # noqa
-from setuptools.extern.six.moves.urllib import parse as urlparse
+from urllib import parse as urlparse
+from ._typing import TYPE_CHECKING
from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
+if TYPE_CHECKING: # pragma: no cover
+ from typing import List
+
class InvalidRequirement(ValueError):
"""
@@ -89,6 +93,7 @@ class Requirement(object):
# TODO: Can we normalize the name and extra name?
def __init__(self, requirement_string):
+ # type: (str) -> None
try:
req = REQUIREMENT.parseString(requirement_string)
except ParseException as e:
@@ -116,7 +121,8 @@ class Requirement(object):
self.marker = req.marker if req.marker else None
def __str__(self):
- parts = [self.name]
+ # type: () -> str
+ parts = [self.name] # type: List[str]
if self.extras:
parts.append("[{0}]".format(",".join(sorted(self.extras))))
@@ -135,4 +141,5 @@ class Requirement(object):
return "".join(parts)
def __repr__(self):
+ # type: () -> str
return "<Requirement({0!r})>".format(str(self))
diff --git a/setuptools/_vendor/packaging/specifiers.py b/setuptools/_vendor/packaging/specifiers.py
index 743576a0..fe09bb1d 100644
--- a/setuptools/_vendor/packaging/specifiers.py
+++ b/setuptools/_vendor/packaging/specifiers.py
@@ -9,8 +9,27 @@ import itertools
import re
from ._compat import string_types, with_metaclass
+from ._typing import TYPE_CHECKING
+from .utils import canonicalize_version
from .version import Version, LegacyVersion, parse
+if TYPE_CHECKING: # pragma: no cover
+ from typing import (
+ List,
+ Dict,
+ Union,
+ Iterable,
+ Iterator,
+ Optional,
+ Callable,
+ Tuple,
+ FrozenSet,
+ )
+
+ ParsedVersion = Union[Version, LegacyVersion]
+ UnparsedVersion = Union[Version, LegacyVersion, str]
+ CallableOperator = Callable[[ParsedVersion, str], bool]
+
class InvalidSpecifier(ValueError):
"""
@@ -18,9 +37,10 @@ class InvalidSpecifier(ValueError):
"""
-class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
+class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): # type: ignore
@abc.abstractmethod
def __str__(self):
+ # type: () -> str
"""
Returns the str representation of this Specifier like object. This
should be representative of the Specifier itself.
@@ -28,12 +48,14 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def __hash__(self):
+ # type: () -> int
"""
Returns a hash value for this Specifier like object.
"""
@abc.abstractmethod
def __eq__(self, other):
+ # type: (object) -> bool
"""
Returns a boolean representing whether or not the two Specifier like
objects are equal.
@@ -41,6 +63,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def __ne__(self, other):
+ # type: (object) -> bool
"""
Returns a boolean representing whether or not the two Specifier like
objects are not equal.
@@ -48,6 +71,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractproperty
def prereleases(self):
+ # type: () -> Optional[bool]
"""
Returns whether or not pre-releases as a whole are allowed by this
specifier.
@@ -55,6 +79,7 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@prereleases.setter
def prereleases(self, value):
+ # type: (bool) -> None
"""
Sets whether or not pre-releases as a whole are allowed by this
specifier.
@@ -62,12 +87,14 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def contains(self, item, prereleases=None):
+ # type: (str, Optional[bool]) -> bool
"""
Determines if the given item is contained within this specifier.
"""
@abc.abstractmethod
def filter(self, iterable, prereleases=None):
+ # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion]
"""
Takes an iterable of items and filters them so that only items which
are contained within this specifier are allowed in it.
@@ -76,19 +103,24 @@ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
class _IndividualSpecifier(BaseSpecifier):
- _operators = {}
+ _operators = {} # type: Dict[str, str]
def __init__(self, spec="", prereleases=None):
+ # type: (str, Optional[bool]) -> None
match = self._regex.search(spec)
if not match:
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
- self._spec = (match.group("operator").strip(), match.group("version").strip())
+ self._spec = (
+ match.group("operator").strip(),
+ match.group("version").strip(),
+ ) # type: Tuple[str, str]
# Store whether or not this Specifier should accept prereleases
self._prereleases = prereleases
def __repr__(self):
+ # type: () -> str
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
@@ -98,26 +130,35 @@ class _IndividualSpecifier(BaseSpecifier):
return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre)
def __str__(self):
+ # type: () -> str
return "{0}{1}".format(*self._spec)
+ @property
+ def _canonical_spec(self):
+ # type: () -> Tuple[str, Union[Version, str]]
+ return self._spec[0], canonicalize_version(self._spec[1])
+
def __hash__(self):
- return hash(self._spec)
+ # type: () -> int
+ return hash(self._canonical_spec)
def __eq__(self, other):
+ # type: (object) -> bool
if isinstance(other, string_types):
try:
- other = self.__class__(other)
+ other = self.__class__(str(other))
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
- return self._spec == other._spec
+ return self._canonical_spec == other._canonical_spec
def __ne__(self, other):
+ # type: (object) -> bool
if isinstance(other, string_types):
try:
- other = self.__class__(other)
+ other = self.__class__(str(other))
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
@@ -126,52 +167,67 @@ class _IndividualSpecifier(BaseSpecifier):
return self._spec != other._spec
def _get_operator(self, op):
- return getattr(self, "_compare_{0}".format(self._operators[op]))
+ # type: (str) -> CallableOperator
+ operator_callable = getattr(
+ self, "_compare_{0}".format(self._operators[op])
+ ) # type: CallableOperator
+ return operator_callable
def _coerce_version(self, version):
+ # type: (UnparsedVersion) -> ParsedVersion
if not isinstance(version, (LegacyVersion, Version)):
version = parse(version)
return version
@property
def operator(self):
+ # type: () -> str
return self._spec[0]
@property
def version(self):
+ # type: () -> str
return self._spec[1]
@property
def prereleases(self):
+ # type: () -> Optional[bool]
return self._prereleases
@prereleases.setter
def prereleases(self, value):
+ # type: (bool) -> None
self._prereleases = value
def __contains__(self, item):
+ # type: (str) -> bool
return self.contains(item)
def contains(self, item, prereleases=None):
+ # type: (UnparsedVersion, Optional[bool]) -> bool
+
# Determine if prereleases are to be allowed or not.
if prereleases is None:
prereleases = self.prereleases
# Normalize item to a Version or LegacyVersion, this allows us to have
# a shortcut for ``"2.0" in Specifier(">=2")
- item = self._coerce_version(item)
+ normalized_item = self._coerce_version(item)
# Determine if we should be supporting prereleases in this specifier
# or not, if we do not support prereleases than we can short circuit
# logic if this version is a prereleases.
- if item.is_prerelease and not prereleases:
+ if normalized_item.is_prerelease and not prereleases:
return False
# Actually do the comparison to determine if this item is contained
# within this Specifier or not.
- return self._get_operator(self.operator)(item, self.version)
+ operator_callable = self._get_operator(self.operator) # type: CallableOperator
+ return operator_callable(normalized_item, self.version)
def filter(self, iterable, prereleases=None):
+ # type: (Iterable[UnparsedVersion], Optional[bool]) -> Iterable[UnparsedVersion]
+
yielded = False
found_prereleases = []
@@ -230,32 +286,43 @@ class LegacySpecifier(_IndividualSpecifier):
}
def _coerce_version(self, version):
+ # type: (Union[ParsedVersion, str]) -> LegacyVersion
if not isinstance(version, LegacyVersion):
version = LegacyVersion(str(version))
return version
def _compare_equal(self, prospective, spec):
+ # type: (LegacyVersion, str) -> bool
return prospective == self._coerce_version(spec)
def _compare_not_equal(self, prospective, spec):
+ # type: (LegacyVersion, str) -> bool
return prospective != self._coerce_version(spec)
def _compare_less_than_equal(self, prospective, spec):
+ # type: (LegacyVersion, str) -> bool
return prospective <= self._coerce_version(spec)
def _compare_greater_than_equal(self, prospective, spec):
+ # type: (LegacyVersion, str) -> bool
return prospective >= self._coerce_version(spec)
def _compare_less_than(self, prospective, spec):
+ # type: (LegacyVersion, str) -> bool
return prospective < self._coerce_version(spec)
def _compare_greater_than(self, prospective, spec):
+ # type: (LegacyVersion, str) -> bool
return prospective > self._coerce_version(spec)
-def _require_version_compare(fn):
+def _require_version_compare(
+ fn # type: (Callable[[Specifier, ParsedVersion, str], bool])
+):
+ # type: (...) -> Callable[[Specifier, ParsedVersion, str], bool]
@functools.wraps(fn)
def wrapped(self, prospective, spec):
+ # type: (Specifier, ParsedVersion, str) -> bool
if not isinstance(prospective, Version):
return False
return fn(self, prospective, spec)
@@ -373,6 +440,8 @@ class Specifier(_IndividualSpecifier):
@_require_version_compare
def _compare_compatible(self, prospective, spec):
+ # type: (ParsedVersion, str) -> bool
+
# Compatible releases have an equivalent combination of >= and ==. That
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
# implement this in terms of the other specifiers instead of
@@ -400,56 +469,75 @@ class Specifier(_IndividualSpecifier):
@_require_version_compare
def _compare_equal(self, prospective, spec):
+ # type: (ParsedVersion, str) -> bool
+
# We need special logic to handle prefix matching
if spec.endswith(".*"):
# In the case of prefix matching we want to ignore local segment.
prospective = Version(prospective.public)
# Split the spec out by dots, and pretend that there is an implicit
# dot in between a release segment and a pre-release segment.
- spec = _version_split(spec[:-2]) # Remove the trailing .*
+ split_spec = _version_split(spec[:-2]) # Remove the trailing .*
# Split the prospective version out by dots, and pretend that there
# is an implicit dot in between a release segment and a pre-release
# segment.
- prospective = _version_split(str(prospective))
+ split_prospective = _version_split(str(prospective))
# Shorten the prospective version to be the same length as the spec
# so that we can determine if the specifier is a prefix of the
# prospective version or not.
- prospective = prospective[: len(spec)]
+ shortened_prospective = split_prospective[: len(split_spec)]
# Pad out our two sides with zeros so that they both equal the same
# length.
- spec, prospective = _pad_version(spec, prospective)
+ padded_spec, padded_prospective = _pad_version(
+ split_spec, shortened_prospective
+ )
+
+ return padded_prospective == padded_spec
else:
# Convert our spec string into a Version
- spec = Version(spec)
+ spec_version = Version(spec)
# If the specifier does not have a local segment, then we want to
# act as if the prospective version also does not have a local
# segment.
- if not spec.local:
+ if not spec_version.local:
prospective = Version(prospective.public)
- return prospective == spec
+ return prospective == spec_version
@_require_version_compare
def _compare_not_equal(self, prospective, spec):
+ # type: (ParsedVersion, str) -> bool
return not self._compare_equal(prospective, spec)
@_require_version_compare
def _compare_less_than_equal(self, prospective, spec):
- return prospective <= Version(spec)
+ # type: (ParsedVersion, str) -> bool
+
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return Version(prospective.public) <= Version(spec)
@_require_version_compare
def _compare_greater_than_equal(self, prospective, spec):
- return prospective >= Version(spec)
+ # type: (ParsedVersion, str) -> bool
+
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return Version(prospective.public) >= Version(spec)
@_require_version_compare
- def _compare_less_than(self, prospective, spec):
+ def _compare_less_than(self, prospective, spec_str):
+ # type: (ParsedVersion, str) -> bool
+
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
- spec = Version(spec)
+ spec = Version(spec_str)
# Check to see if the prospective version is less than the spec
# version. If it's not we can short circuit and just return False now
@@ -471,10 +559,12 @@ class Specifier(_IndividualSpecifier):
return True
@_require_version_compare
- def _compare_greater_than(self, prospective, spec):
+ def _compare_greater_than(self, prospective, spec_str):
+ # type: (ParsedVersion, str) -> bool
+
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
- spec = Version(spec)
+ spec = Version(spec_str)
# Check to see if the prospective version is greater than the spec
# version. If it's not we can short circuit and just return False now
@@ -502,10 +592,13 @@ class Specifier(_IndividualSpecifier):
return True
def _compare_arbitrary(self, prospective, spec):
+ # type: (Version, str) -> bool
return str(prospective).lower() == str(spec).lower()
@property
def prereleases(self):
+ # type: () -> bool
+
# If there is an explicit prereleases set for this, then we'll just
# blindly use that.
if self._prereleases is not None:
@@ -530,6 +623,7 @@ class Specifier(_IndividualSpecifier):
@prereleases.setter
def prereleases(self, value):
+ # type: (bool) -> None
self._prereleases = value
@@ -537,7 +631,8 @@ _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
def _version_split(version):
- result = []
+ # type: (str) -> List[str]
+ result = [] # type: List[str]
for item in version.split("."):
match = _prefix_regex.search(item)
if match:
@@ -548,6 +643,7 @@ def _version_split(version):
def _pad_version(left, right):
+ # type: (List[str], List[str]) -> Tuple[List[str], List[str]]
left_split, right_split = [], []
# Get the release segment of our versions
@@ -567,14 +663,16 @@ def _pad_version(left, right):
class SpecifierSet(BaseSpecifier):
def __init__(self, specifiers="", prereleases=None):
- # Split on , to break each indidivual specifier into it's own item, and
+ # type: (str, Optional[bool]) -> None
+
+ # Split on , to break each individual specifier into it's own item, and
# strip each item to remove leading/trailing whitespace.
- specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+ split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
# Parsed each individual specifier, attempting first to make it a
# Specifier and falling back to a LegacySpecifier.
parsed = set()
- for specifier in specifiers:
+ for specifier in split_specifiers:
try:
parsed.add(Specifier(specifier))
except InvalidSpecifier:
@@ -588,6 +686,7 @@ class SpecifierSet(BaseSpecifier):
self._prereleases = prereleases
def __repr__(self):
+ # type: () -> str
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
@@ -597,12 +696,15 @@ class SpecifierSet(BaseSpecifier):
return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
def __str__(self):
+ # type: () -> str
return ",".join(sorted(str(s) for s in self._specs))
def __hash__(self):
+ # type: () -> int
return hash(self._specs)
def __and__(self, other):
+ # type: (Union[SpecifierSet, str]) -> SpecifierSet
if isinstance(other, string_types):
other = SpecifierSet(other)
elif not isinstance(other, SpecifierSet):
@@ -626,9 +728,8 @@ class SpecifierSet(BaseSpecifier):
return specifier
def __eq__(self, other):
- if isinstance(other, string_types):
- other = SpecifierSet(other)
- elif isinstance(other, _IndividualSpecifier):
+ # type: (object) -> bool
+ if isinstance(other, (string_types, _IndividualSpecifier)):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
@@ -636,9 +737,8 @@ class SpecifierSet(BaseSpecifier):
return self._specs == other._specs
def __ne__(self, other):
- if isinstance(other, string_types):
- other = SpecifierSet(other)
- elif isinstance(other, _IndividualSpecifier):
+ # type: (object) -> bool
+ if isinstance(other, (string_types, _IndividualSpecifier)):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
@@ -646,13 +746,17 @@ class SpecifierSet(BaseSpecifier):
return self._specs != other._specs
def __len__(self):
+ # type: () -> int
return len(self._specs)
def __iter__(self):
+ # type: () -> Iterator[FrozenSet[_IndividualSpecifier]]
return iter(self._specs)
@property
def prereleases(self):
+ # type: () -> Optional[bool]
+
# If we have been given an explicit prerelease modifier, then we'll
# pass that through here.
if self._prereleases is not None:
@@ -670,12 +774,16 @@ class SpecifierSet(BaseSpecifier):
@prereleases.setter
def prereleases(self, value):
+ # type: (bool) -> None
self._prereleases = value
def __contains__(self, item):
+ # type: (Union[ParsedVersion, str]) -> bool
return self.contains(item)
def contains(self, item, prereleases=None):
+ # type: (Union[ParsedVersion, str], Optional[bool]) -> bool
+
# Ensure that our item is a Version or LegacyVersion instance.
if not isinstance(item, (LegacyVersion, Version)):
item = parse(item)
@@ -701,7 +809,13 @@ class SpecifierSet(BaseSpecifier):
# will always return True, this is an explicit design decision.
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
- def filter(self, iterable, prereleases=None):
+ def filter(
+ self,
+ iterable, # type: Iterable[Union[ParsedVersion, str]]
+ prereleases=None, # type: Optional[bool]
+ ):
+ # type: (...) -> Iterable[Union[ParsedVersion, str]]
+
# Determine if we're forcing a prerelease or not, if we're not forcing
# one for this particular filter call, then we'll use whatever the
# SpecifierSet thinks for whether or not we should support prereleases.
@@ -719,8 +833,8 @@ class SpecifierSet(BaseSpecifier):
# which will filter out any pre-releases, unless there are no final
# releases, and which will filter out LegacyVersion in general.
else:
- filtered = []
- found_prereleases = []
+ filtered = [] # type: List[Union[ParsedVersion, str]]
+ found_prereleases = [] # type: List[Union[ParsedVersion, str]]
for item in iterable:
# Ensure that we some kind of Version class for this item.
diff --git a/setuptools/_vendor/packaging/tags.py b/setuptools/_vendor/packaging/tags.py
index ec9942f0..9064910b 100644
--- a/setuptools/_vendor/packaging/tags.py
+++ b/setuptools/_vendor/packaging/tags.py
@@ -13,12 +13,37 @@ except ImportError: # pragma: no cover
EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()]
del imp
+import logging
+import os
import platform
import re
+import struct
import sys
import sysconfig
import warnings
+from ._typing import TYPE_CHECKING, cast
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import (
+ Dict,
+ FrozenSet,
+ IO,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Sequence,
+ Tuple,
+ Union,
+ )
+
+ PythonVersion = Sequence[int]
+ MacVersion = Tuple[int, int]
+ GlibcVersion = Tuple[int, int]
+
+
+logger = logging.getLogger(__name__)
INTERPRETER_SHORT_NAMES = {
"python": "py", # Generic.
@@ -26,34 +51,48 @@ INTERPRETER_SHORT_NAMES = {
"pypy": "pp",
"ironpython": "ip",
"jython": "jy",
-}
+} # type: Dict[str, str]
_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
class Tag(object):
+ """
+ A representation of the tag triple for a wheel.
+
+ Instances are considered immutable and thus are hashable. Equality checking
+ is also supported.
+ """
__slots__ = ["_interpreter", "_abi", "_platform"]
def __init__(self, interpreter, abi, platform):
+ # type: (str, str, str) -> None
self._interpreter = interpreter.lower()
self._abi = abi.lower()
self._platform = platform.lower()
@property
def interpreter(self):
+ # type: () -> str
return self._interpreter
@property
def abi(self):
+ # type: () -> str
return self._abi
@property
def platform(self):
+ # type: () -> str
return self._platform
def __eq__(self, other):
+ # type: (object) -> bool
+ if not isinstance(other, Tag):
+ return NotImplemented
+
return (
(self.platform == other.platform)
and (self.abi == other.abi)
@@ -61,16 +100,26 @@ class Tag(object):
)
def __hash__(self):
+ # type: () -> int
return hash((self._interpreter, self._abi, self._platform))
def __str__(self):
+ # type: () -> str
return "{}-{}-{}".format(self._interpreter, self._abi, self._platform)
def __repr__(self):
+ # type: () -> str
return "<{self} @ {self_id}>".format(self=self, self_id=id(self))
def parse_tag(tag):
+ # type: (str) -> FrozenSet[Tag]
+ """
+ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+ Returning a set is required due to the possibility that the tag is a
+ compressed tag set.
+ """
tags = set()
interpreters, abis, platforms = tag.split("-")
for interpreter in interpreters.split("."):
@@ -80,20 +129,54 @@ def parse_tag(tag):
return frozenset(tags)
+def _warn_keyword_parameter(func_name, kwargs):
+ # type: (str, Dict[str, bool]) -> bool
+ """
+ Backwards-compatibility with Python 2.7 to allow treating 'warn' as keyword-only.
+ """
+ if not kwargs:
+ return False
+ elif len(kwargs) > 1 or "warn" not in kwargs:
+ kwargs.pop("warn", None)
+ arg = next(iter(kwargs.keys()))
+ raise TypeError(
+ "{}() got an unexpected keyword argument {!r}".format(func_name, arg)
+ )
+ return kwargs["warn"]
+
+
+def _get_config_var(name, warn=False):
+ # type: (str, bool) -> Union[int, str, None]
+ value = sysconfig.get_config_var(name)
+ if value is None and warn:
+ logger.debug(
+ "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+ )
+ return value
+
+
def _normalize_string(string):
+ # type: (str) -> str
return string.replace(".", "_").replace("-", "_")
-def _cpython_interpreter(py_version):
- # TODO: Is using py_version_nodot for interpreter version critical?
- return "cp{major}{minor}".format(major=py_version[0], minor=py_version[1])
+def _abi3_applies(python_version):
+ # type: (PythonVersion) -> bool
+ """
+ Determine if the Python version supports abi3.
+
+ PEP 384 was first implemented in Python 3.2.
+ """
+ return len(python_version) > 1 and tuple(python_version) >= (3, 2)
-def _cpython_abis(py_version):
+def _cpython_abis(py_version, warn=False):
+ # type: (PythonVersion, bool) -> List[str]
+ py_version = tuple(py_version) # To allow for version comparison.
abis = []
- version = "{}{}".format(*py_version[:2])
+ version = _version_nodot(py_version[:2])
debug = pymalloc = ucs4 = ""
- with_debug = sysconfig.get_config_var("Py_DEBUG")
+ with_debug = _get_config_var("Py_DEBUG", warn)
has_refcount = hasattr(sys, "gettotalrefcount")
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
# extension modules is the best option.
@@ -102,11 +185,11 @@ def _cpython_abis(py_version):
if with_debug or (with_debug is None and (has_refcount or has_ext)):
debug = "d"
if py_version < (3, 8):
- with_pymalloc = sysconfig.get_config_var("WITH_PYMALLOC")
+ with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
if with_pymalloc or with_pymalloc is None:
pymalloc = "m"
if py_version < (3, 3):
- unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE")
+ unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
if unicode_size == 4 or (
unicode_size is None and sys.maxunicode == 0x10FFFF
):
@@ -124,86 +207,148 @@ def _cpython_abis(py_version):
return abis
-def _cpython_tags(py_version, interpreter, abis, platforms):
+def cpython_tags(
+ python_version=None, # type: Optional[PythonVersion]
+ abis=None, # type: Optional[Iterable[str]]
+ platforms=None, # type: Optional[Iterable[str]]
+ **kwargs # type: bool
+):
+ # type: (...) -> Iterator[Tag]
+ """
+ Yields the tags for a CPython interpreter.
+
+ The tags consist of:
+ - cp<python_version>-<abi>-<platform>
+ - cp<python_version>-abi3-<platform>
+ - cp<python_version>-none-<platform>
+ - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
+
+ If python_version only specifies a major version then user-provided ABIs and
+ the 'none' ABItag will be used.
+
+ If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+ their normal position and not at the beginning.
+ """
+ warn = _warn_keyword_parameter("cpython_tags", kwargs)
+ if not python_version:
+ python_version = sys.version_info[:2]
+
+ interpreter = "cp{}".format(_version_nodot(python_version[:2]))
+
+ if abis is None:
+ if len(python_version) > 1:
+ abis = _cpython_abis(python_version, warn)
+ else:
+ abis = []
+ abis = list(abis)
+ # 'abi3' and 'none' are explicitly handled later.
+ for explicit_abi in ("abi3", "none"):
+ try:
+ abis.remove(explicit_abi)
+ except ValueError:
+ pass
+
+ platforms = list(platforms or _platform_tags())
for abi in abis:
for platform_ in platforms:
yield Tag(interpreter, abi, platform_)
- for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms):
- yield tag
+ if _abi3_applies(python_version):
+ for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms):
+ yield tag
for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms):
yield tag
- # PEP 384 was first implemented in Python 3.2.
- for minor_version in range(py_version[1] - 1, 1, -1):
- for platform_ in platforms:
- interpreter = "cp{major}{minor}".format(
- major=py_version[0], minor=minor_version
- )
- yield Tag(interpreter, "abi3", platform_)
-
-def _pypy_interpreter():
- return "pp{py_major}{pypy_major}{pypy_minor}".format(
- py_major=sys.version_info[0],
- pypy_major=sys.pypy_version_info.major,
- pypy_minor=sys.pypy_version_info.minor,
- )
+ if _abi3_applies(python_version):
+ for minor_version in range(python_version[1] - 1, 1, -1):
+ for platform_ in platforms:
+ interpreter = "cp{version}".format(
+ version=_version_nodot((python_version[0], minor_version))
+ )
+ yield Tag(interpreter, "abi3", platform_)
def _generic_abi():
+ # type: () -> Iterator[str]
abi = sysconfig.get_config_var("SOABI")
if abi:
- return _normalize_string(abi)
- else:
- return "none"
+ yield _normalize_string(abi)
-def _pypy_tags(py_version, interpreter, abi, platforms):
- for tag in (Tag(interpreter, abi, platform) for platform in platforms):
- yield tag
- for tag in (Tag(interpreter, "none", platform) for platform in platforms):
- yield tag
+def generic_tags(
+ interpreter=None, # type: Optional[str]
+ abis=None, # type: Optional[Iterable[str]]
+ platforms=None, # type: Optional[Iterable[str]]
+ **kwargs # type: bool
+):
+ # type: (...) -> Iterator[Tag]
+ """
+ Yields the tags for a generic interpreter.
+ The tags consist of:
+ - <interpreter>-<abi>-<platform>
-def _generic_tags(interpreter, py_version, abi, platforms):
- for tag in (Tag(interpreter, abi, platform) for platform in platforms):
- yield tag
- if abi != "none":
- tags = (Tag(interpreter, "none", platform_) for platform_ in platforms)
- for tag in tags:
- yield tag
+ The "none" ABI will be added if it was not explicitly provided.
+ """
+ warn = _warn_keyword_parameter("generic_tags", kwargs)
+ if not interpreter:
+ interp_name = interpreter_name()
+ interp_version = interpreter_version(warn=warn)
+ interpreter = "".join([interp_name, interp_version])
+ if abis is None:
+ abis = _generic_abi()
+ platforms = list(platforms or _platform_tags())
+ abis = list(abis)
+ if "none" not in abis:
+ abis.append("none")
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
def _py_interpreter_range(py_version):
+ # type: (PythonVersion) -> Iterator[str]
"""
- Yield Python versions in descending order.
+ Yields Python versions in descending order.
After the latest version, the major-only version will be yielded, and then
- all following versions up to 'end'.
+ all previous versions of that major version.
"""
- yield "py{major}{minor}".format(major=py_version[0], minor=py_version[1])
+ if len(py_version) > 1:
+ yield "py{version}".format(version=_version_nodot(py_version[:2]))
yield "py{major}".format(major=py_version[0])
- for minor in range(py_version[1] - 1, -1, -1):
- yield "py{major}{minor}".format(major=py_version[0], minor=minor)
+ if len(py_version) > 1:
+ for minor in range(py_version[1] - 1, -1, -1):
+ yield "py{version}".format(version=_version_nodot((py_version[0], minor)))
-def _independent_tags(interpreter, py_version, platforms):
+def compatible_tags(
+ python_version=None, # type: Optional[PythonVersion]
+ interpreter=None, # type: Optional[str]
+ platforms=None, # type: Optional[Iterable[str]]
+):
+ # type: (...) -> Iterator[Tag]
"""
- Return the sequence of tags that are consistent across implementations.
+ Yields the sequence of tags that are compatible with a specific version of Python.
The tags consist of:
- py*-none-<platform>
- - <interpreter>-none-any
+ - <interpreter>-none-any # ... if `interpreter` is provided.
- py*-none-any
"""
- for version in _py_interpreter_range(py_version):
+ if not python_version:
+ python_version = sys.version_info[:2]
+ platforms = list(platforms or _platform_tags())
+ for version in _py_interpreter_range(python_version):
for platform_ in platforms:
yield Tag(version, "none", platform_)
- yield Tag(interpreter, "none", "any")
- for version in _py_interpreter_range(py_version):
+ if interpreter:
+ yield Tag(interpreter, "none", "any")
+ for version in _py_interpreter_range(python_version):
yield Tag(version, "none", "any")
def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER):
+ # type: (str, bool) -> str
if not is_32bit:
return arch
@@ -214,6 +359,7 @@ def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER):
def _mac_binary_formats(version, cpu_arch):
+ # type: (MacVersion, str) -> List[str]
formats = [cpu_arch]
if cpu_arch == "x86_64":
if version < (10, 4):
@@ -240,32 +386,42 @@ def _mac_binary_formats(version, cpu_arch):
return formats
-def _mac_platforms(version=None, arch=None):
- version_str, _, cpu_arch = platform.mac_ver()
+def mac_platforms(version=None, arch=None):
+ # type: (Optional[MacVersion], Optional[str]) -> Iterator[str]
+ """
+ Yields the platform tags for a macOS system.
+
+ The `version` parameter is a two-item tuple specifying the macOS version to
+ generate platform tags for. The `arch` parameter is the CPU architecture to
+ generate platform tags for. Both parameters default to the appropriate value
+ for the current system.
+ """
+ version_str, _, cpu_arch = platform.mac_ver() # type: ignore
if version is None:
- version = tuple(map(int, version_str.split(".")[:2]))
+ version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+ else:
+ version = version
if arch is None:
arch = _mac_arch(cpu_arch)
- platforms = []
+ else:
+ arch = arch
for minor_version in range(version[1], -1, -1):
compat_version = version[0], minor_version
binary_formats = _mac_binary_formats(compat_version, arch)
for binary_format in binary_formats:
- platforms.append(
- "macosx_{major}_{minor}_{binary_format}".format(
- major=compat_version[0],
- minor=compat_version[1],
- binary_format=binary_format,
- )
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=compat_version[0],
+ minor=compat_version[1],
+ binary_format=binary_format,
)
- return platforms
# From PEP 513.
def _is_manylinux_compatible(name, glibc_version):
+ # type: (str, GlibcVersion) -> bool
# Check for presence of _manylinux module.
try:
- import _manylinux
+ import _manylinux # noqa
return bool(getattr(_manylinux, name + "_compatible"))
except (ImportError, AttributeError):
@@ -276,14 +432,50 @@ def _is_manylinux_compatible(name, glibc_version):
def _glibc_version_string():
+ # type: () -> Optional[str]
# Returns glibc version string, or None if not using glibc.
- import ctypes
+ return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _glibc_version_string_confstr():
+ # type: () -> Optional[str]
+ """
+ Primary implementation of glibc_version_string using os.confstr.
+ """
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+ # to be broken or missing. This strategy is used in the standard library
+ # platform module.
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
+ try:
+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
+ version_string = os.confstr( # type: ignore[attr-defined] # noqa: F821
+ "CS_GNU_LIBC_VERSION"
+ )
+ assert version_string is not None
+ _, version = version_string.split() # type: Tuple[str, str]
+ except (AssertionError, AttributeError, OSError, ValueError):
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+ return None
+ return version
+
+
+def _glibc_version_string_ctypes():
+ # type: () -> Optional[str]
+ """
+ Fallback implementation of glibc_version_string using ctypes.
+ """
+ try:
+ import ctypes
+ except ImportError:
+ return None
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
- process_namespace = ctypes.CDLL(None)
+ #
+ # Note: typeshed is wrong here so we are ignoring this line.
+ process_namespace = ctypes.CDLL(None) # type: ignore
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
@@ -293,7 +485,7 @@ def _glibc_version_string():
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
- version_str = gnu_get_libc_version()
+ version_str = gnu_get_libc_version() # type: str
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
@@ -303,6 +495,7 @@ def _glibc_version_string():
# Separated out from have_compatible_glibc for easier unit testing.
def _check_glibc_version(version_str, required_major, minimum_minor):
+ # type: (str, int, int) -> bool
# Parse string and check against requested version.
#
# We use a regexp instead of str.split because we want to discard any
@@ -324,81 +517,235 @@ def _check_glibc_version(version_str, required_major, minimum_minor):
def _have_compatible_glibc(required_major, minimum_minor):
+ # type: (int, int) -> bool
version_str = _glibc_version_string()
if version_str is None:
return False
return _check_glibc_version(version_str, required_major, minimum_minor)
+# Python does not provide platform information at sufficient granularity to
+# identify the architecture of the running executable in some cases, so we
+# determine it dynamically by reading the information from the running
+# process. This only applies on Linux, which uses the ELF format.
+class _ELFFileHeader(object):
+ # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
+ class _InvalidELFFileHeader(ValueError):
+ """
+ An invalid ELF file header was found.
+ """
+
+ ELF_MAGIC_NUMBER = 0x7F454C46
+ ELFCLASS32 = 1
+ ELFCLASS64 = 2
+ ELFDATA2LSB = 1
+ ELFDATA2MSB = 2
+ EM_386 = 3
+ EM_S390 = 22
+ EM_ARM = 40
+ EM_X86_64 = 62
+ EF_ARM_ABIMASK = 0xFF000000
+ EF_ARM_ABI_VER5 = 0x05000000
+ EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+ def __init__(self, file):
+ # type: (IO[bytes]) -> None
+ def unpack(fmt):
+ # type: (str) -> int
+ try:
+ (result,) = struct.unpack(
+ fmt, file.read(struct.calcsize(fmt))
+ ) # type: (int, )
+ except struct.error:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ return result
+
+ self.e_ident_magic = unpack(">I")
+ if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_class = unpack("B")
+ if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_data = unpack("B")
+ if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
+ raise _ELFFileHeader._InvalidELFFileHeader()
+ self.e_ident_version = unpack("B")
+ self.e_ident_osabi = unpack("B")
+ self.e_ident_abiversion = unpack("B")
+ self.e_ident_pad = file.read(7)
+ format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
+ format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
+ format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
+ format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
+ self.e_type = unpack(format_h)
+ self.e_machine = unpack(format_h)
+ self.e_version = unpack(format_i)
+ self.e_entry = unpack(format_p)
+ self.e_phoff = unpack(format_p)
+ self.e_shoff = unpack(format_p)
+ self.e_flags = unpack(format_i)
+ self.e_ehsize = unpack(format_h)
+ self.e_phentsize = unpack(format_h)
+ self.e_phnum = unpack(format_h)
+ self.e_shentsize = unpack(format_h)
+ self.e_shnum = unpack(format_h)
+ self.e_shstrndx = unpack(format_h)
+
+
+def _get_elf_header():
+ # type: () -> Optional[_ELFFileHeader]
+ try:
+ with open(sys.executable, "rb") as f:
+ elf_header = _ELFFileHeader(f)
+ except (IOError, OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
+ return None
+ return elf_header
+
+
+def _is_linux_armhf():
+ # type: () -> bool
+ # hard-float ABI can be detected from the ELF header of the running
+ # process
+ # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+ elf_header = _get_elf_header()
+ if elf_header is None:
+ return False
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+ result &= elf_header.e_machine == elf_header.EM_ARM
+ result &= (
+ elf_header.e_flags & elf_header.EF_ARM_ABIMASK
+ ) == elf_header.EF_ARM_ABI_VER5
+ result &= (
+ elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
+ ) == elf_header.EF_ARM_ABI_FLOAT_HARD
+ return result
+
+
+def _is_linux_i686():
+ # type: () -> bool
+ elf_header = _get_elf_header()
+ if elf_header is None:
+ return False
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
+ result &= elf_header.e_machine == elf_header.EM_386
+ return result
+
+
+def _have_compatible_manylinux_abi(arch):
+ # type: (str) -> bool
+ if arch == "armv7l":
+ return _is_linux_armhf()
+ if arch == "i686":
+ return _is_linux_i686()
+ return True
+
+
def _linux_platforms(is_32bit=_32_BIT_INTERPRETER):
+ # type: (bool) -> Iterator[str]
linux = _normalize_string(distutils.util.get_platform())
- if linux == "linux_x86_64" and is_32bit:
- linux = "linux_i686"
- manylinux_support = (
- ("manylinux2014", (2, 17)), # CentOS 7 w/ glibc 2.17 (PEP 599)
- ("manylinux2010", (2, 12)), # CentOS 6 w/ glibc 2.12 (PEP 571)
- ("manylinux1", (2, 5)), # CentOS 5 w/ glibc 2.5 (PEP 513)
- )
+ if is_32bit:
+ if linux == "linux_x86_64":
+ linux = "linux_i686"
+ elif linux == "linux_aarch64":
+ linux = "linux_armv7l"
+ manylinux_support = []
+ _, arch = linux.split("_", 1)
+ if _have_compatible_manylinux_abi(arch):
+ if arch in {"x86_64", "i686", "aarch64", "armv7l", "ppc64", "ppc64le", "s390x"}:
+ manylinux_support.append(
+ ("manylinux2014", (2, 17))
+ ) # CentOS 7 w/ glibc 2.17 (PEP 599)
+ if arch in {"x86_64", "i686"}:
+ manylinux_support.append(
+ ("manylinux2010", (2, 12))
+ ) # CentOS 6 w/ glibc 2.12 (PEP 571)
+ manylinux_support.append(
+ ("manylinux1", (2, 5))
+ ) # CentOS 5 w/ glibc 2.5 (PEP 513)
manylinux_support_iter = iter(manylinux_support)
for name, glibc_version in manylinux_support_iter:
if _is_manylinux_compatible(name, glibc_version):
- platforms = [linux.replace("linux", name)]
+ yield linux.replace("linux", name)
break
- else:
- platforms = []
# Support for a later manylinux implies support for an earlier version.
- platforms += [linux.replace("linux", name) for name, _ in manylinux_support_iter]
- platforms.append(linux)
- return platforms
+ for name, _ in manylinux_support_iter:
+ yield linux.replace("linux", name)
+ yield linux
def _generic_platforms():
- platform = _normalize_string(distutils.util.get_platform())
- return [platform]
+ # type: () -> Iterator[str]
+ yield _normalize_string(distutils.util.get_platform())
-def _interpreter_name():
- name = platform.python_implementation().lower()
+def _platform_tags():
+ # type: () -> Iterator[str]
+ """
+ Provides the platform tags for this installation.
+ """
+ if platform.system() == "Darwin":
+ return mac_platforms()
+ elif platform.system() == "Linux":
+ return _linux_platforms()
+ else:
+ return _generic_platforms()
+
+
+def interpreter_name():
+ # type: () -> str
+ """
+ Returns the name of the running interpreter.
+ """
+ try:
+ name = sys.implementation.name # type: ignore
+ except AttributeError: # pragma: no cover
+ # Python 2.7 compatibility.
+ name = platform.python_implementation().lower()
return INTERPRETER_SHORT_NAMES.get(name) or name
-def _generic_interpreter(name, py_version):
- version = sysconfig.get_config_var("py_version_nodot")
- if not version:
- version = "".join(map(str, py_version[:2]))
- return "{name}{version}".format(name=name, version=version)
+def interpreter_version(**kwargs):
+ # type: (bool) -> str
+ """
+ Returns the version of the running interpreter.
+ """
+ warn = _warn_keyword_parameter("interpreter_version", kwargs)
+ version = _get_config_var("py_version_nodot", warn=warn)
+ if version:
+ version = str(version)
+ else:
+ version = _version_nodot(sys.version_info[:2])
+ return version
+
+
+def _version_nodot(version):
+ # type: (PythonVersion) -> str
+ if any(v >= 10 for v in version):
+ sep = "_"
+ else:
+ sep = ""
+ return sep.join(map(str, version))
-def sys_tags():
+def sys_tags(**kwargs):
+ # type: (bool) -> Iterator[Tag]
"""
Returns the sequence of tag triples for the running interpreter.
The order of the sequence corresponds to priority order for the
interpreter, from most to least important.
"""
- py_version = sys.version_info[:2]
- interpreter_name = _interpreter_name()
- if platform.system() == "Darwin":
- platforms = _mac_platforms()
- elif platform.system() == "Linux":
- platforms = _linux_platforms()
- else:
- platforms = _generic_platforms()
+ warn = _warn_keyword_parameter("sys_tags", kwargs)
- if interpreter_name == "cp":
- interpreter = _cpython_interpreter(py_version)
- abis = _cpython_abis(py_version)
- for tag in _cpython_tags(py_version, interpreter, abis, platforms):
- yield tag
- elif interpreter_name == "pp":
- interpreter = _pypy_interpreter()
- abi = _generic_abi()
- for tag in _pypy_tags(py_version, interpreter, abi, platforms):
+ interp_name = interpreter_name()
+ if interp_name == "cp":
+ for tag in cpython_tags(warn=warn):
yield tag
else:
- interpreter = _generic_interpreter(interpreter_name, py_version)
- abi = _generic_abi()
- for tag in _generic_tags(interpreter, py_version, abi, platforms):
+ for tag in generic_tags():
yield tag
- for tag in _independent_tags(interpreter, py_version, platforms):
+
+ for tag in compatible_tags():
yield tag
diff --git a/setuptools/_vendor/packaging/utils.py b/setuptools/_vendor/packaging/utils.py
index 88418786..19579c1a 100644
--- a/setuptools/_vendor/packaging/utils.py
+++ b/setuptools/_vendor/packaging/utils.py
@@ -5,28 +5,36 @@ from __future__ import absolute_import, division, print_function
import re
+from ._typing import TYPE_CHECKING, cast
from .version import InvalidVersion, Version
+if TYPE_CHECKING: # pragma: no cover
+ from typing import NewType, Union
+
+ NormalizedName = NewType("NormalizedName", str)
_canonicalize_regex = re.compile(r"[-_.]+")
def canonicalize_name(name):
+ # type: (str) -> NormalizedName
# This is taken from PEP 503.
- return _canonicalize_regex.sub("-", name).lower()
+ value = _canonicalize_regex.sub("-", name).lower()
+ return cast("NormalizedName", value)
-def canonicalize_version(version):
+def canonicalize_version(_version):
+ # type: (str) -> Union[Version, str]
"""
- This is very similar to Version.__str__, but has one subtle differences
+ This is very similar to Version.__str__, but has one subtle difference
with the way it handles the release segment.
"""
try:
- version = Version(version)
+ version = Version(_version)
except InvalidVersion:
# Legacy versions cannot be normalized
- return version
+ return _version
parts = []
diff --git a/setuptools/_vendor/packaging/version.py b/setuptools/_vendor/packaging/version.py
index 95157a1f..00371e86 100644
--- a/setuptools/_vendor/packaging/version.py
+++ b/setuptools/_vendor/packaging/version.py
@@ -7,8 +7,35 @@ import collections
import itertools
import re
-from ._structures import Infinity
-
+from ._structures import Infinity, NegativeInfinity
+from ._typing import TYPE_CHECKING
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
+
+ from ._structures import InfinityType, NegativeInfinityType
+
+ InfiniteTypes = Union[InfinityType, NegativeInfinityType]
+ PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
+ SubLocalType = Union[InfiniteTypes, int, str]
+ LocalType = Union[
+ NegativeInfinityType,
+ Tuple[
+ Union[
+ SubLocalType,
+ Tuple[SubLocalType, str],
+ Tuple[NegativeInfinityType, SubLocalType],
+ ],
+ ...,
+ ],
+ ]
+ CmpKey = Tuple[
+ int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
+ ]
+ LegacyCmpKey = Tuple[int, Tuple[str, ...]]
+ VersionComparisonMethod = Callable[
+ [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
+ ]
__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
@@ -19,6 +46,7 @@ _Version = collections.namedtuple(
def parse(version):
+ # type: (str) -> Union[LegacyVersion, Version]
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
@@ -37,28 +65,38 @@ class InvalidVersion(ValueError):
class _BaseVersion(object):
+ _key = None # type: Union[CmpKey, LegacyCmpKey]
+
def __hash__(self):
+ # type: () -> int
return hash(self._key)
def __lt__(self, other):
+ # type: (_BaseVersion) -> bool
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
+ # type: (_BaseVersion) -> bool
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
+ # type: (object) -> bool
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
+ # type: (_BaseVersion) -> bool
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
+ # type: (_BaseVersion) -> bool
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
+ # type: (object) -> bool
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
+ # type: (object, VersionComparisonMethod) -> Union[bool, NotImplemented]
if not isinstance(other, _BaseVersion):
return NotImplemented
@@ -67,57 +105,71 @@ class _BaseVersion(object):
class LegacyVersion(_BaseVersion):
def __init__(self, version):
+ # type: (str) -> None
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
def __str__(self):
+ # type: () -> str
return self._version
def __repr__(self):
+ # type: () -> str
return "<LegacyVersion({0})>".format(repr(str(self)))
@property
def public(self):
+ # type: () -> str
return self._version
@property
def base_version(self):
+ # type: () -> str
return self._version
@property
def epoch(self):
+ # type: () -> int
return -1
@property
def release(self):
+ # type: () -> None
return None
@property
def pre(self):
+ # type: () -> None
return None
@property
def post(self):
+ # type: () -> None
return None
@property
def dev(self):
+ # type: () -> None
return None
@property
def local(self):
+ # type: () -> None
return None
@property
def is_prerelease(self):
+ # type: () -> bool
return False
@property
def is_postrelease(self):
+ # type: () -> bool
return False
@property
def is_devrelease(self):
+ # type: () -> bool
return False
@@ -133,6 +185,7 @@ _legacy_version_replacement_map = {
def _parse_version_parts(s):
+ # type: (str) -> Iterator[str]
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
@@ -150,6 +203,8 @@ def _parse_version_parts(s):
def _legacy_cmpkey(version):
+ # type: (str) -> LegacyCmpKey
+
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
@@ -158,7 +213,7 @@ def _legacy_cmpkey(version):
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
- parts = []
+ parts = [] # type: List[str]
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
@@ -171,9 +226,8 @@ def _legacy_cmpkey(version):
parts.pop()
parts.append(part)
- parts = tuple(parts)
- return epoch, parts
+ return epoch, tuple(parts)
# Deliberately not anchored to the start and end of the string, to make it
@@ -215,6 +269,8 @@ class Version(_BaseVersion):
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
def __init__(self, version):
+ # type: (str) -> None
+
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
@@ -243,9 +299,11 @@ class Version(_BaseVersion):
)
def __repr__(self):
+ # type: () -> str
return "<Version({0})>".format(repr(str(self)))
def __str__(self):
+ # type: () -> str
parts = []
# Epoch
@@ -275,26 +333,35 @@ class Version(_BaseVersion):
@property
def epoch(self):
- return self._version.epoch
+ # type: () -> int
+ _epoch = self._version.epoch # type: int
+ return _epoch
@property
def release(self):
- return self._version.release
+ # type: () -> Tuple[int, ...]
+ _release = self._version.release # type: Tuple[int, ...]
+ return _release
@property
def pre(self):
- return self._version.pre
+ # type: () -> Optional[Tuple[str, int]]
+ _pre = self._version.pre # type: Optional[Tuple[str, int]]
+ return _pre
@property
def post(self):
+ # type: () -> Optional[Tuple[str, int]]
return self._version.post[1] if self._version.post else None
@property
def dev(self):
+ # type: () -> Optional[Tuple[str, int]]
return self._version.dev[1] if self._version.dev else None
@property
def local(self):
+ # type: () -> Optional[str]
if self._version.local:
return ".".join(str(x) for x in self._version.local)
else:
@@ -302,10 +369,12 @@ class Version(_BaseVersion):
@property
def public(self):
+ # type: () -> str
return str(self).split("+", 1)[0]
@property
def base_version(self):
+ # type: () -> str
parts = []
# Epoch
@@ -319,18 +388,41 @@ class Version(_BaseVersion):
@property
def is_prerelease(self):
+ # type: () -> bool
return self.dev is not None or self.pre is not None
@property
def is_postrelease(self):
+ # type: () -> bool
return self.post is not None
@property
def is_devrelease(self):
+ # type: () -> bool
return self.dev is not None
+ @property
+ def major(self):
+ # type: () -> int
+ return self.release[0] if len(self.release) >= 1 else 0
+
+ @property
+ def minor(self):
+ # type: () -> int
+ return self.release[1] if len(self.release) >= 2 else 0
+
+ @property
+ def micro(self):
+ # type: () -> int
+ return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+ letter, # type: str
+ number, # type: Union[str, bytes, SupportsInt]
+):
+ # type: (...) -> Optional[Tuple[str, int]]
-def _parse_letter_version(letter, number):
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
@@ -360,11 +452,14 @@ def _parse_letter_version(letter, number):
return letter, int(number)
+ return None
+
_local_version_separators = re.compile(r"[\._-]")
def _parse_local_version(local):
+ # type: (str) -> Optional[LocalType]
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
@@ -373,15 +468,25 @@ def _parse_local_version(local):
part.lower() if not part.isdigit() else int(part)
for part in _local_version_separators.split(local)
)
+ return None
+
+def _cmpkey(
+ epoch, # type: int
+ release, # type: Tuple[int, ...]
+ pre, # type: Optional[Tuple[str, int]]
+ post, # type: Optional[Tuple[str, int]]
+ dev, # type: Optional[Tuple[str, int]]
+ local, # type: Optional[Tuple[SubLocalType]]
+):
+ # type: (...) -> CmpKey
-def _cmpkey(epoch, release, pre, post, dev, local):
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
- release = tuple(
+ _release = tuple(
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
)
@@ -390,23 +495,31 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
- pre = -Infinity
+ _pre = NegativeInfinity # type: PrePostDevType
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
- pre = Infinity
+ _pre = Infinity
+ else:
+ _pre = pre
# Versions without a post segment should sort before those with one.
if post is None:
- post = -Infinity
+ _post = NegativeInfinity # type: PrePostDevType
+
+ else:
+ _post = post
# Versions without a development segment should sort after those with one.
if dev is None:
- dev = Infinity
+ _dev = Infinity # type: PrePostDevType
+
+ else:
+ _dev = dev
if local is None:
# Versions without a local segment should sort before those with one.
- local = -Infinity
+ _local = NegativeInfinity # type: LocalType
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
@@ -415,6 +528,8 @@ def _cmpkey(epoch, release, pre, post, dev, local):
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
- local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local)
+ _local = tuple(
+ (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+ )
- return epoch, release, pre, post, dev, local
+ return epoch, _release, _pre, _post, _dev, _local
diff --git a/setuptools/_vendor/six.py b/setuptools/_vendor/six.py
deleted file mode 100644
index 190c0239..00000000
--- a/setuptools/_vendor/six.py
+++ /dev/null
@@ -1,868 +0,0 @@
-"""Utilities for writing code that runs on Python 2 and 3"""
-
-# Copyright (c) 2010-2015 Benjamin Peterson
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-from __future__ import absolute_import
-
-import functools
-import itertools
-import operator
-import sys
-import types
-
-__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.10.0"
-
-
-# Useful for very coarse version differentiation.
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-PY34 = sys.version_info[0:2] >= (3, 4)
-
-if PY3:
- string_types = str,
- integer_types = int,
- class_types = type,
- text_type = str
- binary_type = bytes
-
- MAXSIZE = sys.maxsize
-else:
- string_types = basestring,
- integer_types = (int, long)
- class_types = (type, types.ClassType)
- text_type = unicode
- binary_type = str
-
- if sys.platform.startswith("java"):
- # Jython always uses 32 bits.
- MAXSIZE = int((1 << 31) - 1)
- else:
- # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
- class X(object):
-
- def __len__(self):
- return 1 << 31
- try:
- len(X())
- except OverflowError:
- # 32-bit
- MAXSIZE = int((1 << 31) - 1)
- else:
- # 64-bit
- MAXSIZE = int((1 << 63) - 1)
- del X
-
-
-def _add_doc(func, doc):
- """Add documentation to a function."""
- func.__doc__ = doc
-
-
-def _import_module(name):
- """Import module, returning the module after the last dot."""
- __import__(name)
- return sys.modules[name]
-
-
-class _LazyDescr(object):
-
- def __init__(self, name):
- self.name = name
-
- def __get__(self, obj, tp):
- result = self._resolve()
- setattr(obj, self.name, result) # Invokes __set__.
- try:
- # This is a bit ugly, but it avoids running this again by
- # removing this descriptor.
- delattr(obj.__class__, self.name)
- except AttributeError:
- pass
- return result
-
-
-class MovedModule(_LazyDescr):
-
- def __init__(self, name, old, new=None):
- super(MovedModule, self).__init__(name)
- if PY3:
- if new is None:
- new = name
- self.mod = new
- else:
- self.mod = old
-
- def _resolve(self):
- return _import_module(self.mod)
-
- def __getattr__(self, attr):
- _module = self._resolve()
- value = getattr(_module, attr)
- setattr(self, attr, value)
- return value
-
-
-class _LazyModule(types.ModuleType):
-
- def __init__(self, name):
- super(_LazyModule, self).__init__(name)
- self.__doc__ = self.__class__.__doc__
-
- def __dir__(self):
- attrs = ["__doc__", "__name__"]
- attrs += [attr.name for attr in self._moved_attributes]
- return attrs
-
- # Subclasses should override this
- _moved_attributes = []
-
-
-class MovedAttribute(_LazyDescr):
-
- def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
- super(MovedAttribute, self).__init__(name)
- if PY3:
- if new_mod is None:
- new_mod = name
- self.mod = new_mod
- if new_attr is None:
- if old_attr is None:
- new_attr = name
- else:
- new_attr = old_attr
- self.attr = new_attr
- else:
- self.mod = old_mod
- if old_attr is None:
- old_attr = name
- self.attr = old_attr
-
- def _resolve(self):
- module = _import_module(self.mod)
- return getattr(module, self.attr)
-
-
-class _SixMetaPathImporter(object):
-
- """
- A meta path importer to import six.moves and its submodules.
-
- This class implements a PEP302 finder and loader. It should be compatible
- with Python 2.5 and all existing versions of Python3
- """
-
- def __init__(self, six_module_name):
- self.name = six_module_name
- self.known_modules = {}
-
- def _add_module(self, mod, *fullnames):
- for fullname in fullnames:
- self.known_modules[self.name + "." + fullname] = mod
-
- def _get_module(self, fullname):
- return self.known_modules[self.name + "." + fullname]
-
- def find_module(self, fullname, path=None):
- if fullname in self.known_modules:
- return self
- return None
-
- def __get_module(self, fullname):
- try:
- return self.known_modules[fullname]
- except KeyError:
- raise ImportError("This loader does not know module " + fullname)
-
- def load_module(self, fullname):
- try:
- # in case of a reload
- return sys.modules[fullname]
- except KeyError:
- pass
- mod = self.__get_module(fullname)
- if isinstance(mod, MovedModule):
- mod = mod._resolve()
- else:
- mod.__loader__ = self
- sys.modules[fullname] = mod
- return mod
-
- def is_package(self, fullname):
- """
- Return true, if the named module is a package.
-
- We need this method to get correct spec objects with
- Python 3.4 (see PEP451)
- """
- return hasattr(self.__get_module(fullname), "__path__")
-
- def get_code(self, fullname):
- """Return None
-
- Required, if is_package is implemented"""
- self.__get_module(fullname) # eventually raises ImportError
- return None
- get_source = get_code # same as get_code
-
-_importer = _SixMetaPathImporter(__name__)
-
-
-class _MovedItems(_LazyModule):
-
- """Lazy loading of moved objects"""
- __path__ = [] # mark as package
-
-
-_moved_attributes = [
- MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
- MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
- MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
- MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
- MovedAttribute("intern", "__builtin__", "sys"),
- MovedAttribute("map", "itertools", "builtins", "imap", "map"),
- MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
- MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
- MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
- MovedAttribute("reduce", "__builtin__", "functools"),
- MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
- MovedAttribute("StringIO", "StringIO", "io"),
- MovedAttribute("UserDict", "UserDict", "collections"),
- MovedAttribute("UserList", "UserList", "collections"),
- MovedAttribute("UserString", "UserString", "collections"),
- MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
- MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
- MovedModule("builtins", "__builtin__"),
- MovedModule("configparser", "ConfigParser"),
- MovedModule("copyreg", "copy_reg"),
- MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
- MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
- MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
- MovedModule("http_cookies", "Cookie", "http.cookies"),
- MovedModule("html_entities", "htmlentitydefs", "html.entities"),
- MovedModule("html_parser", "HTMLParser", "html.parser"),
- MovedModule("http_client", "httplib", "http.client"),
- MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
- MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
- MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
- MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
- MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
- MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
- MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
- MovedModule("cPickle", "cPickle", "pickle"),
- MovedModule("queue", "Queue"),
- MovedModule("reprlib", "repr"),
- MovedModule("socketserver", "SocketServer"),
- MovedModule("_thread", "thread", "_thread"),
- MovedModule("tkinter", "Tkinter"),
- MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
- MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
- MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
- MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
- MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
- MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
- MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
- MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
- MovedModule("tkinter_colorchooser", "tkColorChooser",
- "tkinter.colorchooser"),
- MovedModule("tkinter_commondialog", "tkCommonDialog",
- "tkinter.commondialog"),
- MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
- MovedModule("tkinter_font", "tkFont", "tkinter.font"),
- MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
- MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
- "tkinter.simpledialog"),
- MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
- MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
- MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
- MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
- MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
- MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
-]
-# Add windows specific modules.
-if sys.platform == "win32":
- _moved_attributes += [
- MovedModule("winreg", "_winreg"),
- ]
-
-for attr in _moved_attributes:
- setattr(_MovedItems, attr.name, attr)
- if isinstance(attr, MovedModule):
- _importer._add_module(attr, "moves." + attr.name)
-del attr
-
-_MovedItems._moved_attributes = _moved_attributes
-
-moves = _MovedItems(__name__ + ".moves")
-_importer._add_module(moves, "moves")
-
-
-class Module_six_moves_urllib_parse(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_parse"""
-
-
-_urllib_parse_moved_attributes = [
- MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
- MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
- MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
- MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
- MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
- MovedAttribute("urljoin", "urlparse", "urllib.parse"),
- MovedAttribute("urlparse", "urlparse", "urllib.parse"),
- MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
- MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
- MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
- MovedAttribute("quote", "urllib", "urllib.parse"),
- MovedAttribute("quote_plus", "urllib", "urllib.parse"),
- MovedAttribute("unquote", "urllib", "urllib.parse"),
- MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
- MovedAttribute("urlencode", "urllib", "urllib.parse"),
- MovedAttribute("splitquery", "urllib", "urllib.parse"),
- MovedAttribute("splittag", "urllib", "urllib.parse"),
- MovedAttribute("splituser", "urllib", "urllib.parse"),
- MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
- MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
- MovedAttribute("uses_params", "urlparse", "urllib.parse"),
- MovedAttribute("uses_query", "urlparse", "urllib.parse"),
- MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
-]
-for attr in _urllib_parse_moved_attributes:
- setattr(Module_six_moves_urllib_parse, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
- "moves.urllib_parse", "moves.urllib.parse")
-
-
-class Module_six_moves_urllib_error(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_error"""
-
-
-_urllib_error_moved_attributes = [
- MovedAttribute("URLError", "urllib2", "urllib.error"),
- MovedAttribute("HTTPError", "urllib2", "urllib.error"),
- MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
-]
-for attr in _urllib_error_moved_attributes:
- setattr(Module_six_moves_urllib_error, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
- "moves.urllib_error", "moves.urllib.error")
-
-
-class Module_six_moves_urllib_request(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_request"""
-
-
-_urllib_request_moved_attributes = [
- MovedAttribute("urlopen", "urllib2", "urllib.request"),
- MovedAttribute("install_opener", "urllib2", "urllib.request"),
- MovedAttribute("build_opener", "urllib2", "urllib.request"),
- MovedAttribute("pathname2url", "urllib", "urllib.request"),
- MovedAttribute("url2pathname", "urllib", "urllib.request"),
- MovedAttribute("getproxies", "urllib", "urllib.request"),
- MovedAttribute("Request", "urllib2", "urllib.request"),
- MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
- MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
- MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
- MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
- MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
- MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
- MovedAttribute("FileHandler", "urllib2", "urllib.request"),
- MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
- MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
- MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
- MovedAttribute("urlretrieve", "urllib", "urllib.request"),
- MovedAttribute("urlcleanup", "urllib", "urllib.request"),
- MovedAttribute("URLopener", "urllib", "urllib.request"),
- MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
- MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
-]
-for attr in _urllib_request_moved_attributes:
- setattr(Module_six_moves_urllib_request, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
- "moves.urllib_request", "moves.urllib.request")
-
-
-class Module_six_moves_urllib_response(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_response"""
-
-
-_urllib_response_moved_attributes = [
- MovedAttribute("addbase", "urllib", "urllib.response"),
- MovedAttribute("addclosehook", "urllib", "urllib.response"),
- MovedAttribute("addinfo", "urllib", "urllib.response"),
- MovedAttribute("addinfourl", "urllib", "urllib.response"),
-]
-for attr in _urllib_response_moved_attributes:
- setattr(Module_six_moves_urllib_response, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
- "moves.urllib_response", "moves.urllib.response")
-
-
-class Module_six_moves_urllib_robotparser(_LazyModule):
-
- """Lazy loading of moved objects in six.moves.urllib_robotparser"""
-
-
-_urllib_robotparser_moved_attributes = [
- MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
-]
-for attr in _urllib_robotparser_moved_attributes:
- setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
- "moves.urllib_robotparser", "moves.urllib.robotparser")
-
-
-class Module_six_moves_urllib(types.ModuleType):
-
- """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
- __path__ = [] # mark as package
- parse = _importer._get_module("moves.urllib_parse")
- error = _importer._get_module("moves.urllib_error")
- request = _importer._get_module("moves.urllib_request")
- response = _importer._get_module("moves.urllib_response")
- robotparser = _importer._get_module("moves.urllib_robotparser")
-
- def __dir__(self):
- return ['parse', 'error', 'request', 'response', 'robotparser']
-
-_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
- "moves.urllib")
-
-
-def add_move(move):
- """Add an item to six.moves."""
- setattr(_MovedItems, move.name, move)
-
-
-def remove_move(name):
- """Remove item from six.moves."""
- try:
- delattr(_MovedItems, name)
- except AttributeError:
- try:
- del moves.__dict__[name]
- except KeyError:
- raise AttributeError("no such move, %r" % (name,))
-
-
-if PY3:
- _meth_func = "__func__"
- _meth_self = "__self__"
-
- _func_closure = "__closure__"
- _func_code = "__code__"
- _func_defaults = "__defaults__"
- _func_globals = "__globals__"
-else:
- _meth_func = "im_func"
- _meth_self = "im_self"
-
- _func_closure = "func_closure"
- _func_code = "func_code"
- _func_defaults = "func_defaults"
- _func_globals = "func_globals"
-
-
-try:
- advance_iterator = next
-except NameError:
- def advance_iterator(it):
- return it.next()
-next = advance_iterator
-
-
-try:
- callable = callable
-except NameError:
- def callable(obj):
- return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
-
-
-if PY3:
- def get_unbound_function(unbound):
- return unbound
-
- create_bound_method = types.MethodType
-
- def create_unbound_method(func, cls):
- return func
-
- Iterator = object
-else:
- def get_unbound_function(unbound):
- return unbound.im_func
-
- def create_bound_method(func, obj):
- return types.MethodType(func, obj, obj.__class__)
-
- def create_unbound_method(func, cls):
- return types.MethodType(func, None, cls)
-
- class Iterator(object):
-
- def next(self):
- return type(self).__next__(self)
-
- callable = callable
-_add_doc(get_unbound_function,
- """Get the function out of a possibly unbound function""")
-
-
-get_method_function = operator.attrgetter(_meth_func)
-get_method_self = operator.attrgetter(_meth_self)
-get_function_closure = operator.attrgetter(_func_closure)
-get_function_code = operator.attrgetter(_func_code)
-get_function_defaults = operator.attrgetter(_func_defaults)
-get_function_globals = operator.attrgetter(_func_globals)
-
-
-if PY3:
- def iterkeys(d, **kw):
- return iter(d.keys(**kw))
-
- def itervalues(d, **kw):
- return iter(d.values(**kw))
-
- def iteritems(d, **kw):
- return iter(d.items(**kw))
-
- def iterlists(d, **kw):
- return iter(d.lists(**kw))
-
- viewkeys = operator.methodcaller("keys")
-
- viewvalues = operator.methodcaller("values")
-
- viewitems = operator.methodcaller("items")
-else:
- def iterkeys(d, **kw):
- return d.iterkeys(**kw)
-
- def itervalues(d, **kw):
- return d.itervalues(**kw)
-
- def iteritems(d, **kw):
- return d.iteritems(**kw)
-
- def iterlists(d, **kw):
- return d.iterlists(**kw)
-
- viewkeys = operator.methodcaller("viewkeys")
-
- viewvalues = operator.methodcaller("viewvalues")
-
- viewitems = operator.methodcaller("viewitems")
-
-_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
-_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
-_add_doc(iteritems,
- "Return an iterator over the (key, value) pairs of a dictionary.")
-_add_doc(iterlists,
- "Return an iterator over the (key, [values]) pairs of a dictionary.")
-
-
-if PY3:
- def b(s):
- return s.encode("latin-1")
-
- def u(s):
- return s
- unichr = chr
- import struct
- int2byte = struct.Struct(">B").pack
- del struct
- byte2int = operator.itemgetter(0)
- indexbytes = operator.getitem
- iterbytes = iter
- import io
- StringIO = io.StringIO
- BytesIO = io.BytesIO
- _assertCountEqual = "assertCountEqual"
- if sys.version_info[1] <= 1:
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
- else:
- _assertRaisesRegex = "assertRaisesRegex"
- _assertRegex = "assertRegex"
-else:
- def b(s):
- return s
- # Workaround for standalone backslash
-
- def u(s):
- return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
- unichr = unichr
- int2byte = chr
-
- def byte2int(bs):
- return ord(bs[0])
-
- def indexbytes(buf, i):
- return ord(buf[i])
- iterbytes = functools.partial(itertools.imap, ord)
- import StringIO
- StringIO = BytesIO = StringIO.StringIO
- _assertCountEqual = "assertItemsEqual"
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
-_add_doc(b, """Byte literal""")
-_add_doc(u, """Text literal""")
-
-
-def assertCountEqual(self, *args, **kwargs):
- return getattr(self, _assertCountEqual)(*args, **kwargs)
-
-
-def assertRaisesRegex(self, *args, **kwargs):
- return getattr(self, _assertRaisesRegex)(*args, **kwargs)
-
-
-def assertRegex(self, *args, **kwargs):
- return getattr(self, _assertRegex)(*args, **kwargs)
-
-
-if PY3:
- exec_ = getattr(moves.builtins, "exec")
-
- def reraise(tp, value, tb=None):
- if value is None:
- value = tp()
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
-
-else:
- def exec_(_code_, _globs_=None, _locs_=None):
- """Execute code in a namespace."""
- if _globs_ is None:
- frame = sys._getframe(1)
- _globs_ = frame.f_globals
- if _locs_ is None:
- _locs_ = frame.f_locals
- del frame
- elif _locs_ is None:
- _locs_ = _globs_
- exec("""exec _code_ in _globs_, _locs_""")
-
- exec_("""def reraise(tp, value, tb=None):
- raise tp, value, tb
-""")
-
-
-if sys.version_info[:2] == (3, 2):
- exec_("""def raise_from(value, from_value):
- if from_value is None:
- raise value
- raise value from from_value
-""")
-elif sys.version_info[:2] > (3, 2):
- exec_("""def raise_from(value, from_value):
- raise value from from_value
-""")
-else:
- def raise_from(value, from_value):
- raise value
-
-
-print_ = getattr(moves.builtins, "print", None)
-if print_ is None:
- def print_(*args, **kwargs):
- """The new-style print function for Python 2.4 and 2.5."""
- fp = kwargs.pop("file", sys.stdout)
- if fp is None:
- return
-
- def write(data):
- if not isinstance(data, basestring):
- data = str(data)
- # If the file has an encoding, encode unicode with it.
- if (isinstance(fp, file) and
- isinstance(data, unicode) and
- fp.encoding is not None):
- errors = getattr(fp, "errors", None)
- if errors is None:
- errors = "strict"
- data = data.encode(fp.encoding, errors)
- fp.write(data)
- want_unicode = False
- sep = kwargs.pop("sep", None)
- if sep is not None:
- if isinstance(sep, unicode):
- want_unicode = True
- elif not isinstance(sep, str):
- raise TypeError("sep must be None or a string")
- end = kwargs.pop("end", None)
- if end is not None:
- if isinstance(end, unicode):
- want_unicode = True
- elif not isinstance(end, str):
- raise TypeError("end must be None or a string")
- if kwargs:
- raise TypeError("invalid keyword arguments to print()")
- if not want_unicode:
- for arg in args:
- if isinstance(arg, unicode):
- want_unicode = True
- break
- if want_unicode:
- newline = unicode("\n")
- space = unicode(" ")
- else:
- newline = "\n"
- space = " "
- if sep is None:
- sep = space
- if end is None:
- end = newline
- for i, arg in enumerate(args):
- if i:
- write(sep)
- write(arg)
- write(end)
-if sys.version_info[:2] < (3, 3):
- _print = print_
-
- def print_(*args, **kwargs):
- fp = kwargs.get("file", sys.stdout)
- flush = kwargs.pop("flush", False)
- _print(*args, **kwargs)
- if flush and fp is not None:
- fp.flush()
-
-_add_doc(reraise, """Reraise an exception.""")
-
-if sys.version_info[0:2] < (3, 4):
- def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
- updated=functools.WRAPPER_UPDATES):
- def wrapper(f):
- f = functools.wraps(wrapped, assigned, updated)(f)
- f.__wrapped__ = wrapped
- return f
- return wrapper
-else:
- wraps = functools.wraps
-
-
-def with_metaclass(meta, *bases):
- """Create a base class with a metaclass."""
- # This requires a bit of explanation: the basic idea is to make a dummy
- # metaclass for one level of class instantiation that replaces itself with
- # the actual metaclass.
- class metaclass(meta):
-
- def __new__(cls, name, this_bases, d):
- return meta(name, bases, d)
- return type.__new__(metaclass, 'temporary_class', (), {})
-
-
-def add_metaclass(metaclass):
- """Class decorator for creating a class with a metaclass."""
- def wrapper(cls):
- orig_vars = cls.__dict__.copy()
- slots = orig_vars.get('__slots__')
- if slots is not None:
- if isinstance(slots, str):
- slots = [slots]
- for slots_var in slots:
- orig_vars.pop(slots_var)
- orig_vars.pop('__dict__', None)
- orig_vars.pop('__weakref__', None)
- return metaclass(cls.__name__, cls.__bases__, orig_vars)
- return wrapper
-
-
-def python_2_unicode_compatible(klass):
- """
- A decorator that defines __unicode__ and __str__ methods under Python 2.
- Under Python 3 it does nothing.
-
- To support Python 2 and 3 with a single code base, define a __str__ method
- returning text and apply this decorator to the class.
- """
- if PY2:
- if '__str__' not in klass.__dict__:
- raise ValueError("@python_2_unicode_compatible cannot be applied "
- "to %s because it doesn't define __str__()." %
- klass.__name__)
- klass.__unicode__ = klass.__str__
- klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
- return klass
-
-
-# Complete the moves implementation.
-# This code is at the end of this module to speed up module loading.
-# Turn this module into a package.
-__path__ = [] # required for PEP 302 and PEP 451
-__package__ = __name__ # see PEP 366 @ReservedAssignment
-if globals().get("__spec__") is not None:
- __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
-# Remove other six meta path importers, since they cause problems. This can
-# happen if six is removed from sys.modules and then reloaded. (Setuptools does
-# this for some reason.)
-if sys.meta_path:
- for i, importer in enumerate(sys.meta_path):
- # Here's some real nastiness: Another "instance" of the six module might
- # be floating around. Therefore, we can't use isinstance() to check for
- # the six meta path importer, since the other six instance will have
- # inserted an importer with different class.
- if (type(importer).__name__ == "_SixMetaPathImporter" and
- importer.name == __name__):
- del sys.meta_path[i]
- break
- del i, importer
-# Finally, add the importer to the meta path import hook.
-sys.meta_path.append(_importer)
diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt
index 65183d9a..b1190436 100644
--- a/setuptools/_vendor/vendored.txt
+++ b/setuptools/_vendor/vendored.txt
@@ -1,4 +1,3 @@
-packaging==19.2
+packaging==20.4
pyparsing==2.2.1
-six==1.10.0
ordered-set==3.1.1