diff --git a/b5304f307f5bb0a9f410c86c9a82fe4756439313.patch b/b5304f307f5bb0a9f410c86c9a82fe4756439313.patch new file mode 100644 index 0000000000000000000000000000000000000000..b3b099528e7547e85dea3dce72ac95ef0b2d8b36 --- /dev/null +++ b/b5304f307f5bb0a9f410c86c9a82fe4756439313.patch @@ -0,0 +1,965 @@ +diff --git a/news/9011.bugfix.rst b/news/9011.bugfix.rst +new file mode 100644 +index 0000000000..4e299dd9b6 +--- /dev/null ++++ b/news/9011.bugfix.rst +@@ -0,0 +1 @@ ++New Resolver: Rework backtracking and state management, to avoid getting stuck in an infinite loop. +diff --git a/news/9077.feature.rst b/news/9077.feature.rst +new file mode 100644 +index 0000000000..f77dacd065 +--- /dev/null ++++ b/news/9077.feature.rst +@@ -0,0 +1 @@ ++Add support for :pep:`600`: Future 'manylinux' Platform Tags for Portable Linux Built Distributions. +diff --git a/news/9138.feature.rst b/news/9138.feature.rst +new file mode 100644 +index 0000000000..98009cdd1a +--- /dev/null ++++ b/news/9138.feature.rst +@@ -0,0 +1 @@ ++Add support for MacOS Big Sur compatibility tags. +diff --git a/news/packaging.vendor.rst b/news/packaging.vendor.rst +new file mode 100644 +index 0000000000..4e49a4639b +--- /dev/null ++++ b/news/packaging.vendor.rst +@@ -0,0 +1 @@ ++Upgrade packaging to 20.7 +diff --git a/news/resolvelib.vendor.rst b/news/resolvelib.vendor.rst +index 97e4f4a8e5..52d32af8be 100644 +--- a/news/resolvelib.vendor.rst ++++ b/news/resolvelib.vendor.rst +@@ -1 +1 @@ +-Upgrade resolvelib to 0.5.2 ++Upgrade resolvelib to 0.5.3 +diff --git a/noxfile.py b/noxfile.py +index c21abc2a38..29e3959e46 100644 +--- a/noxfile.py ++++ b/noxfile.py +@@ -168,6 +168,9 @@ def pinned_requirements(path): + + vendor_txt = Path("src/pip/_vendor/vendor.txt") + for name, old_version in pinned_requirements(vendor_txt): ++ if name == "setuptools": ++ continue ++ + # update requirements.txt + session.run("vendoring", "update", ".", name) + +diff --git a/src/pip/_vendor/packaging/__about__.py b/src/pip/_vendor/packaging/__about__.py +deleted file mode 100644 +index 4d998578d7..0000000000 +--- a/src/pip/_vendor/packaging/__about__.py ++++ /dev/null +@@ -1,27 +0,0 @@ +-# This file is dual licensed under the terms of the Apache License, Version +-# 2.0, and the BSD License. See the LICENSE file in the root of this repository +-# for complete details. +-from __future__ import absolute_import, division, print_function +- +-__all__ = [ +- "__title__", +- "__summary__", +- "__uri__", +- "__version__", +- "__author__", +- "__email__", +- "__license__", +- "__copyright__", +-] +- +-__title__ = "packaging" +-__summary__ = "Core utilities for Python packages" +-__uri__ = "https://github.com/pypa/packaging" +- +-__version__ = "20.4" +- +-__author__ = "Donald Stufft and individual contributors" +-__email__ = "donald@stufft.io" +- +-__license__ = "BSD-2-Clause or Apache-2.0" +-__copyright__ = "Copyright 2014-2019 %s" % __author__ +diff --git a/src/pip/_vendor/packaging/__init__.py b/src/pip/_vendor/packaging/__init__.py +index a0cf67df52..18fecb867a 100644 +--- a/src/pip/_vendor/packaging/__init__.py ++++ b/src/pip/_vendor/packaging/__init__.py +@@ -1,26 +1,5 @@ + # This file is dual licensed under the terms of the Apache License, Version + # 2.0, and the BSD License. See the LICENSE file in the root of this repository + # for complete details. +-from __future__ import absolute_import, division, print_function +- +-from .__about__ import ( +- __author__, +- __copyright__, +- __email__, +- __license__, +- __summary__, +- __title__, +- __uri__, +- __version__, +-) +- +-__all__ = [ +- "__title__", +- "__summary__", +- "__uri__", +- "__version__", +- "__author__", +- "__email__", +- "__license__", +- "__copyright__", +-] ++"""Core utilities for Python packages""" ++__version__ = "20.7" +diff --git a/src/pip/_vendor/packaging/requirements.py b/src/pip/_vendor/packaging/requirements.py +index 5e64101c43..f9d1c65991 100644 +--- a/src/pip/_vendor/packaging/requirements.py ++++ b/src/pip/_vendor/packaging/requirements.py +@@ -5,16 +5,22 @@ + + import string + import re ++import sys + + from pip._vendor.pyparsing import stringStart, stringEnd, originalTextFor, ParseException + from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine + from pip._vendor.pyparsing import Literal as L # noqa +-from pip._vendor.six.moves.urllib import parse as urlparse + + from ._typing import TYPE_CHECKING + from .markers import MARKER_EXPR, Marker + from .specifiers import LegacySpecifier, Specifier, SpecifierSet + ++if sys.version_info[0] >= 3: ++ from urllib import parse as urlparse # pragma: no cover ++else: # pragma: no cover ++ import urlparse ++ ++ + if TYPE_CHECKING: # pragma: no cover + from typing import List + +diff --git a/src/pip/_vendor/packaging/specifiers.py b/src/pip/_vendor/packaging/specifiers.py +index fe09bb1dbb..a42cbfef33 100644 +--- a/src/pip/_vendor/packaging/specifiers.py ++++ b/src/pip/_vendor/packaging/specifiers.py +@@ -7,6 +7,7 @@ + import functools + import itertools + import re ++import warnings + + from ._compat import string_types, with_metaclass + from ._typing import TYPE_CHECKING +@@ -14,17 +15,7 @@ + from .version import Version, LegacyVersion, parse + + if TYPE_CHECKING: # pragma: no cover +- from typing import ( +- List, +- Dict, +- Union, +- Iterable, +- Iterator, +- Optional, +- Callable, +- Tuple, +- FrozenSet, +- ) ++ from typing import List, Dict, Union, Iterable, Iterator, Optional, Callable, Tuple + + ParsedVersion = Union[Version, LegacyVersion] + UnparsedVersion = Union[Version, LegacyVersion, str] +@@ -285,6 +276,16 @@ class LegacySpecifier(_IndividualSpecifier): + ">": "greater_than", + } + ++ def __init__(self, spec="", prereleases=None): ++ # type: (str, Optional[bool]) -> None ++ super(LegacySpecifier, self).__init__(spec, prereleases) ++ ++ warnings.warn( ++ "Creating a LegacyVersion has been deprecated and will be " ++ "removed in the next major release", ++ DeprecationWarning, ++ ) ++ + def _coerce_version(self, version): + # type: (Union[ParsedVersion, str]) -> LegacyVersion + if not isinstance(version, LegacyVersion): +@@ -317,7 +318,7 @@ def _compare_greater_than(self, prospective, spec): + + + def _require_version_compare( +- fn # type: (Callable[[Specifier, ParsedVersion, str], bool]) ++ fn, # type: (Callable[[Specifier, ParsedVersion, str], bool]) + ): + # type: (...) -> Callable[[Specifier, ParsedVersion, str], bool] + @functools.wraps(fn) +@@ -750,7 +751,7 @@ def __len__(self): + return len(self._specs) + + def __iter__(self): +- # type: () -> Iterator[FrozenSet[_IndividualSpecifier]] ++ # type: () -> Iterator[_IndividualSpecifier] + return iter(self._specs) + + @property +diff --git a/src/pip/_vendor/packaging/tags.py b/src/pip/_vendor/packaging/tags.py +index 9064910b8b..842447d863 100644 +--- a/src/pip/_vendor/packaging/tags.py ++++ b/src/pip/_vendor/packaging/tags.py +@@ -13,6 +13,7 @@ + + EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] + del imp ++import collections + import logging + import os + import platform +@@ -57,6 +58,24 @@ + _32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 + + ++_LEGACY_MANYLINUX_MAP = { ++ # CentOS 7 w/ glibc 2.17 (PEP 599) ++ (2, 17): "manylinux2014", ++ # CentOS 6 w/ glibc 2.12 (PEP 571) ++ (2, 12): "manylinux2010", ++ # CentOS 5 w/ glibc 2.5 (PEP 513) ++ (2, 5): "manylinux1", ++} ++ ++# If glibc ever changes its major version, we need to know what the last ++# minor version was, so we can build the complete list of all versions. ++# For now, guess what the highest minor version might be, assume it will ++# be 50 for testing. Once this actually happens, update the dictionary ++# with the actual value. ++_LAST_GLIBC_MINOR = collections.defaultdict(lambda: 50) # type: Dict[int, int] ++glibcVersion = collections.namedtuple("Version", ["major", "minor"]) ++ ++ + class Tag(object): + """ + A representation of the tag triple for a wheel. +@@ -65,13 +84,19 @@ class Tag(object): + is also supported. + """ + +- __slots__ = ["_interpreter", "_abi", "_platform"] ++ __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] + + def __init__(self, interpreter, abi, platform): + # type: (str, str, str) -> None + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() ++ # The __hash__ of every single element in a Set[Tag] will be evaluated each time ++ # that a set calls its `.disjoint()` method, which may be called hundreds of ++ # times when scanning a page of links for packages with tags matching that ++ # Set[Tag]. Pre-computing the value here produces significant speedups for ++ # downstream consumers. ++ self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property + def interpreter(self): +@@ -101,7 +126,7 @@ def __eq__(self, other): + + def __hash__(self): + # type: () -> int +- return hash((self._interpreter, self._abi, self._platform)) ++ return self._hash + + def __str__(self): + # type: () -> str +@@ -382,7 +407,12 @@ def _mac_binary_formats(version, cpu_arch): + return [] + formats.extend(["fat32", "fat"]) + +- formats.append("universal") ++ if cpu_arch in {"arm64", "x86_64"}: ++ formats.append("universal2") ++ ++ if cpu_arch in {"x86_64", "i386", "ppc64", "ppc"}: ++ formats.append("universal") ++ + return formats + + +@@ -405,30 +435,73 @@ def mac_platforms(version=None, arch=None): + arch = _mac_arch(cpu_arch) + else: + arch = arch +- for minor_version in range(version[1], -1, -1): +- compat_version = version[0], minor_version +- binary_formats = _mac_binary_formats(compat_version, arch) +- for binary_format in binary_formats: +- yield "macosx_{major}_{minor}_{binary_format}".format( +- major=compat_version[0], +- minor=compat_version[1], +- binary_format=binary_format, +- ) + ++ if (10, 0) <= version and version < (11, 0): ++ # Prior to Mac OS 11, each yearly release of Mac OS bumped the ++ # "minor" version number. The major version was always 10. ++ for minor_version in range(version[1], -1, -1): ++ compat_version = 10, minor_version ++ binary_formats = _mac_binary_formats(compat_version, arch) ++ for binary_format in binary_formats: ++ yield "macosx_{major}_{minor}_{binary_format}".format( ++ major=10, minor=minor_version, binary_format=binary_format ++ ) ++ ++ if version >= (11, 0): ++ # Starting with Mac OS 11, each yearly release bumps the major version ++ # number. The minor versions are now the midyear updates. ++ for major_version in range(version[0], 10, -1): ++ compat_version = major_version, 0 ++ binary_formats = _mac_binary_formats(compat_version, arch) ++ for binary_format in binary_formats: ++ yield "macosx_{major}_{minor}_{binary_format}".format( ++ major=major_version, minor=0, binary_format=binary_format ++ ) + +-# From PEP 513. +-def _is_manylinux_compatible(name, glibc_version): +- # type: (str, GlibcVersion) -> bool ++ if version >= (11, 0) and arch == "x86_64": ++ # Mac OS 11 on x86_64 is compatible with binaries from previous releases. ++ # Arm64 support was introduced in 11.0, so no Arm binaries from previous ++ # releases exist. ++ for minor_version in range(16, 3, -1): ++ compat_version = 10, minor_version ++ binary_formats = _mac_binary_formats(compat_version, arch) ++ for binary_format in binary_formats: ++ yield "macosx_{major}_{minor}_{binary_format}".format( ++ major=compat_version[0], ++ minor=compat_version[1], ++ binary_format=binary_format, ++ ) ++ ++ ++# From PEP 513, PEP 600 ++def _is_manylinux_compatible(name, arch, glibc_version): ++ # type: (str, str, GlibcVersion) -> bool ++ sys_glibc = _get_glibc_version() ++ if sys_glibc < glibc_version: ++ return False + # Check for presence of _manylinux module. + try: + import _manylinux # noqa +- +- return bool(getattr(_manylinux, name + "_compatible")) +- except (ImportError, AttributeError): +- # Fall through to heuristic check below. ++ except ImportError: + pass +- +- return _have_compatible_glibc(*glibc_version) ++ else: ++ if hasattr(_manylinux, "manylinux_compatible"): ++ result = _manylinux.manylinux_compatible( ++ glibc_version[0], glibc_version[1], arch ++ ) ++ if result is not None: ++ return bool(result) ++ else: ++ if glibc_version == (2, 5): ++ if hasattr(_manylinux, "manylinux1_compatible"): ++ return bool(_manylinux.manylinux1_compatible) ++ if glibc_version == (2, 12): ++ if hasattr(_manylinux, "manylinux2010_compatible"): ++ return bool(_manylinux.manylinux2010_compatible) ++ if glibc_version == (2, 17): ++ if hasattr(_manylinux, "manylinux2014_compatible"): ++ return bool(_manylinux.manylinux2014_compatible) ++ return True + + + def _glibc_version_string(): +@@ -474,8 +547,20 @@ def _glibc_version_string_ctypes(): + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # +- # Note: typeshed is wrong here so we are ignoring this line. +- process_namespace = ctypes.CDLL(None) # type: ignore ++ # We must also handle the special case where the executable is not a ++ # dynamically linked executable. This can occur when using musl libc, ++ # for example. In this situation, dlopen() will error, leading to an ++ # OSError. Interestingly, at least in the case of musl, there is no ++ # errno set on the OSError. The single string argument used to construct ++ # OSError comes from libc itself and is therefore not portable to ++ # hard code here. In any case, failure to call dlopen() means we ++ # can proceed, so we bail on our attempt. ++ try: ++ # Note: typeshed is wrong here so we are ignoring this line. ++ process_namespace = ctypes.CDLL(None) # type: ignore ++ except OSError: ++ return None ++ + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: +@@ -493,10 +578,9 @@ def _glibc_version_string_ctypes(): + return version_str + + +-# Separated out from have_compatible_glibc for easier unit testing. +-def _check_glibc_version(version_str, required_major, minimum_minor): +- # type: (str, int, int) -> bool +- # Parse string and check against requested version. ++def _parse_glibc_version(version_str): ++ # type: (str) -> Tuple[int, int] ++ # Parse glibc version. + # + # We use a regexp instead of str.split because we want to discard any + # random junk that might come after the minor version -- this might happen +@@ -509,19 +593,23 @@ def _check_glibc_version(version_str, required_major, minimum_minor): + " got: %s" % version_str, + RuntimeWarning, + ) +- return False +- return ( +- int(m.group("major")) == required_major +- and int(m.group("minor")) >= minimum_minor +- ) ++ return -1, -1 ++ return (int(m.group("major")), int(m.group("minor"))) + + +-def _have_compatible_glibc(required_major, minimum_minor): +- # type: (int, int) -> bool ++_glibc_version = [] # type: List[Tuple[int, int]] ++ ++ ++def _get_glibc_version(): ++ # type: () -> Tuple[int, int] ++ if _glibc_version: ++ return _glibc_version[0] + version_str = _glibc_version_string() + if version_str is None: +- return False +- return _check_glibc_version(version_str, required_major, minimum_minor) ++ _glibc_version.append((-1, -1)) ++ else: ++ _glibc_version.append(_parse_glibc_version(version_str)) ++ return _glibc_version[0] + + + # Python does not provide platform information at sufficient granularity to +@@ -639,7 +727,42 @@ def _have_compatible_manylinux_abi(arch): + return _is_linux_armhf() + if arch == "i686": + return _is_linux_i686() +- return True ++ return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} ++ ++ ++def _manylinux_tags(linux, arch): ++ # type: (str, str) -> Iterator[str] ++ # Oldest glibc to be supported regardless of architecture is (2, 17). ++ too_old_glibc2 = glibcVersion(2, 16) ++ if arch in {"x86_64", "i686"}: ++ # On x86/i686 also oldest glibc to be supported is (2, 5). ++ too_old_glibc2 = glibcVersion(2, 4) ++ current_glibc = glibcVersion(*_get_glibc_version()) ++ glibc_max_list = [current_glibc] ++ # We can assume compatibility across glibc major versions. ++ # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 ++ # ++ # Build a list of maximum glibc versions so that we can ++ # output the canonical list of all glibc from current_glibc ++ # down to too_old_glibc2, including all intermediary versions. ++ for glibc_major in range(current_glibc.major - 1, 1, -1): ++ glibc_max_list.append(glibcVersion(glibc_major, _LAST_GLIBC_MINOR[glibc_major])) ++ for glibc_max in glibc_max_list: ++ if glibc_max.major == too_old_glibc2.major: ++ min_minor = too_old_glibc2.minor ++ else: ++ # For other glibc major versions oldest supported is (x, 0). ++ min_minor = -1 ++ for glibc_minor in range(glibc_max.minor, min_minor, -1): ++ glibc_version = (glibc_max.major, glibc_minor) ++ tag = "manylinux_{}_{}".format(*glibc_version) ++ if _is_manylinux_compatible(tag, arch, glibc_version): ++ yield linux.replace("linux", tag) ++ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. ++ if glibc_version in _LEGACY_MANYLINUX_MAP: ++ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] ++ if _is_manylinux_compatible(legacy_tag, arch, glibc_version): ++ yield linux.replace("linux", legacy_tag) + + + def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): +@@ -650,28 +773,10 @@ def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv7l" +- manylinux_support = [] + _, arch = linux.split("_", 1) + if _have_compatible_manylinux_abi(arch): +- if arch in {"x86_64", "i686", "aarch64", "armv7l", "ppc64", "ppc64le", "s390x"}: +- manylinux_support.append( +- ("manylinux2014", (2, 17)) +- ) # CentOS 7 w/ glibc 2.17 (PEP 599) +- if arch in {"x86_64", "i686"}: +- manylinux_support.append( +- ("manylinux2010", (2, 12)) +- ) # CentOS 6 w/ glibc 2.12 (PEP 571) +- manylinux_support.append( +- ("manylinux1", (2, 5)) +- ) # CentOS 5 w/ glibc 2.5 (PEP 513) +- manylinux_support_iter = iter(manylinux_support) +- for name, glibc_version in manylinux_support_iter: +- if _is_manylinux_compatible(name, glibc_version): +- yield linux.replace("linux", name) +- break +- # Support for a later manylinux implies support for an earlier version. +- for name, _ in manylinux_support_iter: +- yield linux.replace("linux", name) ++ for tag in _manylinux_tags(linux, arch): ++ yield tag + yield linux + + +diff --git a/src/pip/_vendor/packaging/utils.py b/src/pip/_vendor/packaging/utils.py +index 19579c1a0f..92c7b00b77 100644 +--- a/src/pip/_vendor/packaging/utils.py ++++ b/src/pip/_vendor/packaging/utils.py +@@ -12,6 +12,8 @@ + from typing import NewType, Union + + NormalizedName = NewType("NormalizedName", str) ++else: ++ NormalizedName = str + + _canonicalize_regex = re.compile(r"[-_.]+") + +@@ -23,18 +25,18 @@ def canonicalize_name(name): + return cast("NormalizedName", value) + + +-def canonicalize_version(_version): +- # type: (str) -> Union[Version, str] ++def canonicalize_version(version): ++ # type: (Union[Version, str]) -> Union[Version, str] + """ + This is very similar to Version.__str__, but has one subtle difference + with the way it handles the release segment. + """ +- +- try: +- version = Version(_version) +- except InvalidVersion: +- # Legacy versions cannot be normalized +- return _version ++ if not isinstance(version, Version): ++ try: ++ version = Version(version) ++ except InvalidVersion: ++ # Legacy versions cannot be normalized ++ return version + + parts = [] + +diff --git a/src/pip/_vendor/packaging/version.py b/src/pip/_vendor/packaging/version.py +index 00371e86a8..517d91f248 100644 +--- a/src/pip/_vendor/packaging/version.py ++++ b/src/pip/_vendor/packaging/version.py +@@ -6,6 +6,7 @@ + import collections + import itertools + import re ++import warnings + + from ._structures import Infinity, NegativeInfinity + from ._typing import TYPE_CHECKING +@@ -71,36 +72,50 @@ def __hash__(self): + # type: () -> int + return hash(self._key) + ++ # Please keep the duplicated `isinstance` check ++ # in the six comparisons hereunder ++ # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other): + # type: (_BaseVersion) -> bool +- return self._compare(other, lambda s, o: s < o) ++ if not isinstance(other, _BaseVersion): ++ return NotImplemented ++ ++ return self._key < other._key + + def __le__(self, other): + # type: (_BaseVersion) -> bool +- return self._compare(other, lambda s, o: s <= o) ++ if not isinstance(other, _BaseVersion): ++ return NotImplemented ++ ++ return self._key <= other._key + + def __eq__(self, other): + # type: (object) -> bool +- return self._compare(other, lambda s, o: s == o) ++ if not isinstance(other, _BaseVersion): ++ return NotImplemented ++ ++ return self._key == other._key + + def __ge__(self, other): + # type: (_BaseVersion) -> bool +- return self._compare(other, lambda s, o: s >= o) ++ if not isinstance(other, _BaseVersion): ++ return NotImplemented ++ ++ return self._key >= other._key + + def __gt__(self, other): + # type: (_BaseVersion) -> bool +- return self._compare(other, lambda s, o: s > o) ++ if not isinstance(other, _BaseVersion): ++ return NotImplemented ++ ++ return self._key > other._key + + def __ne__(self, other): + # type: (object) -> bool +- return self._compare(other, lambda s, o: s != o) +- +- def _compare(self, other, method): +- # type: (object, VersionComparisonMethod) -> Union[bool, NotImplemented] + if not isinstance(other, _BaseVersion): + return NotImplemented + +- return method(self._key, other._key) ++ return self._key != other._key + + + class LegacyVersion(_BaseVersion): +@@ -109,6 +124,12 @@ def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + ++ warnings.warn( ++ "Creating a LegacyVersion has been deprecated and will be " ++ "removed in the next major release", ++ DeprecationWarning, ++ ) ++ + def __str__(self): + # type: () -> str + return self._version +diff --git a/src/pip/_vendor/resolvelib/__init__.py b/src/pip/_vendor/resolvelib/__init__.py +index 78ede4fd1a..5a400f23ed 100644 +--- a/src/pip/_vendor/resolvelib/__init__.py ++++ b/src/pip/_vendor/resolvelib/__init__.py +@@ -11,7 +11,7 @@ + "ResolutionTooDeep", + ] + +-__version__ = "0.5.2" ++__version__ = "0.5.3" + + + from .providers import AbstractProvider, AbstractResolver +diff --git a/src/pip/_vendor/resolvelib/resolvers.py b/src/pip/_vendor/resolvelib/resolvers.py +index 976608b177..acf0f8a6b4 100644 +--- a/src/pip/_vendor/resolvelib/resolvers.py ++++ b/src/pip/_vendor/resolvelib/resolvers.py +@@ -99,16 +99,15 @@ def merged_with(self, provider, requirement, parent): + raise RequirementsConflicted(criterion) + return criterion + +- def excluded_of(self, candidate): +- """Build a new instance from this, but excluding specified candidate. ++ def excluded_of(self, candidates): ++ """Build a new instance from this, but excluding specified candidates. + + Returns the new instance, or None if we still have no valid candidates. + """ +- cands = self.candidates.excluding(candidate) ++ cands = self.candidates.excluding(candidates) + if not cands: + return None +- incompats = list(self.incompatibilities) +- incompats.append(candidate) ++ incompats = self.incompatibilities + candidates + return type(self)(cands, list(self.information), incompats) + + +@@ -158,15 +157,11 @@ def _push_new_state(self): + This new state will be used to hold resolution results of the next + coming round. + """ +- try: +- base = self._states[-1] +- except IndexError: +- state = State(mapping=collections.OrderedDict(), criteria={}) +- else: +- state = State( +- mapping=base.mapping.copy(), +- criteria=base.criteria.copy(), +- ) ++ base = self._states[-1] ++ state = State( ++ mapping=base.mapping.copy(), ++ criteria=base.criteria.copy(), ++ ) + self._states.append(state) + + def _merge_into_criterion(self, requirement, parent): +@@ -239,44 +234,77 @@ def _attempt_to_pin_criterion(self, name, criterion): + return causes + + def _backtrack(self): +- # Drop the current state, it's known not to work. +- del self._states[-1] +- +- # We need at least 2 states here: +- # (a) One to backtrack to. +- # (b) One to restore state (a) to its state prior to candidate-pinning, +- # so we can pin another one instead. ++ """Perform backtracking. ++ ++ When we enter here, the stack is like this:: ++ ++ [ state Z ] ++ [ state Y ] ++ [ state X ] ++ .... earlier states are irrelevant. ++ ++ 1. No pins worked for Z, so it does not have a pin. ++ 2. We want to reset state Y to unpinned, and pin another candidate. ++ 3. State X holds what state Y was before the pin, but does not ++ have the incompatibility information gathered in state Y. ++ ++ Each iteration of the loop will: ++ ++ 1. Discard Z. ++ 2. Discard Y but remember its incompatibility information gathered ++ previously, and the failure we're dealing with right now. ++ 3. Push a new state Y' based on X, and apply the incompatibility ++ information from Y to Y'. ++ 4a. If this causes Y' to conflict, we need to backtrack again. Make Y' ++ the new Z and go back to step 2. ++ 4b. If the incompatibilites apply cleanly, end backtracking. ++ """ ++ while len(self._states) >= 3: ++ # Remove the state that triggered backtracking. ++ del self._states[-1] ++ ++ # Retrieve the last candidate pin and known incompatibilities. ++ broken_state = self._states.pop() ++ name, candidate = broken_state.mapping.popitem() ++ incompatibilities_from_broken = [ ++ (k, v.incompatibilities) ++ for k, v in broken_state.criteria.items() ++ ] + +- while len(self._states) >= 2: +- # Retract the last candidate pin. +- prev_state = self._states.pop() +- try: +- name, candidate = prev_state.mapping.popitem() +- except KeyError: +- continue + self._r.backtracking(candidate) + +- # Create a new state to work on, with the newly known not-working +- # candidate excluded. ++ # Create a new state from the last known-to-work one, and apply ++ # the previously gathered incompatibility information. + self._push_new_state() ++ for k, incompatibilities in incompatibilities_from_broken: ++ try: ++ crit = self.state.criteria[k] ++ except KeyError: ++ continue ++ self.state.criteria[k] = crit.excluded_of(incompatibilities) + +- # Mark the retracted candidate as incompatible. +- criterion = self.state.criteria[name].excluded_of(candidate) +- if criterion is None: +- # This state still does not work. Try the still previous state. +- del self._states[-1] +- continue +- self.state.criteria[name] = criterion ++ # Mark the newly known incompatibility. ++ criterion = self.state.criteria[name].excluded_of([candidate]) + +- return True ++ # It works! Let's work on this new state. ++ if criterion: ++ self.state.criteria[name] = criterion ++ return True ++ ++ # State does not work after adding the new incompatibility ++ # information. Try the still previous state. + ++ # No way to backtrack anymore. + return False + + def resolve(self, requirements, max_rounds): + if self._states: + raise RuntimeError("already resolved") + +- self._push_new_state() ++ self._r.starting() ++ ++ # Initialize the root state. ++ self._states = [State(mapping=collections.OrderedDict(), criteria={})] + for r in requirements: + try: + name, crit = self._merge_into_criterion(r, parent=None) +@@ -284,14 +312,14 @@ def resolve(self, requirements, max_rounds): + raise ResolutionImpossible(e.criterion.information) + self.state.criteria[name] = crit + +- self._r.starting() ++ # The root state is saved as a sentinel so the first ever pin can have ++ # something to backtrack to if it fails. The root state is basically ++ # pinning the virtual "root" package in the graph. ++ self._push_new_state() + + for round_index in range(max_rounds): + self._r.starting_round(round_index) + +- self._push_new_state() +- curr = self.state +- + unsatisfied_criterion_items = [ + item + for item in self.state.criteria.items() +@@ -300,8 +328,7 @@ def resolve(self, requirements, max_rounds): + + # All criteria are accounted for. Nothing more to pin, we are done! + if not unsatisfied_criterion_items: +- del self._states[-1] +- self._r.ending(curr) ++ self._r.ending(self.state) + return self.state + + # Choose the most preferred unpinned criterion to try. +@@ -311,16 +338,20 @@ def resolve(self, requirements, max_rounds): + ) + failure_causes = self._attempt_to_pin_criterion(name, criterion) + +- # Backtrack if pinning fails. + if failure_causes: +- result = self._backtrack() +- if not result: +- causes = [ +- i for crit in failure_causes for i in crit.information +- ] ++ # Backtrack if pinning fails. The backtrack process puts us in ++ # an unpinned state, so we can work on it in the next round. ++ success = self._backtrack() ++ ++ # Dead ends everywhere. Give up. ++ if not success: ++ causes = [i for c in failure_causes for i in c.information] + raise ResolutionImpossible(causes) ++ else: ++ # Pinning was successful. Push a new state to do another pin. ++ self._push_new_state() + +- self._r.ending_round(round_index, curr) ++ self._r.ending_round(round_index, self.state) + + raise ResolutionTooDeep(max_rounds) + +diff --git a/src/pip/_vendor/resolvelib/structs.py b/src/pip/_vendor/resolvelib/structs.py +index 479aad5dc1..c4542f08a0 100644 +--- a/src/pip/_vendor/resolvelib/structs.py ++++ b/src/pip/_vendor/resolvelib/structs.py +@@ -79,6 +79,9 @@ class _FactoryIterableView(object): + def __init__(self, factory): + self._factory = factory + ++ def __repr__(self): ++ return "{}({})".format(type(self).__name__, list(self._factory())) ++ + def __bool__(self): + try: + next(self._factory()) +@@ -95,11 +98,11 @@ def for_preference(self): + """Provide an candidate iterable for `get_preference()`""" + return self._factory() + +- def excluding(self, candidate): +- """Create a new `Candidates` instance excluding `candidate`.""" ++ def excluding(self, candidates): ++ """Create a new instance excluding specified candidates.""" + + def factory(): +- return (c for c in self._factory() if c != candidate) ++ return (c for c in self._factory() if c not in candidates) + + return type(self)(factory) + +@@ -114,6 +117,9 @@ class _SequenceIterableView(object): + def __init__(self, sequence): + self._sequence = sequence + ++ def __repr__(self): ++ return "{}({})".format(type(self).__name__, self._sequence) ++ + def __bool__(self): + return bool(self._sequence) + +@@ -129,9 +135,9 @@ def for_preference(self): + """Provide an candidate iterable for `get_preference()`""" + return self._sequence + +- def excluding(self, candidate): +- """Create a new instance excluding `candidate`.""" +- return type(self)([c for c in self._sequence if c != candidate]) ++ def excluding(self, candidates): ++ """Create a new instance excluding specified candidates.""" ++ return type(self)([c for c in self._sequence if c not in candidates]) + + + def build_iter_view(matches): +diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt +index cc8157f169..c7bc37c16c 100644 +--- a/src/pip/_vendor/vendor.txt ++++ b/src/pip/_vendor/vendor.txt +@@ -7,7 +7,7 @@ distro==1.5.0 + html5lib==1.1 + ipaddress==1.0.23 # Only needed on 2.6 and 2.7 + msgpack==1.0.0 +-packaging==20.4 ++packaging==20.7 + pep517==0.9.1 + progress==1.5 + pyparsing==2.4.7 +@@ -16,7 +16,7 @@ requests==2.25.0 + chardet==3.0.4 + idna==2.10 + urllib3==1.26.2 +-resolvelib==0.5.2 ++resolvelib==0.5.3 + retrying==1.3.3 + setuptools==44.0.0 + six==1.15.0 +diff --git a/tests/unit/test_models_wheel.py b/tests/unit/test_models_wheel.py +index 05ee74262d..a4f954a2c7 100644 +--- a/tests/unit/test_models_wheel.py ++++ b/tests/unit/test_models_wheel.py +@@ -93,6 +93,14 @@ def test_not_supported_osx_version(self): + w = Wheel('simple-0.1-cp27-none-macosx_10_9_intel.whl') + assert not w.supported(tags=tags) + ++ @pytest.mark.xfail( ++ reason=( ++ "packaging.tags changed behaviour in this area, and @pradyunsg " ++ "decided as the release manager that this behaviour change is less " ++ "critical than Big Sur support for pip 20.3. See " ++ "https://github.com/pypa/packaging/pull/361 for further discussion." ++ ) ++ ) + def test_supported_multiarch_darwin(self): + """ + Multi-arch wheels (intel) are supported on components (i386, x86_64) diff --git a/d91b7d9e88ea46d66d602c16fb7d8bedb1f8c656.patch b/d91b7d9e88ea46d66d602c16fb7d8bedb1f8c656.patch new file mode 100644 index 0000000000000000000000000000000000000000..d438a44f4ac5f1ea3823fd8b032082b27ee330bb --- /dev/null +++ b/d91b7d9e88ea46d66d602c16fb7d8bedb1f8c656.patch @@ -0,0 +1,22 @@ +diff --git a/docs/docs_feedback_sphinxext.py b/docs/docs_feedback_sphinxext.py +index 86eb3d61a7..15da417776 100644 +--- a/docs/docs_feedback_sphinxext.py ++++ b/docs/docs_feedback_sphinxext.py +@@ -111,7 +111,7 @@ def _modify_rst_document_source_on_read( + orphan_mark = ':orphan:' + is_orphan = orphan_mark in source[0] + if is_orphan: +- source[0].replace(orphan_mark, '') ++ source[0] = source[0].replace(orphan_mark, '') + else: + orphan_mark = '' + +diff --git a/news/9171.trivial.rst b/news/9171.trivial.rst +new file mode 100644 +index 0000000000..ca02166a2e +--- /dev/null ++++ b/news/9171.trivial.rst +@@ -0,0 +1,3 @@ ++Fixed moving the ``:orphan:`` to top of documents in the Sphinx ++extension for collecting the UX feedback from docs (initially ++introduced in PR #8848). diff --git a/python-pip.spec b/python-pip.spec index 3d035b59a33f24b5c58f6925acf4d9cb6a79f840..4381732fe7242a712573c47154e36a957eaac18c 100644 --- a/python-pip.spec +++ b/python-pip.spec @@ -6,7 +6,7 @@ pip is the package installer for Python. You can use pip to install packages fro %global bashcompdir %(b=$(pkg-config --variable=completionsdir bash-completion 2>/dev/null); echo ${b:-%{_sysconfdir}/bash_completion.d}) Name: python-%{srcname} Version: 20.2.2 -Release: 3 +Release: 4 Summary: A tool for installing and managing Python packages License: MIT and Python and ASL 2.0 and BSD and ISC and LGPLv2 and MPLv2.0 and (ASL 2.0 or BSD) URL: http://www.pip-installer.org @@ -16,6 +16,8 @@ Patch1: allow-stripping-given-prefix-from-wheel-RECORD-files.patch Patch2: emit-a-warning-when-running-with-root-privileges.patch Patch3: remove-existing-dist-only-if-path-conflicts.patch Patch6000: dummy-certifi.patch +Patch6001: d91b7d9e88ea46d66d602c16fb7d8bedb1f8c656.patch +Patch6002: b5304f307f5bb0a9f410c86c9a82fe4756439313.patch Source10: pip-allow-older-versions.patch %description %{_description} @@ -112,6 +114,9 @@ install -p dist/%{python_wheelname} -t %{buildroot}%{python_wheeldir} %{python_wheeldir}/%{python_wheelname} %changelog +* 20201130035849754506 patch-tracking 20.2.2-4 +- append patch file of upstream repository from to + * Wed Nov 4 2020 wangjie -20.2.2-3 - Type:NA - ID:NA @@ -161,4 +166,4 @@ install -p dist/%{python_wheelname} -t %{buildroot}%{python_wheeldir} - DESC: Synchronize a patch * Mon Sep 23 2019 openEuler Buildteam - 18.0-6 -- Package init +- Package init \ No newline at end of file