From 99c3c0988cdbb859a2d8cc6ef9c0d356bc1323c9 Mon Sep 17 00:00:00 2001 From: openeuler-ci-bot <80474298@qq.com> Date: Sat, 26 Dec 2020 18:59:00 +0800 Subject: [PATCH 01/10] [patch tracking] 20201226185849754648 - https://github.com/pypa/pip/commit/7a6b1a580529f737069342849255b165f69c4110 --- ...1a580529f737069342849255b165f69c4110.patch | 2566 +++++++++++++++++ 1 file changed, 2566 insertions(+) create mode 100644 7a6b1a580529f737069342849255b165f69c4110.patch diff --git a/7a6b1a580529f737069342849255b165f69c4110.patch b/7a6b1a580529f737069342849255b165f69c4110.patch new file mode 100644 index 0000000..d3cac03 --- /dev/null +++ b/7a6b1a580529f737069342849255b165f69c4110.patch @@ -0,0 +1,2566 @@ +diff --git a/news/54754cb1-2151-45c3-baa0-b87e50d7e56d.trivial.rst b/news/54754cb1-2151-45c3-baa0-b87e50d7e56d.trivial.rst +new file mode 100644 +index 0000000000..e69de29bb2 +diff --git a/src/pip/_internal/network/session.py b/src/pip/_internal/network/session.py +index 43ab1e18cd..6848516876 100644 +--- a/src/pip/_internal/network/session.py ++++ b/src/pip/_internal/network/session.py +@@ -6,6 +6,7 @@ + # mypy: disallow-untyped-defs=False + + import email.utils ++import ipaddress + import json + import logging + import mimetypes +@@ -27,7 +28,7 @@ + from pip._internal.network.cache import SafeFileCache + + # Import ssl from compat so the initial import occurs in only one place. +-from pip._internal.utils.compat import has_tls, ipaddress ++from pip._internal.utils.compat import has_tls + from pip._internal.utils.glibc import libc_ver + from pip._internal.utils.misc import ( + build_url_from_netloc, +diff --git a/src/pip/_internal/utils/compat.py b/src/pip/_internal/utils/compat.py +index 6eeb712ad6..dc351b804b 100644 +--- a/src/pip/_internal/utils/compat.py ++++ b/src/pip/_internal/utils/compat.py +@@ -15,21 +15,8 @@ + if MYPY_CHECK_RUNNING: + from typing import Optional, Union + +-try: +- import ipaddress +-except ImportError: +- try: +- from pip._vendor import ipaddress # type: ignore +- except ImportError: +- import ipaddr as ipaddress # type: ignore +- ipaddress.ip_address = ipaddress.IPAddress # type: ignore +- ipaddress.ip_network = ipaddress.IPNetwork # type: ignore +- + +-__all__ = [ +- "ipaddress", "console_to_str", +- "get_path_uid", "stdlib_pkgs", "WINDOWS", +-] ++__all__ = ["console_to_str", "get_path_uid", "stdlib_pkgs", "WINDOWS"] + + + logger = logging.getLogger(__name__) +diff --git a/src/pip/_vendor/ipaddress.LICENSE b/src/pip/_vendor/ipaddress.LICENSE +deleted file mode 100644 +index 41bd16ba6c..0000000000 +--- a/src/pip/_vendor/ipaddress.LICENSE ++++ /dev/null +@@ -1,50 +0,0 @@ +-This package is a modified version of cpython's ipaddress module. +-It is therefore distributed under the PSF license, as follows: +- +-PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +--------------------------------------------- +- +-1. This LICENSE AGREEMENT is between the Python Software Foundation +-("PSF"), and the Individual or Organization ("Licensee") accessing and +-otherwise using this software ("Python") in source or binary form and +-its associated documentation. +- +-2. Subject to the terms and conditions of this License Agreement, PSF hereby +-grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +-analyze, test, perform and/or display publicly, prepare derivative works, +-distribute, and otherwise use Python alone or in any derivative version, +-provided, however, that PSF's License Agreement and PSF's notice of copyright, +-i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +-2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are +-retained in Python alone or in any derivative version prepared by Licensee. +- +-3. In the event Licensee prepares a derivative work that is based on +-or incorporates Python or any part thereof, and wants to make +-the derivative work available to others as provided herein, then +-Licensee hereby agrees to include in any such work a brief summary of +-the changes made to Python. +- +-4. PSF is making Python available to Licensee on an "AS IS" +-basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +-INFRINGE ANY THIRD PARTY RIGHTS. +- +-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +-FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. +- +-6. This License Agreement will automatically terminate upon a material +-breach of its terms and conditions. +- +-7. Nothing in this License Agreement shall be deemed to create any +-relationship of agency, partnership, or joint venture between PSF and +-Licensee. This License Agreement does not grant permission to use PSF +-trademarks or trade name in a trademark sense to endorse or promote +-products or services of Licensee, or any third party. +- +-8. By copying, installing or otherwise using Python, Licensee +-agrees to be bound by the terms and conditions of this License +-Agreement. +diff --git a/src/pip/_vendor/ipaddress.py b/src/pip/_vendor/ipaddress.py +deleted file mode 100644 +index 3e6f9e499c..0000000000 +--- a/src/pip/_vendor/ipaddress.py ++++ /dev/null +@@ -1,2420 +0,0 @@ +-# Copyright 2007 Google Inc. +-# Licensed to PSF under a Contributor Agreement. +- +-"""A fast, lightweight IPv4/IPv6 manipulation library in Python. +- +-This library is used to create/poke/manipulate IPv4 and IPv6 addresses +-and networks. +- +-""" +- +-from __future__ import unicode_literals +- +- +-import itertools +-import struct +- +-__version__ = '1.0.23' +- +-# Compatibility functions +-_compat_int_types = (int,) +-try: +- _compat_int_types = (int, long) +-except NameError: +- pass +-try: +- _compat_str = unicode +-except NameError: +- _compat_str = str +- assert bytes != str +-if b'\0'[0] == 0: # Python 3 semantics +- def _compat_bytes_to_byte_vals(byt): +- return byt +-else: +- def _compat_bytes_to_byte_vals(byt): +- return [struct.unpack(b'!B', b)[0] for b in byt] +-try: +- _compat_int_from_byte_vals = int.from_bytes +-except AttributeError: +- def _compat_int_from_byte_vals(bytvals, endianess): +- assert endianess == 'big' +- res = 0 +- for bv in bytvals: +- assert isinstance(bv, _compat_int_types) +- res = (res << 8) + bv +- return res +- +- +-def _compat_to_bytes(intval, length, endianess): +- assert isinstance(intval, _compat_int_types) +- assert endianess == 'big' +- if length == 4: +- if intval < 0 or intval >= 2 ** 32: +- raise struct.error("integer out of range for 'I' format code") +- return struct.pack(b'!I', intval) +- elif length == 16: +- if intval < 0 or intval >= 2 ** 128: +- raise struct.error("integer out of range for 'QQ' format code") +- return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff) +- else: +- raise NotImplementedError() +- +- +-if hasattr(int, 'bit_length'): +- # Not int.bit_length , since that won't work in 2.7 where long exists +- def _compat_bit_length(i): +- return i.bit_length() +-else: +- def _compat_bit_length(i): +- for res in itertools.count(): +- if i >> res == 0: +- return res +- +- +-def _compat_range(start, end, step=1): +- assert step > 0 +- i = start +- while i < end: +- yield i +- i += step +- +- +-class _TotalOrderingMixin(object): +- __slots__ = () +- +- # Helper that derives the other comparison operations from +- # __lt__ and __eq__ +- # We avoid functools.total_ordering because it doesn't handle +- # NotImplemented correctly yet (http://bugs.python.org/issue10042) +- def __eq__(self, other): +- raise NotImplementedError +- +- def __ne__(self, other): +- equal = self.__eq__(other) +- if equal is NotImplemented: +- return NotImplemented +- return not equal +- +- def __lt__(self, other): +- raise NotImplementedError +- +- def __le__(self, other): +- less = self.__lt__(other) +- if less is NotImplemented or not less: +- return self.__eq__(other) +- return less +- +- def __gt__(self, other): +- less = self.__lt__(other) +- if less is NotImplemented: +- return NotImplemented +- equal = self.__eq__(other) +- if equal is NotImplemented: +- return NotImplemented +- return not (less or equal) +- +- def __ge__(self, other): +- less = self.__lt__(other) +- if less is NotImplemented: +- return NotImplemented +- return not less +- +- +-IPV4LENGTH = 32 +-IPV6LENGTH = 128 +- +- +-class AddressValueError(ValueError): +- """A Value Error related to the address.""" +- +- +-class NetmaskValueError(ValueError): +- """A Value Error related to the netmask.""" +- +- +-def ip_address(address): +- """Take an IP string/int and return an object of the correct type. +- +- Args: +- address: A string or integer, the IP address. Either IPv4 or +- IPv6 addresses may be supplied; integers less than 2**32 will +- be considered to be IPv4 by default. +- +- Returns: +- An IPv4Address or IPv6Address object. +- +- Raises: +- ValueError: if the *address* passed isn't either a v4 or a v6 +- address +- +- """ +- try: +- return IPv4Address(address) +- except (AddressValueError, NetmaskValueError): +- pass +- +- try: +- return IPv6Address(address) +- except (AddressValueError, NetmaskValueError): +- pass +- +- if isinstance(address, bytes): +- raise AddressValueError( +- '%r does not appear to be an IPv4 or IPv6 address. ' +- 'Did you pass in a bytes (str in Python 2) instead of' +- ' a unicode object?' % address) +- +- raise ValueError('%r does not appear to be an IPv4 or IPv6 address' % +- address) +- +- +-def ip_network(address, strict=True): +- """Take an IP string/int and return an object of the correct type. +- +- Args: +- address: A string or integer, the IP network. Either IPv4 or +- IPv6 networks may be supplied; integers less than 2**32 will +- be considered to be IPv4 by default. +- +- Returns: +- An IPv4Network or IPv6Network object. +- +- Raises: +- ValueError: if the string passed isn't either a v4 or a v6 +- address. Or if the network has host bits set. +- +- """ +- try: +- return IPv4Network(address, strict) +- except (AddressValueError, NetmaskValueError): +- pass +- +- try: +- return IPv6Network(address, strict) +- except (AddressValueError, NetmaskValueError): +- pass +- +- if isinstance(address, bytes): +- raise AddressValueError( +- '%r does not appear to be an IPv4 or IPv6 network. ' +- 'Did you pass in a bytes (str in Python 2) instead of' +- ' a unicode object?' % address) +- +- raise ValueError('%r does not appear to be an IPv4 or IPv6 network' % +- address) +- +- +-def ip_interface(address): +- """Take an IP string/int and return an object of the correct type. +- +- Args: +- address: A string or integer, the IP address. Either IPv4 or +- IPv6 addresses may be supplied; integers less than 2**32 will +- be considered to be IPv4 by default. +- +- Returns: +- An IPv4Interface or IPv6Interface object. +- +- Raises: +- ValueError: if the string passed isn't either a v4 or a v6 +- address. +- +- Notes: +- The IPv?Interface classes describe an Address on a particular +- Network, so they're basically a combination of both the Address +- and Network classes. +- +- """ +- try: +- return IPv4Interface(address) +- except (AddressValueError, NetmaskValueError): +- pass +- +- try: +- return IPv6Interface(address) +- except (AddressValueError, NetmaskValueError): +- pass +- +- raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' % +- address) +- +- +-def v4_int_to_packed(address): +- """Represent an address as 4 packed bytes in network (big-endian) order. +- +- Args: +- address: An integer representation of an IPv4 IP address. +- +- Returns: +- The integer address packed as 4 bytes in network (big-endian) order. +- +- Raises: +- ValueError: If the integer is negative or too large to be an +- IPv4 IP address. +- +- """ +- try: +- return _compat_to_bytes(address, 4, 'big') +- except (struct.error, OverflowError): +- raise ValueError("Address negative or too large for IPv4") +- +- +-def v6_int_to_packed(address): +- """Represent an address as 16 packed bytes in network (big-endian) order. +- +- Args: +- address: An integer representation of an IPv6 IP address. +- +- Returns: +- The integer address packed as 16 bytes in network (big-endian) order. +- +- """ +- try: +- return _compat_to_bytes(address, 16, 'big') +- except (struct.error, OverflowError): +- raise ValueError("Address negative or too large for IPv6") +- +- +-def _split_optional_netmask(address): +- """Helper to split the netmask and raise AddressValueError if needed""" +- addr = _compat_str(address).split('/') +- if len(addr) > 2: +- raise AddressValueError("Only one '/' permitted in %r" % address) +- return addr +- +- +-def _find_address_range(addresses): +- """Find a sequence of sorted deduplicated IPv#Address. +- +- Args: +- addresses: a list of IPv#Address objects. +- +- Yields: +- A tuple containing the first and last IP addresses in the sequence. +- +- """ +- it = iter(addresses) +- first = last = next(it) +- for ip in it: +- if ip._ip != last._ip + 1: +- yield first, last +- first = ip +- last = ip +- yield first, last +- +- +-def _count_righthand_zero_bits(number, bits): +- """Count the number of zero bits on the right hand side. +- +- Args: +- number: an integer. +- bits: maximum number of bits to count. +- +- Returns: +- The number of zero bits on the right hand side of the number. +- +- """ +- if number == 0: +- return bits +- return min(bits, _compat_bit_length(~number & (number - 1))) +- +- +-def summarize_address_range(first, last): +- """Summarize a network range given the first and last IP addresses. +- +- Example: +- >>> list(summarize_address_range(IPv4Address('192.0.2.0'), +- ... IPv4Address('192.0.2.130'))) +- ... #doctest: +NORMALIZE_WHITESPACE +- [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), +- IPv4Network('192.0.2.130/32')] +- +- Args: +- first: the first IPv4Address or IPv6Address in the range. +- last: the last IPv4Address or IPv6Address in the range. +- +- Returns: +- An iterator of the summarized IPv(4|6) network objects. +- +- Raise: +- TypeError: +- If the first and last objects are not IP addresses. +- If the first and last objects are not the same version. +- ValueError: +- If the last object is not greater than the first. +- If the version of the first address is not 4 or 6. +- +- """ +- if (not (isinstance(first, _BaseAddress) and +- isinstance(last, _BaseAddress))): +- raise TypeError('first and last must be IP addresses, not networks') +- if first.version != last.version: +- raise TypeError("%s and %s are not of the same version" % ( +- first, last)) +- if first > last: +- raise ValueError('last IP address must be greater than first') +- +- if first.version == 4: +- ip = IPv4Network +- elif first.version == 6: +- ip = IPv6Network +- else: +- raise ValueError('unknown IP version') +- +- ip_bits = first._max_prefixlen +- first_int = first._ip +- last_int = last._ip +- while first_int <= last_int: +- nbits = min(_count_righthand_zero_bits(first_int, ip_bits), +- _compat_bit_length(last_int - first_int + 1) - 1) +- net = ip((first_int, ip_bits - nbits)) +- yield net +- first_int += 1 << nbits +- if first_int - 1 == ip._ALL_ONES: +- break +- +- +-def _collapse_addresses_internal(addresses): +- """Loops through the addresses, collapsing concurrent netblocks. +- +- Example: +- +- ip1 = IPv4Network('192.0.2.0/26') +- ip2 = IPv4Network('192.0.2.64/26') +- ip3 = IPv4Network('192.0.2.128/26') +- ip4 = IPv4Network('192.0.2.192/26') +- +- _collapse_addresses_internal([ip1, ip2, ip3, ip4]) -> +- [IPv4Network('192.0.2.0/24')] +- +- This shouldn't be called directly; it is called via +- collapse_addresses([]). +- +- Args: +- addresses: A list of IPv4Network's or IPv6Network's +- +- Returns: +- A list of IPv4Network's or IPv6Network's depending on what we were +- passed. +- +- """ +- # First merge +- to_merge = list(addresses) +- subnets = {} +- while to_merge: +- net = to_merge.pop() +- supernet = net.supernet() +- existing = subnets.get(supernet) +- if existing is None: +- subnets[supernet] = net +- elif existing != net: +- # Merge consecutive subnets +- del subnets[supernet] +- to_merge.append(supernet) +- # Then iterate over resulting networks, skipping subsumed subnets +- last = None +- for net in sorted(subnets.values()): +- if last is not None: +- # Since they are sorted, +- # last.network_address <= net.network_address is a given. +- if last.broadcast_address >= net.broadcast_address: +- continue +- yield net +- last = net +- +- +-def collapse_addresses(addresses): +- """Collapse a list of IP objects. +- +- Example: +- collapse_addresses([IPv4Network('192.0.2.0/25'), +- IPv4Network('192.0.2.128/25')]) -> +- [IPv4Network('192.0.2.0/24')] +- +- Args: +- addresses: An iterator of IPv4Network or IPv6Network objects. +- +- Returns: +- An iterator of the collapsed IPv(4|6)Network objects. +- +- Raises: +- TypeError: If passed a list of mixed version objects. +- +- """ +- addrs = [] +- ips = [] +- nets = [] +- +- # split IP addresses and networks +- for ip in addresses: +- if isinstance(ip, _BaseAddress): +- if ips and ips[-1]._version != ip._version: +- raise TypeError("%s and %s are not of the same version" % ( +- ip, ips[-1])) +- ips.append(ip) +- elif ip._prefixlen == ip._max_prefixlen: +- if ips and ips[-1]._version != ip._version: +- raise TypeError("%s and %s are not of the same version" % ( +- ip, ips[-1])) +- try: +- ips.append(ip.ip) +- except AttributeError: +- ips.append(ip.network_address) +- else: +- if nets and nets[-1]._version != ip._version: +- raise TypeError("%s and %s are not of the same version" % ( +- ip, nets[-1])) +- nets.append(ip) +- +- # sort and dedup +- ips = sorted(set(ips)) +- +- # find consecutive address ranges in the sorted sequence and summarize them +- if ips: +- for first, last in _find_address_range(ips): +- addrs.extend(summarize_address_range(first, last)) +- +- return _collapse_addresses_internal(addrs + nets) +- +- +-def get_mixed_type_key(obj): +- """Return a key suitable for sorting between networks and addresses. +- +- Address and Network objects are not sortable by default; they're +- fundamentally different so the expression +- +- IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') +- +- doesn't make any sense. There are some times however, where you may wish +- to have ipaddress sort these for you anyway. If you need to do this, you +- can use this function as the key= argument to sorted(). +- +- Args: +- obj: either a Network or Address object. +- Returns: +- appropriate key. +- +- """ +- if isinstance(obj, _BaseNetwork): +- return obj._get_networks_key() +- elif isinstance(obj, _BaseAddress): +- return obj._get_address_key() +- return NotImplemented +- +- +-class _IPAddressBase(_TotalOrderingMixin): +- +- """The mother class.""" +- +- __slots__ = () +- +- @property +- def exploded(self): +- """Return the longhand version of the IP address as a string.""" +- return self._explode_shorthand_ip_string() +- +- @property +- def compressed(self): +- """Return the shorthand version of the IP address as a string.""" +- return _compat_str(self) +- +- @property +- def reverse_pointer(self): +- """The name of the reverse DNS pointer for the IP address, e.g.: +- >>> ipaddress.ip_address("127.0.0.1").reverse_pointer +- '1.0.0.127.in-addr.arpa' +- >>> ipaddress.ip_address("2001:db8::1").reverse_pointer +- '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' +- +- """ +- return self._reverse_pointer() +- +- @property +- def version(self): +- msg = '%200s has no version specified' % (type(self),) +- raise NotImplementedError(msg) +- +- def _check_int_address(self, address): +- if address < 0: +- msg = "%d (< 0) is not permitted as an IPv%d address" +- raise AddressValueError(msg % (address, self._version)) +- if address > self._ALL_ONES: +- msg = "%d (>= 2**%d) is not permitted as an IPv%d address" +- raise AddressValueError(msg % (address, self._max_prefixlen, +- self._version)) +- +- def _check_packed_address(self, address, expected_len): +- address_len = len(address) +- if address_len != expected_len: +- msg = ( +- '%r (len %d != %d) is not permitted as an IPv%d address. ' +- 'Did you pass in a bytes (str in Python 2) instead of' +- ' a unicode object?') +- raise AddressValueError(msg % (address, address_len, +- expected_len, self._version)) +- +- @classmethod +- def _ip_int_from_prefix(cls, prefixlen): +- """Turn the prefix length into a bitwise netmask +- +- Args: +- prefixlen: An integer, the prefix length. +- +- Returns: +- An integer. +- +- """ +- return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen) +- +- @classmethod +- def _prefix_from_ip_int(cls, ip_int): +- """Return prefix length from the bitwise netmask. +- +- Args: +- ip_int: An integer, the netmask in expanded bitwise format +- +- Returns: +- An integer, the prefix length. +- +- Raises: +- ValueError: If the input intermingles zeroes & ones +- """ +- trailing_zeroes = _count_righthand_zero_bits(ip_int, +- cls._max_prefixlen) +- prefixlen = cls._max_prefixlen - trailing_zeroes +- leading_ones = ip_int >> trailing_zeroes +- all_ones = (1 << prefixlen) - 1 +- if leading_ones != all_ones: +- byteslen = cls._max_prefixlen // 8 +- details = _compat_to_bytes(ip_int, byteslen, 'big') +- msg = 'Netmask pattern %r mixes zeroes & ones' +- raise ValueError(msg % details) +- return prefixlen +- +- @classmethod +- def _report_invalid_netmask(cls, netmask_str): +- msg = '%r is not a valid netmask' % netmask_str +- raise NetmaskValueError(msg) +- +- @classmethod +- def _prefix_from_prefix_string(cls, prefixlen_str): +- """Return prefix length from a numeric string +- +- Args: +- prefixlen_str: The string to be converted +- +- Returns: +- An integer, the prefix length. +- +- Raises: +- NetmaskValueError: If the input is not a valid netmask +- """ +- # int allows a leading +/- as well as surrounding whitespace, +- # so we ensure that isn't the case +- if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str): +- cls._report_invalid_netmask(prefixlen_str) +- try: +- prefixlen = int(prefixlen_str) +- except ValueError: +- cls._report_invalid_netmask(prefixlen_str) +- if not (0 <= prefixlen <= cls._max_prefixlen): +- cls._report_invalid_netmask(prefixlen_str) +- return prefixlen +- +- @classmethod +- def _prefix_from_ip_string(cls, ip_str): +- """Turn a netmask/hostmask string into a prefix length +- +- Args: +- ip_str: The netmask/hostmask to be converted +- +- Returns: +- An integer, the prefix length. +- +- Raises: +- NetmaskValueError: If the input is not a valid netmask/hostmask +- """ +- # Parse the netmask/hostmask like an IP address. +- try: +- ip_int = cls._ip_int_from_string(ip_str) +- except AddressValueError: +- cls._report_invalid_netmask(ip_str) +- +- # Try matching a netmask (this would be /1*0*/ as a bitwise regexp). +- # Note that the two ambiguous cases (all-ones and all-zeroes) are +- # treated as netmasks. +- try: +- return cls._prefix_from_ip_int(ip_int) +- except ValueError: +- pass +- +- # Invert the bits, and try matching a /0+1+/ hostmask instead. +- ip_int ^= cls._ALL_ONES +- try: +- return cls._prefix_from_ip_int(ip_int) +- except ValueError: +- cls._report_invalid_netmask(ip_str) +- +- def __reduce__(self): +- return self.__class__, (_compat_str(self),) +- +- +-class _BaseAddress(_IPAddressBase): +- +- """A generic IP object. +- +- This IP class contains the version independent methods which are +- used by single IP addresses. +- """ +- +- __slots__ = () +- +- def __int__(self): +- return self._ip +- +- def __eq__(self, other): +- try: +- return (self._ip == other._ip and +- self._version == other._version) +- except AttributeError: +- return NotImplemented +- +- def __lt__(self, other): +- if not isinstance(other, _IPAddressBase): +- return NotImplemented +- if not isinstance(other, _BaseAddress): +- raise TypeError('%s and %s are not of the same type' % ( +- self, other)) +- if self._version != other._version: +- raise TypeError('%s and %s are not of the same version' % ( +- self, other)) +- if self._ip != other._ip: +- return self._ip < other._ip +- return False +- +- # Shorthand for Integer addition and subtraction. This is not +- # meant to ever support addition/subtraction of addresses. +- def __add__(self, other): +- if not isinstance(other, _compat_int_types): +- return NotImplemented +- return self.__class__(int(self) + other) +- +- def __sub__(self, other): +- if not isinstance(other, _compat_int_types): +- return NotImplemented +- return self.__class__(int(self) - other) +- +- def __repr__(self): +- return '%s(%r)' % (self.__class__.__name__, _compat_str(self)) +- +- def __str__(self): +- return _compat_str(self._string_from_ip_int(self._ip)) +- +- def __hash__(self): +- return hash(hex(int(self._ip))) +- +- def _get_address_key(self): +- return (self._version, self) +- +- def __reduce__(self): +- return self.__class__, (self._ip,) +- +- +-class _BaseNetwork(_IPAddressBase): +- +- """A generic IP network object. +- +- This IP class contains the version independent methods which are +- used by networks. +- +- """ +- def __init__(self, address): +- self._cache = {} +- +- def __repr__(self): +- return '%s(%r)' % (self.__class__.__name__, _compat_str(self)) +- +- def __str__(self): +- return '%s/%d' % (self.network_address, self.prefixlen) +- +- def hosts(self): +- """Generate Iterator over usable hosts in a network. +- +- This is like __iter__ except it doesn't return the network +- or broadcast addresses. +- +- """ +- network = int(self.network_address) +- broadcast = int(self.broadcast_address) +- for x in _compat_range(network + 1, broadcast): +- yield self._address_class(x) +- +- def __iter__(self): +- network = int(self.network_address) +- broadcast = int(self.broadcast_address) +- for x in _compat_range(network, broadcast + 1): +- yield self._address_class(x) +- +- def __getitem__(self, n): +- network = int(self.network_address) +- broadcast = int(self.broadcast_address) +- if n >= 0: +- if network + n > broadcast: +- raise IndexError('address out of range') +- return self._address_class(network + n) +- else: +- n += 1 +- if broadcast + n < network: +- raise IndexError('address out of range') +- return self._address_class(broadcast + n) +- +- def __lt__(self, other): +- if not isinstance(other, _IPAddressBase): +- return NotImplemented +- if not isinstance(other, _BaseNetwork): +- raise TypeError('%s and %s are not of the same type' % ( +- self, other)) +- if self._version != other._version: +- raise TypeError('%s and %s are not of the same version' % ( +- self, other)) +- if self.network_address != other.network_address: +- return self.network_address < other.network_address +- if self.netmask != other.netmask: +- return self.netmask < other.netmask +- return False +- +- def __eq__(self, other): +- try: +- return (self._version == other._version and +- self.network_address == other.network_address and +- int(self.netmask) == int(other.netmask)) +- except AttributeError: +- return NotImplemented +- +- def __hash__(self): +- return hash(int(self.network_address) ^ int(self.netmask)) +- +- def __contains__(self, other): +- # always false if one is v4 and the other is v6. +- if self._version != other._version: +- return False +- # dealing with another network. +- if isinstance(other, _BaseNetwork): +- return False +- # dealing with another address +- else: +- # address +- return (int(self.network_address) <= int(other._ip) <= +- int(self.broadcast_address)) +- +- def overlaps(self, other): +- """Tell if self is partly contained in other.""" +- return self.network_address in other or ( +- self.broadcast_address in other or ( +- other.network_address in self or ( +- other.broadcast_address in self))) +- +- @property +- def broadcast_address(self): +- x = self._cache.get('broadcast_address') +- if x is None: +- x = self._address_class(int(self.network_address) | +- int(self.hostmask)) +- self._cache['broadcast_address'] = x +- return x +- +- @property +- def hostmask(self): +- x = self._cache.get('hostmask') +- if x is None: +- x = self._address_class(int(self.netmask) ^ self._ALL_ONES) +- self._cache['hostmask'] = x +- return x +- +- @property +- def with_prefixlen(self): +- return '%s/%d' % (self.network_address, self._prefixlen) +- +- @property +- def with_netmask(self): +- return '%s/%s' % (self.network_address, self.netmask) +- +- @property +- def with_hostmask(self): +- return '%s/%s' % (self.network_address, self.hostmask) +- +- @property +- def num_addresses(self): +- """Number of hosts in the current subnet.""" +- return int(self.broadcast_address) - int(self.network_address) + 1 +- +- @property +- def _address_class(self): +- # Returning bare address objects (rather than interfaces) allows for +- # more consistent behaviour across the network address, broadcast +- # address and individual host addresses. +- msg = '%200s has no associated address class' % (type(self),) +- raise NotImplementedError(msg) +- +- @property +- def prefixlen(self): +- return self._prefixlen +- +- def address_exclude(self, other): +- """Remove an address from a larger block. +- +- For example: +- +- addr1 = ip_network('192.0.2.0/28') +- addr2 = ip_network('192.0.2.1/32') +- list(addr1.address_exclude(addr2)) = +- [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'), +- IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')] +- +- or IPv6: +- +- addr1 = ip_network('2001:db8::1/32') +- addr2 = ip_network('2001:db8::1/128') +- list(addr1.address_exclude(addr2)) = +- [ip_network('2001:db8::1/128'), +- ip_network('2001:db8::2/127'), +- ip_network('2001:db8::4/126'), +- ip_network('2001:db8::8/125'), +- ... +- ip_network('2001:db8:8000::/33')] +- +- Args: +- other: An IPv4Network or IPv6Network object of the same type. +- +- Returns: +- An iterator of the IPv(4|6)Network objects which is self +- minus other. +- +- Raises: +- TypeError: If self and other are of differing address +- versions, or if other is not a network object. +- ValueError: If other is not completely contained by self. +- +- """ +- if not self._version == other._version: +- raise TypeError("%s and %s are not of the same version" % ( +- self, other)) +- +- if not isinstance(other, _BaseNetwork): +- raise TypeError("%s is not a network object" % other) +- +- if not other.subnet_of(self): +- raise ValueError('%s not contained in %s' % (other, self)) +- if other == self: +- return +- +- # Make sure we're comparing the network of other. +- other = other.__class__('%s/%s' % (other.network_address, +- other.prefixlen)) +- +- s1, s2 = self.subnets() +- while s1 != other and s2 != other: +- if other.subnet_of(s1): +- yield s2 +- s1, s2 = s1.subnets() +- elif other.subnet_of(s2): +- yield s1 +- s1, s2 = s2.subnets() +- else: +- # If we got here, there's a bug somewhere. +- raise AssertionError('Error performing exclusion: ' +- 's1: %s s2: %s other: %s' % +- (s1, s2, other)) +- if s1 == other: +- yield s2 +- elif s2 == other: +- yield s1 +- else: +- # If we got here, there's a bug somewhere. +- raise AssertionError('Error performing exclusion: ' +- 's1: %s s2: %s other: %s' % +- (s1, s2, other)) +- +- def compare_networks(self, other): +- """Compare two IP objects. +- +- This is only concerned about the comparison of the integer +- representation of the network addresses. This means that the +- host bits aren't considered at all in this method. If you want +- to compare host bits, you can easily enough do a +- 'HostA._ip < HostB._ip' +- +- Args: +- other: An IP object. +- +- Returns: +- If the IP versions of self and other are the same, returns: +- +- -1 if self < other: +- eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25') +- IPv6Network('2001:db8::1000/124') < +- IPv6Network('2001:db8::2000/124') +- 0 if self == other +- eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24') +- IPv6Network('2001:db8::1000/124') == +- IPv6Network('2001:db8::1000/124') +- 1 if self > other +- eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25') +- IPv6Network('2001:db8::2000/124') > +- IPv6Network('2001:db8::1000/124') +- +- Raises: +- TypeError if the IP versions are different. +- +- """ +- # does this need to raise a ValueError? +- if self._version != other._version: +- raise TypeError('%s and %s are not of the same type' % ( +- self, other)) +- # self._version == other._version below here: +- if self.network_address < other.network_address: +- return -1 +- if self.network_address > other.network_address: +- return 1 +- # self.network_address == other.network_address below here: +- if self.netmask < other.netmask: +- return -1 +- if self.netmask > other.netmask: +- return 1 +- return 0 +- +- def _get_networks_key(self): +- """Network-only key function. +- +- Returns an object that identifies this address' network and +- netmask. This function is a suitable "key" argument for sorted() +- and list.sort(). +- +- """ +- return (self._version, self.network_address, self.netmask) +- +- def subnets(self, prefixlen_diff=1, new_prefix=None): +- """The subnets which join to make the current subnet. +- +- In the case that self contains only one IP +- (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 +- for IPv6), yield an iterator with just ourself. +- +- Args: +- prefixlen_diff: An integer, the amount the prefix length +- should be increased by. This should not be set if +- new_prefix is also set. +- new_prefix: The desired new prefix length. This must be a +- larger number (smaller prefix) than the existing prefix. +- This should not be set if prefixlen_diff is also set. +- +- Returns: +- An iterator of IPv(4|6) objects. +- +- Raises: +- ValueError: The prefixlen_diff is too small or too large. +- OR +- prefixlen_diff and new_prefix are both set or new_prefix +- is a smaller number than the current prefix (smaller +- number means a larger network) +- +- """ +- if self._prefixlen == self._max_prefixlen: +- yield self +- return +- +- if new_prefix is not None: +- if new_prefix < self._prefixlen: +- raise ValueError('new prefix must be longer') +- if prefixlen_diff != 1: +- raise ValueError('cannot set prefixlen_diff and new_prefix') +- prefixlen_diff = new_prefix - self._prefixlen +- +- if prefixlen_diff < 0: +- raise ValueError('prefix length diff must be > 0') +- new_prefixlen = self._prefixlen + prefixlen_diff +- +- if new_prefixlen > self._max_prefixlen: +- raise ValueError( +- 'prefix length diff %d is invalid for netblock %s' % ( +- new_prefixlen, self)) +- +- start = int(self.network_address) +- end = int(self.broadcast_address) + 1 +- step = (int(self.hostmask) + 1) >> prefixlen_diff +- for new_addr in _compat_range(start, end, step): +- current = self.__class__((new_addr, new_prefixlen)) +- yield current +- +- def supernet(self, prefixlen_diff=1, new_prefix=None): +- """The supernet containing the current network. +- +- Args: +- prefixlen_diff: An integer, the amount the prefix length of +- the network should be decreased by. For example, given a +- /24 network and a prefixlen_diff of 3, a supernet with a +- /21 netmask is returned. +- +- Returns: +- An IPv4 network object. +- +- Raises: +- ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have +- a negative prefix length. +- OR +- If prefixlen_diff and new_prefix are both set or new_prefix is a +- larger number than the current prefix (larger number means a +- smaller network) +- +- """ +- if self._prefixlen == 0: +- return self +- +- if new_prefix is not None: +- if new_prefix > self._prefixlen: +- raise ValueError('new prefix must be shorter') +- if prefixlen_diff != 1: +- raise ValueError('cannot set prefixlen_diff and new_prefix') +- prefixlen_diff = self._prefixlen - new_prefix +- +- new_prefixlen = self.prefixlen - prefixlen_diff +- if new_prefixlen < 0: +- raise ValueError( +- 'current prefixlen is %d, cannot have a prefixlen_diff of %d' % +- (self.prefixlen, prefixlen_diff)) +- return self.__class__(( +- int(self.network_address) & (int(self.netmask) << prefixlen_diff), +- new_prefixlen)) +- +- @property +- def is_multicast(self): +- """Test if the address is reserved for multicast use. +- +- Returns: +- A boolean, True if the address is a multicast address. +- See RFC 2373 2.7 for details. +- +- """ +- return (self.network_address.is_multicast and +- self.broadcast_address.is_multicast) +- +- @staticmethod +- def _is_subnet_of(a, b): +- try: +- # Always false if one is v4 and the other is v6. +- if a._version != b._version: +- raise TypeError( +- "%s and %s are not of the same version" % (a, b)) +- return (b.network_address <= a.network_address and +- b.broadcast_address >= a.broadcast_address) +- except AttributeError: +- raise TypeError("Unable to test subnet containment " +- "between %s and %s" % (a, b)) +- +- def subnet_of(self, other): +- """Return True if this network is a subnet of other.""" +- return self._is_subnet_of(self, other) +- +- def supernet_of(self, other): +- """Return True if this network is a supernet of other.""" +- return self._is_subnet_of(other, self) +- +- @property +- def is_reserved(self): +- """Test if the address is otherwise IETF reserved. +- +- Returns: +- A boolean, True if the address is within one of the +- reserved IPv6 Network ranges. +- +- """ +- return (self.network_address.is_reserved and +- self.broadcast_address.is_reserved) +- +- @property +- def is_link_local(self): +- """Test if the address is reserved for link-local. +- +- Returns: +- A boolean, True if the address is reserved per RFC 4291. +- +- """ +- return (self.network_address.is_link_local and +- self.broadcast_address.is_link_local) +- +- @property +- def is_private(self): +- """Test if this address is allocated for private networks. +- +- Returns: +- A boolean, True if the address is reserved per +- iana-ipv4-special-registry or iana-ipv6-special-registry. +- +- """ +- return (self.network_address.is_private and +- self.broadcast_address.is_private) +- +- @property +- def is_global(self): +- """Test if this address is allocated for public networks. +- +- Returns: +- A boolean, True if the address is not reserved per +- iana-ipv4-special-registry or iana-ipv6-special-registry. +- +- """ +- return not self.is_private +- +- @property +- def is_unspecified(self): +- """Test if the address is unspecified. +- +- Returns: +- A boolean, True if this is the unspecified address as defined in +- RFC 2373 2.5.2. +- +- """ +- return (self.network_address.is_unspecified and +- self.broadcast_address.is_unspecified) +- +- @property +- def is_loopback(self): +- """Test if the address is a loopback address. +- +- Returns: +- A boolean, True if the address is a loopback address as defined in +- RFC 2373 2.5.3. +- +- """ +- return (self.network_address.is_loopback and +- self.broadcast_address.is_loopback) +- +- +-class _BaseV4(object): +- +- """Base IPv4 object. +- +- The following methods are used by IPv4 objects in both single IP +- addresses and networks. +- +- """ +- +- __slots__ = () +- _version = 4 +- # Equivalent to 255.255.255.255 or 32 bits of 1's. +- _ALL_ONES = (2 ** IPV4LENGTH) - 1 +- _DECIMAL_DIGITS = frozenset('0123456789') +- +- # the valid octets for host and netmasks. only useful for IPv4. +- _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0]) +- +- _max_prefixlen = IPV4LENGTH +- # There are only a handful of valid v4 netmasks, so we cache them all +- # when constructed (see _make_netmask()). +- _netmask_cache = {} +- +- def _explode_shorthand_ip_string(self): +- return _compat_str(self) +- +- @classmethod +- def _make_netmask(cls, arg): +- """Make a (netmask, prefix_len) tuple from the given argument. +- +- Argument can be: +- - an integer (the prefix length) +- - a string representing the prefix length (e.g. "24") +- - a string representing the prefix netmask (e.g. "255.255.255.0") +- """ +- if arg not in cls._netmask_cache: +- if isinstance(arg, _compat_int_types): +- prefixlen = arg +- else: +- try: +- # Check for a netmask in prefix length form +- prefixlen = cls._prefix_from_prefix_string(arg) +- except NetmaskValueError: +- # Check for a netmask or hostmask in dotted-quad form. +- # This may raise NetmaskValueError. +- prefixlen = cls._prefix_from_ip_string(arg) +- netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen)) +- cls._netmask_cache[arg] = netmask, prefixlen +- return cls._netmask_cache[arg] +- +- @classmethod +- def _ip_int_from_string(cls, ip_str): +- """Turn the given IP string into an integer for comparison. +- +- Args: +- ip_str: A string, the IP ip_str. +- +- Returns: +- The IP ip_str as an integer. +- +- Raises: +- AddressValueError: if ip_str isn't a valid IPv4 Address. +- +- """ +- if not ip_str: +- raise AddressValueError('Address cannot be empty') +- +- octets = ip_str.split('.') +- if len(octets) != 4: +- raise AddressValueError("Expected 4 octets in %r" % ip_str) +- +- try: +- return _compat_int_from_byte_vals( +- map(cls._parse_octet, octets), 'big') +- except ValueError as exc: +- raise AddressValueError("%s in %r" % (exc, ip_str)) +- +- @classmethod +- def _parse_octet(cls, octet_str): +- """Convert a decimal octet into an integer. +- +- Args: +- octet_str: A string, the number to parse. +- +- Returns: +- The octet as an integer. +- +- Raises: +- ValueError: if the octet isn't strictly a decimal from [0..255]. +- +- """ +- if not octet_str: +- raise ValueError("Empty octet not permitted") +- # Whitelist the characters, since int() allows a lot of bizarre stuff. +- if not cls._DECIMAL_DIGITS.issuperset(octet_str): +- msg = "Only decimal digits permitted in %r" +- raise ValueError(msg % octet_str) +- # We do the length check second, since the invalid character error +- # is likely to be more informative for the user +- if len(octet_str) > 3: +- msg = "At most 3 characters permitted in %r" +- raise ValueError(msg % octet_str) +- # Convert to integer (we know digits are legal) +- octet_int = int(octet_str, 10) +- # Any octets that look like they *might* be written in octal, +- # and which don't look exactly the same in both octal and +- # decimal are rejected as ambiguous +- if octet_int > 7 and octet_str[0] == '0': +- msg = "Ambiguous (octal/decimal) value in %r not permitted" +- raise ValueError(msg % octet_str) +- if octet_int > 255: +- raise ValueError("Octet %d (> 255) not permitted" % octet_int) +- return octet_int +- +- @classmethod +- def _string_from_ip_int(cls, ip_int): +- """Turns a 32-bit integer into dotted decimal notation. +- +- Args: +- ip_int: An integer, the IP address. +- +- Returns: +- The IP address as a string in dotted decimal notation. +- +- """ +- return '.'.join(_compat_str(struct.unpack(b'!B', b)[0] +- if isinstance(b, bytes) +- else b) +- for b in _compat_to_bytes(ip_int, 4, 'big')) +- +- def _is_hostmask(self, ip_str): +- """Test if the IP string is a hostmask (rather than a netmask). +- +- Args: +- ip_str: A string, the potential hostmask. +- +- Returns: +- A boolean, True if the IP string is a hostmask. +- +- """ +- bits = ip_str.split('.') +- try: +- parts = [x for x in map(int, bits) if x in self._valid_mask_octets] +- except ValueError: +- return False +- if len(parts) != len(bits): +- return False +- if parts[0] < parts[-1]: +- return True +- return False +- +- def _reverse_pointer(self): +- """Return the reverse DNS pointer name for the IPv4 address. +- +- This implements the method described in RFC1035 3.5. +- +- """ +- reverse_octets = _compat_str(self).split('.')[::-1] +- return '.'.join(reverse_octets) + '.in-addr.arpa' +- +- @property +- def max_prefixlen(self): +- return self._max_prefixlen +- +- @property +- def version(self): +- return self._version +- +- +-class IPv4Address(_BaseV4, _BaseAddress): +- +- """Represent and manipulate single IPv4 Addresses.""" +- +- __slots__ = ('_ip', '__weakref__') +- +- def __init__(self, address): +- +- """ +- Args: +- address: A string or integer representing the IP +- +- Additionally, an integer can be passed, so +- IPv4Address('192.0.2.1') == IPv4Address(3221225985). +- or, more generally +- IPv4Address(int(IPv4Address('192.0.2.1'))) == +- IPv4Address('192.0.2.1') +- +- Raises: +- AddressValueError: If ipaddress isn't a valid IPv4 address. +- +- """ +- # Efficient constructor from integer. +- if isinstance(address, _compat_int_types): +- self._check_int_address(address) +- self._ip = address +- return +- +- # Constructing from a packed address +- if isinstance(address, bytes): +- self._check_packed_address(address, 4) +- bvs = _compat_bytes_to_byte_vals(address) +- self._ip = _compat_int_from_byte_vals(bvs, 'big') +- return +- +- # Assume input argument to be string or any object representation +- # which converts into a formatted IP string. +- addr_str = _compat_str(address) +- if '/' in addr_str: +- raise AddressValueError("Unexpected '/' in %r" % address) +- self._ip = self._ip_int_from_string(addr_str) +- +- @property +- def packed(self): +- """The binary representation of this address.""" +- return v4_int_to_packed(self._ip) +- +- @property +- def is_reserved(self): +- """Test if the address is otherwise IETF reserved. +- +- Returns: +- A boolean, True if the address is within the +- reserved IPv4 Network range. +- +- """ +- return self in self._constants._reserved_network +- +- @property +- def is_private(self): +- """Test if this address is allocated for private networks. +- +- Returns: +- A boolean, True if the address is reserved per +- iana-ipv4-special-registry. +- +- """ +- return any(self in net for net in self._constants._private_networks) +- +- @property +- def is_global(self): +- return ( +- self not in self._constants._public_network and +- not self.is_private) +- +- @property +- def is_multicast(self): +- """Test if the address is reserved for multicast use. +- +- Returns: +- A boolean, True if the address is multicast. +- See RFC 3171 for details. +- +- """ +- return self in self._constants._multicast_network +- +- @property +- def is_unspecified(self): +- """Test if the address is unspecified. +- +- Returns: +- A boolean, True if this is the unspecified address as defined in +- RFC 5735 3. +- +- """ +- return self == self._constants._unspecified_address +- +- @property +- def is_loopback(self): +- """Test if the address is a loopback address. +- +- Returns: +- A boolean, True if the address is a loopback per RFC 3330. +- +- """ +- return self in self._constants._loopback_network +- +- @property +- def is_link_local(self): +- """Test if the address is reserved for link-local. +- +- Returns: +- A boolean, True if the address is link-local per RFC 3927. +- +- """ +- return self in self._constants._linklocal_network +- +- +-class IPv4Interface(IPv4Address): +- +- def __init__(self, address): +- if isinstance(address, (bytes, _compat_int_types)): +- IPv4Address.__init__(self, address) +- self.network = IPv4Network(self._ip) +- self._prefixlen = self._max_prefixlen +- return +- +- if isinstance(address, tuple): +- IPv4Address.__init__(self, address[0]) +- if len(address) > 1: +- self._prefixlen = int(address[1]) +- else: +- self._prefixlen = self._max_prefixlen +- +- self.network = IPv4Network(address, strict=False) +- self.netmask = self.network.netmask +- self.hostmask = self.network.hostmask +- return +- +- addr = _split_optional_netmask(address) +- IPv4Address.__init__(self, addr[0]) +- +- self.network = IPv4Network(address, strict=False) +- self._prefixlen = self.network._prefixlen +- +- self.netmask = self.network.netmask +- self.hostmask = self.network.hostmask +- +- def __str__(self): +- return '%s/%d' % (self._string_from_ip_int(self._ip), +- self.network.prefixlen) +- +- def __eq__(self, other): +- address_equal = IPv4Address.__eq__(self, other) +- if not address_equal or address_equal is NotImplemented: +- return address_equal +- try: +- return self.network == other.network +- except AttributeError: +- # An interface with an associated network is NOT the +- # same as an unassociated address. That's why the hash +- # takes the extra info into account. +- return False +- +- def __lt__(self, other): +- address_less = IPv4Address.__lt__(self, other) +- if address_less is NotImplemented: +- return NotImplemented +- try: +- return (self.network < other.network or +- self.network == other.network and address_less) +- except AttributeError: +- # We *do* allow addresses and interfaces to be sorted. The +- # unassociated address is considered less than all interfaces. +- return False +- +- def __hash__(self): +- return self._ip ^ self._prefixlen ^ int(self.network.network_address) +- +- __reduce__ = _IPAddressBase.__reduce__ +- +- @property +- def ip(self): +- return IPv4Address(self._ip) +- +- @property +- def with_prefixlen(self): +- return '%s/%s' % (self._string_from_ip_int(self._ip), +- self._prefixlen) +- +- @property +- def with_netmask(self): +- return '%s/%s' % (self._string_from_ip_int(self._ip), +- self.netmask) +- +- @property +- def with_hostmask(self): +- return '%s/%s' % (self._string_from_ip_int(self._ip), +- self.hostmask) +- +- +-class IPv4Network(_BaseV4, _BaseNetwork): +- +- """This class represents and manipulates 32-bit IPv4 network + addresses.. +- +- Attributes: [examples for IPv4Network('192.0.2.0/27')] +- .network_address: IPv4Address('192.0.2.0') +- .hostmask: IPv4Address('0.0.0.31') +- .broadcast_address: IPv4Address('192.0.2.32') +- .netmask: IPv4Address('255.255.255.224') +- .prefixlen: 27 +- +- """ +- # Class to use when creating address objects +- _address_class = IPv4Address +- +- def __init__(self, address, strict=True): +- +- """Instantiate a new IPv4 network object. +- +- Args: +- address: A string or integer representing the IP [& network]. +- '192.0.2.0/24' +- '192.0.2.0/255.255.255.0' +- '192.0.0.2/0.0.0.255' +- are all functionally the same in IPv4. Similarly, +- '192.0.2.1' +- '192.0.2.1/255.255.255.255' +- '192.0.2.1/32' +- are also functionally equivalent. That is to say, failing to +- provide a subnetmask will create an object with a mask of /32. +- +- If the mask (portion after the / in the argument) is given in +- dotted quad form, it is treated as a netmask if it starts with a +- non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it +- starts with a zero field (e.g. 0.255.255.255 == /8), with the +- single exception of an all-zero mask which is treated as a +- netmask == /0. If no mask is given, a default of /32 is used. +- +- Additionally, an integer can be passed, so +- IPv4Network('192.0.2.1') == IPv4Network(3221225985) +- or, more generally +- IPv4Interface(int(IPv4Interface('192.0.2.1'))) == +- IPv4Interface('192.0.2.1') +- +- Raises: +- AddressValueError: If ipaddress isn't a valid IPv4 address. +- NetmaskValueError: If the netmask isn't valid for +- an IPv4 address. +- ValueError: If strict is True and a network address is not +- supplied. +- +- """ +- _BaseNetwork.__init__(self, address) +- +- # Constructing from a packed address or integer +- if isinstance(address, (_compat_int_types, bytes)): +- self.network_address = IPv4Address(address) +- self.netmask, self._prefixlen = self._make_netmask( +- self._max_prefixlen) +- # fixme: address/network test here. +- return +- +- if isinstance(address, tuple): +- if len(address) > 1: +- arg = address[1] +- else: +- # We weren't given an address[1] +- arg = self._max_prefixlen +- self.network_address = IPv4Address(address[0]) +- self.netmask, self._prefixlen = self._make_netmask(arg) +- packed = int(self.network_address) +- if packed & int(self.netmask) != packed: +- if strict: +- raise ValueError('%s has host bits set' % self) +- else: +- self.network_address = IPv4Address(packed & +- int(self.netmask)) +- return +- +- # Assume input argument to be string or any object representation +- # which converts into a formatted IP prefix string. +- addr = _split_optional_netmask(address) +- self.network_address = IPv4Address(self._ip_int_from_string(addr[0])) +- +- if len(addr) == 2: +- arg = addr[1] +- else: +- arg = self._max_prefixlen +- self.netmask, self._prefixlen = self._make_netmask(arg) +- +- if strict: +- if (IPv4Address(int(self.network_address) & int(self.netmask)) != +- self.network_address): +- raise ValueError('%s has host bits set' % self) +- self.network_address = IPv4Address(int(self.network_address) & +- int(self.netmask)) +- +- if self._prefixlen == (self._max_prefixlen - 1): +- self.hosts = self.__iter__ +- +- @property +- def is_global(self): +- """Test if this address is allocated for public networks. +- +- Returns: +- A boolean, True if the address is not reserved per +- iana-ipv4-special-registry. +- +- """ +- return (not (self.network_address in IPv4Network('100.64.0.0/10') and +- self.broadcast_address in IPv4Network('100.64.0.0/10')) and +- not self.is_private) +- +- +-class _IPv4Constants(object): +- +- _linklocal_network = IPv4Network('169.254.0.0/16') +- +- _loopback_network = IPv4Network('127.0.0.0/8') +- +- _multicast_network = IPv4Network('224.0.0.0/4') +- +- _public_network = IPv4Network('100.64.0.0/10') +- +- _private_networks = [ +- IPv4Network('0.0.0.0/8'), +- IPv4Network('10.0.0.0/8'), +- IPv4Network('127.0.0.0/8'), +- IPv4Network('169.254.0.0/16'), +- IPv4Network('172.16.0.0/12'), +- IPv4Network('192.0.0.0/29'), +- IPv4Network('192.0.0.170/31'), +- IPv4Network('192.0.2.0/24'), +- IPv4Network('192.168.0.0/16'), +- IPv4Network('198.18.0.0/15'), +- IPv4Network('198.51.100.0/24'), +- IPv4Network('203.0.113.0/24'), +- IPv4Network('240.0.0.0/4'), +- IPv4Network('255.255.255.255/32'), +- ] +- +- _reserved_network = IPv4Network('240.0.0.0/4') +- +- _unspecified_address = IPv4Address('0.0.0.0') +- +- +-IPv4Address._constants = _IPv4Constants +- +- +-class _BaseV6(object): +- +- """Base IPv6 object. +- +- The following methods are used by IPv6 objects in both single IP +- addresses and networks. +- +- """ +- +- __slots__ = () +- _version = 6 +- _ALL_ONES = (2 ** IPV6LENGTH) - 1 +- _HEXTET_COUNT = 8 +- _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef') +- _max_prefixlen = IPV6LENGTH +- +- # There are only a bunch of valid v6 netmasks, so we cache them all +- # when constructed (see _make_netmask()). +- _netmask_cache = {} +- +- @classmethod +- def _make_netmask(cls, arg): +- """Make a (netmask, prefix_len) tuple from the given argument. +- +- Argument can be: +- - an integer (the prefix length) +- - a string representing the prefix length (e.g. "24") +- - a string representing the prefix netmask (e.g. "255.255.255.0") +- """ +- if arg not in cls._netmask_cache: +- if isinstance(arg, _compat_int_types): +- prefixlen = arg +- else: +- prefixlen = cls._prefix_from_prefix_string(arg) +- netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen)) +- cls._netmask_cache[arg] = netmask, prefixlen +- return cls._netmask_cache[arg] +- +- @classmethod +- def _ip_int_from_string(cls, ip_str): +- """Turn an IPv6 ip_str into an integer. +- +- Args: +- ip_str: A string, the IPv6 ip_str. +- +- Returns: +- An int, the IPv6 address +- +- Raises: +- AddressValueError: if ip_str isn't a valid IPv6 Address. +- +- """ +- if not ip_str: +- raise AddressValueError('Address cannot be empty') +- +- parts = ip_str.split(':') +- +- # An IPv6 address needs at least 2 colons (3 parts). +- _min_parts = 3 +- if len(parts) < _min_parts: +- msg = "At least %d parts expected in %r" % (_min_parts, ip_str) +- raise AddressValueError(msg) +- +- # If the address has an IPv4-style suffix, convert it to hexadecimal. +- if '.' in parts[-1]: +- try: +- ipv4_int = IPv4Address(parts.pop())._ip +- except AddressValueError as exc: +- raise AddressValueError("%s in %r" % (exc, ip_str)) +- parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF)) +- parts.append('%x' % (ipv4_int & 0xFFFF)) +- +- # An IPv6 address can't have more than 8 colons (9 parts). +- # The extra colon comes from using the "::" notation for a single +- # leading or trailing zero part. +- _max_parts = cls._HEXTET_COUNT + 1 +- if len(parts) > _max_parts: +- msg = "At most %d colons permitted in %r" % ( +- _max_parts - 1, ip_str) +- raise AddressValueError(msg) +- +- # Disregarding the endpoints, find '::' with nothing in between. +- # This indicates that a run of zeroes has been skipped. +- skip_index = None +- for i in _compat_range(1, len(parts) - 1): +- if not parts[i]: +- if skip_index is not None: +- # Can't have more than one '::' +- msg = "At most one '::' permitted in %r" % ip_str +- raise AddressValueError(msg) +- skip_index = i +- +- # parts_hi is the number of parts to copy from above/before the '::' +- # parts_lo is the number of parts to copy from below/after the '::' +- if skip_index is not None: +- # If we found a '::', then check if it also covers the endpoints. +- parts_hi = skip_index +- parts_lo = len(parts) - skip_index - 1 +- if not parts[0]: +- parts_hi -= 1 +- if parts_hi: +- msg = "Leading ':' only permitted as part of '::' in %r" +- raise AddressValueError(msg % ip_str) # ^: requires ^:: +- if not parts[-1]: +- parts_lo -= 1 +- if parts_lo: +- msg = "Trailing ':' only permitted as part of '::' in %r" +- raise AddressValueError(msg % ip_str) # :$ requires ::$ +- parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo) +- if parts_skipped < 1: +- msg = "Expected at most %d other parts with '::' in %r" +- raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str)) +- else: +- # Otherwise, allocate the entire address to parts_hi. The +- # endpoints could still be empty, but _parse_hextet() will check +- # for that. +- if len(parts) != cls._HEXTET_COUNT: +- msg = "Exactly %d parts expected without '::' in %r" +- raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str)) +- if not parts[0]: +- msg = "Leading ':' only permitted as part of '::' in %r" +- raise AddressValueError(msg % ip_str) # ^: requires ^:: +- if not parts[-1]: +- msg = "Trailing ':' only permitted as part of '::' in %r" +- raise AddressValueError(msg % ip_str) # :$ requires ::$ +- parts_hi = len(parts) +- parts_lo = 0 +- parts_skipped = 0 +- +- try: +- # Now, parse the hextets into a 128-bit integer. +- ip_int = 0 +- for i in range(parts_hi): +- ip_int <<= 16 +- ip_int |= cls._parse_hextet(parts[i]) +- ip_int <<= 16 * parts_skipped +- for i in range(-parts_lo, 0): +- ip_int <<= 16 +- ip_int |= cls._parse_hextet(parts[i]) +- return ip_int +- except ValueError as exc: +- raise AddressValueError("%s in %r" % (exc, ip_str)) +- +- @classmethod +- def _parse_hextet(cls, hextet_str): +- """Convert an IPv6 hextet string into an integer. +- +- Args: +- hextet_str: A string, the number to parse. +- +- Returns: +- The hextet as an integer. +- +- Raises: +- ValueError: if the input isn't strictly a hex number from +- [0..FFFF]. +- +- """ +- # Whitelist the characters, since int() allows a lot of bizarre stuff. +- if not cls._HEX_DIGITS.issuperset(hextet_str): +- raise ValueError("Only hex digits permitted in %r" % hextet_str) +- # We do the length check second, since the invalid character error +- # is likely to be more informative for the user +- if len(hextet_str) > 4: +- msg = "At most 4 characters permitted in %r" +- raise ValueError(msg % hextet_str) +- # Length check means we can skip checking the integer value +- return int(hextet_str, 16) +- +- @classmethod +- def _compress_hextets(cls, hextets): +- """Compresses a list of hextets. +- +- Compresses a list of strings, replacing the longest continuous +- sequence of "0" in the list with "" and adding empty strings at +- the beginning or at the end of the string such that subsequently +- calling ":".join(hextets) will produce the compressed version of +- the IPv6 address. +- +- Args: +- hextets: A list of strings, the hextets to compress. +- +- Returns: +- A list of strings. +- +- """ +- best_doublecolon_start = -1 +- best_doublecolon_len = 0 +- doublecolon_start = -1 +- doublecolon_len = 0 +- for index, hextet in enumerate(hextets): +- if hextet == '0': +- doublecolon_len += 1 +- if doublecolon_start == -1: +- # Start of a sequence of zeros. +- doublecolon_start = index +- if doublecolon_len > best_doublecolon_len: +- # This is the longest sequence of zeros so far. +- best_doublecolon_len = doublecolon_len +- best_doublecolon_start = doublecolon_start +- else: +- doublecolon_len = 0 +- doublecolon_start = -1 +- +- if best_doublecolon_len > 1: +- best_doublecolon_end = (best_doublecolon_start + +- best_doublecolon_len) +- # For zeros at the end of the address. +- if best_doublecolon_end == len(hextets): +- hextets += [''] +- hextets[best_doublecolon_start:best_doublecolon_end] = [''] +- # For zeros at the beginning of the address. +- if best_doublecolon_start == 0: +- hextets = [''] + hextets +- +- return hextets +- +- @classmethod +- def _string_from_ip_int(cls, ip_int=None): +- """Turns a 128-bit integer into hexadecimal notation. +- +- Args: +- ip_int: An integer, the IP address. +- +- Returns: +- A string, the hexadecimal representation of the address. +- +- Raises: +- ValueError: The address is bigger than 128 bits of all ones. +- +- """ +- if ip_int is None: +- ip_int = int(cls._ip) +- +- if ip_int > cls._ALL_ONES: +- raise ValueError('IPv6 address is too large') +- +- hex_str = '%032x' % ip_int +- hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)] +- +- hextets = cls._compress_hextets(hextets) +- return ':'.join(hextets) +- +- def _explode_shorthand_ip_string(self): +- """Expand a shortened IPv6 address. +- +- Args: +- ip_str: A string, the IPv6 address. +- +- Returns: +- A string, the expanded IPv6 address. +- +- """ +- if isinstance(self, IPv6Network): +- ip_str = _compat_str(self.network_address) +- elif isinstance(self, IPv6Interface): +- ip_str = _compat_str(self.ip) +- else: +- ip_str = _compat_str(self) +- +- ip_int = self._ip_int_from_string(ip_str) +- hex_str = '%032x' % ip_int +- parts = [hex_str[x:x + 4] for x in range(0, 32, 4)] +- if isinstance(self, (_BaseNetwork, IPv6Interface)): +- return '%s/%d' % (':'.join(parts), self._prefixlen) +- return ':'.join(parts) +- +- def _reverse_pointer(self): +- """Return the reverse DNS pointer name for the IPv6 address. +- +- This implements the method described in RFC3596 2.5. +- +- """ +- reverse_chars = self.exploded[::-1].replace(':', '') +- return '.'.join(reverse_chars) + '.ip6.arpa' +- +- @property +- def max_prefixlen(self): +- return self._max_prefixlen +- +- @property +- def version(self): +- return self._version +- +- +-class IPv6Address(_BaseV6, _BaseAddress): +- +- """Represent and manipulate single IPv6 Addresses.""" +- +- __slots__ = ('_ip', '__weakref__') +- +- def __init__(self, address): +- """Instantiate a new IPv6 address object. +- +- Args: +- address: A string or integer representing the IP +- +- Additionally, an integer can be passed, so +- IPv6Address('2001:db8::') == +- IPv6Address(42540766411282592856903984951653826560) +- or, more generally +- IPv6Address(int(IPv6Address('2001:db8::'))) == +- IPv6Address('2001:db8::') +- +- Raises: +- AddressValueError: If address isn't a valid IPv6 address. +- +- """ +- # Efficient constructor from integer. +- if isinstance(address, _compat_int_types): +- self._check_int_address(address) +- self._ip = address +- return +- +- # Constructing from a packed address +- if isinstance(address, bytes): +- self._check_packed_address(address, 16) +- bvs = _compat_bytes_to_byte_vals(address) +- self._ip = _compat_int_from_byte_vals(bvs, 'big') +- return +- +- # Assume input argument to be string or any object representation +- # which converts into a formatted IP string. +- addr_str = _compat_str(address) +- if '/' in addr_str: +- raise AddressValueError("Unexpected '/' in %r" % address) +- self._ip = self._ip_int_from_string(addr_str) +- +- @property +- def packed(self): +- """The binary representation of this address.""" +- return v6_int_to_packed(self._ip) +- +- @property +- def is_multicast(self): +- """Test if the address is reserved for multicast use. +- +- Returns: +- A boolean, True if the address is a multicast address. +- See RFC 2373 2.7 for details. +- +- """ +- return self in self._constants._multicast_network +- +- @property +- def is_reserved(self): +- """Test if the address is otherwise IETF reserved. +- +- Returns: +- A boolean, True if the address is within one of the +- reserved IPv6 Network ranges. +- +- """ +- return any(self in x for x in self._constants._reserved_networks) +- +- @property +- def is_link_local(self): +- """Test if the address is reserved for link-local. +- +- Returns: +- A boolean, True if the address is reserved per RFC 4291. +- +- """ +- return self in self._constants._linklocal_network +- +- @property +- def is_site_local(self): +- """Test if the address is reserved for site-local. +- +- Note that the site-local address space has been deprecated by RFC 3879. +- Use is_private to test if this address is in the space of unique local +- addresses as defined by RFC 4193. +- +- Returns: +- A boolean, True if the address is reserved per RFC 3513 2.5.6. +- +- """ +- return self in self._constants._sitelocal_network +- +- @property +- def is_private(self): +- """Test if this address is allocated for private networks. +- +- Returns: +- A boolean, True if the address is reserved per +- iana-ipv6-special-registry. +- +- """ +- return any(self in net for net in self._constants._private_networks) +- +- @property +- def is_global(self): +- """Test if this address is allocated for public networks. +- +- Returns: +- A boolean, true if the address is not reserved per +- iana-ipv6-special-registry. +- +- """ +- return not self.is_private +- +- @property +- def is_unspecified(self): +- """Test if the address is unspecified. +- +- Returns: +- A boolean, True if this is the unspecified address as defined in +- RFC 2373 2.5.2. +- +- """ +- return self._ip == 0 +- +- @property +- def is_loopback(self): +- """Test if the address is a loopback address. +- +- Returns: +- A boolean, True if the address is a loopback address as defined in +- RFC 2373 2.5.3. +- +- """ +- return self._ip == 1 +- +- @property +- def ipv4_mapped(self): +- """Return the IPv4 mapped address. +- +- Returns: +- If the IPv6 address is a v4 mapped address, return the +- IPv4 mapped address. Return None otherwise. +- +- """ +- if (self._ip >> 32) != 0xFFFF: +- return None +- return IPv4Address(self._ip & 0xFFFFFFFF) +- +- @property +- def teredo(self): +- """Tuple of embedded teredo IPs. +- +- Returns: +- Tuple of the (server, client) IPs or None if the address +- doesn't appear to be a teredo address (doesn't start with +- 2001::/32) +- +- """ +- if (self._ip >> 96) != 0x20010000: +- return None +- return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF), +- IPv4Address(~self._ip & 0xFFFFFFFF)) +- +- @property +- def sixtofour(self): +- """Return the IPv4 6to4 embedded address. +- +- Returns: +- The IPv4 6to4-embedded address if present or None if the +- address doesn't appear to contain a 6to4 embedded address. +- +- """ +- if (self._ip >> 112) != 0x2002: +- return None +- return IPv4Address((self._ip >> 80) & 0xFFFFFFFF) +- +- +-class IPv6Interface(IPv6Address): +- +- def __init__(self, address): +- if isinstance(address, (bytes, _compat_int_types)): +- IPv6Address.__init__(self, address) +- self.network = IPv6Network(self._ip) +- self._prefixlen = self._max_prefixlen +- return +- if isinstance(address, tuple): +- IPv6Address.__init__(self, address[0]) +- if len(address) > 1: +- self._prefixlen = int(address[1]) +- else: +- self._prefixlen = self._max_prefixlen +- self.network = IPv6Network(address, strict=False) +- self.netmask = self.network.netmask +- self.hostmask = self.network.hostmask +- return +- +- addr = _split_optional_netmask(address) +- IPv6Address.__init__(self, addr[0]) +- self.network = IPv6Network(address, strict=False) +- self.netmask = self.network.netmask +- self._prefixlen = self.network._prefixlen +- self.hostmask = self.network.hostmask +- +- def __str__(self): +- return '%s/%d' % (self._string_from_ip_int(self._ip), +- self.network.prefixlen) +- +- def __eq__(self, other): +- address_equal = IPv6Address.__eq__(self, other) +- if not address_equal or address_equal is NotImplemented: +- return address_equal +- try: +- return self.network == other.network +- except AttributeError: +- # An interface with an associated network is NOT the +- # same as an unassociated address. That's why the hash +- # takes the extra info into account. +- return False +- +- def __lt__(self, other): +- address_less = IPv6Address.__lt__(self, other) +- if address_less is NotImplemented: +- return NotImplemented +- try: +- return (self.network < other.network or +- self.network == other.network and address_less) +- except AttributeError: +- # We *do* allow addresses and interfaces to be sorted. The +- # unassociated address is considered less than all interfaces. +- return False +- +- def __hash__(self): +- return self._ip ^ self._prefixlen ^ int(self.network.network_address) +- +- __reduce__ = _IPAddressBase.__reduce__ +- +- @property +- def ip(self): +- return IPv6Address(self._ip) +- +- @property +- def with_prefixlen(self): +- return '%s/%s' % (self._string_from_ip_int(self._ip), +- self._prefixlen) +- +- @property +- def with_netmask(self): +- return '%s/%s' % (self._string_from_ip_int(self._ip), +- self.netmask) +- +- @property +- def with_hostmask(self): +- return '%s/%s' % (self._string_from_ip_int(self._ip), +- self.hostmask) +- +- @property +- def is_unspecified(self): +- return self._ip == 0 and self.network.is_unspecified +- +- @property +- def is_loopback(self): +- return self._ip == 1 and self.network.is_loopback +- +- +-class IPv6Network(_BaseV6, _BaseNetwork): +- +- """This class represents and manipulates 128-bit IPv6 networks. +- +- Attributes: [examples for IPv6('2001:db8::1000/124')] +- .network_address: IPv6Address('2001:db8::1000') +- .hostmask: IPv6Address('::f') +- .broadcast_address: IPv6Address('2001:db8::100f') +- .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0') +- .prefixlen: 124 +- +- """ +- +- # Class to use when creating address objects +- _address_class = IPv6Address +- +- def __init__(self, address, strict=True): +- """Instantiate a new IPv6 Network object. +- +- Args: +- address: A string or integer representing the IPv6 network or the +- IP and prefix/netmask. +- '2001:db8::/128' +- '2001:db8:0000:0000:0000:0000:0000:0000/128' +- '2001:db8::' +- are all functionally the same in IPv6. That is to say, +- failing to provide a subnetmask will create an object with +- a mask of /128. +- +- Additionally, an integer can be passed, so +- IPv6Network('2001:db8::') == +- IPv6Network(42540766411282592856903984951653826560) +- or, more generally +- IPv6Network(int(IPv6Network('2001:db8::'))) == +- IPv6Network('2001:db8::') +- +- strict: A boolean. If true, ensure that we have been passed +- A true network address, eg, 2001:db8::1000/124 and not an +- IP address on a network, eg, 2001:db8::1/124. +- +- Raises: +- AddressValueError: If address isn't a valid IPv6 address. +- NetmaskValueError: If the netmask isn't valid for +- an IPv6 address. +- ValueError: If strict was True and a network address was not +- supplied. +- +- """ +- _BaseNetwork.__init__(self, address) +- +- # Efficient constructor from integer or packed address +- if isinstance(address, (bytes, _compat_int_types)): +- self.network_address = IPv6Address(address) +- self.netmask, self._prefixlen = self._make_netmask( +- self._max_prefixlen) +- return +- +- if isinstance(address, tuple): +- if len(address) > 1: +- arg = address[1] +- else: +- arg = self._max_prefixlen +- self.netmask, self._prefixlen = self._make_netmask(arg) +- self.network_address = IPv6Address(address[0]) +- packed = int(self.network_address) +- if packed & int(self.netmask) != packed: +- if strict: +- raise ValueError('%s has host bits set' % self) +- else: +- self.network_address = IPv6Address(packed & +- int(self.netmask)) +- return +- +- # Assume input argument to be string or any object representation +- # which converts into a formatted IP prefix string. +- addr = _split_optional_netmask(address) +- +- self.network_address = IPv6Address(self._ip_int_from_string(addr[0])) +- +- if len(addr) == 2: +- arg = addr[1] +- else: +- arg = self._max_prefixlen +- self.netmask, self._prefixlen = self._make_netmask(arg) +- +- if strict: +- if (IPv6Address(int(self.network_address) & int(self.netmask)) != +- self.network_address): +- raise ValueError('%s has host bits set' % self) +- self.network_address = IPv6Address(int(self.network_address) & +- int(self.netmask)) +- +- if self._prefixlen == (self._max_prefixlen - 1): +- self.hosts = self.__iter__ +- +- def hosts(self): +- """Generate Iterator over usable hosts in a network. +- +- This is like __iter__ except it doesn't return the +- Subnet-Router anycast address. +- +- """ +- network = int(self.network_address) +- broadcast = int(self.broadcast_address) +- for x in _compat_range(network + 1, broadcast + 1): +- yield self._address_class(x) +- +- @property +- def is_site_local(self): +- """Test if the address is reserved for site-local. +- +- Note that the site-local address space has been deprecated by RFC 3879. +- Use is_private to test if this address is in the space of unique local +- addresses as defined by RFC 4193. +- +- Returns: +- A boolean, True if the address is reserved per RFC 3513 2.5.6. +- +- """ +- return (self.network_address.is_site_local and +- self.broadcast_address.is_site_local) +- +- +-class _IPv6Constants(object): +- +- _linklocal_network = IPv6Network('fe80::/10') +- +- _multicast_network = IPv6Network('ff00::/8') +- +- _private_networks = [ +- IPv6Network('::1/128'), +- IPv6Network('::/128'), +- IPv6Network('::ffff:0:0/96'), +- IPv6Network('100::/64'), +- IPv6Network('2001::/23'), +- IPv6Network('2001:2::/48'), +- IPv6Network('2001:db8::/32'), +- IPv6Network('2001:10::/28'), +- IPv6Network('fc00::/7'), +- IPv6Network('fe80::/10'), +- ] +- +- _reserved_networks = [ +- IPv6Network('::/8'), IPv6Network('100::/8'), +- IPv6Network('200::/7'), IPv6Network('400::/6'), +- IPv6Network('800::/5'), IPv6Network('1000::/4'), +- IPv6Network('4000::/3'), IPv6Network('6000::/3'), +- IPv6Network('8000::/3'), IPv6Network('A000::/3'), +- IPv6Network('C000::/3'), IPv6Network('E000::/4'), +- IPv6Network('F000::/5'), IPv6Network('F800::/6'), +- IPv6Network('FE00::/9'), +- ] +- +- _sitelocal_network = IPv6Network('fec0::/10') +- +- +-IPv6Address._constants = _IPv6Constants +diff --git a/src/pip/_vendor/ipaddress.pyi b/src/pip/_vendor/ipaddress.pyi +deleted file mode 100644 +index eef994d945..0000000000 +--- a/src/pip/_vendor/ipaddress.pyi ++++ /dev/null +@@ -1 +0,0 @@ +-from ipaddress import * +\ No newline at end of file +diff --git a/src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py +index 5831c2e01d..689208d3c6 100644 +--- a/src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py ++++ b/src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py +@@ -11,7 +11,7 @@ + # python-3.5) otherwise only do DNS matching. This allows + # backports.ssl_match_hostname to continue to be used in Python 2.7. + try: +- from pip._vendor import ipaddress ++ import ipaddress + except ImportError: + ipaddress = None + +diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt +index 15c000339a..4f7042cc12 100644 +--- a/src/pip/_vendor/vendor.txt ++++ b/src/pip/_vendor/vendor.txt +@@ -5,7 +5,6 @@ contextlib2==0.6.0.post1 + distlib==0.3.1 + distro==1.5.0 + html5lib==1.1 +-ipaddress==1.0.23 # Only needed on 2.6 and 2.7 + msgpack==1.0.0 + packaging==20.8 + pep517==0.9.1 -- Gitee From 1f376aaba6cca3f0da0d7b01a4f34081b1dfd183 Mon Sep 17 00:00:00 2001 From: openeuler-ci-bot <80474298@qq.com> Date: Sat, 26 Dec 2020 18:59:01 +0800 Subject: [PATCH 02/10] [patch tracking] 20201226185849754648 - https://github.com/pypa/pip/commit/49c898e4b64ff7ba4fc42d047b53bf23d57ee59e --- ...98e4b64ff7ba4fc42d047b53bf23d57ee59e.patch | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 49c898e4b64ff7ba4fc42d047b53bf23d57ee59e.patch diff --git a/49c898e4b64ff7ba4fc42d047b53bf23d57ee59e.patch b/49c898e4b64ff7ba4fc42d047b53bf23d57ee59e.patch new file mode 100644 index 0000000..1b2e181 --- /dev/null +++ b/49c898e4b64ff7ba4fc42d047b53bf23d57ee59e.patch @@ -0,0 +1,41 @@ +diff --git a/news/b034ad46-e6b0-48b1-8b26-1145d611d082.trivial.rst b/news/b034ad46-e6b0-48b1-8b26-1145d611d082.trivial.rst +new file mode 100644 +index 0000000000..e69de29bb2 +diff --git a/src/pip/_internal/vcs/bazaar.py b/src/pip/_internal/vcs/bazaar.py +index 22969c726d..0ead887040 100644 +--- a/src/pip/_internal/vcs/bazaar.py ++++ b/src/pip/_internal/vcs/bazaar.py +@@ -3,7 +3,6 @@ + + import logging + import os +-from urllib import parse as urllib_parse + + from pip._internal.utils.misc import display_path, rmtree + from pip._internal.utils.subprocess import make_command +@@ -30,13 +29,6 @@ class Bazaar(VersionControl): + 'bzr+lp', + ) + +- def __init__(self, *args, **kwargs): +- super().__init__(*args, **kwargs) +- # This is only needed for python <2.7.5 +- # Register lp but do not expose as a scheme to support bzr+lp. +- if getattr(urllib_parse, 'uses_fragment', None): +- urllib_parse.uses_fragment.extend(['lp']) +- + @staticmethod + def get_base_rev_args(rev): + return ['-r', rev] +diff --git a/src/pip/_internal/vcs/versioncontrol.py b/src/pip/_internal/vcs/versioncontrol.py +index 35caf19157..3ef17172ab 100644 +--- a/src/pip/_internal/vcs/versioncontrol.py ++++ b/src/pip/_internal/vcs/versioncontrol.py +@@ -286,7 +286,6 @@ def __init__(self): + # Register more schemes with urlparse for various version control + # systems + urllib_parse.uses_netloc.extend(self.schemes) +- urllib_parse.uses_fragment.extend(self.schemes) + super().__init__() + + def __iter__(self): -- Gitee From 0ca7948d0808e15a16be1ecf8aa6a544866ace94 Mon Sep 17 00:00:00 2001 From: openeuler-ci-bot <80474298@qq.com> Date: Sat, 26 Dec 2020 18:59:01 +0800 Subject: [PATCH 03/10] [patch tracking] 20201226185849754648 - https://github.com/pypa/pip/commit/31eb524ff0d18d6190b30a0c7821c0c857a7523c --- ...524ff0d18d6190b30a0c7821c0c857a7523c.patch | 1504 +++++++++++++++++ 1 file changed, 1504 insertions(+) create mode 100644 31eb524ff0d18d6190b30a0c7821c0c857a7523c.patch diff --git a/31eb524ff0d18d6190b30a0c7821c0c857a7523c.patch b/31eb524ff0d18d6190b30a0c7821c0c857a7523c.patch new file mode 100644 index 0000000..a8f2de9 --- /dev/null +++ b/31eb524ff0d18d6190b30a0c7821c0c857a7523c.patch @@ -0,0 +1,1504 @@ +diff --git a/news/1170af15-1373-4226-a1ec-efe54b7ad480.trivial.rst b/news/1170af15-1373-4226-a1ec-efe54b7ad480.trivial.rst +new file mode 100644 +index 0000000000..e69de29bb2 +diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py +index a08e63cd05..a587d9f7c8 100644 +--- a/src/pip/_internal/build_env.py ++++ b/src/pip/_internal/build_env.py +@@ -46,7 +46,7 @@ def __init__(self, path): + self.lib_dirs = [purelib, platlib] + + +-class BuildEnvironment(object): ++class BuildEnvironment: + """Creates and manages an isolated environment to install build deps + """ + +diff --git a/src/pip/_internal/cache.py b/src/pip/_internal/cache.py +index b02c82a3c3..8724d90935 100644 +--- a/src/pip/_internal/cache.py ++++ b/src/pip/_internal/cache.py +@@ -33,7 +33,7 @@ def _hash_dict(d): + return hashlib.sha224(s.encode("ascii")).hexdigest() + + +-class Cache(object): ++class Cache: + """An abstract class - provides cache directories for data from links + + +@@ -263,7 +263,7 @@ def __init__(self, format_control): + super().__init__(self._temp_dir.path, format_control) + + +-class CacheEntry(object): ++class CacheEntry: + def __init__( + self, + link, # type: Link +diff --git a/src/pip/_internal/cli/command_context.py b/src/pip/_internal/cli/command_context.py +index 7ee2d24e32..ade14f2f67 100644 +--- a/src/pip/_internal/cli/command_context.py ++++ b/src/pip/_internal/cli/command_context.py +@@ -10,7 +10,7 @@ + _T = TypeVar('_T', covariant=True) + + +-class CommandContextMixIn(object): ++class CommandContextMixIn: + def __init__(self): + # type: () -> None + super().__init__() +diff --git a/src/pip/_internal/cli/progress_bars.py b/src/pip/_internal/cli/progress_bars.py +index e248a1a5fa..2c856a51fa 100644 +--- a/src/pip/_internal/cli/progress_bars.py ++++ b/src/pip/_internal/cli/progress_bars.py +@@ -52,7 +52,7 @@ def _select_progress_class(preferred, fallback): + _BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any + + +-class InterruptibleMixin(object): ++class InterruptibleMixin: + """ + Helper to ensure that self.finish() gets called on keyboard interrupt. + +@@ -125,7 +125,7 @@ class BlueEmojiBar(IncrementalBar): + phases = ("\U0001F539", "\U0001F537", "\U0001F535") + + +-class DownloadProgressMixin(object): ++class DownloadProgressMixin: + + def __init__(self, *args, **kwargs): + # type: (List[Any], Dict[Any, Any]) -> None +@@ -164,7 +164,7 @@ def iter(self, it): # type: ignore + self.finish() + + +-class WindowsMixin(object): ++class WindowsMixin: + + def __init__(self, *args, **kwargs): + # type: (List[Any], Dict[Any, Any]) -> None +diff --git a/src/pip/_internal/cli/spinners.py b/src/pip/_internal/cli/spinners.py +index 171d3a02d2..05ec2dcc76 100644 +--- a/src/pip/_internal/cli/spinners.py ++++ b/src/pip/_internal/cli/spinners.py +@@ -16,7 +16,7 @@ + logger = logging.getLogger(__name__) + + +-class SpinnerInterface(object): ++class SpinnerInterface: + def spin(self): + # type: () -> None + raise NotImplementedError() +@@ -109,7 +109,7 @@ def finish(self, final_status): + self._finished = True + + +-class RateLimiter(object): ++class RateLimiter: + def __init__(self, min_update_interval_seconds): + # type: (float) -> None + self._min_update_interval_seconds = min_update_interval_seconds +diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py +index a55882b463..5ca07c8ee8 100644 +--- a/src/pip/_internal/configuration.py ++++ b/src/pip/_internal/configuration.py +@@ -94,7 +94,7 @@ def get_configuration_files(): + } + + +-class Configuration(object): ++class Configuration: + """Handles management of configuration. + + Provides an interface to accessing and managing configuration files. +diff --git a/src/pip/_internal/distributions/base.py b/src/pip/_internal/distributions/base.py +index 6c68a86a27..37db810b35 100644 +--- a/src/pip/_internal/distributions/base.py ++++ b/src/pip/_internal/distributions/base.py +@@ -11,7 +11,7 @@ + from pip._internal.req import InstallRequirement + + +-class AbstractDistribution(object, metaclass=abc.ABCMeta): ++class AbstractDistribution(metaclass=abc.ABCMeta): + """A base class for handling installable artifacts. + + The requirements for anything installable are as follows: +diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py +index 6ad72de5c5..a8c8ec0ce7 100644 +--- a/src/pip/_internal/index/collector.py ++++ b/src/pip/_internal/index/collector.py +@@ -284,7 +284,7 @@ def _create_link_from_element( + return link + + +-class CacheablePageContent(object): ++class CacheablePageContent: + def __init__(self, page): + # type: (HTMLPage) -> None + assert page.cache_link_parsing +@@ -350,7 +350,7 @@ def parse_links(page): + yield link + + +-class HTMLPage(object): ++class HTMLPage: + """Represents one page, along with its URL""" + + def __init__( +@@ -524,7 +524,7 @@ def sort_path(path): + return files, urls + + +-class CollectedLinks(object): ++class CollectedLinks: + + """ + Encapsulates the return value of a call to LinkCollector.collect_links(). +@@ -559,7 +559,7 @@ def __init__( + self.project_urls = project_urls + + +-class LinkCollector(object): ++class LinkCollector: + + """ + Responsible for collecting Link objects from all configured locations, +diff --git a/src/pip/_internal/index/package_finder.py b/src/pip/_internal/index/package_finder.py +index adb2459227..9d13fdda6b 100644 +--- a/src/pip/_internal/index/package_finder.py ++++ b/src/pip/_internal/index/package_finder.py +@@ -98,7 +98,7 @@ def _check_link_requires_python( + return True + + +-class LinkEvaluator(object): ++class LinkEvaluator: + + """ + Responsible for evaluating links for a particular project. +@@ -311,7 +311,7 @@ def filter_unallowed_hashes( + return filtered + + +-class CandidatePreferences(object): ++class CandidatePreferences: + + """ + Encapsulates some of the preferences for filtering and sorting +@@ -331,7 +331,7 @@ def __init__( + self.prefer_binary = prefer_binary + + +-class BestCandidateResult(object): ++class BestCandidateResult: + """A collection of candidates, returned by `PackageFinder.find_best_candidate`. + + This class is only intended to be instantiated by CandidateEvaluator's +@@ -376,7 +376,7 @@ def iter_applicable(self): + return iter(self._applicable_candidates) + + +-class CandidateEvaluator(object): ++class CandidateEvaluator: + + """ + Responsible for filtering and sorting candidates for installation based +@@ -572,7 +572,7 @@ def compute_best_candidate( + ) + + +-class PackageFinder(object): ++class PackageFinder: + """This finds packages. + + This is meant to match easy_install's technique for looking for +diff --git a/src/pip/_internal/models/direct_url.py b/src/pip/_internal/models/direct_url.py +index 8f544caf60..1d97b38eaa 100644 +--- a/src/pip/_internal/models/direct_url.py ++++ b/src/pip/_internal/models/direct_url.py +@@ -71,7 +71,7 @@ def _filter_none(**kwargs): + return {k: v for k, v in kwargs.items() if v is not None} + + +-class VcsInfo(object): ++class VcsInfo: + name = "vcs_info" + + def __init__( +@@ -112,7 +112,7 @@ def _to_dict(self): + ) + + +-class ArchiveInfo(object): ++class ArchiveInfo: + name = "archive_info" + + def __init__( +@@ -133,7 +133,7 @@ def _to_dict(self): + return _filter_none(hash=self.hash) + + +-class DirInfo(object): ++class DirInfo: + name = "dir_info" + + def __init__( +@@ -160,7 +160,7 @@ def _to_dict(self): + InfoType = Union[ArchiveInfo, DirInfo, VcsInfo] + + +-class DirectUrl(object): ++class DirectUrl: + + def __init__( + self, +diff --git a/src/pip/_internal/models/format_control.py b/src/pip/_internal/models/format_control.py +index adcf61e285..fc2747c950 100644 +--- a/src/pip/_internal/models/format_control.py ++++ b/src/pip/_internal/models/format_control.py +@@ -7,7 +7,7 @@ + from typing import FrozenSet, Optional, Set + + +-class FormatControl(object): ++class FormatControl: + """Helper for managing formats from which a package can be installed. + """ + +diff --git a/src/pip/_internal/models/index.py b/src/pip/_internal/models/index.py +index 7f3285692b..ec328190a2 100644 +--- a/src/pip/_internal/models/index.py ++++ b/src/pip/_internal/models/index.py +@@ -1,7 +1,7 @@ + from urllib import parse as urllib_parse + + +-class PackageIndex(object): ++class PackageIndex: + """Represents a Package Index and provides easier access to endpoints + """ + +diff --git a/src/pip/_internal/models/scheme.py b/src/pip/_internal/models/scheme.py +index 5040551eb0..697cd19b47 100644 +--- a/src/pip/_internal/models/scheme.py ++++ b/src/pip/_internal/models/scheme.py +@@ -9,7 +9,7 @@ + SCHEME_KEYS = ['platlib', 'purelib', 'headers', 'scripts', 'data'] + + +-class Scheme(object): ++class Scheme: + """A Scheme holds paths which are used as the base directories for + artifacts associated with a Python package. + """ +diff --git a/src/pip/_internal/models/search_scope.py b/src/pip/_internal/models/search_scope.py +index ab6f914869..abfb8bed41 100644 +--- a/src/pip/_internal/models/search_scope.py ++++ b/src/pip/_internal/models/search_scope.py +@@ -18,7 +18,7 @@ + logger = logging.getLogger(__name__) + + +-class SearchScope(object): ++class SearchScope: + + """ + Encapsulates the locations that pip is configured to search. +diff --git a/src/pip/_internal/models/selection_prefs.py b/src/pip/_internal/models/selection_prefs.py +index 83110dd8f9..4d5822268b 100644 +--- a/src/pip/_internal/models/selection_prefs.py ++++ b/src/pip/_internal/models/selection_prefs.py +@@ -6,7 +6,7 @@ + from pip._internal.models.format_control import FormatControl + + +-class SelectionPreferences(object): ++class SelectionPreferences: + """ + Encapsulates the candidate selection preferences for downloading + and installing files. +diff --git a/src/pip/_internal/models/target_python.py b/src/pip/_internal/models/target_python.py +index 4593dc854f..2f2a74242d 100644 +--- a/src/pip/_internal/models/target_python.py ++++ b/src/pip/_internal/models/target_python.py +@@ -10,7 +10,7 @@ + from pip._vendor.packaging.tags import Tag + + +-class TargetPython(object): ++class TargetPython: + + """ + Encapsulates the properties of a Python interpreter one is targeting +diff --git a/src/pip/_internal/models/wheel.py b/src/pip/_internal/models/wheel.py +index 4d4068f3b7..49aae14703 100644 +--- a/src/pip/_internal/models/wheel.py ++++ b/src/pip/_internal/models/wheel.py +@@ -12,7 +12,7 @@ + from typing import List + + +-class Wheel(object): ++class Wheel: + """A wheel file""" + + wheel_file_re = re.compile( +diff --git a/src/pip/_internal/network/download.py b/src/pip/_internal/network/download.py +index 76896e8997..32396573ca 100644 +--- a/src/pip/_internal/network/download.py ++++ b/src/pip/_internal/network/download.py +@@ -133,7 +133,7 @@ def _http_get_download(session, link): + return resp + + +-class Downloader(object): ++class Downloader: + def __init__( + self, + session, # type: PipSession +@@ -166,7 +166,7 @@ def __call__(self, link, location): + return filepath, content_type + + +-class BatchDownloader(object): ++class BatchDownloader: + + def __init__( + self, +diff --git a/src/pip/_internal/network/lazy_wheel.py b/src/pip/_internal/network/lazy_wheel.py +index 83704f6f19..c68a35b9e1 100644 +--- a/src/pip/_internal/network/lazy_wheel.py ++++ b/src/pip/_internal/network/lazy_wheel.py +@@ -44,7 +44,7 @@ def dist_from_wheel_url(name, url, session): + return pkg_resources_distribution_for_wheel(zip_file, name, wheel.name) + + +-class LazyZipOverHTTP(object): ++class LazyZipOverHTTP: + """File-like object mapped to a ZIP file over HTTP. + + This uses HTTP range requests to lazily fetch the file's content, +diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py +index 3529c55edc..74a6cd3d9c 100644 +--- a/src/pip/_internal/operations/freeze.py ++++ b/src/pip/_internal/operations/freeze.py +@@ -236,7 +236,7 @@ def get_requirement_info(dist): + return (None, False, comments) + + +-class FrozenRequirement(object): ++class FrozenRequirement: + def __init__(self, name, req, editable, comments=()): + # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None + self.name = name +diff --git a/src/pip/_internal/operations/install/wheel.py b/src/pip/_internal/operations/install/wheel.py +index e822a7f8ad..49e93c510c 100644 +--- a/src/pip/_internal/operations/install/wheel.py ++++ b/src/pip/_internal/operations/install/wheel.py +@@ -391,7 +391,7 @@ def get_console_script_specs(console): + return scripts_to_generate + + +-class ZipBackedFile(object): ++class ZipBackedFile: + def __init__(self, src_record_path, dest_path, zip_file): + # type: (RecordPath, str, ZipFile) -> None + self.src_record_path = src_record_path +@@ -432,7 +432,7 @@ def save(self): + set_extracted_file_to_default_mode_plus_executable(self.dest_path) + + +-class ScriptFile(object): ++class ScriptFile: + def __init__(self, file): + # type: (File) -> None + self._file = file +diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py +index 1550a60d1d..853e2e7fbd 100644 +--- a/src/pip/_internal/operations/prepare.py ++++ b/src/pip/_internal/operations/prepare.py +@@ -85,7 +85,7 @@ def unpack_vcs_link(link, location): + vcs_backend.unpack(location, url=hide_url(link.url)) + + +-class File(object): ++class File: + + def __init__(self, path, content_type): + # type: (str, Optional[str]) -> None +@@ -279,7 +279,7 @@ def _check_download_dir(link, download_dir, hashes): + return download_path + + +-class RequirementPreparer(object): ++class RequirementPreparer: + """Prepares a Requirement + """ + +diff --git a/src/pip/_internal/req/__init__.py b/src/pip/_internal/req/__init__.py +index 8bdec4fc8c..9f9bc50122 100644 +--- a/src/pip/_internal/req/__init__.py ++++ b/src/pip/_internal/req/__init__.py +@@ -19,7 +19,7 @@ + logger = logging.getLogger(__name__) + + +-class InstallationResult(object): ++class InstallationResult: + def __init__(self, name): + # type: (str) -> None + self.name = name +diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py +index 2245cb826f..3564ab399e 100644 +--- a/src/pip/_internal/req/constructors.py ++++ b/src/pip/_internal/req/constructors.py +@@ -171,7 +171,7 @@ def deduce_helpful_msg(req): + return msg + + +-class RequirementParts(object): ++class RequirementParts: + def __init__( + self, + requirement, # type: Optional[Requirement] +diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py +index 26055b4d6c..bc5d6dfb2e 100644 +--- a/src/pip/_internal/req/req_file.py ++++ b/src/pip/_internal/req/req_file.py +@@ -77,7 +77,7 @@ + SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] + + +-class ParsedRequirement(object): ++class ParsedRequirement: + def __init__( + self, + requirement, # type:str +@@ -96,7 +96,7 @@ def __init__( + self.line_source = line_source + + +-class ParsedLine(object): ++class ParsedLine: + def __init__( + self, + filename, # type: str +@@ -320,7 +320,7 @@ def handle_line( + return None + + +-class RequirementsFileParser(object): ++class RequirementsFileParser: + def __init__( + self, + session, # type: PipSession +diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py +index 92a77f87bc..f391b47f6d 100644 +--- a/src/pip/_internal/req/req_install.py ++++ b/src/pip/_internal/req/req_install.py +@@ -92,7 +92,7 @@ def _get_dist(metadata_directory): + ) + + +-class InstallRequirement(object): ++class InstallRequirement: + """ + Represents something that may be installed later on, may have information + about where to fetch the relevant requirement and also contains logic for +diff --git a/src/pip/_internal/req/req_set.py b/src/pip/_internal/req/req_set.py +index 42c76820d2..96bb4013eb 100644 +--- a/src/pip/_internal/req/req_set.py ++++ b/src/pip/_internal/req/req_set.py +@@ -17,7 +17,7 @@ + logger = logging.getLogger(__name__) + + +-class RequirementSet(object): ++class RequirementSet: + + def __init__(self, check_supported_wheels=True): + # type: (bool) -> None +diff --git a/src/pip/_internal/req/req_tracker.py b/src/pip/_internal/req/req_tracker.py +index cfbfbb10f4..84edbbfae6 100644 +--- a/src/pip/_internal/req/req_tracker.py ++++ b/src/pip/_internal/req/req_tracker.py +@@ -62,7 +62,7 @@ def get_requirement_tracker(): + yield tracker + + +-class RequirementTracker(object): ++class RequirementTracker: + + def __init__(self, root): + # type: (str) -> None +diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py +index 5e62b83289..43d42c3b41 100644 +--- a/src/pip/_internal/req/req_uninstall.py ++++ b/src/pip/_internal/req/req_uninstall.py +@@ -214,7 +214,7 @@ def compress_for_output_listing(paths): + return will_remove, will_skip + + +-class StashedUninstallPathSet(object): ++class StashedUninstallPathSet: + """A set of file rename operations to stash files while + tentatively uninstalling them.""" + def __init__(self): +@@ -325,7 +325,7 @@ def can_rollback(self): + return bool(self._moves) + + +-class UninstallPathSet(object): ++class UninstallPathSet: + """A set of file paths to be removed in the uninstallation of a + requirement.""" + def __init__(self, dist): +@@ -590,7 +590,7 @@ def from_dist(cls, dist): + return paths_to_remove + + +-class UninstallPthEntries(object): ++class UninstallPthEntries: + def __init__(self, pth_file): + # type: (str) -> None + self.file = pth_file +diff --git a/src/pip/_internal/resolution/base.py b/src/pip/_internal/resolution/base.py +index 6d50555e53..f2816ab71c 100644 +--- a/src/pip/_internal/resolution/base.py ++++ b/src/pip/_internal/resolution/base.py +@@ -11,7 +11,7 @@ + ] + + +-class BaseResolver(object): ++class BaseResolver: + def resolve(self, root_reqs, check_supported_wheels): + # type: (List[InstallRequirement], bool) -> RequirementSet + raise NotImplementedError() +diff --git a/src/pip/_internal/resolution/resolvelib/base.py b/src/pip/_internal/resolution/resolvelib/base.py +index 7eb8a178eb..82c5ec7c72 100644 +--- a/src/pip/_internal/resolution/resolvelib/base.py ++++ b/src/pip/_internal/resolution/resolvelib/base.py +@@ -26,7 +26,7 @@ def format_name(project, extras): + return "{}[{}]".format(project, ",".join(canonical_extras)) + + +-class Constraint(object): ++class Constraint: + def __init__(self, specifier, hashes): + # type: (SpecifierSet, Hashes) -> None + self.specifier = specifier +@@ -66,7 +66,7 @@ def is_satisfied_by(self, candidate): + return self.specifier.contains(candidate.version, prereleases=True) + + +-class Requirement(object): ++class Requirement: + @property + def project_name(self): + # type: () -> str +@@ -101,7 +101,7 @@ def format_for_error(self): + raise NotImplementedError("Subclass should override") + + +-class Candidate(object): ++class Candidate: + @property + def project_name(self): + # type: () -> str +diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py +index b4c7bf1135..03d0faadee 100644 +--- a/src/pip/_internal/resolution/resolvelib/factory.py ++++ b/src/pip/_internal/resolution/resolvelib/factory.py +@@ -71,7 +71,7 @@ + logger = logging.getLogger(__name__) + + +-class Factory(object): ++class Factory: + def __init__( + self, + finder, # type: PackageFinder +diff --git a/src/pip/_internal/self_outdated_check.py b/src/pip/_internal/self_outdated_check.py +index 8159412007..01ed8787b5 100644 +--- a/src/pip/_internal/self_outdated_check.py ++++ b/src/pip/_internal/self_outdated_check.py +@@ -36,7 +36,7 @@ def _get_statefile_name(key): + return name + + +-class SelfCheckState(object): ++class SelfCheckState: + def __init__(self, cache_dir): + # type: (str) -> None + self.state = {} # type: Dict[str, Any] +diff --git a/src/pip/_internal/utils/hashes.py b/src/pip/_internal/utils/hashes.py +index 30a7f4a69f..35dae23582 100644 +--- a/src/pip/_internal/utils/hashes.py ++++ b/src/pip/_internal/utils/hashes.py +@@ -19,7 +19,7 @@ + STRONG_HASHES = ['sha256', 'sha384', 'sha512'] + + +-class Hashes(object): ++class Hashes: + """A wrapper that builds multiple hashes at once and checks them against + known-good values + +diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py +index c3d969eb22..72db88c7f4 100644 +--- a/src/pip/_internal/utils/misc.py ++++ b/src/pip/_internal/utils/misc.py +@@ -573,7 +573,7 @@ def write_output(msg, *args): + logger.info(msg, *args) + + +-class FakeFile(object): ++class FakeFile: + """Wrap a list of lines in an object with readline() to make + ConfigParser happy.""" + def __init__(self, lines): +@@ -805,7 +805,7 @@ def redact_auth_from_url(url): + return _transform_url(url, _redact_netloc)[0] + + +-class HiddenText(object): ++class HiddenText: + def __init__( + self, + secret, # type: str +diff --git a/src/pip/_internal/utils/models.py b/src/pip/_internal/utils/models.py +index d1c2f22679..e7db67a933 100644 +--- a/src/pip/_internal/utils/models.py ++++ b/src/pip/_internal/utils/models.py +@@ -6,7 +6,7 @@ + import operator + + +-class KeyBasedCompareMixin(object): ++class KeyBasedCompareMixin: + """Provides comparison capabilities that is based on a key + """ + +diff --git a/src/pip/_internal/utils/pkg_resources.py b/src/pip/_internal/utils/pkg_resources.py +index 0bc129acc6..0f42cc381a 100644 +--- a/src/pip/_internal/utils/pkg_resources.py ++++ b/src/pip/_internal/utils/pkg_resources.py +@@ -7,7 +7,7 @@ + from typing import Dict, Iterable, List + + +-class DictMetadata(object): ++class DictMetadata: + """IMetadataProvider that reads metadata files from a dictionary. + """ + def __init__(self, metadata): +diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py +index c97edc76d6..f224a04188 100644 +--- a/src/pip/_internal/utils/temp_dir.py ++++ b/src/pip/_internal/utils/temp_dir.py +@@ -45,7 +45,7 @@ def global_tempdir_manager(): + _tempdir_manager = old_tempdir_manager + + +-class TempDirectoryTypeRegistry(object): ++class TempDirectoryTypeRegistry: + """Manages temp directory behavior + """ + +@@ -86,14 +86,14 @@ def tempdir_registry(): + _tempdir_registry = old_tempdir_registry + + +-class _Default(object): ++class _Default: + pass + + + _default = _Default() + + +-class TempDirectory(object): ++class TempDirectory: + """Helper class that owns and cleans up a temporary directory. + + This class can be used as a context manager or as an OO representation of a +diff --git a/src/pip/_internal/vcs/versioncontrol.py b/src/pip/_internal/vcs/versioncontrol.py +index 3ef17172ab..d97a41918b 100644 +--- a/src/pip/_internal/vcs/versioncontrol.py ++++ b/src/pip/_internal/vcs/versioncontrol.py +@@ -204,7 +204,7 @@ class RemoteNotFoundError(Exception): + pass + + +-class RevOptions(object): ++class RevOptions: + + """ + Encapsulates a VCS-specific revision to install, along with any VCS +@@ -277,7 +277,7 @@ def make_new(self, rev): + return self.vc_class.make_rev_options(rev, extra_args=self.extra_args) + + +-class VcsSupport(object): ++class VcsSupport: + _registry = {} # type: Dict[str, VersionControl] + schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] + +@@ -371,7 +371,7 @@ def get_backend(self, name): + vcs = VcsSupport() + + +-class VersionControl(object): ++class VersionControl: + name = '' + dirname = '' + repo_name = '' +diff --git a/tests/conftest.py b/tests/conftest.py +index 7e67a0b2aa..048258f96f 100644 +--- a/tests/conftest.py ++++ b/tests/conftest.py +@@ -457,13 +457,13 @@ def data(tmpdir): + return TestData.copy(tmpdir.joinpath("data")) + + +-class InMemoryPipResult(object): ++class InMemoryPipResult: + def __init__(self, returncode, stdout): + self.returncode = returncode + self.stdout = stdout + + +-class InMemoryPip(object): ++class InMemoryPip: + def pip(self, *args): + orig_stdout = sys.stdout + stdout = io.StringIO() +@@ -506,7 +506,7 @@ def factory(): + return factory + + +-class MockServer(object): ++class MockServer: + def __init__(self, server): + # type: (_MockServer) -> None + self._server = server +diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py +index 72b55fda92..90981395a7 100644 +--- a/tests/functional/test_download.py ++++ b/tests/functional/test_download.py +@@ -324,7 +324,7 @@ def test_download_specify_platform(script, data): + ) + + +-class TestDownloadPlatformManylinuxes(object): ++class TestDownloadPlatformManylinuxes: + """ + "pip download --platform" downloads a .whl archive supported for + manylinux platforms. +diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py +index c5985243b6..83fe9c9451 100644 +--- a/tests/functional/test_install_reqs.py ++++ b/tests/functional/test_install_reqs.py +@@ -16,7 +16,7 @@ + from tests.lib.path import Path + + +-class ArgRecordingSdist(object): ++class ArgRecordingSdist: + def __init__(self, sdist_path, args_path): + self.sdist_path = sdist_path + self._args_path = args_path +diff --git a/tests/functional/test_install_upgrade.py b/tests/functional/test_install_upgrade.py +index 923a594c62..0dd4f9f8b5 100644 +--- a/tests/functional/test_install_upgrade.py ++++ b/tests/functional/test_install_upgrade.py +@@ -396,7 +396,7 @@ def test_upgrade_vcs_req_with_dist_found(script): + assert "pypi.org" not in result.stdout, result.stdout + + +-class TestUpgradeDistributeToSetuptools(object): ++class TestUpgradeDistributeToSetuptools: + """ + From pip1.4 to pip6, pip supported a set of "hacks" (see Issue #1122) to + allow distribute to conflict with setuptools, so that the following would +diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py +index b730b3cbdf..4b40ca23bf 100644 +--- a/tests/functional/test_new_resolver.py ++++ b/tests/functional/test_new_resolver.py +@@ -858,7 +858,7 @@ def test_new_resolver_upgrade_strategy(script): + assert_installed(script, dep="2.0.0") + + +-class TestExtraMerge(object): ++class TestExtraMerge: + """ + Test installing a package that depends the same package with different + extras, one listed as required and the other as in extra. +diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py +index 954145b0a0..26cb4ff94f 100644 +--- a/tests/lib/__init__.py ++++ b/tests/lib/__init__.py +@@ -146,7 +146,7 @@ def make_test_finder( + ) + + +-class TestData(object): ++class TestData: + """ + Represents a bundle of pre-created test data. + +@@ -230,7 +230,7 @@ class TestFailure(AssertionError): + pass + + +-class TestPipResult(object): ++class TestPipResult: + + def __init__(self, impl, verbose=False): + self._impl = impl +diff --git a/tests/lib/configuration_helpers.py b/tests/lib/configuration_helpers.py +index 3e3692696a..384a424e2d 100644 +--- a/tests/lib/configuration_helpers.py ++++ b/tests/lib/configuration_helpers.py +@@ -14,7 +14,7 @@ + kinds = pip._internal.configuration.kinds + + +-class ConfigurationMixin(object): ++class ConfigurationMixin: + + def setup(self): + self.configuration = pip._internal.configuration.Configuration( +diff --git a/tests/lib/options_helpers.py b/tests/lib/options_helpers.py +index 2354a818df..8cc5e306d5 100644 +--- a/tests/lib/options_helpers.py ++++ b/tests/lib/options_helpers.py +@@ -17,7 +17,7 @@ def main(self, args): + return self.parse_args(args) + + +-class AddFakeCommandMixin(object): ++class AddFakeCommandMixin: + + def setup(self): + commands_dict['fake'] = CommandInfo( +diff --git a/tests/lib/requests_mocks.py b/tests/lib/requests_mocks.py +index e8e3e9c886..b8ae2d232d 100644 +--- a/tests/lib/requests_mocks.py ++++ b/tests/lib/requests_mocks.py +@@ -4,7 +4,7 @@ + from io import BytesIO + + +-class FakeStream(object): ++class FakeStream: + + def __init__(self, contents): + self._io = BytesIO(contents) +@@ -19,7 +19,7 @@ def release_conn(self): + pass + + +-class MockResponse(object): ++class MockResponse: + + def __init__(self, contents): + self.raw = FakeStream(contents) +@@ -33,7 +33,7 @@ def __init__(self, contents): + self.history = [] + + +-class MockConnection(object): ++class MockConnection: + + def _send(self, req, **kwargs): + raise NotImplementedError("_send must be overridden for tests") +@@ -45,7 +45,7 @@ def send(self, req, **kwargs): + return resp + + +-class MockRequest(object): ++class MockRequest: + + def __init__(self, url): + self.url = url +diff --git a/tests/lib/venv.py b/tests/lib/venv.py +index c5652fecf4..e3ed345065 100644 +--- a/tests/lib/venv.py ++++ b/tests/lib/venv.py +@@ -9,7 +9,7 @@ + from .path import Path + + +-class VirtualEnvironment(object): ++class VirtualEnvironment: + """ + An abstraction around virtual environments, currently it only uses + virtualenv but in the future it could use pyvenv. +diff --git a/tests/lib/wheel.py b/tests/lib/wheel.py +index 2121a175ca..d460a126df 100644 +--- a/tests/lib/wheel.py ++++ b/tests/lib/wheel.py +@@ -261,7 +261,7 @@ def wheel_name(name, version, pythons, abis, platforms): + return "{}.whl".format(stem) + + +-class WheelBuilder(object): ++class WheelBuilder: + """A wheel that can be saved or converted to several formats. + """ + +diff --git a/tests/unit/test_base_command.py b/tests/unit/test_base_command.py +index 6d60ca0942..857d4f4f30 100644 +--- a/tests/unit/test_base_command.py ++++ b/tests/unit/test_base_command.py +@@ -52,7 +52,7 @@ def run(self, options, args): + ) + + +-class TestCommand(object): ++class TestCommand: + + def call_main(self, capsys, args): + """ +@@ -159,7 +159,7 @@ def test_base_command_global_tempdir_cleanup(kind, exists): + assert temp_dir._tempdir_manager is None + assert temp_dir._tempdir_registry is None + +- class Holder(object): ++ class Holder: + value = None + + def create_temp_dirs(options, args): +diff --git a/tests/unit/test_check.py b/tests/unit/test_check.py +index 1d1921484e..c53830aa09 100644 +--- a/tests/unit/test_check.py ++++ b/tests/unit/test_check.py +@@ -6,7 +6,7 @@ + from pip._internal.operations import check + + +-class TestInstalledDistributionsCall(object): ++class TestInstalledDistributionsCall: + + def test_passes_correct_default_kwargs(self, monkeypatch): + my_mock = mock.MagicMock(return_value=[]) +diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py +index 294ea721a3..ac765c5602 100644 +--- a/tests/unit/test_collector.py ++++ b/tests/unit/test_collector.py +@@ -640,7 +640,7 @@ def check_links_include(links, names): + ) + + +-class TestLinkCollector(object): ++class TestLinkCollector: + + @patch('pip._internal.index.collector._get_html_response') + def test_fetch_page(self, mock_get_html_response): +diff --git a/tests/unit/test_finder.py b/tests/unit/test_finder.py +index 55fdab3b88..d716e58253 100644 +--- a/tests/unit/test_finder.py ++++ b/tests/unit/test_finder.py +@@ -432,7 +432,7 @@ def test_finder_installs_pre_releases_with_version_spec(): + assert found.link.url == "https://foo/bar-2.0b1.tar.gz" + + +-class TestLinkEvaluator(object): ++class TestLinkEvaluator: + + def make_test_link_evaluator(self, formats): + target_python = TargetPython() +diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py +index 54bce7052a..b3da43cb85 100644 +--- a/tests/unit/test_logging.py ++++ b/tests/unit/test_logging.py +@@ -15,7 +15,7 @@ + logger = logging.getLogger(__name__) + + +-class TestIndentingFormatter(object): ++class TestIndentingFormatter: + """Test ``pip._internal.utils.logging.IndentingFormatter``.""" + + def make_record(self, msg, level_name): +@@ -110,7 +110,7 @@ def thread_function(): + assert results[0] == results[1] + + +-class TestColorizedStreamHandler(object): ++class TestColorizedStreamHandler: + + def _make_log_record(self): + attrs = { +diff --git a/tests/unit/test_models.py b/tests/unit/test_models.py +index f636336775..8e2975bd7e 100644 +--- a/tests/unit/test_models.py ++++ b/tests/unit/test_models.py +@@ -6,7 +6,7 @@ + from pip._internal.models import candidate, index + + +-class TestPackageIndex(object): ++class TestPackageIndex: + """Tests for pip._internal.models.index.PackageIndex + """ + +@@ -41,7 +41,7 @@ def test_TestPyPI_urls_are_correct(self): + assert pack_index.file_storage_domain == "test-files.pythonhosted.org" + + +-class TestInstallationCandidate(object): ++class TestInstallationCandidate: + + def test_sets_correct_variables(self): + obj = candidate.InstallationCandidate( +diff --git a/tests/unit/test_models_wheel.py b/tests/unit/test_models_wheel.py +index a4f954a2c7..e0d45f5c84 100644 +--- a/tests/unit/test_models_wheel.py ++++ b/tests/unit/test_models_wheel.py +@@ -6,7 +6,7 @@ + from pip._internal.utils import compatibility_tags + + +-class TestWheelFile(object): ++class TestWheelFile: + + def test_std_wheel_pattern(self): + w = Wheel('simple-1.1.1-py2-none-any.whl') +diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py +index 8116b627f7..44c739d864 100644 +--- a/tests/unit/test_network_auth.py ++++ b/tests/unit/test_network_auth.py +@@ -71,7 +71,7 @@ def test_get_index_url_credentials(): + assert get("http://example.com/path3/path2") == (None, None) + + +-class KeyringModuleV1(object): ++class KeyringModuleV1: + """Represents the supported API of keyring before get_credential + was added. + """ +@@ -209,10 +209,10 @@ def _send(sent_req, **kwargs): + assert keyring.saved_passwords == [] + + +-class KeyringModuleV2(object): ++class KeyringModuleV2: + """Represents the current supported API of keyring""" + +- class Credential(object): ++ class Credential: + def __init__(self, username, password): + self.username = username + self.password = password +@@ -244,7 +244,7 @@ def test_keyring_get_credential(monkeypatch, url, expect): + ) == expect + + +-class KeyringModuleBroken(object): ++class KeyringModuleBroken: + """Represents the current supported API of keyring, but broken""" + + def __init__(self): +diff --git a/tests/unit/test_network_session.py b/tests/unit/test_network_session.py +index a0d1463b2c..e9b575a96b 100644 +--- a/tests/unit/test_network_session.py ++++ b/tests/unit/test_network_session.py +@@ -199,7 +199,7 @@ def test_iter_secure_origins__trusted_hosts_empty(self): + ], + ) + def test_is_secure_origin(self, caplog, location, trusted, expected): +- class MockLogger(object): ++ class MockLogger: + def __init__(self): + self.called = False + +diff --git a/tests/unit/test_operations_prepare.py b/tests/unit/test_operations_prepare.py +index af3ce72a1e..9bdecc8e0f 100644 +--- a/tests/unit/test_operations_prepare.py ++++ b/tests/unit/test_operations_prepare.py +@@ -161,7 +161,7 @@ def test_copy_source_tree_with_unreadable_dir_fails(clean_project, tmpdir): + assert expected_files == copied_files + + +-class Test_unpack_url(object): ++class Test_unpack_url: + + def prep(self, tmpdir, data): + self.build_dir = tmpdir.joinpath('build') +diff --git a/tests/unit/test_options.py b/tests/unit/test_options.py +index 533a4b8db3..b02658af10 100644 +--- a/tests/unit/test_options.py ++++ b/tests/unit/test_options.py +@@ -160,7 +160,7 @@ def test_cache_dir__PIP_NO_CACHE_DIR_invalid__with_no_cache_dir( + main(['--no-cache-dir', 'fake']) + + +-class TestUsePEP517Options(object): ++class TestUsePEP517Options: + + """ + Test options related to using --use-pep517. +@@ -431,7 +431,7 @@ def test_client_cert(self): + assert options1.client_cert == options2.client_cert == 'path' + + +-class TestOptionsConfigFiles(object): ++class TestOptionsConfigFiles: + + def test_venv_config_file_found(self, monkeypatch): + # strict limit on the global config files list +diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py +index e168a3cc16..0c0b1ce4a8 100644 +--- a/tests/unit/test_req.py ++++ b/tests/unit/test_req.py +@@ -58,7 +58,7 @@ def get_processed_req_from_line(line, fname='file', lineno=1): + return req + + +-class TestRequirementSet(object): ++class TestRequirementSet: + """RequirementSet tests""" + + def setup(self): +@@ -317,7 +317,7 @@ def test_hashed_deps_on_require_hashes(self): + )) + + +-class TestInstallRequirement(object): ++class TestInstallRequirement: + def setup(self): + self.tempdir = tempfile.mkdtemp() + +diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py +index 4812637ee5..0f188d7b0a 100644 +--- a/tests/unit/test_req_file.py ++++ b/tests/unit/test_req_file.py +@@ -67,7 +67,7 @@ def parse_reqfile( + ) + + +-class TestPreprocess(object): ++class TestPreprocess: + """tests for `preprocess`""" + + def test_comments_and_joins_case1(self): +@@ -97,7 +97,7 @@ def test_comments_and_joins_case3(self): + assert list(result) == [(1, 'req1'), (3, 'req2')] + + +-class TestIgnoreComments(object): ++class TestIgnoreComments: + """tests for `ignore_comment`""" + + def test_ignore_line(self): +@@ -116,7 +116,7 @@ def test_strip_comment(self): + assert list(result) == [(1, 'req1'), (2, 'req'), (3, 'req2')] + + +-class TestJoinLines(object): ++class TestJoinLines: + """tests for `join_lines`""" + + def test_join_lines(self): +@@ -183,7 +183,7 @@ def process_line( + return process_line + + +-class TestProcessLine(object): ++class TestProcessLine: + """tests for `process_line`""" + + def test_parser_error(self, line_processor): +@@ -513,7 +513,7 @@ def get_file_content(filename, *args, **kwargs): + assert not result[0].constraint + + +-class TestBreakOptionsArgs(object): ++class TestBreakOptionsArgs: + + def test_no_args(self): + assert ('', '--option') == break_args_options('--option') +@@ -530,7 +530,7 @@ def test_args_long_options(self): + assert ('arg arg', '--long') == result + + +-class TestOptionVariants(object): ++class TestOptionVariants: + + # this suite is really just testing optparse, but added it anyway + +@@ -555,7 +555,7 @@ def test_variant5(self, line_processor, finder): + assert finder.index_urls == ['url'] + + +-class TestParseRequirements(object): ++class TestParseRequirements: + """tests for `parse_reqfile`""" + + @pytest.mark.network +diff --git a/tests/unit/test_req_install.py b/tests/unit/test_req_install.py +index d0d8003529..d8eee8d13d 100644 +--- a/tests/unit/test_req_install.py ++++ b/tests/unit/test_req_install.py +@@ -12,7 +12,7 @@ + from pip._internal.req.req_install import InstallRequirement + + +-class TestInstallRequirementBuildDirectory(object): ++class TestInstallRequirementBuildDirectory: + # no need to test symlinks on Windows + @pytest.mark.skipif("sys.platform == 'win32'") + def test_tmp_build_directory(self): +@@ -51,7 +51,7 @@ def test_forward_slash_results_in_a_link(self, tmpdir): + assert requirement.link is not None + + +-class TestInstallRequirementFrom(object): ++class TestInstallRequirementFrom: + + def test_install_req_from_string_invalid_requirement(self): + """ +diff --git a/tests/unit/test_req_uninstall.py b/tests/unit/test_req_uninstall.py +index d4d707e604..90bf0d50fb 100644 +--- a/tests/unit/test_req_uninstall.py ++++ b/tests/unit/test_req_uninstall.py +@@ -24,7 +24,7 @@ def mock_is_local(path): + + + def test_uninstallation_paths(): +- class dist(object): ++ class dist: + def get_metadata_lines(self, record): + return ['file.py,,', + 'file.pyc,,', +@@ -116,7 +116,7 @@ def in_tmpdir(paths): + assert sorted(expected_rename) == sorted(compact(will_rename)) + + +-class TestUninstallPathSet(object): ++class TestUninstallPathSet: + def test_add(self, tmpdir, monkeypatch): + monkeypatch.setattr(pip._internal.req.req_uninstall, 'is_local', + mock_is_local) +@@ -215,7 +215,7 @@ def test_detect_symlink_dirs(self, monkeypatch, tmpdir): + assert ups.paths == {path1} + + +-class TestStashedUninstallPathSet(object): ++class TestStashedUninstallPathSet: + WALK_RESULT = [ + ("A", ["B", "C"], ["a.py"]), + ("A/B", ["D"], ["b.py"]), +diff --git a/tests/unit/test_resolution_legacy_resolver.py b/tests/unit/test_resolution_legacy_resolver.py +index c4ff649291..f56ecd96e7 100644 +--- a/tests/unit/test_resolution_legacy_resolver.py ++++ b/tests/unit/test_resolution_legacy_resolver.py +@@ -52,7 +52,7 @@ def make_fake_dist(requires_python=None, metadata_name=None): + return FakeDist(metadata, metadata_name=metadata_name) + + +-class TestCheckDistRequiresPython(object): ++class TestCheckDistRequiresPython: + + """ + Test _check_dist_requires_python(). +@@ -173,7 +173,7 @@ def test_empty_metadata_error(self, caplog, metadata_name): + ) + + +-class TestYankedWarning(object): ++class TestYankedWarning: + """ + Test _populate_link() emits warning if one or more candidates are yanked. + """ +diff --git a/tests/unit/test_self_check_outdated.py b/tests/unit/test_self_check_outdated.py +index c5e60d92fc..42c4c45272 100644 +--- a/tests/unit/test_self_check_outdated.py ++++ b/tests/unit/test_self_check_outdated.py +@@ -17,12 +17,12 @@ + from tests.lib.path import Path + + +-class MockBestCandidateResult(object): ++class MockBestCandidateResult: + def __init__(self, best): + self.best_candidate = best + + +-class MockPackageFinder(object): ++class MockPackageFinder: + + BASE_URL = 'https://pypi.org/simple/pip-{0}.tar.gz' + PIP_PROJECT_NAME = 'pip' +@@ -43,7 +43,7 @@ def find_best_candidate(self, project_name): + return MockBestCandidateResult(self.INSTALLATION_CANDIDATES[0]) + + +-class MockDistribution(object): ++class MockDistribution: + def __init__(self, installer): + self.installer = installer + +diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py +index 57434669e5..41501d38b0 100644 +--- a/tests/unit/test_utils.py ++++ b/tests/unit/test_utils.py +@@ -188,7 +188,7 @@ def test_noegglink_in_sitepkgs_venv_global(self): + @patch('pip._internal.utils.misc.dist_in_usersite') + @patch('pip._internal.utils.misc.dist_is_local') + @patch('pip._internal.utils.misc.dist_is_editable') +-class TestsGetDistributions(object): ++class TestsGetDistributions: + """Test get_installed_distributions() and get_distribution(). + """ + class MockWorkingSet(list): +@@ -443,7 +443,7 @@ def test_path_to_display(monkeypatch, path, fs_encoding, expected): + assert actual == expected, 'actual: {!r}'.format(actual) + + +-class Test_normalize_path(object): ++class Test_normalize_path: + # Technically, symlinks are possible on Windows, but you need a special + # permission bit to create them, and Python 2 doesn't support it anyway, so + # it's easiest just to skip this test on Windows altogether. +@@ -480,7 +480,7 @@ def test_resolve_symlinks(self, tmpdir): + os.chdir(orig_working_dir) + + +-class TestHashes(object): ++class TestHashes: + """Tests for pip._internal.utils.hashes""" + + @pytest.mark.parametrize('hash_name, hex_digest, expected', [ +@@ -550,7 +550,7 @@ def test_hash(self): + assert cache[Hashes({'sha256': ['ab', 'cd']})] == 42 + + +-class TestEncoding(object): ++class TestEncoding: + """Tests for pip._internal.utils.encoding""" + + def test_auto_decode_utf_16_le(self): +@@ -596,7 +596,7 @@ def raises(error): + raise error + + +-class TestGlibc(object): ++class TestGlibc: + @pytest.mark.skipif("sys.platform == 'win32'") + def test_glibc_version_string(self, monkeypatch): + monkeypatch.setattr( +@@ -641,7 +641,7 @@ def test_normalize_version_info(version_info, expected): + assert actual == expected + + +-class TestGetProg(object): ++class TestGetProg: + + @pytest.mark.parametrize( + ("argv", "executable", "expected"), +diff --git a/tests/unit/test_utils_compatibility_tags.py b/tests/unit/test_utils_compatibility_tags.py +index 64f59a2f98..735f024c12 100644 +--- a/tests/unit/test_utils_compatibility_tags.py ++++ b/tests/unit/test_utils_compatibility_tags.py +@@ -21,7 +21,7 @@ def test_version_info_to_nodot(version_info, expected): + assert actual == expected + + +-class Testcompatibility_tags(object): ++class Testcompatibility_tags: + + def mock_get_config_var(self, **kwd): + """ +@@ -52,7 +52,7 @@ def test_no_hyphen_tag(self): + assert '-' not in tag.platform + + +-class TestManylinux2010Tags(object): ++class TestManylinux2010Tags: + + @pytest.mark.parametrize("manylinux2010,manylinux1", [ + ("manylinux2010_x86_64", "manylinux1_x86_64"), +@@ -75,7 +75,7 @@ def test_manylinux2010_implies_manylinux1(self, manylinux2010, manylinux1): + assert arches[:2] == [manylinux2010, manylinux1] + + +-class TestManylinux2014Tags(object): ++class TestManylinux2014Tags: + + @pytest.mark.parametrize("manylinuxA,manylinuxB", [ + ("manylinux2014_x86_64", ["manylinux2010_x86_64", +diff --git a/tests/unit/test_utils_subprocess.py b/tests/unit/test_utils_subprocess.py +index fd73878c1a..8a67d7d8da 100644 +--- a/tests/unit/test_utils_subprocess.py ++++ b/tests/unit/test_utils_subprocess.py +@@ -166,7 +166,7 @@ def finish(self, final_status): + self.final_status = final_status + + +-class TestCallSubprocess(object): ++class TestCallSubprocess: + + """ + Test call_subprocess(). +diff --git a/tests/unit/test_utils_unpacking.py b/tests/unit/test_utils_unpacking.py +index 5c2be24d42..94121acff1 100644 +--- a/tests/unit/test_utils_unpacking.py ++++ b/tests/unit/test_utils_unpacking.py +@@ -13,7 +13,7 @@ + from pip._internal.utils.unpacking import is_within_directory, untar_file, unzip_file + + +-class TestUnpackArchives(object): ++class TestUnpackArchives: + """ + test_tar.tgz/test_tar.zip have content as follows engineered to confirm 3 + things: +diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py +index 0f7a3c0747..52a5fe0436 100644 +--- a/tests/unit/test_wheel.py ++++ b/tests/unit/test_wheel.py +@@ -223,7 +223,7 @@ def test_wheel_root_is_purelib(text, expected): + assert wheel.wheel_root_is_purelib(message_from_string(text)) == expected + + +-class TestWheelFile(object): ++class TestWheelFile: + + def test_unpack_wheel_no_flatten(self, tmpdir): + filepath = os.path.join(DATA_DIR, 'packages', +@@ -232,7 +232,7 @@ def test_unpack_wheel_no_flatten(self, tmpdir): + assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info')) + + +-class TestInstallUnpackedWheel(object): ++class TestInstallUnpackedWheel: + """ + Tests for moving files from wheel src to scheme paths + """ +@@ -487,7 +487,7 @@ def test_invalid_entrypoints_fail( + assert entrypoint in exc_text + + +-class TestMessageAboutScriptsNotOnPATH(object): ++class TestMessageAboutScriptsNotOnPATH: + + tilde_warning_msg = ( + "NOTE: The current PATH contains path(s) starting with `~`, " +@@ -644,7 +644,7 @@ def test_multi_script_all_tilde_not_at_start__multi_dir_not_on_PATH(self): + assert self.tilde_warning_msg not in retval + + +-class TestWheelHashCalculators(object): ++class TestWheelHashCalculators: + + def prep(self, tmpdir): + self.test_file = tmpdir.joinpath("hash.file") -- Gitee From 4e4adb8767a2b9598624addeccc286896c85e7ac Mon Sep 17 00:00:00 2001 From: openeuler-ci-bot <80474298@qq.com> Date: Sat, 26 Dec 2020 18:59:02 +0800 Subject: [PATCH 04/10] [patch tracking] 20201226185849754648 - https://github.com/pypa/pip/commit/7407bc1e8a7e3482241f0038acbfa3b76e4a0d83 --- ...bc1e8a7e3482241f0038acbfa3b76e4a0d83.patch | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 7407bc1e8a7e3482241f0038acbfa3b76e4a0d83.patch diff --git a/7407bc1e8a7e3482241f0038acbfa3b76e4a0d83.patch b/7407bc1e8a7e3482241f0038acbfa3b76e4a0d83.patch new file mode 100644 index 0000000..9848385 --- /dev/null +++ b/7407bc1e8a7e3482241f0038acbfa3b76e4a0d83.patch @@ -0,0 +1,33 @@ +diff --git a/docs/docs_feedback_sphinxext.py b/docs/docs_feedback_sphinxext.py +index 15da417776..a8ab94e5cb 100644 +--- a/docs/docs_feedback_sphinxext.py ++++ b/docs/docs_feedback_sphinxext.py +@@ -141,7 +141,7 @@ def setup(app: Sphinx) -> Dict[str, Union[bool, str]]: + ) + app.add_config_value( + 'docs_feedback_email', +- default='Docs UX Team ', ++ default='Docs UX Team ', + rebuild=rebuild_trigger, + ) + app.add_config_value( +diff --git a/docs/html/conf.py b/docs/html/conf.py +index 9e65cbe7a1..fd124ebb69 100644 +--- a/docs/html/conf.py ++++ b/docs/html/conf.py +@@ -305,7 +305,7 @@ def to_document_name(path, base_dir): + # NOTE: 'important', 'note', 'tip', 'warning' or 'admonition'. + docs_feedback_admonition_type = 'important' + docs_feedback_big_doc_lines = 50 # bigger docs will have a banner on top +-docs_feedback_email = 'Docs UX Team ' ++docs_feedback_email = 'Docs UX Team ' + docs_feedback_excluded_documents = { # these won't have any banners + 'news', 'reference/index', + } +diff --git a/news/9343.doc.rst b/news/9343.doc.rst +new file mode 100644 +index 0000000000..1e4f91aec4 +--- /dev/null ++++ b/news/9343.doc.rst +@@ -0,0 +1 @@ ++Fix broken email link in docs feedback banners. -- Gitee From d9a133efab7b3cf72d59ebc71e1e85ee0cf42ef5 Mon Sep 17 00:00:00 2001 From: openeuler-ci-bot <80474298@qq.com> Date: Sat, 26 Dec 2020 18:59:02 +0800 Subject: [PATCH 05/10] [patch tracking] 20201226185849754648 - https://github.com/pypa/pip/commit/f2d2d10e4f76fca69efe593e7aecd2717b3de06c --- f2d2d10e4f76fca69efe593e7aecd2717b3de06c.patch | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 f2d2d10e4f76fca69efe593e7aecd2717b3de06c.patch diff --git a/f2d2d10e4f76fca69efe593e7aecd2717b3de06c.patch b/f2d2d10e4f76fca69efe593e7aecd2717b3de06c.patch new file mode 100644 index 0000000..358aee2 --- /dev/null +++ b/f2d2d10e4f76fca69efe593e7aecd2717b3de06c.patch @@ -0,0 +1,13 @@ +diff --git a/setup.cfg b/setup.cfg +index c28a167bd5..d2bf9fb69d 100644 +--- a/setup.cfg ++++ b/setup.cfg +@@ -102,8 +102,5 @@ exclude_lines = + # Can be set to exclude e.g. `if PY2:` on Python 3 + ${PIP_CI_COVERAGE_EXCLUDES} + +-[bdist_wheel] +-universal = 1 +- + [metadata] + license_file = LICENSE.txt -- Gitee From a429f5897b3e6ce0d7f03f3a1f6b60174e0074a5 Mon Sep 17 00:00:00 2001 From: openeuler-ci-bot <80474298@qq.com> Date: Sat, 26 Dec 2020 18:59:03 +0800 Subject: [PATCH 06/10] [patch tracking] 20201226185849754648 - https://github.com/pypa/pip/commit/fecfa11f5a859aac362cd83978d5a09a5ee4cd11 --- ...a11f5a859aac362cd83978d5a09a5ee4cd11.patch | 174 ++++++++++++++++++ 1 file changed, 174 insertions(+) create mode 100644 fecfa11f5a859aac362cd83978d5a09a5ee4cd11.patch diff --git a/fecfa11f5a859aac362cd83978d5a09a5ee4cd11.patch b/fecfa11f5a859aac362cd83978d5a09a5ee4cd11.patch new file mode 100644 index 0000000..04577f5 --- /dev/null +++ b/fecfa11f5a859aac362cd83978d5a09a5ee4cd11.patch @@ -0,0 +1,174 @@ +diff --git a/news/738a71b0-98f9-4e1f-a541-af95fb990af9.trivial.rst b/news/738a71b0-98f9-4e1f-a541-af95fb990af9.trivial.rst +new file mode 100644 +index 0000000000..e69de29bb2 +diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py +index 74a6cd3d9c..e9eec32c91 100644 +--- a/src/pip/_internal/operations/freeze.py ++++ b/src/pip/_internal/operations/freeze.py +@@ -225,8 +225,7 @@ def get_requirement_info(dist): + "falling back to uneditable format", exc + ) + else: +- if req is not None: +- return (req, True, []) ++ return (req, True, []) + + logger.warning( + 'Could not determine repository location of %s', location +diff --git a/src/pip/_internal/vcs/bazaar.py b/src/pip/_internal/vcs/bazaar.py +index 0ead887040..ee78b5d27c 100644 +--- a/src/pip/_internal/vcs/bazaar.py ++++ b/src/pip/_internal/vcs/bazaar.py +@@ -8,7 +8,7 @@ + from pip._internal.utils.subprocess import make_command + from pip._internal.utils.typing import MYPY_CHECK_RUNNING + from pip._internal.utils.urls import path_to_url +-from pip._internal.vcs.versioncontrol import VersionControl, vcs ++from pip._internal.vcs.versioncontrol import RemoteNotFoundError, VersionControl, vcs + + if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple +@@ -81,6 +81,7 @@ def get_url_rev_and_auth(cls, url): + + @classmethod + def get_remote_url(cls, location): ++ # type: (str) -> str + urls = cls.run_command(['info'], cwd=location) + for line in urls.splitlines(): + line = line.strip() +@@ -91,7 +92,7 @@ def get_remote_url(cls, location): + if cls._is_local_repository(repo): + return path_to_url(repo) + return repo +- return None ++ raise RemoteNotFoundError + + @classmethod + def get_revision(cls, location): +diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py +index 46f15fc8b4..688f132a49 100644 +--- a/src/pip/_internal/vcs/git.py ++++ b/src/pip/_internal/vcs/git.py +@@ -303,6 +303,7 @@ def update(self, dest, url, rev_options): + + @classmethod + def get_remote_url(cls, location): ++ # type: (str) -> str + """ + Return URL of the first remote encountered. + +diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py +index 1c84266742..e7988d1ac2 100644 +--- a/src/pip/_internal/vcs/mercurial.py ++++ b/src/pip/_internal/vcs/mercurial.py +@@ -87,6 +87,7 @@ def update(self, dest, url, rev_options): + + @classmethod + def get_remote_url(cls, location): ++ # type: (str) -> str + url = cls.run_command( + ['showconfig', 'paths.default'], + cwd=location).strip() +diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py +index 3bb7ea0f85..85ce2aa916 100644 +--- a/src/pip/_internal/vcs/subversion.py ++++ b/src/pip/_internal/vcs/subversion.py +@@ -14,7 +14,7 @@ + ) + from pip._internal.utils.subprocess import make_command + from pip._internal.utils.typing import MYPY_CHECK_RUNNING +-from pip._internal.vcs.versioncontrol import VersionControl, vcs ++from pip._internal.vcs.versioncontrol import RemoteNotFoundError, VersionControl, vcs + + _svn_xml_url_re = re.compile('url="([^"]+)"') + _svn_rev_re = re.compile(r'committed-rev="(\d+)"') +@@ -110,6 +110,7 @@ def make_rev_args(username, password): + + @classmethod + def get_remote_url(cls, location): ++ # type: (str) -> str + # In cases where the source is in a subdirectory, not alongside + # setup.py we have to look up in the location until we find a real + # setup.py +@@ -125,7 +126,7 @@ def get_remote_url(cls, location): + "parent directories)", + orig_location, + ) +- return None ++ raise RemoteNotFoundError + + return cls._get_svn_url_rev(location)[0] + +diff --git a/src/pip/_internal/vcs/versioncontrol.py b/src/pip/_internal/vcs/versioncontrol.py +index d97a41918b..5cfd5d6fbb 100644 +--- a/src/pip/_internal/vcs/versioncontrol.py ++++ b/src/pip/_internal/vcs/versioncontrol.py +@@ -409,7 +409,7 @@ def get_requirement_revision(cls, repo_dir): + + @classmethod + def get_src_requirement(cls, repo_dir, project_name): +- # type: (str, str) -> Optional[str] ++ # type: (str, str) -> str + """ + Return the requirement string to use to redownload the files + currently at the given repository directory. +@@ -422,8 +422,6 @@ def get_src_requirement(cls, repo_dir, project_name): + {repository_url}@{revision}#egg={project_name} + """ + repo_url = cls.get_remote_url(repo_dir) +- if repo_url is None: +- return None + + if cls.should_add_vcs_url_prefix(repo_url): + repo_url = '{}+{}'.format(cls.name, repo_url) +diff --git a/tests/functional/test_vcs_bazaar.py b/tests/functional/test_vcs_bazaar.py +index d928da8b36..ad24d73d5b 100644 +--- a/tests/functional/test_vcs_bazaar.py ++++ b/tests/functional/test_vcs_bazaar.py +@@ -8,6 +8,7 @@ + + from pip._internal.utils.misc import hide_url + from pip._internal.vcs.bazaar import Bazaar ++from pip._internal.vcs.versioncontrol import RemoteNotFoundError + from tests.lib import ( + _test_path_to_file_url, + _vcs_add, +@@ -65,3 +66,15 @@ def test_export_rev(script, tmpdir): + + with open(export_dir / 'test_file', 'r') as f: + assert f.read() == 'something initial' ++ ++ ++@need_bzr ++def test_get_remote_url__no_remote(script, tmpdir): ++ repo_dir = tmpdir / 'temp-repo' ++ repo_dir.mkdir() ++ repo_dir = str(repo_dir) ++ ++ script.run('bzr', 'init', repo_dir) ++ ++ with pytest.raises(RemoteNotFoundError): ++ Bazaar().get_remote_url(repo_dir) +diff --git a/tests/functional/test_vcs_subversion.py b/tests/functional/test_vcs_subversion.py +new file mode 100644 +index 0000000000..c71c793f89 +--- /dev/null ++++ b/tests/functional/test_vcs_subversion.py +@@ -0,0 +1,17 @@ ++import pytest ++ ++from pip._internal.vcs.subversion import Subversion ++from pip._internal.vcs.versioncontrol import RemoteNotFoundError ++from tests.lib import _create_svn_repo, need_svn ++ ++ ++@need_svn ++def test_get_remote_url__no_remote(script, tmpdir): ++ repo_dir = tmpdir / 'temp-repo' ++ repo_dir.mkdir() ++ repo_dir = str(repo_dir) ++ ++ _create_svn_repo(script, repo_dir) ++ ++ with pytest.raises(RemoteNotFoundError): ++ Subversion().get_remote_url(repo_dir) -- Gitee From d3ce07697378373df3ad33fc939b1bfbc1e2c807 Mon Sep 17 00:00:00 2001 From: openeuler-ci-bot <80474298@qq.com> Date: Sat, 26 Dec 2020 18:59:03 +0800 Subject: [PATCH 07/10] [patch tracking] 20201226185849754648 - https://github.com/pypa/pip/commit/86afa8904382f8939612750315ad2e5389d31ec5 --- ...a8904382f8939612750315ad2e5389d31ec5.patch | 2437 +++++++++++++++++ 1 file changed, 2437 insertions(+) create mode 100644 86afa8904382f8939612750315ad2e5389d31ec5.patch diff --git a/86afa8904382f8939612750315ad2e5389d31ec5.patch b/86afa8904382f8939612750315ad2e5389d31ec5.patch new file mode 100644 index 0000000..b0c499b --- /dev/null +++ b/86afa8904382f8939612750315ad2e5389d31ec5.patch @@ -0,0 +1,2437 @@ +diff --git a/docs/pip_sphinxext.py b/docs/pip_sphinxext.py +index 2486d5c33b..df4390d810 100644 +--- a/docs/pip_sphinxext.py ++++ b/docs/pip_sphinxext.py +@@ -25,7 +25,7 @@ def run(self): + cmd_prefix = cmd_prefix.strip('"') + cmd_prefix = cmd_prefix.strip("'") + usage = dedent( +- cmd.usage.replace('%prog', '{} {}'.format(cmd_prefix, cmd.name)) ++ cmd.usage.replace('%prog', f'{cmd_prefix} {cmd.name}') + ).strip() + node = nodes.literal_block(usage, usage) + return [node] +@@ -63,7 +63,7 @@ def _format_option(self, option, cmd_name=None): + line += option._long_opts[0] + if option.takes_value(): + metavar = option.metavar or option.dest.lower() +- line += " <{}>".format(metavar.lower()) ++ line += f" <{metavar.lower()}>" + # fix defaults + opt_help = option.help.replace('%default', str(option.default)) + # fix paths with sys.prefix +@@ -123,7 +123,7 @@ def determine_opt_prefix(self, opt_name): + if cmd.cmd_opts.has_option(opt_name): + return command + +- raise KeyError('Could not identify prefix of opt {}'.format(opt_name)) ++ raise KeyError(f'Could not identify prefix of opt {opt_name}') + + def process_options(self): + for option in SUPPORTED_OPTIONS: +diff --git a/news/ea24fc60-675c-4104-9825-39d1ee0a20b7.trivial.rst b/news/ea24fc60-675c-4104-9825-39d1ee0a20b7.trivial.rst +new file mode 100644 +index 0000000000..e69de29bb2 +diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py +index 6a3f383826..a94a7a47ee 100644 +--- a/src/pip/_internal/cli/base_command.py ++++ b/src/pip/_internal/cli/base_command.py +@@ -55,7 +55,7 @@ def __init__(self, name, summary, isolated=False): + super().__init__() + parser_kw = { + 'usage': self.usage, +- 'prog': '{} {}'.format(get_prog(), name), ++ 'prog': f'{get_prog()} {name}', + 'formatter': UpdatingDefaultsHelpFormatter(), + 'add_help_option': False, + 'name': name, +@@ -70,7 +70,7 @@ def __init__(self, name, summary, isolated=False): + self.tempdir_registry = None # type: Optional[TempDirRegistry] + + # Commands should add options to this option group +- optgroup_name = '{} Options'.format(self.name.capitalize()) ++ optgroup_name = f'{self.name.capitalize()} Options' + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) + + # Add the general options +diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py +index 8f427d6b58..e16f42de61 100644 +--- a/src/pip/_internal/cli/cmdoptions.py ++++ b/src/pip/_internal/cli/cmdoptions.py +@@ -46,7 +46,7 @@ def raise_option_error(parser, option, msg): + option: an Option instance. + msg: the error text. + """ +- msg = '{} error: {}'.format(option, msg) ++ msg = f'{option} error: {msg}' + msg = textwrap.fill(' '.join(msg.split())) + parser.error(msg) + +diff --git a/src/pip/_internal/cli/main.py b/src/pip/_internal/cli/main.py +index 9a5fbb1f1e..ed59073072 100644 +--- a/src/pip/_internal/cli/main.py ++++ b/src/pip/_internal/cli/main.py +@@ -57,7 +57,7 @@ def main(args=None): + try: + cmd_name, cmd_args = parse_command(args) + except PipError as exc: +- sys.stderr.write("ERROR: {}".format(exc)) ++ sys.stderr.write(f"ERROR: {exc}") + sys.stderr.write(os.linesep) + sys.exit(1) + +diff --git a/src/pip/_internal/cli/main_parser.py b/src/pip/_internal/cli/main_parser.py +index 6d69e82f0c..fcee6a2c23 100644 +--- a/src/pip/_internal/cli/main_parser.py ++++ b/src/pip/_internal/cli/main_parser.py +@@ -83,9 +83,9 @@ def parse_command(args): + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + +- msg = ['unknown command "{}"'.format(cmd_name)] ++ msg = [f'unknown command "{cmd_name}"'] + if guess: +- msg.append('maybe you meant "{}"'.format(guess)) ++ msg.append(f'maybe you meant "{guess}"') + + raise CommandError(' - '.join(msg)) + +diff --git a/src/pip/_internal/cli/parser.py b/src/pip/_internal/cli/parser.py +index ba647f3a16..3bc86d9a38 100644 +--- a/src/pip/_internal/cli/parser.py ++++ b/src/pip/_internal/cli/parser.py +@@ -82,7 +82,7 @@ def format_description(self, description): + description = description.rstrip() + # dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") +- description = '{}:\n{}\n'.format(label, description) ++ description = f'{label}:\n{description}\n' + return description + else: + return '' +@@ -168,7 +168,7 @@ def check_default(self, option, key, val): + try: + return option.check_value(key, val) + except optparse.OptionValueError as exc: +- print("An error occurred during configuration: {}".format(exc)) ++ print(f"An error occurred during configuration: {exc}") + sys.exit(3) + + def _get_ordered_configuration_items(self): +@@ -279,4 +279,4 @@ def get_default_values(self): + + def error(self, msg): + self.print_usage(sys.stderr) +- self.exit(UNKNOWN_ERROR, "{}\n".format(msg)) ++ self.exit(UNKNOWN_ERROR, f"{msg}\n") +diff --git a/src/pip/_internal/cli/progress_bars.py b/src/pip/_internal/cli/progress_bars.py +index 2c856a51fa..59b01a6d0f 100644 +--- a/src/pip/_internal/cli/progress_bars.py ++++ b/src/pip/_internal/cli/progress_bars.py +@@ -152,7 +152,7 @@ def download_speed(self): + def pretty_eta(self): + # type: () -> str + if self.eta: # type: ignore +- return "eta {}".format(self.eta_td) # type: ignore ++ return f"eta {self.eta_td}" # type: ignore + return "" + + def iter(self, it): # type: ignore +diff --git a/src/pip/_internal/commands/cache.py b/src/pip/_internal/commands/cache.py +index 80e668faea..d5ac45ad73 100644 +--- a/src/pip/_internal/commands/cache.py ++++ b/src/pip/_internal/commands/cache.py +@@ -154,7 +154,7 @@ def format_for_human(self, files): + for filename in files: + wheel = os.path.basename(filename) + size = filesystem.format_file_size(filename) +- results.append(' - {} ({})'.format(wheel, size)) ++ results.append(f' - {wheel} ({size})') + logger.info('Cache contents:\n') + logger.info('\n'.join(sorted(results))) + +diff --git a/src/pip/_internal/commands/configuration.py b/src/pip/_internal/commands/configuration.py +index 1ab90b47b4..a440a2b177 100644 +--- a/src/pip/_internal/commands/configuration.py ++++ b/src/pip/_internal/commands/configuration.py +@@ -221,7 +221,7 @@ def print_env_var_values(self): + write_output("%s:", 'env_var') + with indent_log(): + for key, value in sorted(self.configuration.get_environ_vars()): +- env_var = 'PIP_{}'.format(key.upper()) ++ env_var = f'PIP_{key.upper()}' + write_output("%s=%r", env_var, value) + + def open_in_editor(self, options, args): +diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py +index 747a1c1758..61df18e20c 100644 +--- a/src/pip/_internal/commands/debug.py ++++ b/src/pip/_internal/commands/debug.py +@@ -66,7 +66,7 @@ def get_module_from_module_name(module_name): + module_name = 'pkg_resources' + + __import__( +- 'pip._vendor.{}'.format(module_name), ++ f'pip._vendor.{module_name}', + globals(), + locals(), + level=0 +@@ -126,7 +126,7 @@ def show_tags(options): + formatted_target = target_python.format_given() + suffix = '' + if formatted_target: +- suffix = ' (target: {})'.format(formatted_target) ++ suffix = f' (target: {formatted_target})' + + msg = 'Compatible tags: {}{}'.format(len(tags), suffix) + logger.info(msg) +diff --git a/src/pip/_internal/commands/help.py b/src/pip/_internal/commands/help.py +index a6c25478e0..8372ac615d 100644 +--- a/src/pip/_internal/commands/help.py ++++ b/src/pip/_internal/commands/help.py +@@ -32,9 +32,9 @@ def run(self, options, args): + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + +- msg = ['unknown command "{}"'.format(cmd_name)] ++ msg = [f'unknown command "{cmd_name}"'] + if guess: +- msg.append('maybe you meant "{}"'.format(guess)) ++ msg.append(f'maybe you meant "{guess}"') + + raise CommandError(' - '.join(msg)) + +diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py +index 5ca07c8ee8..559c198c77 100644 +--- a/src/pip/_internal/configuration.py ++++ b/src/pip/_internal/configuration.py +@@ -164,7 +164,7 @@ def get_value(self, key): + try: + return self._dictionary[key] + except KeyError: +- raise ConfigurationError("No such key - {}".format(key)) ++ raise ConfigurationError(f"No such key - {key}") + + def set_value(self, key, value): + # type: (str, Any) -> None +@@ -193,7 +193,7 @@ def unset_value(self, key): + + assert self.load_only + if key not in self._config[self.load_only]: +- raise ConfigurationError("No such key - {}".format(key)) ++ raise ConfigurationError(f"No such key - {key}") + + fname, parser = self._get_parser_to_modify() + +@@ -403,4 +403,4 @@ def _mark_as_modified(self, fname, parser): + + def __repr__(self): + # type: () -> str +- return "{}({!r})".format(self.__class__.__name__, self._dictionary) ++ return f"{self.__class__.__name__}({self._dictionary!r})" +diff --git a/src/pip/_internal/distributions/sdist.py b/src/pip/_internal/distributions/sdist.py +index 06b9df09cb..9b708fdd83 100644 +--- a/src/pip/_internal/distributions/sdist.py ++++ b/src/pip/_internal/distributions/sdist.py +@@ -52,7 +52,7 @@ def _raise_conflicts(conflicting_with, conflicting_reqs): + requirement=self.req, + conflicting_with=conflicting_with, + description=', '.join( +- '{} is incompatible with {}'.format(installed, wanted) ++ f'{installed} is incompatible with {wanted}' + for installed, wanted in sorted(conflicting) + ) + ) +diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py +index 7eef1414c4..b4e7e68ed9 100644 +--- a/src/pip/_internal/exceptions.py ++++ b/src/pip/_internal/exceptions.py +@@ -203,11 +203,11 @@ def body(self): + its link already populated by the resolver's _populate_link(). + + """ +- return ' {}'.format(self._requirement_name()) ++ return f' {self._requirement_name()}' + + def __str__(self): + # type: () -> str +- return '{}\n{}'.format(self.head, self.body()) ++ return f'{self.head}\n{self.body()}' + + def _requirement_name(self): + # type: () -> str +@@ -364,8 +364,8 @@ def __init__(self, reason="could not be loaded", fname=None, error=None): + def __str__(self): + # type: () -> str + if self.fname is not None: +- message_part = " in {}.".format(self.fname) ++ message_part = f" in {self.fname}." + else: + assert self.error is not None +- message_part = ".\n{}\n".format(self.error) +- return "Configuration file {}{}".format(self.reason, message_part) ++ message_part = f".\n{self.error}\n" ++ return f"Configuration file {self.reason}{message_part}" +diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py +index a8c8ec0ce7..53b97c6b55 100644 +--- a/src/pip/_internal/index/collector.py ++++ b/src/pip/_internal/index/collector.py +@@ -447,7 +447,7 @@ def _get_html_page(link, session=None): + reason += str(exc) + _handle_get_page_fail(link, reason, meth=logger.info) + except requests.ConnectionError as exc: +- _handle_get_page_fail(link, "connection error: {}".format(exc)) ++ _handle_get_page_fail(link, f"connection error: {exc}") + except requests.Timeout: + _handle_get_page_fail(link, "timed out") + else: +@@ -656,7 +656,7 @@ def collect_links(self, project_name): + ), + ] + for link in url_locations: +- lines.append('* {}'.format(link)) ++ lines.append(f'* {link}') + logger.debug('\n'.join(lines)) + + return CollectedLinks( +diff --git a/src/pip/_internal/index/package_finder.py b/src/pip/_internal/index/package_finder.py +index 9d13fdda6b..731188926f 100644 +--- a/src/pip/_internal/index/package_finder.py ++++ b/src/pip/_internal/index/package_finder.py +@@ -161,7 +161,7 @@ def evaluate_link(self, link): + version = None + if link.is_yanked and not self._allow_yanked: + reason = link.yanked_reason or '' +- return (False, 'yanked for reason: {}'.format(reason)) ++ return (False, f'yanked for reason: {reason}') + + if link.egg_fragment: + egg_info = link.egg_fragment +@@ -171,7 +171,7 @@ def evaluate_link(self, link): + if not ext: + return (False, 'not a file') + if ext not in SUPPORTED_EXTENSIONS: +- return (False, 'unsupported archive format: {}'.format(ext)) ++ return (False, f'unsupported archive format: {ext}') + if "binary" not in self._formats and ext == WHEEL_EXTENSION: + reason = 'No binaries permitted for {}'.format( + self.project_name) +@@ -204,7 +204,7 @@ def evaluate_link(self, link): + + # This should be up by the self.ok_binary check, but see issue 2700. + if "source" not in self._formats and ext != WHEEL_EXTENSION: +- reason = 'No sources permitted for {}'.format(self.project_name) ++ reason = f'No sources permitted for {self.project_name}' + return (False, reason) + + if not version: +@@ -212,7 +212,7 @@ def evaluate_link(self, link): + egg_info, self._canonical_name, + ) + if not version: +- reason = 'Missing project version for {}'.format(self.project_name) ++ reason = f'Missing project version for {self.project_name}' + return (False, reason) + + match = self._py_version_re.search(version) +@@ -983,7 +983,7 @@ def _find_name_version_sep(fragment, canonical_name): + continue + if canonicalize_name(fragment[:i]) == canonical_name: + return i +- raise ValueError("{} does not match {}".format(fragment, canonical_name)) ++ raise ValueError(f"{fragment} does not match {canonical_name}") + + + def _extract_version_from_fragment(fragment, canonical_name): +diff --git a/src/pip/_internal/locations.py b/src/pip/_internal/locations.py +index 7d549dcef1..dc5d2e0b2d 100644 +--- a/src/pip/_internal/locations.py ++++ b/src/pip/_internal/locations.py +@@ -111,8 +111,8 @@ def distutils_scheme( + # NOTE: setting user or home has the side-effect of creating the home dir + # or user base for installations during finalize_options() + # ideally, we'd prefer a scheme class that has no side-effects. +- assert not (user and prefix), "user={} prefix={}".format(user, prefix) +- assert not (home and prefix), "home={} prefix={}".format(home, prefix) ++ assert not (user and prefix), f"user={user} prefix={prefix}" ++ assert not (home and prefix), f"home={home} prefix={prefix}" + i.user = user or i.user + if user or home: + i.prefix = "" +@@ -138,7 +138,7 @@ def distutils_scheme( + i.prefix, + 'include', + 'site', +- 'python{}'.format(get_major_minor_version()), ++ f'python{get_major_minor_version()}', + dist_name, + ) + +diff --git a/src/pip/_internal/models/direct_url.py b/src/pip/_internal/models/direct_url.py +index 1d97b38eaa..39112ea4e3 100644 +--- a/src/pip/_internal/models/direct_url.py ++++ b/src/pip/_internal/models/direct_url.py +@@ -46,7 +46,7 @@ def _get_required(d, expected_type, key, default=None): + # type: (Dict[str, Any], Type[T], str, Optional[T]) -> T + value = _get(d, expected_type, key, default) + if value is None: +- raise DirectUrlValidationError("{} must have a value".format(key)) ++ raise DirectUrlValidationError(f"{key} must have a value") + return value + + +diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py +index 6a1920f84b..07f9c565c1 100644 +--- a/src/pip/_internal/models/link.py ++++ b/src/pip/_internal/models/link.py +@@ -83,7 +83,7 @@ def __init__( + def __str__(self): + # type: () -> str + if self.requires_python: +- rp = ' (requires-python:{})'.format(self.requires_python) ++ rp = f' (requires-python:{self.requires_python})' + else: + rp = '' + if self.comes_from: +@@ -94,7 +94,7 @@ def __str__(self): + + def __repr__(self): + # type: () -> str +- return ''.format(self) ++ return f'' + + @property + def url(self): +diff --git a/src/pip/_internal/models/target_python.py b/src/pip/_internal/models/target_python.py +index 2f2a74242d..6e6e8b52ee 100644 +--- a/src/pip/_internal/models/target_python.py ++++ b/src/pip/_internal/models/target_python.py +@@ -86,7 +86,7 @@ def format_given(self): + ('implementation', self.implementation), + ] + return ' '.join( +- '{}={!r}'.format(key, value) for key, value in key_values ++ f'{key}={value!r}' for key, value in key_values + if value is not None + ) + +diff --git a/src/pip/_internal/models/wheel.py b/src/pip/_internal/models/wheel.py +index 49aae14703..5e03f9ff83 100644 +--- a/src/pip/_internal/models/wheel.py ++++ b/src/pip/_internal/models/wheel.py +@@ -30,7 +30,7 @@ def __init__(self, filename): + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename( +- "{} is not a valid wheel filename.".format(filename) ++ f"{filename} is not a valid wheel filename." + ) + self.filename = filename + self.name = wheel_info.group('name').replace('_', '-') +diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py +index 3de21518e9..1a61385406 100644 +--- a/src/pip/_internal/network/auth.py ++++ b/src/pip/_internal/network/auth.py +@@ -199,7 +199,7 @@ def _get_url_and_credentials(self, original_url): + (username is not None and password is not None) or + # Credentials were not found + (username is None and password is None) +- ), "Could not load credentials from url: {}".format(original_url) ++ ), f"Could not load credentials from url: {original_url}" + + return url, username, password + +@@ -223,7 +223,7 @@ def __call__(self, req): + # Factored out to allow for easy patching in tests + def _prompt_for_password(self, netloc): + # type: (str) -> Tuple[Optional[str], Optional[str], bool] +- username = ask_input("User for {}: ".format(netloc)) ++ username = ask_input(f"User for {netloc}: ") + if not username: + return None, None, False + auth = get_keyring_auth(netloc, username) +diff --git a/src/pip/_internal/network/lazy_wheel.py b/src/pip/_internal/network/lazy_wheel.py +index c68a35b9e1..c5176a4bb2 100644 +--- a/src/pip/_internal/network/lazy_wheel.py ++++ b/src/pip/_internal/network/lazy_wheel.py +@@ -190,7 +190,7 @@ def _stream_response(self, start, end, base_headers=HEADERS): + # type: (int, int, Dict[str, str]) -> Response + """Return HTTP response to a range request from start to end.""" + headers = base_headers.copy() +- headers['Range'] = 'bytes={}-{}'.format(start, end) ++ headers['Range'] = f'bytes={start}-{end}' + # TODO: Get range requests to be correctly cached + headers['Cache-Control'] = 'no-cache' + return self._session.get(self._url, headers=headers, stream=True) +diff --git a/src/pip/_internal/network/session.py b/src/pip/_internal/network/session.py +index 6848516876..c4ac889e28 100644 +--- a/src/pip/_internal/network/session.py ++++ b/src/pip/_internal/network/session.py +@@ -318,9 +318,9 @@ def add_trusted_host(self, host, source=None, suppress_logging=False): + string came from. + """ + if not suppress_logging: +- msg = 'adding trusted host: {!r}'.format(host) ++ msg = f'adding trusted host: {host!r}' + if source is not None: +- msg += ' (from {})'.format(source) ++ msg += f' (from {source})' + logger.info(msg) + + host_port = parse_netloc(host) +diff --git a/src/pip/_internal/operations/build/metadata_legacy.py b/src/pip/_internal/operations/build/metadata_legacy.py +index 14762aef3c..d44589666f 100644 +--- a/src/pip/_internal/operations/build/metadata_legacy.py ++++ b/src/pip/_internal/operations/build/metadata_legacy.py +@@ -26,7 +26,7 @@ def _find_egg_info(directory): + + if not filenames: + raise InstallationError( +- "No .egg-info directory found in {}".format(directory) ++ f"No .egg-info directory found in {directory}" + ) + + if len(filenames) > 1: +diff --git a/src/pip/_internal/operations/build/wheel.py b/src/pip/_internal/operations/build/wheel.py +index d16ee0966e..d25f9c42f6 100644 +--- a/src/pip/_internal/operations/build/wheel.py ++++ b/src/pip/_internal/operations/build/wheel.py +@@ -34,7 +34,7 @@ def build_wheel_pep517( + logger.debug('Destination directory: %s', tempd) + + runner = runner_with_spinner_message( +- 'Building wheel for {} (PEP 517)'.format(name) ++ f'Building wheel for {name} (PEP 517)' + ) + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( +diff --git a/src/pip/_internal/operations/build/wheel_legacy.py b/src/pip/_internal/operations/build/wheel_legacy.py +index 73401cd78c..82fa44406e 100644 +--- a/src/pip/_internal/operations/build/wheel_legacy.py ++++ b/src/pip/_internal/operations/build/wheel_legacy.py +@@ -23,7 +23,7 @@ def format_command_result( + # type: (...) -> str + """Format command information for logging.""" + command_desc = format_command_args(command_args) +- text = 'Command arguments: {}\n'.format(command_desc) ++ text = f'Command arguments: {command_desc}\n' + + if not command_output: + text += 'Command output: None' +@@ -32,7 +32,7 @@ def format_command_result( + else: + if not command_output.endswith('\n'): + command_output += '\n' +- text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER) ++ text += f'Command output:\n{command_output}{LOG_DIVIDER}' + + return text + +@@ -87,7 +87,7 @@ def build_wheel_legacy( + destination_dir=tempd, + ) + +- spin_message = 'Building wheel for {} (setup.py)'.format(name) ++ spin_message = f'Building wheel for {name} (setup.py)' + with open_spinner(spin_message) as spinner: + logger.debug('Destination directory: %s', tempd) + +diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py +index e9eec32c91..47f5ab5070 100644 +--- a/src/pip/_internal/operations/freeze.py ++++ b/src/pip/_internal/operations/freeze.py +@@ -56,7 +56,7 @@ def freeze( + find_links = find_links or [] + + for link in find_links: +- yield '-f {}'.format(link) ++ yield f'-f {link}' + installations = {} # type: Dict[str, FrozenRequirement] + + for dist in get_installed_distributions( +@@ -195,7 +195,7 @@ def get_requirement_info(dist): + location, + ) + comments = [ +- '# Editable install with no version control ({})'.format(req) ++ f'# Editable install with no version control ({req})' + ] + return (location, True, comments) + +@@ -269,5 +269,5 @@ def __str__(self): + # type: () -> str + req = self.req + if self.editable: +- req = '-e {}'.format(req) ++ req = f'-e {req}' + return '\n'.join(list(self.comments) + [str(req)]) + '\n' +diff --git a/src/pip/_internal/operations/install/legacy.py b/src/pip/_internal/operations/install/legacy.py +index 87227d5fed..63a693a91e 100644 +--- a/src/pip/_internal/operations/install/legacy.py ++++ b/src/pip/_internal/operations/install/legacy.py +@@ -68,7 +68,7 @@ def install( + ) + + runner = runner_with_spinner_message( +- "Running setup.py install for {}".format(req_name) ++ f"Running setup.py install for {req_name}" + ) + with indent_log(), build_env: + runner( +diff --git a/src/pip/_internal/operations/install/wheel.py b/src/pip/_internal/operations/install/wheel.py +index 49e93c510c..b6da06f55a 100644 +--- a/src/pip/_internal/operations/install/wheel.py ++++ b/src/pip/_internal/operations/install/wheel.py +@@ -360,7 +360,7 @@ def get_console_script_specs(console): + ) + + scripts_to_generate.append( +- 'pip{} = {}'.format(get_major_minor_version(), pip_script) ++ f'pip{get_major_minor_version()} = {pip_script}' + ) + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] +diff --git a/src/pip/_internal/req/__init__.py b/src/pip/_internal/req/__init__.py +index 9f9bc50122..352d8923f1 100644 +--- a/src/pip/_internal/req/__init__.py ++++ b/src/pip/_internal/req/__init__.py +@@ -26,7 +26,7 @@ def __init__(self, name): + + def __repr__(self): + # type: () -> str +- return "InstallationResult(name={!r})".format(self.name) ++ return f"InstallationResult(name={self.name!r})" + + + def _validate_requirements( +@@ -34,7 +34,7 @@ def _validate_requirements( + ): + # type: (...) -> Iterator[Tuple[str, InstallRequirement]] + for req in requirements: +- assert req.name, "invalid to-be-installed requirement: {}".format(req) ++ assert req.name, f"invalid to-be-installed requirement: {req}" + yield req.name, req + + +diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py +index 3564ab399e..d02dc636b0 100644 +--- a/src/pip/_internal/req/constructors.py ++++ b/src/pip/_internal/req/constructors.py +@@ -111,8 +111,8 @@ def parse_editable(editable_req): + return package_name, url_no_extras, set() + + for version_control in vcs: +- if url.lower().startswith('{}:'.format(version_control)): +- url = '{}+{}'.format(version_control, url) ++ if url.lower().startswith(f'{version_control}:'): ++ url = f'{version_control}+{url}' + break + + if '+' not in url: +@@ -167,7 +167,7 @@ def deduce_helpful_msg(req): + "Cannot parse '%s' as requirements file", req, exc_info=True + ) + else: +- msg += " File '{}' does not exist.".format(req) ++ msg += f" File '{req}' does not exist." + return msg + + +@@ -193,7 +193,7 @@ def parse_req_from_editable(editable_req): + try: + req = Requirement(name) + except InvalidRequirement: +- raise InstallationError("Invalid requirement: '{}'".format(name)) ++ raise InstallationError(f"Invalid requirement: '{name}'") + else: + req = None + +@@ -342,7 +342,7 @@ def with_source(text): + # type: (str) -> str + if not line_source: + return text +- return '{} (from {})'.format(text, line_source) ++ return f'{text} (from {line_source})' + + if req_as_string is not None: + try: +@@ -357,10 +357,10 @@ def with_source(text): + else: + add_msg = '' + msg = with_source( +- 'Invalid requirement: {!r}'.format(req_as_string) ++ f'Invalid requirement: {req_as_string!r}' + ) + if add_msg: +- msg += '\nHint: {}'.format(add_msg) ++ msg += f'\nHint: {add_msg}' + raise InstallationError(msg) + else: + # Deprecate extras after specifiers: "name>=1.0[extras]" +@@ -370,7 +370,7 @@ def with_source(text): + for spec in req.specifier: + spec_str = str(spec) + if spec_str.endswith(']'): +- msg = "Extras after version '{}'.".format(spec_str) ++ msg = f"Extras after version '{spec_str}'." + replace = "moving the extras before version specifiers" + deprecated(msg, replacement=replace, gone_in="21.0") + else: +@@ -421,7 +421,7 @@ def install_req_from_req_string( + try: + req = Requirement(req_string) + except InvalidRequirement: +- raise InstallationError("Invalid requirement: '{}'".format(req_string)) ++ raise InstallationError(f"Invalid requirement: '{req_string}'") + + domains_not_allowed = [ + PyPI.file_storage_domain, +diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py +index bc5d6dfb2e..087129613d 100644 +--- a/src/pip/_internal/req/req_file.py ++++ b/src/pip/_internal/req/req_file.py +@@ -201,7 +201,7 @@ def handle_requirement_line( + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + +- line_source = 'line {} of {}'.format(line.lineno, line.filename) ++ line_source = f'line {line.lineno} of {line.filename}' + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, +@@ -271,7 +271,7 @@ def handle_option_line( + + if session: + for host in opts.trusted_hosts or []: +- source = 'line {} of {}'.format(lineno, filename) ++ source = f'line {lineno} of {filename}' + session.add_trusted_host(host, source=source) + + +@@ -381,7 +381,7 @@ def _parse_file(self, filename, constraint): + args_str, opts = self._line_parser(line) + except OptionParsingError as e: + # add offending line +- msg = 'Invalid requirement: {}\n{}'.format(line, e.msg) ++ msg = f'Invalid requirement: {line}\n{e.msg}' + raise RequirementsFileParseError(msg) + + yield ParsedLine( +@@ -559,6 +559,6 @@ def get_file_content(url, session): + content = auto_decode(f.read()) + except IOError as exc: + raise InstallationError( +- 'Could not open requirements file: {}'.format(exc) ++ f'Could not open requirements file: {exc}' + ) + return url, content +diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py +index f391b47f6d..0bf0013203 100644 +--- a/src/pip/_internal/req/req_install.py ++++ b/src/pip/_internal/req/req_install.py +@@ -226,7 +226,7 @@ def __str__(self): + else: + comes_from = self.comes_from.from_path() + if comes_from: +- s += ' (from {})'.format(comes_from) ++ s += f' (from {comes_from})' + return s + + def __repr__(self): +@@ -364,7 +364,7 @@ def ensure_build_location(self, build_dir, autodelete, parallel_builds): + # name so multiple builds do not interfere with each other. + dir_name = canonicalize_name(self.name) + if parallel_builds: +- dir_name = "{}_{}".format(dir_name, uuid.uuid4().hex) ++ dir_name = f"{dir_name}_{uuid.uuid4().hex}" + + # FIXME: Is there a better place to create the build_dir? (hg and bzr + # need this) +@@ -475,7 +475,7 @@ def unpacked_source_directory(self): + @property + def setup_py_path(self): + # type: () -> str +- assert self.source_dir, "No source dir for {}".format(self) ++ assert self.source_dir, f"No source dir for {self}" + setup_py = os.path.join(self.unpacked_source_directory, 'setup.py') + + return setup_py +@@ -483,7 +483,7 @@ def setup_py_path(self): + @property + def pyproject_toml_path(self): + # type: () -> str +- assert self.source_dir, "No source dir for {}".format(self) ++ assert self.source_dir, f"No source dir for {self}" + return make_pyproject_path(self.unpacked_source_directory) + + def load_pyproject_toml(self): +@@ -526,7 +526,7 @@ def _generate_metadata(self): + setup_py_path=self.setup_py_path, + source_dir=self.unpacked_source_directory, + isolated=self.isolated, +- details=self.name or "from {}".format(self.link) ++ details=self.name or f"from {self.link}" + ) + + assert self.pep517_backend is not None +diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py +index 275cb5105a..39358711ac 100644 +--- a/src/pip/_internal/resolution/resolvelib/candidates.py ++++ b/src/pip/_internal/resolution/resolvelib/candidates.py +@@ -88,9 +88,9 @@ def make_install_req_from_dist(dist, template): + if template.req: + line = str(template.req) + elif template.link: +- line = "{} @ {}".format(project_name, template.link.url) ++ line = f"{project_name} @ {template.link.url}" + else: +- line = "{}=={}".format(project_name, dist.parsed_version) ++ line = f"{project_name}=={dist.parsed_version}" + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, +@@ -145,7 +145,7 @@ def __init__( + + def __str__(self): + # type: () -> str +- return "{} {}".format(self.name, self.version) ++ return f"{self.name} {self.version}" + + def __repr__(self): + # type: () -> str +@@ -288,7 +288,7 @@ def __init__( + wheel = Wheel(ireq.link.filename) + wheel_name = canonicalize_name(wheel.name) + assert name == wheel_name, ( +- "{!r} != {!r} for wheel".format(name, wheel_name) ++ f"{name!r} != {wheel_name!r} for wheel" + ) + # Version may not be present for PEP 508 direct URLs + if version is not None: +@@ -416,7 +416,7 @@ def is_editable(self): + + def format_for_error(self): + # type: () -> str +- return "{} {} (Installed)".format(self.name, self.version) ++ return f"{self.name} {self.version} (Installed)" + + def iter_dependencies(self, with_requires): + # type: (bool) -> Iterable[Optional[Requirement]] +@@ -584,7 +584,7 @@ def __init__(self, py_version_info): + + def __str__(self): + # type: () -> str +- return "Python {}".format(self._version) ++ return f"Python {self._version}" + + @property + def project_name(self): +@@ -604,7 +604,7 @@ def version(self): + + def format_for_error(self): + # type: () -> str +- return "Python {}".format(self.version) ++ return f"Python {self.version}" + + def iter_dependencies(self, with_requires): + # type: (bool) -> Iterable[Optional[Requirement]] +diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py +index 03d0faadee..70484f4701 100644 +--- a/src/pip/_internal/resolution/resolvelib/factory.py ++++ b/src/pip/_internal/resolution/resolvelib/factory.py +@@ -391,13 +391,13 @@ def get_installation_error(self, e): + if parent is None: + req_disp = str(req) + else: +- req_disp = '{} (from {})'.format(req, parent.name) ++ req_disp = f'{req} (from {parent.name})' + logger.critical( + "Could not find a version that satisfies the requirement %s", + req_disp, + ) + return DistributionNotFound( +- 'No matching distribution found for {}'.format(req) ++ f'No matching distribution found for {req}' + ) + + # OK, we now have a list of requirements that can't all be +@@ -415,7 +415,7 @@ def describe_trigger(parent): + # type: (Candidate) -> str + ireq = parent.get_install_requirement() + if not ireq or not ireq.comes_from: +- return "{}=={}".format(parent.name, parent.version) ++ return f"{parent.name}=={parent.version}" + if isinstance(ireq.comes_from, InstallRequirement): + return str(ireq.comes_from.name) + return str(ireq.comes_from) +diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py +index d926d0a065..85343d5980 100644 +--- a/src/pip/_internal/resolution/resolvelib/requirements.py ++++ b/src/pip/_internal/resolution/resolvelib/requirements.py +@@ -122,7 +122,7 @@ def __init__(self, specifier, match): + + def __str__(self): + # type: () -> str +- return "Python {}".format(self.specifier) ++ return f"Python {self.specifier}" + + def __repr__(self): + # type: () -> str +diff --git a/src/pip/_internal/self_outdated_check.py b/src/pip/_internal/self_outdated_check.py +index 01ed8787b5..56d03a8668 100644 +--- a/src/pip/_internal/self_outdated_check.py ++++ b/src/pip/_internal/self_outdated_check.py +@@ -181,7 +181,7 @@ def pip_self_version_check(session, options): + # command context, so be pragmatic here and suggest the command + # that's always available. This does not accommodate spaces in + # `sys.executable`. +- pip_cmd = "{} -m pip".format(sys.executable) ++ pip_cmd = f"{sys.executable} -m pip" + logger.warning( + "You are using pip version %s; however, version %s is " + "available.\nYou should consider upgrading via the " +diff --git a/src/pip/_internal/utils/compatibility_tags.py b/src/pip/_internal/utils/compatibility_tags.py +index 4bf5aaa936..ac37c3a17b 100644 +--- a/src/pip/_internal/utils/compatibility_tags.py ++++ b/src/pip/_internal/utils/compatibility_tags.py +@@ -116,7 +116,7 @@ def _get_custom_interpreter(implementation=None, version=None): + implementation = interpreter_name() + if version is None: + version = interpreter_version() +- return "{}{}".format(implementation, version) ++ return f"{implementation}{version}" + + + def get_supported( +diff --git a/src/pip/_internal/utils/direct_url_helpers.py b/src/pip/_internal/utils/direct_url_helpers.py +index c6cd976cae..9598137aa0 100644 +--- a/src/pip/_internal/utils/direct_url_helpers.py ++++ b/src/pip/_internal/utils/direct_url_helpers.py +@@ -88,7 +88,7 @@ def direct_url_from_link(link, source_dir=None, link_is_in_wheel_cache=False): + hash = None + hash_name = link.hash_name + if hash_name: +- hash = "{}={}".format(hash_name, link.hash) ++ hash = f"{hash_name}={link.hash}" + return DirectUrl( + url=link.url_without_fragment, + info=ArchiveInfo(hash=hash), +diff --git a/src/pip/_internal/utils/hashes.py b/src/pip/_internal/utils/hashes.py +index 35dae23582..d5ff90063c 100644 +--- a/src/pip/_internal/utils/hashes.py ++++ b/src/pip/_internal/utils/hashes.py +@@ -85,7 +85,7 @@ def check_against_chunks(self, chunks): + gots[hash_name] = hashlib.new(hash_name) + except (ValueError, TypeError): + raise InstallationError( +- 'Unknown hash name: {}'.format(hash_name) ++ f'Unknown hash name: {hash_name}' + ) + + for chunk in chunks: +diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py +index 72db88c7f4..e83259eccb 100644 +--- a/src/pip/_internal/utils/misc.py ++++ b/src/pip/_internal/utils/misc.py +@@ -116,7 +116,7 @@ def get_prog(): + try: + prog = os.path.basename(sys.argv[0]) + if prog in ('__main__.py', '-c'): +- return "{} -m pip".format(sys.executable) ++ return f"{sys.executable} -m pip" + else: + return prog + except (AttributeError, TypeError, IndexError): +@@ -676,8 +676,8 @@ def build_netloc(host, port): + return host + if ':' in host: + # Only wrap host with square brackets when it is IPv6 +- host = '[{}]'.format(host) +- return '{}:{}'.format(host, port) ++ host = f'[{host}]' ++ return f'{host}:{port}' + + + def build_url_from_netloc(netloc, scheme='https'): +@@ -687,8 +687,8 @@ def build_url_from_netloc(netloc, scheme='https'): + """ + if netloc.count(':') >= 2 and '@' not in netloc and '[' not in netloc: + # It must be a bare IPv6 address, so wrap it with brackets. +- netloc = '[{}]'.format(netloc) +- return '{}://{}'.format(scheme, netloc) ++ netloc = f'[{netloc}]' ++ return f'{scheme}://{netloc}' + + + def parse_netloc(netloc): +diff --git a/src/pip/_internal/utils/pkg_resources.py b/src/pip/_internal/utils/pkg_resources.py +index 0f42cc381a..d5b26f5389 100644 +--- a/src/pip/_internal/utils/pkg_resources.py ++++ b/src/pip/_internal/utils/pkg_resources.py +@@ -24,7 +24,7 @@ def get_metadata(self, name): + return ensure_str(self._metadata[name]) + except UnicodeDecodeError as e: + # Mirrors handling done in pkg_resources.NullProvider. +- e.reason += " in {} file".format(name) ++ e.reason += f" in {name} file" + raise + + def get_metadata_lines(self, name): +diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py +index f224a04188..91b277df6e 100644 +--- a/src/pip/_internal/utils/temp_dir.py ++++ b/src/pip/_internal/utils/temp_dir.py +@@ -151,13 +151,13 @@ def __init__( + def path(self): + # type: () -> str + assert not self._deleted, ( +- "Attempted to access deleted path: {}".format(self._path) ++ f"Attempted to access deleted path: {self._path}" + ) + return self._path + + def __repr__(self): + # type: () -> str +- return "<{} {!r}>".format(self.__class__.__name__, self.path) ++ return f"<{self.__class__.__name__} {self.path!r}>" + + def __enter__(self): + # type: (_T) -> _T +@@ -184,7 +184,7 @@ def _create(self, kind): + # scripts, so we canonicalize the path by traversing potential + # symlinks here. + path = os.path.realpath( +- tempfile.mkdtemp(prefix="pip-{}-".format(kind)) ++ tempfile.mkdtemp(prefix=f"pip-{kind}-") + ) + logger.debug("Created temporary directory: %s", path) + return path +@@ -275,7 +275,7 @@ def _create(self, kind): + else: + # Final fallback on the default behavior. + path = os.path.realpath( +- tempfile.mkdtemp(prefix="pip-{}-".format(kind)) ++ tempfile.mkdtemp(prefix=f"pip-{kind}-") + ) + + logger.debug("Created temporary directory: %s", path) +diff --git a/src/pip/_internal/utils/unpacking.py b/src/pip/_internal/utils/unpacking.py +index 5cfba87f81..a24d7e5573 100644 +--- a/src/pip/_internal/utils/unpacking.py ++++ b/src/pip/_internal/utils/unpacking.py +@@ -273,5 +273,5 @@ def unpack_file( + filename, location, content_type, + ) + raise InstallationError( +- 'Cannot determine archive format of {}'.format(location) ++ f'Cannot determine archive format of {location}' + ) +diff --git a/src/pip/_internal/utils/wheel.py b/src/pip/_internal/utils/wheel.py +index c6dc4ccb0d..2c01cf9927 100644 +--- a/src/pip/_internal/utils/wheel.py ++++ b/src/pip/_internal/utils/wheel.py +@@ -57,7 +57,7 @@ def pkg_resources_distribution_for_wheel(wheel_zip, name, location): + info_dir, _ = parse_wheel(wheel_zip, name) + + metadata_files = [ +- p for p in wheel_zip.namelist() if p.startswith("{}/".format(info_dir)) ++ p for p in wheel_zip.namelist() if p.startswith(f"{info_dir}/") + ] + + metadata_text = {} # type: Dict[str, bytes] +@@ -152,7 +152,7 @@ def read_wheel_metadata_file(source, path): + # and RuntimeError for password-protected files + except (BadZipFile, KeyError, RuntimeError) as e: + raise UnsupportedWheel( +- "could not read {!r} file: {!r}".format(path, e) ++ f"could not read {path!r} file: {e!r}" + ) + + +@@ -161,14 +161,14 @@ def wheel_metadata(source, dist_info_dir): + """Return the WHEEL metadata of an extracted wheel, if possible. + Otherwise, raise UnsupportedWheel. + """ +- path = "{}/WHEEL".format(dist_info_dir) ++ path = f"{dist_info_dir}/WHEEL" + # Zip file path separators must be / + wheel_contents = read_wheel_metadata_file(source, path) + + try: + wheel_text = ensure_str(wheel_contents) + except UnicodeDecodeError as e: +- raise UnsupportedWheel("error decoding {!r}: {!r}".format(path, e)) ++ raise UnsupportedWheel(f"error decoding {path!r}: {e!r}") + + # FeedParser (used by Parser) does not raise any exceptions. The returned + # message may have .defects populated, but for backwards-compatibility we +@@ -190,7 +190,7 @@ def wheel_version(wheel_data): + try: + return tuple(map(int, version.split('.'))) + except ValueError: +- raise UnsupportedWheel("invalid Wheel-Version: {!r}".format(version)) ++ raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}") + + + def check_compatibility(version, name): +diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py +index 688f132a49..71cb533122 100644 +--- a/src/pip/_internal/vcs/git.py ++++ b/src/pip/_internal/vcs/git.py +@@ -147,12 +147,12 @@ def get_revision_sha(cls, dest, rev): + except ValueError: + # Include the offending line to simplify troubleshooting if + # this error ever occurs. +- raise ValueError('unexpected show-ref line: {!r}'.format(line)) ++ raise ValueError(f'unexpected show-ref line: {line!r}') + + refs[ref] = sha + +- branch_ref = 'refs/remotes/origin/{}'.format(rev) +- tag_ref = 'refs/tags/{}'.format(rev) ++ branch_ref = f'refs/remotes/origin/{rev}' ++ tag_ref = f'refs/tags/{rev}' + + sha = refs.get(branch_ref) + if sha is not None: +@@ -266,7 +266,7 @@ def fetch_new(self, dest, url, rev_options): + elif self.get_current_branch(dest) != branch_name: + # Then a specific branch was requested, and that branch + # is not yet checked out. +- track_branch = 'origin/{}'.format(branch_name) ++ track_branch = f'origin/{branch_name}' + cmd_args = [ + 'checkout', '-b', branch_name, '--track', track_branch, + ] +diff --git a/src/pip/_internal/vcs/versioncontrol.py b/src/pip/_internal/vcs/versioncontrol.py +index 5cfd5d6fbb..a175e6d846 100644 +--- a/src/pip/_internal/vcs/versioncontrol.py ++++ b/src/pip/_internal/vcs/versioncontrol.py +@@ -77,9 +77,9 @@ def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): + project_name: the (unescaped) project name. + """ + egg_project_name = pkg_resources.to_filename(project_name) +- req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name) ++ req = f'{repo_url}@{rev}#egg={egg_project_name}' + if subdir: +- req += '&subdirectory={}'.format(subdir) ++ req += f'&subdirectory={subdir}' + + return req + +@@ -236,7 +236,7 @@ def __init__( + + def __repr__(self): + # type: () -> str +- return ''.format(self.vc_class.name, self.rev) ++ return f'' + + @property + def arg_rev(self): +@@ -264,7 +264,7 @@ def to_display(self): + if not self.rev: + return '' + +- return ' (to revision {})'.format(self.rev) ++ return f' (to revision {self.rev})' + + def make_new(self, rev): + # type: (str) -> RevOptions +@@ -388,7 +388,7 @@ def should_add_vcs_url_prefix(cls, remote_url): + Return whether the vcs prefix (e.g. "git+") should be added to a + repository's remote url when used in a requirement. + """ +- return not remote_url.lower().startswith('{}:'.format(cls.name)) ++ return not remote_url.lower().startswith(f'{cls.name}:') + + @classmethod + def get_subdirectory(cls, location): +@@ -424,7 +424,7 @@ def get_src_requirement(cls, repo_dir, project_name): + repo_url = cls.get_remote_url(repo_dir) + + if cls.should_add_vcs_url_prefix(repo_url): +- repo_url = '{}+{}'.format(cls.name, repo_url) ++ repo_url = f'{cls.name}+{repo_url}' + + revision = cls.get_requirement_revision(repo_dir) + subdir = cls.get_subdirectory(repo_dir) +diff --git a/tests/conftest.py b/tests/conftest.py +index 048258f96f..0bb69dae6d 100644 +--- a/tests/conftest.py ++++ b/tests/conftest.py +@@ -95,7 +95,7 @@ def pytest_collection_modifyitems(config, items): + item.add_marker(pytest.mark.unit) + else: + raise RuntimeError( +- "Unknown test type (filename = {})".format(module_path) ++ f"Unknown test type (filename = {module_path})" + ) + + +diff --git a/tests/functional/test_cache.py b/tests/functional/test_cache.py +index 872f55982b..0dc7910818 100644 +--- a/tests/functional/test_cache.py ++++ b/tests/functional/test_cache.py +@@ -103,7 +103,7 @@ def list_matches_wheel(wheel_name, result): + E.g., If wheel_name is `foo-1.2.3` it searches for a line starting with + `- foo-1.2.3-py3-none-any.whl `.""" + lines = result.stdout.splitlines() +- expected = ' - {}-py3-none-any.whl '.format(wheel_name) ++ expected = f' - {wheel_name}-py3-none-any.whl ' + return any(map(lambda l: l.startswith(expected), lines)) + + +@@ -115,7 +115,7 @@ def list_matches_wheel_abspath(wheel_name, result): + E.g., If wheel_name is `foo-1.2.3` it searches for a line starting with + `foo-1.2.3-py3-none-any.whl`.""" + lines = result.stdout.splitlines() +- expected = '{}-py3-none-any.whl'.format(wheel_name) ++ expected = f'{wheel_name}-py3-none-any.whl' + return any(map(lambda l: os.path.basename(l).startswith(expected) + and os.path.exists(l), lines)) + +@@ -137,7 +137,7 @@ def _remove_matches_http(http_filename, result): + path = os.path.join( + http_cache_dir, 'arbitrary', 'pathname', http_filename, + ) +- expected = 'Removed {}'.format(path) ++ expected = f'Removed {path}' + return expected in lines + + return _remove_matches_http +@@ -155,14 +155,14 @@ def remove_matches_wheel(wheel_cache_dir): + def _remove_matches_wheel(wheel_name, result): + lines = result.stdout.splitlines() + +- wheel_filename = '{}-py3-none-any.whl'.format(wheel_name) ++ wheel_filename = f'{wheel_name}-py3-none-any.whl' + + # The "/arbitrary/pathname/" bit is an implementation detail of how + # the `populate_wheel_cache` fixture is implemented. + path = os.path.join( + wheel_cache_dir, 'arbitrary', 'pathname', wheel_filename, + ) +- expected = 'Removed {}'.format(path) ++ expected = f'Removed {path}' + return expected in lines + + return _remove_matches_wheel +@@ -191,12 +191,12 @@ def test_cache_info( + result = script.pip('cache', 'info') + + assert ( +- 'Package index page cache location: {}'.format(http_cache_dir) ++ f'Package index page cache location: {http_cache_dir}' + in result.stdout + ) +- assert 'Wheels location: {}'.format(wheel_cache_dir) in result.stdout ++ assert f'Wheels location: {wheel_cache_dir}' in result.stdout + num_wheels = len(wheel_cache_files) +- assert 'Number of wheels: {}'.format(num_wheels) in result.stdout ++ assert f'Number of wheels: {num_wheels}' in result.stdout + + + @pytest.mark.usefixtures("populate_wheel_cache") +diff --git a/tests/functional/test_configuration.py b/tests/functional/test_configuration.py +index f820bdc19a..72c09bd363 100644 +--- a/tests/functional/test_configuration.py ++++ b/tests/functional/test_configuration.py +@@ -98,7 +98,7 @@ def test_env_values(self, script): + """)) + + result = script.pip("config", "debug") +- assert "{}, exists: True".format(config_file) in result.stdout ++ assert f"{config_file}, exists: True" in result.stdout + assert "global.timeout: 60" in result.stdout + assert "freeze.timeout: 10" in result.stdout + assert re.search(r"env:\n( .+\n)+", result.stdout) +@@ -117,7 +117,7 @@ def test_user_values(self, script,): + script.pip("config", "--user", "set", "freeze.timeout", "10") + + result = script.pip("config", "debug") +- assert "{}, exists: True".format(new_config_file) in result.stdout ++ assert f"{new_config_file}, exists: True" in result.stdout + assert "global.timeout: 60" in result.stdout + assert "freeze.timeout: 10" in result.stdout + assert re.search(r"user:\n( .+\n)+", result.stdout) +@@ -134,7 +134,7 @@ def test_site_values(self, script, virtualenv): + script.pip("config", "--site", "set", "freeze.timeout", "10") + + result = script.pip("config", "debug") +- assert "{}, exists: True".format(site_config_file) in result.stdout ++ assert f"{site_config_file}, exists: True" in result.stdout + assert "global.timeout: 60" in result.stdout + assert "freeze.timeout: 10" in result.stdout + assert re.search(r"site:\n( .+\n)+", result.stdout) +@@ -149,4 +149,4 @@ def test_global_config_file(self, script): + # So we just check if the file can be identified + global_config_file = get_configuration_files()[kinds.GLOBAL][0] + result = script.pip("config", "debug") +- assert "{}, exists:".format(global_config_file) in result.stdout ++ assert f"{global_config_file}, exists:" in result.stdout +diff --git a/tests/functional/test_debug.py b/tests/functional/test_debug.py +index f309604df5..0e2261e1ae 100644 +--- a/tests/functional/test_debug.py ++++ b/tests/functional/test_debug.py +@@ -40,7 +40,7 @@ def test_debug__library_versions(script): + + vendored_versions = create_vendor_txt_map() + for name, value in vendored_versions.items(): +- assert '{}=={}'.format(name, value) in result.stdout ++ assert f'{name}=={value}' in result.stdout + + + @pytest.mark.parametrize( +diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py +index 90981395a7..95f1b63cf8 100644 +--- a/tests/functional/test_download.py ++++ b/tests/functional/test_download.py +@@ -364,7 +364,7 @@ def test_download_compatible_manylinuxes( + """ + Earlier manylinuxes are compatible with later manylinuxes. + """ +- wheel = 'fake-1.0-py2.py3-none-{}.whl'.format(wheel_abi) ++ wheel = f'fake-1.0-py2.py3-none-{wheel_abi}.whl' + fake_wheel(data, wheel) + result = script.pip( + 'download', '--no-index', '--find-links', data.find_links, +@@ -491,7 +491,7 @@ def make_wheel_with_python_requires(script, package_name, python_requires): + 'python', 'setup.py', 'bdist_wheel', '--universal', cwd=package_dir, + ) + +- file_name = '{}-1.0-py2.py3-none-any.whl'.format(package_name) ++ file_name = f'{package_name}-1.0-py2.py3-none-any.whl' + return package_dir / 'dist' / file_name + + +@@ -521,7 +521,7 @@ def make_args(python_version): + "ERROR: Package 'mypackage' requires a different Python: " + "3.3.0 not in '==3.2'" + ) +- assert expected_err in result.stderr, 'stderr: {}'.format(result.stderr) ++ assert expected_err in result.stderr, f'stderr: {result.stderr}' + + # Now try with a --python-version that satisfies the Requires-Python. + args = make_args('32') +@@ -863,8 +863,8 @@ def test_download_http_url_bad_hash( + file_response(simple_pkg) + ]) + mock_server.start() +- base_address = 'http://{}:{}'.format(mock_server.host, mock_server.port) +- url = "{}/simple-1.0.tar.gz#sha256={}".format(base_address, digest) ++ base_address = f'http://{mock_server.host}:{mock_server.port}' ++ url = f"{base_address}/simple-1.0.tar.gz#sha256={digest}" + + shared_script.pip('download', '-d', str(download_dir), url) + +diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py +index f0a2265f3a..5d3d496861 100644 +--- a/tests/functional/test_freeze.py ++++ b/tests/functional/test_freeze.py +@@ -495,7 +495,7 @@ def test_freeze_bazaar_clone(script, tmpdir): + try: + checkout_path = _create_test_package(script, vcs='bazaar') + except OSError as e: +- pytest.fail('Invoking `bzr` failed: {e}'.format(e=e)) ++ pytest.fail(f'Invoking `bzr` failed: {e}') + + result = script.run( + 'bzr', 'checkout', checkout_path, 'bzr-package' +@@ -552,7 +552,7 @@ def test_freeze_nested_vcs(script, outer_vcs, inner_vcs): + result = script.pip("freeze", expect_stderr=True) + _check_output( + result.stdout, +- "...-e {}+...#egg=version_pkg\n...".format(inner_vcs), ++ f"...-e {inner_vcs}+...#egg=version_pkg\n...", + ) + + +diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py +index aedd691a4e..9c36fef0ec 100644 +--- a/tests/functional/test_install.py ++++ b/tests/functional/test_install.py +@@ -530,7 +530,7 @@ def test_hashed_install_failure(script, tmpdir): + + def assert_re_match(pattern, text): + assert re.search(pattern, text), ( +- "Could not find {!r} in {!r}".format(pattern, text) ++ f"Could not find {pattern!r} in {text!r}" + ) + + +@@ -1023,7 +1023,7 @@ def test_install_package_with_prefix(script, data): + install_path = ( + distutils.sysconfig.get_python_lib(prefix=rel_prefix_path) / + # we still test for egg-info because no-binary implies setup.py install +- 'simple-1.0-py{}.egg-info'.format(pyversion) ++ f'simple-1.0-py{pyversion}.egg-info' + ) + result.did_create(install_path) + +@@ -1040,7 +1040,7 @@ def test_install_editable_with_prefix(script): + + if hasattr(sys, "pypy_version_info"): + site_packages = os.path.join( +- 'prefix', 'lib', 'python{}'.format(pyversion), 'site-packages') ++ 'prefix', 'lib', f'python{pyversion}', 'site-packages') + else: + site_packages = distutils.sysconfig.get_python_lib(prefix='prefix') + +@@ -1086,7 +1086,7 @@ def test_install_package_that_emits_unicode(script, data): + ) + assert ( + 'FakeError: this package designed to fail on install' in result.stderr +- ), 'stderr: {}'.format(result.stderr) ++ ), f'stderr: {result.stderr}' + assert 'UnicodeDecodeError' not in result.stderr + assert 'UnicodeDecodeError' not in result.stdout + +@@ -1838,7 +1838,7 @@ def test_install_sends_client_cert(install_args, script, cert_factory, data): + file_response(str(data.packages / "simple-3.0.tar.gz")), + ] + +- url = "https://{}:{}/simple".format(server.host, server.port) ++ url = f"https://{server.host}:{server.port}/simple" + + args = ["install", "-vvv", "--cert", cert_path, "--client-cert", cert_path] + args.extend(["--index-url", url]) +diff --git a/tests/functional/test_install_cleanup.py b/tests/functional/test_install_cleanup.py +index 10e4912496..7b64ed4b5e 100644 +--- a/tests/functional/test_install_cleanup.py ++++ b/tests/functional/test_install_cleanup.py +@@ -14,7 +14,7 @@ def test_no_clean_option_blocks_cleaning_after_install(script, data): + build = script.base_path / 'pip-build' + script.pip( + 'install', '--no-clean', '--no-index', '--build', build, +- '--find-links={}'.format(data.find_links), 'simple', ++ f'--find-links={data.find_links}', 'simple', + expect_temp=True, + # TODO: allow_stderr_warning is used for the --build deprecation, + # remove it when removing support for --build +diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py +index dcc9c66d5a..41be6fbbbb 100644 +--- a/tests/functional/test_install_config.py ++++ b/tests/functional/test_install_config.py +@@ -100,9 +100,9 @@ def test_command_line_append_flags(script, virtualenv, data): + in result.stdout + ) + assert ( +- 'Skipping link: not a file: {}'.format(data.find_links) in ++ f'Skipping link: not a file: {data.find_links}' in + result.stdout +- ), 'stdout: {}'.format(result.stdout) ++ ), f'stdout: {result.stdout}' + + + @pytest.mark.network +@@ -124,9 +124,9 @@ def test_command_line_appends_correctly(script, data): + in result.stdout + ), result.stdout + assert ( +- 'Skipping link: not a file: {}'.format(data.find_links) in ++ f'Skipping link: not a file: {data.find_links}' in + result.stdout +- ), 'stdout: {}'.format(result.stdout) ++ ), f'stdout: {result.stdout}' + + + def test_config_file_override_stack( +@@ -143,7 +143,7 @@ def test_config_file_override_stack( + file_response(shared_data.packages.joinpath("INITools-0.2.tar.gz")), + ]) + mock_server.start() +- base_address = "http://{}:{}".format(mock_server.host, mock_server.port) ++ base_address = f"http://{mock_server.host}:{mock_server.port}" + + config_file = script.scratch_path / "test-pip.cfg" + +@@ -166,7 +166,7 @@ def test_config_file_override_stack( + ) + script.pip('install', '-vvv', 'INITools', expect_error=True) + script.pip( +- 'install', '-vvv', '--index-url', "{}/simple3".format(base_address), ++ 'install', '-vvv', '--index-url', f"{base_address}/simple3", + 'INITools', + ) + +@@ -236,14 +236,14 @@ def test_prompt_for_authentication(script, data, cert_factory): + authorization_response(str(data.packages / "simple-3.0.tar.gz")), + ] + +- url = "https://{}:{}/simple".format(server.host, server.port) ++ url = f"https://{server.host}:{server.port}/simple" + + with server_running(server): + result = script.pip('install', "--index-url", url, + "--cert", cert_path, "--client-cert", cert_path, + 'simple', expect_error=True) + +- assert 'User for {}:{}'.format(server.host, server.port) in \ ++ assert f'User for {server.host}:{server.port}' in \ + result.stdout, str(result) + + +@@ -266,7 +266,7 @@ def test_do_not_prompt_for_authentication(script, data, cert_factory): + authorization_response(str(data.packages / "simple-3.0.tar.gz")), + ] + +- url = "https://{}:{}/simple".format(server.host, server.port) ++ url = f"https://{server.host}:{server.port}/simple" + + with server_running(server): + result = script.pip('install', "--index-url", url, +diff --git a/tests/functional/test_install_direct_url.py b/tests/functional/test_install_direct_url.py +index ec1e927ebf..23273774d1 100644 +--- a/tests/functional/test_install_direct_url.py ++++ b/tests/functional/test_install_direct_url.py +@@ -33,7 +33,7 @@ def test_install_vcs_editable_no_direct_url(script, with_wheel): + def test_install_vcs_non_editable_direct_url(script, with_wheel): + pkg_path = _create_test_package(script, name="testpkg") + url = path_to_url(pkg_path) +- args = ["install", "git+{}#egg=testpkg".format(url)] ++ args = ["install", f"git+{url}#egg=testpkg"] + result = script.pip(*args) + direct_url = _get_created_direct_url(result, "testpkg") + assert direct_url +diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py +index d70067b6bc..0ec4294063 100644 +--- a/tests/functional/test_install_extras.py ++++ b/tests/functional/test_install_extras.py +@@ -171,5 +171,4 @@ def test_install_extra_merging(script, data, extra_to_install, simple_version): + '{pkga_path}{extra_to_install}'.format(**locals()), + ) + +- assert ('Successfully installed pkga-0.1 simple-{}'.format(simple_version) +- ) in result.stdout ++ assert f'Successfully installed pkga-0.1 simple-{simple_version}' in result.stdout +diff --git a/tests/functional/test_install_force_reinstall.py b/tests/functional/test_install_force_reinstall.py +index 0fbdeb276c..265c52b20d 100644 +--- a/tests/functional/test_install_force_reinstall.py ++++ b/tests/functional/test_install_force_reinstall.py +@@ -11,7 +11,7 @@ def check_installed_version(script, package, expected): + if line.startswith('Version: '): + version = line.split()[-1] + break +- assert version == expected, 'version {} != {}'.format(version, expected) ++ assert version == expected, f'version {version} != {expected}' + + + def check_force_reinstall(script, specifier, expected): +diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py +index 83fe9c9451..9c35aee832 100644 +--- a/tests/functional/test_install_reqs.py ++++ b/tests/functional/test_install_reqs.py +@@ -55,7 +55,7 @@ def _arg_recording_sdist_maker(name): + sdist_path = create_basic_sdist_for_package( + script, name, "0.1.0", extra_files + ) +- args_path = output_dir / "{}.json".format(name) ++ args_path = output_dir / f"{name}.json" + return ArgRecordingSdist(sdist_path, args_path) + + return _arg_recording_sdist_maker +diff --git a/tests/functional/test_install_upgrade.py b/tests/functional/test_install_upgrade.py +index 0dd4f9f8b5..46aac8f9d2 100644 +--- a/tests/functional/test_install_upgrade.py ++++ b/tests/functional/test_install_upgrade.py +@@ -470,5 +470,5 @@ def test_install_find_existing_package_canonicalize(script, req1, req2): + result = script.pip( + "install", "--no-index", "--find-links", pkg_container, "pkg", + ) +- satisfied_message = "Requirement already satisfied: {}".format(req2) ++ satisfied_message = f"Requirement already satisfied: {req2}" + assert satisfied_message in result.stdout, str(result) +diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py +index 59393d3474..4c26d8e88a 100644 +--- a/tests/functional/test_install_vcs_git.py ++++ b/tests/functional/test_install_vcs_git.py +@@ -38,7 +38,7 @@ def _get_branch_remote(script, package_name, branch): + """ + repo_dir = _get_editable_repo_dir(script, package_name) + result = script.run( +- 'git', 'config', 'branch.{}.remote'.format(branch), cwd=repo_dir ++ 'git', 'config', f'branch.{branch}.remote', cwd=repo_dir + ) + return result.stdout.strip() + +@@ -57,12 +57,12 @@ def _github_checkout(url_path, temp_dir, rev=None, egg=None, scheme=None): + """ + if scheme is None: + scheme = 'https' +- url = 'git+{}://github.com/{}'.format(scheme, url_path) ++ url = f'git+{scheme}://github.com/{url_path}' + local_url = local_checkout(url, temp_dir) + if rev is not None: +- local_url += '@{}'.format(rev) ++ local_url += f'@{rev}' + if egg is not None: +- local_url += '#egg={}'.format(egg) ++ local_url += f'#egg={egg}' + + return local_url + +@@ -77,8 +77,8 @@ def _make_version_pkg_url(path, rev=None, name="version_pkg"): + rev: an optional revision to install like a branch name, tag, or SHA. + """ + file_url = _test_path_to_file_url(path) +- url_rev = '' if rev is None else '@{}'.format(rev) +- url = 'git+{}{}#egg={}'.format(file_url, url_rev, name) ++ url_rev = '' if rev is None else f'@{rev}' ++ url = f'git+{file_url}{url_rev}#egg={name}' + + return url + +@@ -278,11 +278,11 @@ def test_git_with_tag_name_and_update(script, tmpdir): + url_path = 'pypa/pip-test-package.git' + base_local_url = _github_checkout(url_path, tmpdir) + +- local_url = '{}#egg=pip-test-package'.format(base_local_url) ++ local_url = f'{base_local_url}#egg=pip-test-package' + result = script.pip('install', '-e', local_url) + result.assert_installed('pip-test-package', with_files=['.git']) + +- new_local_url = '{}@0.1.2#egg=pip-test-package'.format(base_local_url) ++ new_local_url = f'{base_local_url}@0.1.2#egg=pip-test-package' + result = script.pip( + 'install', '--global-option=--version', '-e', new_local_url, + ) +@@ -484,12 +484,12 @@ def test_install_git_branch_not_cached(script, with_wheel): + repo_dir = _create_test_package(script, name=PKG) + url = _make_version_pkg_url(repo_dir, rev="master", name=PKG) + result = script.pip("install", url, "--only-binary=:all:") +- assert "Successfully built {}".format(PKG) in result.stdout, result.stdout ++ assert f"Successfully built {PKG}" in result.stdout, result.stdout + script.pip("uninstall", "-y", PKG) + # build occurs on the second install too because it is not cached + result = script.pip("install", url) + assert ( +- "Successfully built {}".format(PKG) in result.stdout ++ f"Successfully built {PKG}" in result.stdout + ), result.stdout + + +@@ -504,10 +504,10 @@ def test_install_git_sha_cached(script, with_wheel): + ).stdout.strip() + url = _make_version_pkg_url(repo_dir, rev=commit, name=PKG) + result = script.pip("install", url) +- assert "Successfully built {}".format(PKG) in result.stdout, result.stdout ++ assert f"Successfully built {PKG}" in result.stdout, result.stdout + script.pip("uninstall", "-y", PKG) + # build does not occur on the second install because it is cached + result = script.pip("install", url) + assert ( +- "Successfully built {}".format(PKG) not in result.stdout ++ f"Successfully built {PKG}" not in result.stdout + ), result.stdout +diff --git a/tests/functional/test_install_wheel.py b/tests/functional/test_install_wheel.py +index 9bf965625f..8df208bb7d 100644 +--- a/tests/functional/test_install_wheel.py ++++ b/tests/functional/test_install_wheel.py +@@ -14,7 +14,7 @@ + # assert_installed expects a package subdirectory, so give it to them + def make_wheel_with_file(name, version, **kwargs): + extra_files = kwargs.setdefault("extra_files", {}) +- extra_files["{}/__init__.py".format(name)] = "# example" ++ extra_files[f"{name}/__init__.py"] = "# example" + return make_wheel(name=name, version=version, **kwargs) + + +@@ -691,7 +691,7 @@ def test_wheel_with_file_in_data_dir_has_reasonable_error( + result = script.pip( + "install", "--no-index", str(wheel_path), expect_error=True + ) +- assert "simple-0.1.0.data/{}".format(name) in result.stderr ++ assert f"simple-0.1.0.data/{name}" in result.stderr + + + def test_wheel_with_unknown_subdir_in_data_dir_has_reasonable_error( +diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py +index 4b40ca23bf..efe9b1ec70 100644 +--- a/tests/functional/test_new_resolver.py ++++ b/tests/functional/test_new_resolver.py +@@ -21,8 +21,7 @@ def assert_installed(script, **kwargs): + for val in json.loads(ret.stdout) + ) + expected = set((canonicalize_name(k), v) for k, v in kwargs.items()) +- assert expected <= installed, \ +- "{!r} not all in {!r}".format(expected, installed) ++ assert expected <= installed, f"{expected!r} not all in {installed!r}" + + + def assert_not_installed(script, *args): +@@ -34,17 +33,16 @@ def assert_not_installed(script, *args): + # None of the given names should be listed as installed, i.e. their + # intersection should be empty. + expected = set(canonicalize_name(k) for k in args) +- assert not (expected & installed), \ +- "{!r} contained in {!r}".format(expected, installed) ++ assert not (expected & installed), f"{expected!r} contained in {installed!r}" + + + def assert_editable(script, *args): + # This simply checks whether all of the listed packages have a + # corresponding .egg-link file installed. + # TODO: Implement a more rigorous way to test for editable installations. +- egg_links = set("{}.egg-link".format(arg) for arg in args) ++ egg_links = set(f"{arg}.egg-link" for arg in args) + assert egg_links <= set(os.listdir(script.site_packages_path)), \ +- "{!r} not all found in {!r}".format(args, script.site_packages_path) ++ f"{args!r} not all found in {script.site_packages_path!r}" + + + def test_new_resolver_can_install(script): +@@ -732,7 +730,7 @@ def test_new_resolver_constraint_on_path_empty( + setup_py.write_text(text) + + constraints_txt = script.scratch_path / "constraints.txt" +- constraints_txt.write_text("foo=={}".format(constraint_version)) ++ constraints_txt.write_text(f"foo=={constraint_version}") + + result = script.pip( + "install", +@@ -1067,8 +1065,8 @@ def test_new_resolver_prefers_installed_in_upgrade_if_latest(script): + def test_new_resolver_presents_messages_when_backtracking_a_lot(script, N): + # Generate a set of wheels that will definitely cause backtracking. + for index in range(1, N+1): +- A_version = "{index}.0.0".format(index=index) +- B_version = "{index}.0.0".format(index=index) ++ A_version = f"{index}.0.0" ++ B_version = f"{index}.0.0" + C_version = "{index_minus_one}.0.0".format(index_minus_one=index - 1) + + depends = ["B == " + B_version] +@@ -1079,15 +1077,15 @@ def test_new_resolver_presents_messages_when_backtracking_a_lot(script, N): + create_basic_wheel_for_package(script, "A", A_version, depends=depends) + + for index in range(1, N+1): +- B_version = "{index}.0.0".format(index=index) +- C_version = "{index}.0.0".format(index=index) ++ B_version = f"{index}.0.0" ++ C_version = f"{index}.0.0" + depends = ["C == " + C_version] + + print("B", B_version, "C", C_version) + create_basic_wheel_for_package(script, "B", B_version, depends=depends) + + for index in range(1, N+1): +- C_version = "{index}.0.0".format(index=index) ++ C_version = f"{index}.0.0" + print("C", C_version) + create_basic_wheel_for_package(script, "C", C_version) + +@@ -1138,7 +1136,7 @@ def test_new_resolver_check_wheel_version_normalized( + metadata_version, + filename_version, + ): +- filename = "simple-{}-py2.py3-none-any.whl".format(filename_version) ++ filename = f"simple-{filename_version}-py2.py3-none-any.whl" + + wheel_builder = make_wheel(name="simple", version=metadata_version) + wheel_builder.save_to(script.scratch_path / filename) +diff --git a/tests/functional/test_new_resolver_target.py b/tests/functional/test_new_resolver_target.py +index 037244a2c4..f5ec6ac7a0 100644 +--- a/tests/functional/test_new_resolver_target.py ++++ b/tests/functional/test_new_resolver_target.py +@@ -16,7 +16,7 @@ def _make_fake_wheel(wheel_tag): + version="1.0", + wheel_metadata_updates={"Tag": []}, + ) +- wheel_path = wheel_house.joinpath("fake-1.0-{}.whl".format(wheel_tag)) ++ wheel_path = wheel_house.joinpath(f"fake-1.0-{wheel_tag}.whl") + wheel_builder.save_to(wheel_path) + return wheel_path + +diff --git a/tests/functional/test_show.py b/tests/functional/test_show.py +index c19228b566..7047aa63aa 100644 +--- a/tests/functional/test_show.py ++++ b/tests/functional/test_show.py +@@ -16,7 +16,7 @@ def test_basic_show(script): + lines = result.stdout.splitlines() + assert len(lines) == 10 + assert 'Name: pip' in lines +- assert 'Version: {}'.format(__version__) in lines ++ assert f'Version: {__version__}' in lines + assert any(line.startswith('Location: ') for line in lines) + assert 'Requires: ' in lines + +diff --git a/tests/functional/test_wheel.py b/tests/functional/test_wheel.py +index b2ea414ac9..3c1a3299c3 100644 +--- a/tests/functional/test_wheel.py ++++ b/tests/functional/test_wheel.py +@@ -227,7 +227,7 @@ def test_no_clean_option_blocks_cleaning_after_wheel( + + if resolver_variant == "legacy": + build = build / 'simple' +- message = "build/simple should still exist {}".format(result) ++ message = f"build/simple should still exist {result}" + assert exists(build), message + + +diff --git a/tests/functional/test_yaml.py b/tests/functional/test_yaml.py +index 4b5f38f97e..ba7b17531e 100644 +--- a/tests/functional/test_yaml.py ++++ b/tests/functional/test_yaml.py +@@ -77,7 +77,7 @@ def stripping_split(my_str, splitwith, count=None): + + for part in parts[1:]: + verb, args_str = stripping_split(part, " ", 1) +- assert verb in ["depends"], "Unknown verb {!r}".format(verb) ++ assert verb in ["depends"], f"Unknown verb {verb!r}" + + retval[verb] = stripping_split(args_str, ",") + +@@ -94,14 +94,14 @@ def handle_request(script, action, requirement, options, resolver_variant): + elif action == 'uninstall': + args = ['uninstall', '--yes'] + else: +- raise "Did not excpet action: {!r}".format(action) ++ raise f"Did not excpet action: {action!r}" + + if isinstance(requirement, str): + args.append(requirement) + elif isinstance(requirement, list): + args.extend(requirement) + else: +- raise "requirement neither str nor list {!r}".format(requirement) ++ raise f"requirement neither str nor list {requirement!r}" + + args.extend(options) + args.append("--verbose") +@@ -177,7 +177,7 @@ def test_yaml_based(script, case): + if action in request: + break + else: +- raise "Unsupported request {!r}".format(request) ++ raise f"Unsupported request {request!r}" + + # Perform the requested action + effect = handle_request(script, action, +diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py +index 26cb4ff94f..6a98d4acf7 100644 +--- a/tests/lib/__init__.py ++++ b/tests/lib/__init__.py +@@ -373,7 +373,7 @@ def _one_or_both(a, b): + if not a: + return str(b) + +- return "{a}\n{b}".format(a=a, b=b) ++ return f"{a}\n{b}" + + + def make_check_stderr_message(stderr, line, reason): +@@ -748,7 +748,7 @@ def _create_main_file(dir_path, name=None, output=None): + def main(): + print({!r}) + """.format(output)) +- filename = '{}.py'.format(name) ++ filename = f'{name}.py' + dir_path.joinpath(filename).write_text(text) + + +@@ -983,14 +983,14 @@ def add_file(path, text): + z.writestr(path, contents) + records.append((path, digest(contents), str(len(contents)))) + +- dist_info = "{}-{}.dist-info".format(name, version) +- record_path = "{}/RECORD".format(dist_info) ++ dist_info = f"{name}-{version}.dist-info" ++ record_path = f"{dist_info}/RECORD" + records = [(record_path, "", "")] + buf = BytesIO() + with ZipFile(buf, "w") as z: +- add_file("{}/WHEEL".format(dist_info), "Wheel-Version: 1.0") ++ add_file(f"{dist_info}/WHEEL", "Wheel-Version: 1.0") + add_file( +- "{}/METADATA".format(dist_info), ++ f"{dist_info}/METADATA", + dedent( + """\ + Metadata-Version: 2.1 +@@ -1023,10 +1023,10 @@ def create_basic_wheel_for_package( + # Fix wheel distribution name by replacing runs of non-alphanumeric + # characters with an underscore _ as per PEP 491 + name = re.sub(r"[^\w\d.]+", "_", name, re.UNICODE) +- archive_name = "{}-{}-py2.py3-none-any.whl".format(name, version) ++ archive_name = f"{name}-{version}-py2.py3-none-any.whl" + archive_path = script.scratch_path / archive_name + +- package_init_py = "{name}/__init__.py".format(name=name) ++ package_init_py = f"{name}/__init__.py" + assert package_init_py not in extra_files + extra_files[package_init_py] = textwrap.dedent( + """ +@@ -1037,7 +1037,7 @@ def hello(): + ).format(version=version, name=name) + + requires_dist = depends + [ +- '{package}; extra == "{extra}"'.format(package=package, extra=extra) ++ f'{package}; extra == "{extra}"' + for extra, packages in extras.items() + for package in packages + ] +@@ -1118,7 +1118,7 @@ def wrapper(fn): + subprocess.check_output(check_cmd) + except (OSError, subprocess.CalledProcessError): + return pytest.mark.skip( +- reason='{name} is not available'.format(name=name))(fn) ++ reason=f'{name} is not available')(fn) + return fn + return wrapper + +diff --git a/tests/lib/index.py b/tests/lib/index.py +index 0f507a0e7f..e6dc2a58be 100644 +--- a/tests/lib/index.py ++++ b/tests/lib/index.py +@@ -3,10 +3,10 @@ + + + def make_mock_candidate(version, yanked_reason=None, hex_digest=None): +- url = 'https://example.com/pkg-{}.tar.gz'.format(version) ++ url = f'https://example.com/pkg-{version}.tar.gz' + if hex_digest is not None: + assert len(hex_digest) == 64 +- url += '#sha256={}'.format(hex_digest) ++ url += f'#sha256={hex_digest}' + + link = Link(url, yanked_reason=yanked_reason) + candidate = InstallationCandidate('mypackage', version, link) +diff --git a/tests/lib/test_lib.py b/tests/lib/test_lib.py +index 655e0bdeea..47b97724f2 100644 +--- a/tests/lib/test_lib.py ++++ b/tests/lib/test_lib.py +@@ -19,7 +19,7 @@ def assert_error_startswith(exc_type, expected_start): + yield + + assert str(err.value).startswith(expected_start), ( +- 'full message: {}'.format(err.value) ++ f'full message: {err.value}' + ) + + +@@ -82,8 +82,8 @@ def run_stderr_with_prefix(self, script, prefix, **kwargs): + """ + Call run() that prints stderr with the given prefix. + """ +- text = '{}: hello, world\\n'.format(prefix) +- command = 'import sys; sys.stderr.write("{}")'.format(text) ++ text = f'{prefix}: hello, world\\n' ++ command = f'import sys; sys.stderr.write("{text}")' + args = [sys.executable, '-c', command] + script.run(*args, **kwargs) + +diff --git a/tests/lib/venv.py b/tests/lib/venv.py +index e3ed345065..6dbdb4dc75 100644 +--- a/tests/lib/venv.py ++++ b/tests/lib/venv.py +@@ -38,7 +38,7 @@ def _update_paths(self): + self.lib = Path(lib) + + def __repr__(self): +- return "".format(self.location) ++ return f"" + + def _create(self, clear=False): + if clear: +diff --git a/tests/lib/wheel.py b/tests/lib/wheel.py +index d460a126df..f96f5d06eb 100644 +--- a/tests/lib/wheel.py ++++ b/tests/lib/wheel.py +@@ -79,7 +79,7 @@ def message_from_dict(headers): + + def dist_info_path(name, version, path): + # type: (str, str, str) -> str +- return "{}-{}.dist-info/{}".format(name, version, path) ++ return f"{name}-{version}.dist-info/{path}" + + + def make_metadata_file( +@@ -162,7 +162,7 @@ def make_entry_points_file( + + lines = [] + for section, values in entry_points_data.items(): +- lines.append("[{}]".format(section)) ++ lines.append(f"[{section}]") + lines.extend(values) + + return File( +@@ -190,9 +190,9 @@ def make_metadata_files(name, version, files): + + def make_data_files(name, version, files): + # type: (str, str, Dict[str, AnyStr]) -> List[File] +- data_dir = "{}-{}.data".format(name, version) ++ data_dir = f"{name}-{version}.data" + return [ +- File("{}/{}".format(data_dir, name), ensure_binary(contents)) ++ File(f"{data_dir}/{name}", ensure_binary(contents)) + for name, contents in files.items() + ] + +@@ -258,7 +258,7 @@ def wheel_name(name, version, pythons, abis, platforms): + ".".join(abis), + ".".join(platforms), + ]) +- return "{}.whl".format(stem) ++ return f"{stem}.whl" + + + class WheelBuilder: +diff --git a/tests/unit/test_cmdoptions.py b/tests/unit/test_cmdoptions.py +index 150570e716..bac33ce77b 100644 +--- a/tests/unit/test_cmdoptions.py ++++ b/tests/unit/test_cmdoptions.py +@@ -21,4 +21,4 @@ + ]) + def test_convert_python_version(value, expected): + actual = _convert_python_version(value) +- assert actual == expected, 'actual: {!r}'.format(actual) ++ assert actual == expected, f'actual: {actual!r}' +diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py +index ac765c5602..22a85b6f27 100644 +--- a/tests/unit/test_collector.py ++++ b/tests/unit/test_collector.py +@@ -398,7 +398,7 @@ def test_parse_links__yanked_reason(anchor_html, expected): + encoding=None, + # parse_links() is cached by url, so we inject a random uuid to ensure + # the page content isn't cached. +- url='https://example.com/simple-{}/'.format(uuid.uuid4()), ++ url=f'https://example.com/simple-{uuid.uuid4()}/', + ) + links = list(parse_links(page)) + link, = links +@@ -580,7 +580,7 @@ def test_get_html_page_directory_append_index(tmpdir): + actual = _get_html_page(Link(dir_url), session=session) + assert mock_func.mock_calls == [ + mock.call(expected_url, session=session), +- ], 'actual calls: {}'.format(mock_func.mock_calls) ++ ], f'actual calls: {mock_func.mock_calls}' + + assert actual.content == fake_response.content + assert actual.encoding is None +@@ -636,7 +636,7 @@ def check_links_include(links, names): + """ + for name in names: + assert any(link.url.endswith(name) for link in links), ( +- 'name {!r} not among links: {}'.format(name, links) ++ f'name {name!r} not among links: {links}' + ) + + +diff --git a/tests/unit/test_commands.py b/tests/unit/test_commands.py +index 7fae427c69..59cbf930ff 100644 +--- a/tests/unit/test_commands.py ++++ b/tests/unit/test_commands.py +@@ -19,7 +19,7 @@ def check_commands(pred, expected): + """ + commands = [create_command(name) for name in sorted(commands_dict)] + actual = [command.name for command in commands if pred(command)] +- assert actual == expected, 'actual: {}'.format(actual) ++ assert actual == expected, f'actual: {actual}' + + + def test_commands_dict__order(): +diff --git a/tests/unit/test_compat.py b/tests/unit/test_compat.py +index cc024b570a..c1dee67d29 100644 +--- a/tests/unit/test_compat.py ++++ b/tests/unit/test_compat.py +@@ -60,7 +60,7 @@ def test_str_to_display(data, expected): + actual = str_to_display(data) + assert actual == expected, ( + # Show the encoding for easier troubleshooting. +- 'encoding: {!r}'.format(locale.getpreferredencoding()) ++ f'encoding: {locale.getpreferredencoding()!r}' + ) + + +@@ -79,7 +79,7 @@ def test_str_to_display__encoding(monkeypatch, data, encoding, expected): + actual = str_to_display(data) + assert actual == expected, ( + # Show the encoding for easier troubleshooting. +- 'encoding: {!r}'.format(locale.getpreferredencoding()) ++ f'encoding: {locale.getpreferredencoding()!r}' + ) + + +@@ -96,7 +96,7 @@ def test_str_to_display__decode_error(monkeypatch, caplog): + + assert actual == expected, ( + # Show the encoding for easier troubleshooting. +- 'encoding: {!r}'.format(locale.getpreferredencoding()) ++ f'encoding: {locale.getpreferredencoding()!r}' + ) + assert len(caplog.records) == 1 + record = caplog.records[0] +diff --git a/tests/unit/test_finder.py b/tests/unit/test_finder.py +index d716e58253..69ebd5d4b7 100644 +--- a/tests/unit/test_finder.py ++++ b/tests/unit/test_finder.py +@@ -474,7 +474,7 @@ def test_evaluate_link__substring_fails(self, url, expected_msg): + def test_process_project_url(data): + project_name = 'simple' + index_url = data.index_url('simple') +- project_url = Link('{}/{}'.format(index_url, project_name)) ++ project_url = Link(f'{index_url}/{project_name}') + finder = make_test_finder(index_urls=[index_url]) + link_evaluator = finder.make_link_evaluator(project_name) + actual = finder.process_project_url( +diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py +index e719707ab7..b6f7f632cc 100644 +--- a/tests/unit/test_index.py ++++ b/tests/unit/test_index.py +@@ -763,7 +763,7 @@ def test_find_name_version_sep(fragment, canonical_name, expected): + def test_find_name_version_sep_failure(fragment, canonical_name): + with pytest.raises(ValueError) as ctx: + _find_name_version_sep(fragment, canonical_name) +- message = "{} does not match {}".format(fragment, canonical_name) ++ message = f"{fragment} does not match {canonical_name}" + assert str(ctx.value) == message + + +diff --git a/tests/unit/test_network_lazy_wheel.py b/tests/unit/test_network_lazy_wheel.py +index cf0e6213d3..e6747a18e6 100644 +--- a/tests/unit/test_network_lazy_wheel.py ++++ b/tests/unit/test_network_lazy_wheel.py +@@ -33,7 +33,7 @@ def mypy_whl_no_range(mock_server, shared_data): + mypy_whl = shared_data.packages / 'mypy-0.782-py3-none-any.whl' + mock_server.set_responses([file_response(mypy_whl)]) + mock_server.start() +- base_address = 'http://{}:{}'.format(mock_server.host, mock_server.port) ++ base_address = f'http://{mock_server.host}:{mock_server.port}' + yield "{}/{}".format(base_address, 'mypy-0.782-py3-none-any.whl') + mock_server.stop() + +diff --git a/tests/unit/test_network_session.py b/tests/unit/test_network_session.py +index e9b575a96b..044d1fb923 100644 +--- a/tests/unit/test_network_session.py ++++ b/tests/unit/test_network_session.py +@@ -13,7 +13,7 @@ def get_user_agent(): + def test_user_agent(): + user_agent = get_user_agent() + +- assert user_agent.startswith("pip/{}".format(__version__)) ++ assert user_agent.startswith(f"pip/{__version__}") + + + @pytest.mark.parametrize('name, expected_like_ci', [ +@@ -115,7 +115,7 @@ def test_add_trusted_host(self): + session.add_trusted_host('host3') + assert session.pip_trusted_origins == [ + ('host1', None), ('host3', None), ('host2', None) +- ], 'actual: {}'.format(session.pip_trusted_origins) ++ ], f'actual: {session.pip_trusted_origins}' + + session.add_trusted_host('host4:8080') + prefix4 = 'https://host4:8080/' +diff --git a/tests/unit/test_operations_prepare.py b/tests/unit/test_operations_prepare.py +index 9bdecc8e0f..17fc94929c 100644 +--- a/tests/unit/test_operations_prepare.py ++++ b/tests/unit/test_operations_prepare.py +@@ -189,7 +189,7 @@ def test_unpack_url_bad_hash(self, tmpdir, data, + Test when the file url hash fragment is wrong + """ + self.prep(tmpdir, data) +- url = '{}#md5=bogus'.format(self.dist_url.url) ++ url = f'{self.dist_url.url}#md5=bogus' + dist_url = Link(url) + with pytest.raises(HashMismatch): + unpack_url(dist_url, +diff --git a/tests/unit/test_options.py b/tests/unit/test_options.py +index b02658af10..6cde2e212f 100644 +--- a/tests/unit/test_options.py ++++ b/tests/unit/test_options.py +@@ -262,7 +262,7 @@ def test_subcommand_option_before_subcommand_fails(self): + @contextmanager + def tmpconfig(option, value, section='global'): + with NamedTemporaryFile(mode='w', delete=False) as f: +- f.write('[{}]\n{}={}\n'.format(section, option, value)) ++ f.write(f'[{section}]\n{option}={value}\n') + name = f.name + try: + yield name +@@ -275,7 +275,7 @@ class TestCountOptions(AddFakeCommandMixin): + @pytest.mark.parametrize('option', ('verbose', 'quiet')) + @pytest.mark.parametrize('value', range(4)) + def test_cli_long(self, option, value): +- flags = ['--{}'.format(option)] * value ++ flags = [f'--{option}'] * value + opt1, args1 = main(flags+['fake']) + opt2, args2 = main(['fake']+flags) + assert getattr(opt1, option) == getattr(opt2, option) == value +diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py +index 0c0b1ce4a8..c7be5fe1ba 100644 +--- a/tests/unit/test_req.py ++++ b/tests/unit/test_req.py +@@ -454,14 +454,14 @@ def test_markers_semicolon(self): + def test_markers_url(self): + # test "URL; markers" syntax + url = 'http://foo.com/?p=bar.git;a=snapshot;h=v0.1;sf=tgz' +- line = '{}; python_version >= "3"'.format(url) ++ line = f'{url}; python_version >= "3"' + req = install_req_from_line(line) + assert req.link.url == url, req.url + assert str(req.markers) == 'python_version >= "3"' + + # without space, markers are part of the URL + url = 'http://foo.com/?p=bar.git;a=snapshot;h=v0.1;sf=tgz' +- line = '{};python_version >= "3"'.format(url) ++ line = f'{url};python_version >= "3"' + req = install_req_from_line(line) + assert req.link.url == line, req.url + assert req.markers is None +@@ -560,7 +560,7 @@ def test_unidentifiable_name(self): + with pytest.raises(InstallationError) as e: + install_req_from_line(test_name) + err_msg = e.value.args[0] +- assert "Invalid requirement: '{}'".format(test_name) == err_msg ++ assert f"Invalid requirement: '{test_name}'" == err_msg + + def test_requirement_file(self): + req_file_path = os.path.join(self.tempdir, 'test.txt') +diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py +index 0f188d7b0a..86f2731e9e 100644 +--- a/tests/unit/test_req_file.py ++++ b/tests/unit/test_req_file.py +@@ -270,7 +270,7 @@ def test_yield_editable_requirement(self, line_processor): + + def test_yield_editable_constraint(self, line_processor): + url = 'git+https://url#egg=SomeProject' +- line = '-e {}'.format(url) ++ line = f'-e {url}' + filename = 'filename' + comes_from = '-c {} (line {})'.format(filename, 1) + req = install_req_from_editable( +@@ -432,7 +432,7 @@ def get_file_content(filename, *args, **kwargs): + return None, '-r reqs.txt' + elif filename == 'http://me.com/me/reqs.txt': + return None, req_name +- assert False, 'Unexpected file requested {}'.format(filename) ++ assert False, f'Unexpected file requested {filename}' + + monkeypatch.setattr( + pip._internal.req.req_file, 'get_file_content', get_file_content +@@ -478,7 +478,7 @@ def test_absolute_local_nested_req_files( + # POSIX-ify the path, since Windows backslashes aren't supported. + other_req_file_str = str(other_req_file).replace('\\', '/') + +- req_file.write_text('-r {}'.format(other_req_file_str)) ++ req_file.write_text(f'-r {other_req_file_str}') + other_req_file.write_text(req_name) + + reqs = list(parse_reqfile(str(req_file), session=session)) +@@ -498,10 +498,10 @@ def test_absolute_http_nested_req_file_in_local( + + def get_file_content(filename, *args, **kwargs): + if filename == str(req_file): +- return None, '-r {}'.format(nested_req_file) ++ return None, f'-r {nested_req_file}' + elif filename == nested_req_file: + return None, req_name +- assert False, 'Unexpected file requested {}'.format(filename) ++ assert False, f'Unexpected file requested {filename}' + + monkeypatch.setattr( + pip._internal.req.req_file, 'get_file_content', get_file_content +diff --git a/tests/unit/test_resolution_legacy_resolver.py b/tests/unit/test_resolution_legacy_resolver.py +index f56ecd96e7..90e98a691d 100644 +--- a/tests/unit/test_resolution_legacy_resolver.py ++++ b/tests/unit/test_resolution_legacy_resolver.py +@@ -34,7 +34,7 @@ def __init__(self, metadata, metadata_name=None): + self.metadata = metadata + + def __str__(self): +- return ''.format(self.project_name) ++ return f'' + + def has_metadata(self, name): + return (name == self.metadata_name) +@@ -47,7 +47,7 @@ def get_metadata(self, name): + def make_fake_dist(requires_python=None, metadata_name=None): + metadata = 'Name: test\n' + if requires_python is not None: +- metadata += 'Requires-Python:{}'.format(requires_python) ++ metadata += f'Requires-Python:{requires_python}' + + return FakeDist(metadata, metadata_name=metadata_name) + +diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py +index 41501d38b0..7b24c8983b 100644 +--- a/tests/unit/test_utils.py ++++ b/tests/unit/test_utils.py +@@ -63,11 +63,11 @@ def setup(self): + self.user_site = 'USER_SITE' + self.user_site_egglink = os.path.join( + self.user_site, +- '{}.egg-link'.format(project) ++ f'{project}.egg-link' + ) + self.site_packages_egglink = os.path.join( + self.site_packages, +- '{}.egg-link'.format(project), ++ f'{project}.egg-link', + ) + + # patches +@@ -440,7 +440,7 @@ def test_rmtree_retries_for_3sec(tmpdir, monkeypatch): + def test_path_to_display(monkeypatch, path, fs_encoding, expected): + monkeypatch.setattr(sys, 'getfilesystemencoding', lambda: fs_encoding) + actual = path_to_display(path) +- assert actual == expected, 'actual: {!r}'.format(actual) ++ assert actual == expected, f'actual: {actual!r}' + + + class Test_normalize_path: +diff --git a/tests/unit/test_utils_distutils_args.py b/tests/unit/test_utils_distutils_args.py +index 5bca65018e..96cdb18042 100644 +--- a/tests/unit/test_utils_distutils_args.py ++++ b/tests/unit/test_utils_distutils_args.py +@@ -50,7 +50,7 @@ def test_multiple_invocations_do_not_keep_options(): + ("root", "11"), + ]) + def test_all_value_options_work(name, value): +- result = parse_distutils_args(["--{}={}".format(name, value)]) ++ result = parse_distutils_args([f"--{name}={value}"]) + key_name = name.replace("-", "_") + assert result[key_name] == value + +diff --git a/tests/unit/test_utils_parallel.py b/tests/unit/test_utils_parallel.py +index d5449988e3..5a23f7d655 100644 +--- a/tests/unit/test_utils_parallel.py ++++ b/tests/unit/test_utils_parallel.py +@@ -62,7 +62,7 @@ def test_have_sem_open(name, monkeypatch): + """Test fallback when sem_open is available.""" + monkeypatch.setattr(DUNDER_IMPORT, have_sem_open) + with tmp_import_parallel() as parallel: +- assert getattr(parallel, name) is getattr(parallel, '_{}'.format(name)) ++ assert getattr(parallel, name) is getattr(parallel, f'_{name}') + + + @mark.parametrize('name', MAPS) +diff --git a/tests/unit/test_utils_subprocess.py b/tests/unit/test_utils_subprocess.py +index 8a67d7d8da..7b9900cd62 100644 +--- a/tests/unit/test_utils_subprocess.py ++++ b/tests/unit/test_utils_subprocess.py +@@ -48,7 +48,7 @@ def test_make_subprocess_output_error(): + line2 + line3 + ----------------------------------------""") +- assert actual == expected, 'actual: {}'.format(actual) ++ assert actual == expected, f'actual: {actual}' + + + def test_make_subprocess_output_error__non_ascii_command_arg(monkeypatch): +@@ -76,7 +76,7 @@ def test_make_subprocess_output_error__non_ascii_command_arg(monkeypatch): + cwd: /path/to/cwd + Complete output (0 lines): + ----------------------------------------""") +- assert actual == expected, 'actual: {}'.format(actual) ++ assert actual == expected, f'actual: {actual}' + + + @pytest.mark.skipif("sys.version_info < (3,)") +@@ -98,7 +98,7 @@ def test_make_subprocess_output_error__non_ascii_cwd_python_3(monkeypatch): + cwd: /path/to/cwd/déf + Complete output (0 lines): + ----------------------------------------""") +- assert actual == expected, 'actual: {}'.format(actual) ++ assert actual == expected, f'actual: {actual}' + + + @pytest.mark.parametrize('encoding', [ +@@ -128,7 +128,7 @@ def test_make_subprocess_output_error__non_ascii_cwd_python_2( + cwd: /path/to/cwd/déf + Complete output (0 lines): + ----------------------------------------""") +- assert actual == expected, 'actual: {}'.format(actual) ++ assert actual == expected, f'actual: {actual}' + + + # This test is mainly important for checking unicode in Python 2. +@@ -150,7 +150,7 @@ def test_make_subprocess_output_error__non_ascii_line(): + Complete output (1 lines): + curly-quote: \u2018 + ----------------------------------------""") +- assert actual == expected, 'actual: {}'.format(actual) ++ assert actual == expected, f'actual: {actual}' + + + class FakeSpinner(SpinnerInterface): +@@ -205,7 +205,7 @@ def check_result( + + records = caplog.record_tuples + if len(records) != len(expected_records): +- raise RuntimeError('{} != {}'.format(records, expected_records)) ++ raise RuntimeError(f'{records} != {expected_records}') + + for record, expected_record in zip(records, expected_records): + # Check the logger_name and log level parts exactly. +@@ -316,7 +316,7 @@ def test_info_logging__subprocess_error(self, capfd, caplog): + 'Hello', + 'fail', + 'world', +- ], 'lines: {}'.format(actual) # Show the full output on failure. ++ ], f'lines: {actual}' # Show the full output on failure. + + assert command_line.startswith(' command: ') + assert command_line.endswith('print("world"); exit("fail")\'') +@@ -375,7 +375,7 @@ def test_spinner_finish( + expected_spin_count = expected[2] + + command = ( +- 'print("Hello"); print("world"); exit({})'.format(exit_status) ++ f'print("Hello"); print("world"); exit({exit_status})' + ) + args, spinner = self.prepare_call(caplog, log_level, command=command) + try: +diff --git a/tests/unit/test_utils_unpacking.py b/tests/unit/test_utils_unpacking.py +index 94121acff1..aea70efbc0 100644 +--- a/tests/unit/test_utils_unpacking.py ++++ b/tests/unit/test_utils_unpacking.py +@@ -66,14 +66,14 @@ def confirm_files(self): + if expected_contents is not None: + with open(path, mode='rb') as f: + contents = f.read() +- assert contents == expected_contents, 'fname: {}'.format(fname) ++ assert contents == expected_contents, f'fname: {fname}' + if sys.platform == 'win32': + # the permissions tests below don't apply in windows + # due to os.chmod being a noop + continue + mode = self.mode(path) + assert mode == expected_mode, ( +- "mode: {}, expected mode: {}".format(mode, expected_mode) ++ f"mode: {mode}, expected mode: {expected_mode}" + ) + + def make_zip_file(self, filename, file_list): +diff --git a/tests/unit/test_utils_wheel.py b/tests/unit/test_utils_wheel.py +index abd3011480..a73ecd6c3d 100644 +--- a/tests/unit/test_utils_wheel.py ++++ b/tests/unit/test_utils_wheel.py +@@ -112,7 +112,7 @@ def test_wheel_version_fails_on_no_wheel_version(): + def test_wheel_version_fails_on_bad_wheel_version(version): + with pytest.raises(UnsupportedWheel) as e: + wheel.wheel_version( +- message_from_string("Wheel-Version: {}".format(version)) ++ message_from_string(f"Wheel-Version: {version}") + ) + assert "invalid Wheel-Version" in str(e.value) + +diff --git a/tools/automation/release/__init__.py b/tools/automation/release/__init__.py +index c1364cfc46..738214491c 100644 +--- a/tools/automation/release/__init__.py ++++ b/tools/automation/release/__init__.py +@@ -98,13 +98,13 @@ def update_version_file(version: str, filepath: str) -> None: + with open(filepath, "w", encoding="utf-8") as f: + for line in content: + if line.startswith("__version__ ="): +- f.write('__version__ = "{}"\n'.format(version)) ++ f.write(f'__version__ = "{version}"\n') + file_modified = True + else: + f.write(line) + + assert file_modified, \ +- "Version file {} did not get modified".format(filepath) ++ f"Version file {filepath} did not get modified" + + + def create_git_tag(session: Session, tag_name: str, *, message: str) -> None: -- Gitee From c1f9dba23ff6f4c4a049e9188a66072930c2c93f Mon Sep 17 00:00:00 2001 From: openeuler-ci-bot <80474298@qq.com> Date: Sat, 26 Dec 2020 18:59:03 +0800 Subject: [PATCH 08/10] [patch tracking] 20201226185849754648 - https://github.com/pypa/pip/commit/a48ad5385b234097d51283b08c3d933fd81ef534 --- ...d5385b234097d51283b08c3d933fd81ef534.patch | 81 +++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 a48ad5385b234097d51283b08c3d933fd81ef534.patch diff --git a/a48ad5385b234097d51283b08c3d933fd81ef534.patch b/a48ad5385b234097d51283b08c3d933fd81ef534.patch new file mode 100644 index 0000000..bc9559f --- /dev/null +++ b/a48ad5385b234097d51283b08c3d933fd81ef534.patch @@ -0,0 +1,81 @@ +diff --git a/news/dc9e5ecc-9fc9-4762-914e-34014e8d09bf.trivial.rst b/news/dc9e5ecc-9fc9-4762-914e-34014e8d09bf.trivial.rst +new file mode 100644 +index 0000000000..e69de29bb2 +diff --git a/setup.py b/setup.py +index b7d0e8f51d..b3aaa36139 100644 +--- a/setup.py ++++ b/setup.py +@@ -1,7 +1,6 @@ + # The following comment should be removed at some point in the future. + # mypy: disallow-untyped-defs=False + +-import codecs + import os + import sys + +@@ -12,7 +11,7 @@ def read(rel_path): + here = os.path.abspath(os.path.dirname(__file__)) + # intentionally *not* adding an encoding option to open, See: + # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690 +- with codecs.open(os.path.join(here, rel_path), 'r') as fp: ++ with open(os.path.join(here, rel_path), 'r') as fp: + return fp.read() + + +diff --git a/src/pip/_internal/pyproject.py b/src/pip/_internal/pyproject.py +index 38192b8507..68ca53bf0b 100644 +--- a/src/pip/_internal/pyproject.py ++++ b/src/pip/_internal/pyproject.py +@@ -1,4 +1,3 @@ +-import io + import os + from collections import namedtuple + +@@ -62,7 +61,7 @@ def load_pyproject_toml( + has_setup = os.path.isfile(setup_py) + + if has_pyproject: +- with io.open(pyproject_toml, encoding="utf-8") as f: ++ with open(pyproject_toml, encoding="utf-8") as f: + pp_toml = toml.load(f) + build_system = pp_toml.get("build-system") + else: +diff --git a/src/pip/_internal/utils/virtualenv.py b/src/pip/_internal/utils/virtualenv.py +index b387ec0b08..acaceee281 100644 +--- a/src/pip/_internal/utils/virtualenv.py ++++ b/src/pip/_internal/utils/virtualenv.py +@@ -1,4 +1,3 @@ +-import io + import logging + import os + import re +@@ -52,7 +51,7 @@ def _get_pyvenv_cfg_lines(): + try: + # Although PEP 405 does not specify, the built-in venv module always + # writes with UTF-8. (pypa/pip#8717) +- with io.open(pyvenv_cfg_file, encoding='utf-8') as f: ++ with open(pyvenv_cfg_file, encoding='utf-8') as f: + return f.read().splitlines() # avoids trailing newlines + except IOError: + return None +diff --git a/tools/automation/release/__init__.py b/tools/automation/release/__init__.py +index 738214491c..20775d5e21 100644 +--- a/tools/automation/release/__init__.py ++++ b/tools/automation/release/__init__.py +@@ -4,7 +4,6 @@ + """ + + import contextlib +-import io + import os + import pathlib + import subprocess +@@ -75,7 +74,7 @@ def generate_authors(filename: str) -> None: + authors = get_author_list() + + # Write our authors to the AUTHORS file +- with io.open(filename, "w", encoding="utf-8") as fp: ++ with open(filename, "w", encoding="utf-8") as fp: + fp.write("\n".join(authors)) + fp.write("\n") + -- Gitee From 804510742b96b53ff58ddecc2f72025c022185c4 Mon Sep 17 00:00:00 2001 From: openeuler-ci-bot <80474298@qq.com> Date: Sat, 26 Dec 2020 18:59:04 +0800 Subject: [PATCH 09/10] [patch tracking] 20201226185849754648 - https://github.com/pypa/pip/commit/c383ec3b41cb7ba919ec9c646ea75dbc3d5f0563 --- ...ec3b41cb7ba919ec9c646ea75dbc3d5f0563.patch | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 c383ec3b41cb7ba919ec9c646ea75dbc3d5f0563.patch diff --git a/c383ec3b41cb7ba919ec9c646ea75dbc3d5f0563.patch b/c383ec3b41cb7ba919ec9c646ea75dbc3d5f0563.patch new file mode 100644 index 0000000..3187d87 --- /dev/null +++ b/c383ec3b41cb7ba919ec9c646ea75dbc3d5f0563.patch @@ -0,0 +1,61 @@ +diff --git a/docs/html/conf.py b/docs/html/conf.py +index fd124ebb69..f81cb6b7da 100644 +--- a/docs/html/conf.py ++++ b/docs/html/conf.py +@@ -11,6 +11,7 @@ + + import glob + import os ++import pathlib + import re + import sys + +@@ -34,6 +35,7 @@ + 'sphinx.ext.intersphinx', + # third-party: + 'sphinx_inline_tabs', ++ 'sphinxcontrib.towncrier', + # in-tree: + 'docs_feedback_sphinxext', + 'pip_sphinxext', +@@ -314,3 +316,10 @@ def to_document_name(path, base_dir): + 'What content was useful?', + 'What content was not useful?', + ) ++ ++# -- Options for towncrier_draft extension ----------------------------------- ++ ++towncrier_draft_autoversion_mode = 'draft' # or: 'sphinx-release', 'sphinx-version' ++towncrier_draft_include_empty = True ++towncrier_draft_working_directory = pathlib.Path(docs_dir).parent ++# Not yet supported: towncrier_draft_config_path = 'pyproject.toml' # relative to cwd +diff --git a/docs/html/news.rst b/docs/html/news.rst +index 137cddf36a..8b54a02e63 100644 +--- a/docs/html/news.rst ++++ b/docs/html/news.rst +@@ -7,4 +7,6 @@ Changelog + Major and minor releases of pip also include changes listed within + prior beta releases. + ++.. towncrier-draft-entries:: |release|, unreleased as on ++ + .. include:: ../../NEWS.rst +diff --git a/news/9172.doc.rst b/news/9172.doc.rst +new file mode 100644 +index 0000000000..fc0063766b +--- /dev/null ++++ b/news/9172.doc.rst +@@ -0,0 +1 @@ ++Render the unreleased pip version change notes on the news page in docs. +diff --git a/tools/requirements/docs.txt b/tools/requirements/docs.txt +index 0c5103d0a2..a5aae67c10 100644 +--- a/tools/requirements/docs.txt ++++ b/tools/requirements/docs.txt +@@ -1,6 +1,7 @@ + sphinx == 3.2.1 + furo + sphinx-inline-tabs ++sphinxcontrib-towncrier + + # `docs.pipext` uses pip's internals to generate documentation. So, we install + # the current directory to make it work. -- Gitee From 125c3200a578da72db27ddfe3be3a2c68b67e054 Mon Sep 17 00:00:00 2001 From: openeuler-ci-bot <80474298@qq.com> Date: Sat, 26 Dec 2020 18:59:04 +0800 Subject: [PATCH 10/10] [patch tracking] 20201226185849754648 - update spec file --- python-pip.spec | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/python-pip.spec b/python-pip.spec index 3d035b5..714a56e 100644 --- a/python-pip.spec +++ b/python-pip.spec @@ -6,7 +6,7 @@ pip is the package installer for Python. You can use pip to install packages fro %global bashcompdir %(b=$(pkg-config --variable=completionsdir bash-completion 2>/dev/null); echo ${b:-%{_sysconfdir}/bash_completion.d}) Name: python-%{srcname} Version: 20.2.2 -Release: 3 +Release: 4 Summary: A tool for installing and managing Python packages License: MIT and Python and ASL 2.0 and BSD and ISC and LGPLv2 and MPLv2.0 and (ASL 2.0 or BSD) URL: http://www.pip-installer.org @@ -16,6 +16,15 @@ Patch1: allow-stripping-given-prefix-from-wheel-RECORD-files.patch Patch2: emit-a-warning-when-running-with-root-privileges.patch Patch3: remove-existing-dist-only-if-path-conflicts.patch Patch6000: dummy-certifi.patch +Patch6001: 7a6b1a580529f737069342849255b165f69c4110.patch +Patch6002: 49c898e4b64ff7ba4fc42d047b53bf23d57ee59e.patch +Patch6003: 31eb524ff0d18d6190b30a0c7821c0c857a7523c.patch +Patch6004: 7407bc1e8a7e3482241f0038acbfa3b76e4a0d83.patch +Patch6005: f2d2d10e4f76fca69efe593e7aecd2717b3de06c.patch +Patch6006: fecfa11f5a859aac362cd83978d5a09a5ee4cd11.patch +Patch6007: 86afa8904382f8939612750315ad2e5389d31ec5.patch +Patch6008: a48ad5385b234097d51283b08c3d933fd81ef534.patch +Patch6009: c383ec3b41cb7ba919ec9c646ea75dbc3d5f0563.patch Source10: pip-allow-older-versions.patch %description %{_description} @@ -112,6 +121,9 @@ install -p dist/%{python_wheelname} -t %{buildroot}%{python_wheeldir} %{python_wheeldir}/%{python_wheelname} %changelog +* 20201226185849754648 patch-tracking 20.2.2-4 +- append patch file of upstream repository from <7a6b1a580529f737069342849255b165f69c4110> to + * Wed Nov 4 2020 wangjie -20.2.2-3 - Type:NA - ID:NA @@ -161,4 +173,4 @@ install -p dist/%{python_wheelname} -t %{buildroot}%{python_wheeldir} - DESC: Synchronize a patch * Mon Sep 23 2019 openEuler Buildteam - 18.0-6 -- Package init +- Package init \ No newline at end of file -- Gitee