subscription table shows more than one app

This commit is contained in:
2025-02-03 13:10:12 -05:00
parent ef5f57e678
commit d3d0806d5a
1841 changed files with 2390 additions and 3359 deletions

View File

@@ -1,6 +1,5 @@
from typing import List, Optional
import pip._internal.utils.inject_securetransport # noqa
from pip._internal.utils import _log
# init_logging() must be called before any call to logging.getLogger()

View File

@@ -78,12 +78,10 @@ class Cache:
if can_not_cache:
return []
candidates = []
path = self.get_path_for_link(link)
if os.path.isdir(path):
for candidate in os.listdir(path):
candidates.append((candidate, path))
return candidates
return [(candidate, path) for candidate in os.listdir(path)]
return []
def get_path_for_link(self, link: Link) -> str:
"""Return a directory to store cached items in for link."""

View File

@@ -71,8 +71,9 @@ def autocomplete() -> None:
for opt in subcommand.parser.option_list_all:
if opt.help != optparse.SUPPRESS_HELP:
for opt_str in opt._long_opts + opt._short_opts:
options.append((opt_str, opt.nargs))
options += [
(opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
]
# filter out previously specified options from available options
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]

View File

@@ -181,7 +181,7 @@ class Command(CommandContextMixIn):
assert isinstance(status, int)
return status
except DiagnosticPipError as exc:
logger.error("[present-rich] %s", exc)
logger.error("%s", exc, extra={"rich": True})
logger.debug("Exception information:", exc_info=True)
return ERROR

View File

@@ -92,10 +92,10 @@ def check_dist_restriction(options: Values, check_target: bool = False) -> None:
)
if check_target:
if dist_restriction_set and not options.target_dir:
if not options.dry_run and dist_restriction_set and not options.target_dir:
raise CommandError(
"Can not use any platform or abi specific options unless "
"installing via '--target'"
"installing via '--target' or using '--dry-run'"
)
@@ -582,10 +582,7 @@ def _handle_python_version(
"""
version_info, error_msg = _convert_python_version(value)
if error_msg is not None:
msg = "invalid --python-version value: {!r}: {}".format(
value,
error_msg,
)
msg = f"invalid --python-version value: {value!r}: {error_msg}"
raise_option_error(parser, option=option, msg=msg)
parser.values.python_version = version_info
@@ -670,7 +667,10 @@ def prefer_binary() -> Option:
dest="prefer_binary",
action="store_true",
default=False,
help="Prefer older binary packages over newer source packages.",
help=(
"Prefer binary packages over source packages, even if the "
"source packages are newer."
),
)
@@ -823,7 +823,7 @@ def _handle_config_settings(
) -> None:
key, sep, val = value.partition("=")
if sep != "=":
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL")
dest = getattr(parser.values, option.dest)
if dest is None:
dest = {}
@@ -892,7 +892,7 @@ disable_pip_version_check: Callable[..., Option] = partial(
"--disable-pip-version-check",
dest="disable_pip_version_check",
action="store_true",
default=False,
default=True,
help="Don't periodically check PyPI to determine whether a new version "
"of pip is available for download. Implied with --no-index.",
)
@@ -918,13 +918,13 @@ def _handle_merge_hash(
algo, digest = value.split(":", 1)
except ValueError:
parser.error(
"Arguments to {} must be a hash name " # noqa
f"Arguments to {opt_str} must be a hash name "
"followed by a value, like --hash=sha256:"
"abcde...".format(opt_str)
"abcde..."
)
if algo not in STRONG_HASHES:
parser.error(
"Allowed hash algorithms for {} are {}.".format( # noqa
"Allowed hash algorithms for {} are {}.".format(
opt_str, ", ".join(STRONG_HASHES)
)
)

View File

@@ -229,9 +229,9 @@ class ConfigOptionParser(CustomOptionParser):
val = strtobool(val)
except ValueError:
self.error(
"{} is not a valid value for {} option, " # noqa
f"{val} is not a valid value for {key} option, "
"please specify a boolean value like yes/no, "
"true/false or 1/0 instead.".format(val, key)
"true/false or 1/0 instead."
)
elif option.action == "count":
with suppress(ValueError):
@@ -240,10 +240,10 @@ class ConfigOptionParser(CustomOptionParser):
val = int(val)
if not isinstance(val, int) or val < 0:
self.error(
"{} is not a valid value for {} option, " # noqa
f"{val} is not a valid value for {key} option, "
"please instead specify either a non-negative integer "
"or a boolean value like yes/no or false/true "
"which is equivalent to 1/0.".format(val, key)
"which is equivalent to 1/0."
)
elif option.action == "append":
val = val.split()

View File

@@ -58,12 +58,9 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]:
return None
try:
import truststore
except ImportError:
raise CommandError(
"To use the truststore feature, 'truststore' must be installed into "
"pip's current environment."
)
from pip._vendor import truststore
except ImportError as e:
raise CommandError(f"The truststore feature is unavailable: {e}")
return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
@@ -123,7 +120,7 @@ class SessionCommandMixin(CommandContextMixIn):
ssl_context = None
session = PipSession(
cache=os.path.join(cache_dir, "http") if cache_dir else None,
cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
retries=retries if retries is not None else options.retries,
trusted_hosts=options.trusted_hosts,
index_urls=self._get_index_urls(options),
@@ -268,7 +265,7 @@ class RequirementCommand(IndexGroupCommand):
if "legacy-resolver" in options.deprecated_features_enabled:
return "legacy"
return "2020-resolver"
return "resolvelib"
@classmethod
def make_requirement_preparer(
@@ -290,7 +287,7 @@ class RequirementCommand(IndexGroupCommand):
legacy_resolver = False
resolver_variant = cls.determine_resolver_variant(options)
if resolver_variant == "2020-resolver":
if resolver_variant == "resolvelib":
lazy_wheel = "fast-deps" in options.features_enabled
if lazy_wheel:
logger.warning(
@@ -352,7 +349,7 @@ class RequirementCommand(IndexGroupCommand):
# The long import name and duplicated invocation is needed to convince
# Mypy into correctly typechecking. Otherwise it would complain the
# "Resolver" class being redefined.
if resolver_variant == "2020-resolver":
if resolver_variant == "resolvelib":
import pip._internal.resolution.resolvelib.resolver
return pip._internal.resolution.resolvelib.resolver.Resolver(

View File

@@ -3,10 +3,10 @@ import textwrap
from optparse import Values
from typing import Any, List
import pip._internal.utils.filesystem as filesystem
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.exceptions import CommandError, PipError
from pip._internal.utils import filesystem
from pip._internal.utils.logging import getLogger
logger = getLogger(__name__)
@@ -93,24 +93,30 @@ class CacheCommand(Command):
num_http_files = len(self._find_http_files(options))
num_packages = len(self._find_wheels(options, "*"))
http_cache_location = self._cache_dir(options, "http")
http_cache_location = self._cache_dir(options, "http-v2")
old_http_cache_location = self._cache_dir(options, "http")
wheels_cache_location = self._cache_dir(options, "wheels")
http_cache_size = filesystem.format_directory_size(http_cache_location)
http_cache_size = filesystem.format_size(
filesystem.directory_size(http_cache_location)
+ filesystem.directory_size(old_http_cache_location)
)
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
message = (
textwrap.dedent(
"""
Package index page cache location: {http_cache_location}
Package index page cache location (pip v23.3+): {http_cache_location}
Package index page cache location (older pips): {old_http_cache_location}
Package index page cache size: {http_cache_size}
Number of HTTP files: {num_http_files}
Locally built wheels location: {wheels_cache_location}
Locally built wheels size: {wheels_cache_size}
Number of locally built wheels: {package_count}
"""
""" # noqa: E501
)
.format(
http_cache_location=http_cache_location,
old_http_cache_location=old_http_cache_location,
http_cache_size=http_cache_size,
num_http_files=num_http_files,
wheels_cache_location=wheels_cache_location,
@@ -151,14 +157,8 @@ class CacheCommand(Command):
logger.info("\n".join(sorted(results)))
def format_for_abspath(self, files: List[str]) -> None:
if not files:
return
results = []
for filename in files:
results.append(filename)
logger.info("\n".join(sorted(results)))
if files:
logger.info("\n".join(sorted(files)))
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
if len(args) > 1:
@@ -175,7 +175,7 @@ class CacheCommand(Command):
files += self._find_http_files(options)
else:
# Add the pattern to the log message
no_matching_msg += ' for pattern "{}"'.format(args[0])
no_matching_msg += f' for pattern "{args[0]}"'
if not files:
logger.warning(no_matching_msg)
@@ -195,8 +195,11 @@ class CacheCommand(Command):
return os.path.join(options.cache_dir, subdir)
def _find_http_files(self, options: Values) -> List[str]:
http_dir = self._cache_dir(options, "http")
return filesystem.find_files(http_dir, "*")
old_http_dir = self._cache_dir(options, "http")
new_http_dir = self._cache_dir(options, "http-v2")
return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
new_http_dir, "*"
)
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
wheel_dir = self._cache_dir(options, "wheels")

View File

@@ -23,9 +23,18 @@ COMPLETION_SCRIPTS = {
""",
"zsh": """
#compdef -P pip[0-9.]#
compadd $( COMP_WORDS="$words[*]" \\
COMP_CWORD=$((CURRENT-1)) \\
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
__pip() {{
compadd $( COMP_WORDS="$words[*]" \\
COMP_CWORD=$((CURRENT-1)) \\
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
}}
if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
# autoload from fpath, call function directly
__pip "$@"
else
# eval/source/. command, register function for later
compdef __pip -P 'pip[0-9.]#'
fi
""",
"fish": """
function __fish_complete_pip

View File

@@ -242,17 +242,15 @@ class ConfigurationCommand(Command):
e.filename = editor
raise
except subprocess.CalledProcessError as e:
raise PipError(
"Editor Subprocess exited with exit code {}".format(e.returncode)
)
raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
"""Helper to make sure the command got the right number of arguments"""
if len(args) != n:
msg = (
"Got unexpected number of arguments, expected {}. "
'(example: "{} config {}")'
).format(n, get_prog(), example)
f"Got unexpected number of arguments, expected {n}. "
f'(example: "{get_prog()} config {example}")'
)
raise PipError(msg)
if n == 1:

View File

@@ -46,22 +46,29 @@ def create_vendor_txt_map() -> Dict[str, str]:
return dict(line.split("==", 1) for line in lines)
def get_module_from_module_name(module_name: str) -> ModuleType:
def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
# Module name can be uppercase in vendor.txt for some reason...
module_name = module_name.lower().replace("-", "_")
# PATCH: setuptools is actually only pkg_resources.
if module_name == "setuptools":
module_name = "pkg_resources"
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
return getattr(pip._vendor, module_name)
try:
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
return getattr(pip._vendor, module_name)
except ImportError:
# We allow 'truststore' to fail to import due
# to being unavailable on Python 3.9 and earlier.
if module_name == "truststore" and sys.version_info < (3, 10):
return None
raise
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
module = get_module_from_module_name(module_name)
version = getattr(module, "__version__", None)
if not version:
if module and not version:
# Try to find version in debundled module info.
assert module.__file__ is not None
env = get_environment([os.path.dirname(module.__file__)])
@@ -88,7 +95,7 @@ def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
elif parse_version(actual_version) != parse_version(expected_version):
extra_message = (
" (CONFLICT: vendor.txt suggests version should"
" be {})".format(expected_version)
f" be {expected_version})"
)
logger.info("%s==%s%s", module_name, actual_version, extra_message)
@@ -105,7 +112,7 @@ def show_tags(options: Values) -> None:
tag_limit = 10
target_python = make_target_python(options)
tags = target_python.get_tags()
tags = target_python.get_sorted_tags()
# Display the target options that were explicitly provided.
formatted_target = target_python.format_given()
@@ -113,7 +120,7 @@ def show_tags(options: Values) -> None:
if formatted_target:
suffix = f" (target: {formatted_target})"
msg = "Compatible tags: {}{}".format(len(tags), suffix)
msg = f"Compatible tags: {len(tags)}{suffix}"
logger.info(msg)
if options.verbose < 1 and len(tags) > tag_limit:
@@ -127,17 +134,12 @@ def show_tags(options: Values) -> None:
logger.info(str(tag))
if tags_limited:
msg = (
"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
).format(tag_limit=tag_limit)
msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
logger.info(msg)
def ca_bundle_info(config: Configuration) -> str:
levels = set()
for key, _ in config.items():
levels.add(key.split(".")[0])
levels = {key.split(".", 1)[0] for key, _ in config.items()}
if not levels:
return "Not specified"

View File

@@ -18,6 +18,7 @@ def _dev_pkgs() -> AbstractSet[str]:
if _should_suppress_build_backends():
pkgs |= {"setuptools", "distribute", "wheel"}
pkgs |= {"setuptools", "distribute", "wheel", "pkg-resources"}
return pkgs

View File

@@ -128,12 +128,12 @@ class IndexCommand(IndexGroupCommand):
if not versions:
raise DistributionNotFound(
"No matching distribution found for {}".format(query)
f"No matching distribution found for {query}"
)
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
latest = formatted_versions[0]
write_output("{} ({})".format(query, latest))
write_output(f"{query} ({latest})")
write_output("Available versions: {}".format(", ".join(formatted_versions)))
print_dist_installation_info(query, latest)

View File

@@ -501,7 +501,7 @@ class InstallCommand(RequirementCommand):
show_traceback,
options.use_user_site,
)
logger.error(message, exc_info=show_traceback) # noqa
logger.error(message, exc_info=show_traceback)
return ERROR
@@ -595,7 +595,7 @@ class InstallCommand(RequirementCommand):
"source of the following dependency conflicts."
)
else:
assert resolver_variant == "2020-resolver"
assert resolver_variant == "resolvelib"
parts.append(
"pip's dependency resolver does not currently take into account "
"all the packages that are installed. This behaviour is the "
@@ -607,12 +607,8 @@ class InstallCommand(RequirementCommand):
version = package_set[project_name][0]
for dependency in missing[project_name]:
message = (
"{name} {version} requires {requirement}, "
f"{project_name} {version} requires {dependency[1]}, "
"which is not installed."
).format(
name=project_name,
version=version,
requirement=dependency[1],
)
parts.append(message)
@@ -628,7 +624,7 @@ class InstallCommand(RequirementCommand):
requirement=req,
dep_name=dep_name,
dep_version=dep_version,
you=("you" if resolver_variant == "2020-resolver" else "you'll"),
you=("you" if resolver_variant == "resolvelib" else "you'll"),
)
parts.append(message)

View File

@@ -33,6 +33,8 @@ if TYPE_CHECKING:
_ProcessedDists = Sequence[_DistWithLatestInfo]
from pip._vendor.packaging.version import parse
logger = logging.getLogger(__name__)
@@ -201,7 +203,7 @@ class ListCommand(IndexGroupCommand):
return [
dist
for dist in self.iter_packages_latest_infos(packages, options)
if dist.latest_version > dist.version
if parse(str(dist.latest_version)) > parse(str(dist.version))
]
def get_uptodate(
@@ -210,7 +212,7 @@ class ListCommand(IndexGroupCommand):
return [
dist
for dist in self.iter_packages_latest_infos(packages, options)
if dist.latest_version == dist.version
if parse(str(dist.latest_version)) == parse(str(dist.version))
]
def get_not_required(
@@ -297,7 +299,7 @@ class ListCommand(IndexGroupCommand):
# Create and add a separator.
if len(data) > 0:
pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
pkg_strings.insert(1, " ".join("-" * x for x in sizes))
for val in pkg_strings:
write_output(val)

View File

@@ -59,8 +59,8 @@ def _disassemble_key(name: str) -> List[str]:
if "." not in name:
error_message = (
"Key does not contain dot separated section and key. "
"Perhaps you wanted to use 'global.{}' instead?"
).format(name)
f"Perhaps you wanted to use 'global.{name}' instead?"
)
raise ConfigurationError(error_message)
return name.split(".", 1)
@@ -327,33 +327,35 @@ class Configuration:
def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
"""Yields variant and configuration files associated with it.
This should be treated like items of a dictionary.
This should be treated like items of a dictionary. The order
here doesn't affect what gets overridden. That is controlled
by OVERRIDE_ORDER. However this does control the order they are
displayed to the user. It's probably most ergononmic to display
things in the same order as OVERRIDE_ORDER
"""
# SMELL: Move the conditions out of this function
# environment variables have the lowest priority
config_file = os.environ.get("PIP_CONFIG_FILE", None)
if config_file is not None:
yield kinds.ENV, [config_file]
else:
yield kinds.ENV, []
env_config_file = os.environ.get("PIP_CONFIG_FILE", None)
config_files = get_configuration_files()
# at the base we have any global configuration
yield kinds.GLOBAL, config_files[kinds.GLOBAL]
# per-user configuration next
# per-user config is not loaded when env_config_file exists
should_load_user_config = not self.isolated and not (
config_file and os.path.exists(config_file)
env_config_file and os.path.exists(env_config_file)
)
if should_load_user_config:
# The legacy config file is overridden by the new config file
yield kinds.USER, config_files[kinds.USER]
# finally virtualenv configuration first trumping others
# virtualenv config
yield kinds.SITE, config_files[kinds.SITE]
if env_config_file is not None:
yield kinds.ENV, [env_config_file]
else:
yield kinds.ENV, []
def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
"""Get values present in a config file"""
return self._config[variant]

View File

@@ -1,4 +1,5 @@
import abc
from typing import Optional
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata.base import BaseDistribution
@@ -19,12 +20,23 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
- we must be able to create a Distribution object exposing the
above metadata.
- if we need to do work in the build tracker, we must be able to generate a unique
string to identify the requirement in the build tracker.
"""
def __init__(self, req: InstallRequirement) -> None:
super().__init__()
self.req = req
@abc.abstractproperty
def build_tracker_id(self) -> Optional[str]:
"""A string that uniquely identifies this requirement to the build tracker.
If None, then this dist has no work to do in the build tracker, and
``.prepare_distribution_metadata()`` will not be called."""
raise NotImplementedError()
@abc.abstractmethod
def get_metadata_distribution(self) -> BaseDistribution:
raise NotImplementedError()

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata import BaseDistribution
@@ -10,6 +12,10 @@ class InstalledDistribution(AbstractDistribution):
been computed.
"""
@property
def build_tracker_id(self) -> Optional[str]:
return None
def get_metadata_distribution(self) -> BaseDistribution:
assert self.req.satisfied_by is not None, "not actually installed"
return self.req.satisfied_by

View File

@@ -1,5 +1,5 @@
import logging
from typing import Iterable, Set, Tuple
from typing import Iterable, Optional, Set, Tuple
from pip._internal.build_env import BuildEnvironment
from pip._internal.distributions.base import AbstractDistribution
@@ -18,6 +18,12 @@ class SourceDistribution(AbstractDistribution):
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
"""
@property
def build_tracker_id(self) -> Optional[str]:
"""Identify this requirement uniquely by its link."""
assert self.req.link
return self.req.link.url_without_fragment
def get_metadata_distribution(self) -> BaseDistribution:
return self.req.get_dist()

View File

@@ -1,3 +1,5 @@
from typing import Optional
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.distributions.base import AbstractDistribution
@@ -15,6 +17,10 @@ class WheelDistribution(AbstractDistribution):
This does not need any preparation as wheels can be directly unpacked.
"""
@property
def build_tracker_id(self) -> Optional[str]:
return None
def get_metadata_distribution(self) -> BaseDistribution:
"""Loads the metadata from the wheel file into memory and returns a
Distribution that uses it, not relying on the wheel file or

View File

@@ -247,10 +247,7 @@ class NoneMetadataError(PipError):
def __str__(self) -> str:
# Use `dist` in the error message because its stringification
# includes more information, like the version and location.
return "None {} metadata found for distribution: {}".format(
self.metadata_name,
self.dist,
)
return f"None {self.metadata_name} metadata found for distribution: {self.dist}"
class UserInstallationInvalid(InstallationError):
@@ -594,7 +591,7 @@ class HashMismatch(HashError):
self.gots = gots
def body(self) -> str:
return " {}:\n{}".format(self._requirement_name(), self._hash_comparison())
return f" {self._requirement_name()}:\n{self._hash_comparison()}"
def _hash_comparison(self) -> str:
"""
@@ -616,11 +613,9 @@ class HashMismatch(HashError):
lines: List[str] = []
for hash_name, expecteds in self.allowed.items():
prefix = hash_then_or(hash_name)
lines.extend(
(" Expected {} {}".format(next(prefix), e)) for e in expecteds
)
lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds)
lines.append(
" Got {}\n".format(self.gots[hash_name].hexdigest())
f" Got {self.gots[hash_name].hexdigest()}\n"
)
return "\n".join(lines)

View File

@@ -473,6 +473,7 @@ class LinkCollector:
page_validator=self.session.is_secure_origin,
expand_dir=False,
cache_link_parsing=False,
project_name=project_name,
)
for loc in self.search_scope.get_index_urls_locations(project_name)
).values()
@@ -483,6 +484,7 @@ class LinkCollector:
page_validator=self.session.is_secure_origin,
expand_dir=True,
cache_link_parsing=True,
project_name=project_name,
)
for loc in self.find_links
).values()

View File

@@ -198,7 +198,7 @@ class LinkEvaluator:
reason = f"wrong project name (not {self.project_name})"
return (LinkType.different_project, reason)
supported_tags = self._target_python.get_tags()
supported_tags = self._target_python.get_unsorted_tags()
if not wheel.supported(supported_tags):
# Include the wheel's tags in the reason string to
# simplify troubleshooting compatibility issues.
@@ -414,7 +414,7 @@ class CandidateEvaluator:
if specifier is None:
specifier = specifiers.SpecifierSet()
supported_tags = target_python.get_tags()
supported_tags = target_python.get_sorted_tags()
return cls(
project_name=project_name,
@@ -533,8 +533,8 @@ class CandidateEvaluator:
)
except ValueError:
raise UnsupportedWheel(
"{} is not a supported wheel for this platform. It "
"can't be sorted.".format(wheel.filename)
f"{wheel.filename} is not a supported wheel for this platform. It "
"can't be sorted."
)
if self._prefer_binary:
binary_preference = 1
@@ -939,9 +939,7 @@ class PackageFinder:
_format_versions(best_candidate_result.iter_all()),
)
raise DistributionNotFound(
"No matching distribution found for {}".format(req)
)
raise DistributionNotFound(f"No matching distribution found for {req}")
def _should_install_candidate(
candidate: Optional[InstallationCandidate],

View File

@@ -1,8 +1,17 @@
import logging
import mimetypes
import os
import pathlib
from typing import Callable, Iterable, Optional, Tuple
from collections import defaultdict
from typing import Callable, Dict, Iterable, List, Optional, Tuple
from pip._vendor.packaging.utils import (
InvalidSdistFilename,
InvalidVersion,
InvalidWheelFilename,
canonicalize_name,
parse_sdist_filename,
parse_wheel_filename,
)
from pip._internal.models.candidate import InstallationCandidate
from pip._internal.models.link import Link
@@ -36,6 +45,53 @@ def _is_html_file(file_url: str) -> bool:
return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
class _FlatDirectoryToUrls:
"""Scans directory and caches results"""
def __init__(self, path: str) -> None:
self._path = path
self._page_candidates: List[str] = []
self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list)
self._scanned_directory = False
def _scan_directory(self) -> None:
"""Scans directory once and populates both page_candidates
and project_name_to_urls at the same time
"""
for entry in os.scandir(self._path):
url = path_to_url(entry.path)
if _is_html_file(url):
self._page_candidates.append(url)
continue
# File must have a valid wheel or sdist name,
# otherwise not worth considering as a package
try:
project_filename = parse_wheel_filename(entry.name)[0]
except (InvalidWheelFilename, InvalidVersion):
try:
project_filename = parse_sdist_filename(entry.name)[0]
except (InvalidSdistFilename, InvalidVersion):
continue
self._project_name_to_urls[project_filename].append(url)
self._scanned_directory = True
@property
def page_candidates(self) -> List[str]:
if not self._scanned_directory:
self._scan_directory()
return self._page_candidates
@property
def project_name_to_urls(self) -> Dict[str, List[str]]:
if not self._scanned_directory:
self._scan_directory()
return self._project_name_to_urls
class _FlatDirectorySource(LinkSource):
"""Link source specified by ``--find-links=<path-to-dir>``.
@@ -45,30 +101,34 @@ class _FlatDirectorySource(LinkSource):
* ``file_candidates``: Archives in the directory.
"""
_paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {}
def __init__(
self,
candidates_from_page: CandidatesFromPage,
path: str,
project_name: str,
) -> None:
self._candidates_from_page = candidates_from_page
self._path = pathlib.Path(os.path.realpath(path))
self._project_name = canonicalize_name(project_name)
# Get existing instance of _FlatDirectoryToUrls if it exists
if path in self._paths_to_urls:
self._path_to_urls = self._paths_to_urls[path]
else:
self._path_to_urls = _FlatDirectoryToUrls(path=path)
self._paths_to_urls[path] = self._path_to_urls
@property
def link(self) -> Optional[Link]:
return None
def page_candidates(self) -> FoundCandidates:
for path in self._path.iterdir():
url = path_to_url(str(path))
if not _is_html_file(url):
continue
for url in self._path_to_urls.page_candidates:
yield from self._candidates_from_page(Link(url))
def file_links(self) -> FoundLinks:
for path in self._path.iterdir():
url = path_to_url(str(path))
if _is_html_file(url):
continue
for url in self._path_to_urls.project_name_to_urls[self._project_name]:
yield Link(url)
@@ -170,6 +230,7 @@ def build_source(
page_validator: PageValidator,
expand_dir: bool,
cache_link_parsing: bool,
project_name: str,
) -> Tuple[Optional[str], Optional[LinkSource]]:
path: Optional[str] = None
url: Optional[str] = None
@@ -203,6 +264,7 @@ def build_source(
source = _FlatDirectorySource(
candidates_from_page=candidates_from_page,
path=path,
project_name=project_name,
)
else:
source = _IndexDirectorySource(

View File

@@ -56,8 +56,7 @@ def distutils_scheme(
try:
d.parse_config_files()
except UnicodeDecodeError:
# Typeshed does not include find_config_files() for some reason.
paths = d.find_config_files() # type: ignore
paths = d.find_config_files()
logger.warning(
"Ignore distutils configs in %s due to encoding errors.",
", ".join(os.path.basename(p) for p in paths),
@@ -89,7 +88,7 @@ def distutils_scheme(
# finalize_options(); we only want to override here if the user
# has explicitly requested it hence going back to the config
if "install_lib" in d.get_option_dict("install"):
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})
if running_under_virtualenv():
if home:

View File

@@ -9,7 +9,7 @@ from pip._internal.utils.misc import strtobool
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
if TYPE_CHECKING:
from typing import Protocol
from typing import Literal, Protocol
else:
Protocol = object
@@ -50,6 +50,7 @@ def _should_use_importlib_metadata() -> bool:
class Backend(Protocol):
NAME: 'Literal["importlib", "pkg_resources"]'
Distribution: Type[BaseDistribution]
Environment: Type[BaseEnvironment]

View File

@@ -64,10 +64,10 @@ def msg_to_json(msg: Message) -> Dict[str, Any]:
key = json_name(field)
if multi:
value: Union[str, List[str]] = [
sanitise_header(v) for v in msg.get_all(field)
sanitise_header(v) for v in msg.get_all(field) # type: ignore
]
else:
value = sanitise_header(msg.get(field))
value = sanitise_header(msg.get(field)) # type: ignore
if key == "keywords":
# Accept both comma-separated and space-separated
# forms, for better compatibility with old data.

View File

@@ -24,7 +24,7 @@ from typing import (
from pip._vendor.packaging.requirements import Requirement
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
from pip._vendor.packaging.utils import NormalizedName
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.version import LegacyVersion, Version
from pip._internal.exceptions import NoneMetadataError
@@ -37,7 +37,6 @@ from pip._internal.models.direct_url import (
from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
from pip._internal.utils.egg_link import egg_link_path_from_sys_path
from pip._internal.utils.misc import is_local, normalize_path
from pip._internal.utils.packaging import safe_extra
from pip._internal.utils.urls import url_to_path
from ._json import msg_to_json
@@ -460,6 +459,19 @@ class BaseDistribution(Protocol):
For modern .dist-info distributions, this is the collection of
"Provides-Extra:" entries in distribution metadata.
The return value of this function is not particularly useful other than
display purposes due to backward compatibility issues and the extra
names being poorly normalized prior to PEP 685. If you want to perform
logic operations on extras, use :func:`is_extra_provided` instead.
"""
raise NotImplementedError()
def is_extra_provided(self, extra: str) -> bool:
"""Check whether an extra is provided by this distribution.
This is needed mostly for compatibility issues with pkg_resources not
following the extra normalization rules defined in PEP 685.
"""
raise NotImplementedError()
@@ -537,10 +549,11 @@ class BaseDistribution(Protocol):
"""Get extras from the egg-info directory."""
known_extras = {""}
for entry in self._iter_requires_txt_entries():
if entry.extra in known_extras:
extra = canonicalize_name(entry.extra)
if extra in known_extras:
continue
known_extras.add(entry.extra)
yield entry.extra
known_extras.add(extra)
yield extra
def _iter_egg_info_dependencies(self) -> Iterable[str]:
"""Get distribution dependencies from the egg-info directory.
@@ -556,10 +569,11 @@ class BaseDistribution(Protocol):
all currently available PEP 517 backends, although not standardized.
"""
for entry in self._iter_requires_txt_entries():
if entry.extra and entry.marker:
marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"'
elif entry.extra:
marker = f'extra == "{safe_extra(entry.extra)}"'
extra = canonicalize_name(entry.extra)
if extra and entry.marker:
marker = f'({entry.marker}) and extra == "{extra}"'
elif extra:
marker = f'extra == "{extra}"'
elif entry.marker:
marker = entry.marker
else:

View File

@@ -1,4 +1,6 @@
from ._dists import Distribution
from ._envs import Environment
__all__ = ["Distribution", "Environment"]
__all__ = ["NAME", "Distribution", "Environment"]
NAME = "importlib"

View File

@@ -27,7 +27,6 @@ from pip._internal.metadata.base import (
Wheel,
)
from pip._internal.utils.misc import normalize_path
from pip._internal.utils.packaging import safe_extra
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
@@ -208,12 +207,16 @@ class Distribution(BaseDistribution):
return cast(email.message.Message, self._dist.metadata)
def iter_provided_extras(self) -> Iterable[str]:
return (
safe_extra(extra) for extra in self.metadata.get_all("Provides-Extra", [])
return self.metadata.get_all("Provides-Extra", [])
def is_extra_provided(self, extra: str) -> bool:
return any(
canonicalize_name(provided_extra) == canonicalize_name(extra)
for provided_extra in self.metadata.get_all("Provides-Extra", [])
)
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
contexts: Sequence[Dict[str, str]] = [{"extra": safe_extra(e)} for e in extras]
contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras]
for req_string in self.metadata.get_all("Requires-Dist", []):
req = Requirement(req_string)
if not req.marker:

View File

@@ -151,7 +151,8 @@ def _emit_egg_deprecation(location: Optional[str]) -> None:
deprecated(
reason=f"Loading egg at {location} is deprecated.",
replacement="to use pip for package installation.",
gone_in="23.3",
gone_in="24.3",
issue=12330,
)

View File

@@ -24,8 +24,12 @@ from .base import (
Wheel,
)
__all__ = ["NAME", "Distribution", "Environment"]
logger = logging.getLogger(__name__)
NAME = "pkg_resources"
class EntryPoint(NamedTuple):
name: str
@@ -212,12 +216,16 @@ class Distribution(BaseDistribution):
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
if extras: # pkg_resources raises on invalid extras, so we sanitize.
extras = frozenset(extras).intersection(self._dist.extras)
extras = frozenset(pkg_resources.safe_extra(e) for e in extras)
extras = extras.intersection(self._dist.extras)
return self._dist.requires(extras)
def iter_provided_extras(self) -> Iterable[str]:
return self._dist.extras
def is_extra_provided(self, extra: str) -> bool:
return pkg_resources.safe_extra(extra) in self._dist.extras
class Environment(BaseEnvironment):
def __init__(self, ws: pkg_resources.WorkingSet) -> None:

Some files were not shown because too many files have changed in this diff Show More