subscription table shows more than one app
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -51,10 +51,22 @@ def get_build_tracker() -> Generator["BuildTracker", None, None]:
|
||||
yield tracker
|
||||
|
||||
|
||||
class TrackerId(str):
|
||||
"""Uniquely identifying string provided to the build tracker."""
|
||||
|
||||
|
||||
class BuildTracker:
|
||||
"""Ensure that an sdist cannot request itself as a setup requirement.
|
||||
|
||||
When an sdist is prepared, it identifies its setup requirements in the
|
||||
context of ``BuildTracker.track()``. If a requirement shows up recursively, this
|
||||
raises an exception.
|
||||
|
||||
This stops fork bombs embedded in malicious packages."""
|
||||
|
||||
def __init__(self, root: str) -> None:
|
||||
self._root = root
|
||||
self._entries: Set[InstallRequirement] = set()
|
||||
self._entries: Dict[TrackerId, InstallRequirement] = {}
|
||||
logger.debug("Created build tracker: %s", self._root)
|
||||
|
||||
def __enter__(self) -> "BuildTracker":
|
||||
@@ -69,16 +81,15 @@ class BuildTracker:
|
||||
) -> None:
|
||||
self.cleanup()
|
||||
|
||||
def _entry_path(self, link: Link) -> str:
|
||||
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
|
||||
def _entry_path(self, key: TrackerId) -> str:
|
||||
hashed = hashlib.sha224(key.encode()).hexdigest()
|
||||
return os.path.join(self._root, hashed)
|
||||
|
||||
def add(self, req: InstallRequirement) -> None:
|
||||
def add(self, req: InstallRequirement, key: TrackerId) -> None:
|
||||
"""Add an InstallRequirement to build tracking."""
|
||||
|
||||
assert req.link
|
||||
# Get the file to write information about this requirement.
|
||||
entry_path = self._entry_path(req.link)
|
||||
entry_path = self._entry_path(key)
|
||||
|
||||
# Try reading from the file. If it exists and can be read from, a build
|
||||
# is already in progress, so a LookupError is raised.
|
||||
@@ -92,33 +103,37 @@ class BuildTracker:
|
||||
raise LookupError(message)
|
||||
|
||||
# If we're here, req should really not be building already.
|
||||
assert req not in self._entries
|
||||
assert key not in self._entries
|
||||
|
||||
# Start tracking this requirement.
|
||||
with open(entry_path, "w", encoding="utf-8") as fp:
|
||||
fp.write(str(req))
|
||||
self._entries.add(req)
|
||||
self._entries[key] = req
|
||||
|
||||
logger.debug("Added %s to build tracker %r", req, self._root)
|
||||
|
||||
def remove(self, req: InstallRequirement) -> None:
|
||||
def remove(self, req: InstallRequirement, key: TrackerId) -> None:
|
||||
"""Remove an InstallRequirement from build tracking."""
|
||||
|
||||
assert req.link
|
||||
# Delete the created file and the corresponding entries.
|
||||
os.unlink(self._entry_path(req.link))
|
||||
self._entries.remove(req)
|
||||
# Delete the created file and the corresponding entry.
|
||||
os.unlink(self._entry_path(key))
|
||||
del self._entries[key]
|
||||
|
||||
logger.debug("Removed %s from build tracker %r", req, self._root)
|
||||
|
||||
def cleanup(self) -> None:
|
||||
for req in set(self._entries):
|
||||
self.remove(req)
|
||||
for key, req in list(self._entries.items()):
|
||||
self.remove(req, key)
|
||||
|
||||
logger.debug("Removed build tracker: %r", self._root)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def track(self, req: InstallRequirement) -> Generator[None, None, None]:
|
||||
self.add(req)
|
||||
def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
|
||||
"""Ensure that `key` cannot install itself as a setup requirement.
|
||||
|
||||
:raises LookupError: If `key` was already provided in a parent invocation of
|
||||
the context introduced by this method."""
|
||||
tracker_id = TrackerId(key)
|
||||
self.add(req, tracker_id)
|
||||
yield
|
||||
self.remove(req)
|
||||
self.remove(req, tracker_id)
|
||||
|
||||
@@ -168,7 +168,7 @@ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
|
||||
f"release a version with a conforming version number"
|
||||
),
|
||||
issue=12063,
|
||||
gone_in="23.3",
|
||||
gone_in="24.1",
|
||||
)
|
||||
for dep in package_details.dependencies:
|
||||
if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
|
||||
@@ -183,5 +183,5 @@ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
|
||||
f"release a version with a conforming dependency specifiers"
|
||||
),
|
||||
issue=12063,
|
||||
gone_in="23.3",
|
||||
gone_in="24.1",
|
||||
)
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -164,16 +164,14 @@ def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
|
||||
for parent_dir, dir_scripts in warn_for.items():
|
||||
sorted_scripts: List[str] = sorted(dir_scripts)
|
||||
if len(sorted_scripts) == 1:
|
||||
start_text = "script {} is".format(sorted_scripts[0])
|
||||
start_text = f"script {sorted_scripts[0]} is"
|
||||
else:
|
||||
start_text = "scripts {} are".format(
|
||||
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
|
||||
)
|
||||
|
||||
msg_lines.append(
|
||||
"The {} installed in '{}' which is not on PATH.".format(
|
||||
start_text, parent_dir
|
||||
)
|
||||
f"The {start_text} installed in '{parent_dir}' which is not on PATH."
|
||||
)
|
||||
|
||||
last_line_fmt = (
|
||||
@@ -267,9 +265,9 @@ def get_csv_rows_for_installed(
|
||||
path = _fs_to_record_path(f, lib_dir)
|
||||
digest, length = rehash(f)
|
||||
installed_rows.append((path, digest, length))
|
||||
for installed_record_path in installed.values():
|
||||
installed_rows.append((installed_record_path, "", ""))
|
||||
return installed_rows
|
||||
return installed_rows + [
|
||||
(installed_record_path, "", "") for installed_record_path in installed.values()
|
||||
]
|
||||
|
||||
|
||||
def get_console_script_specs(console: Dict[str, str]) -> List[str]:
|
||||
@@ -321,9 +319,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]:
|
||||
scripts_to_generate.append("pip = " + pip_script)
|
||||
|
||||
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
||||
scripts_to_generate.append(
|
||||
"pip{} = {}".format(sys.version_info[0], pip_script)
|
||||
)
|
||||
scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}")
|
||||
|
||||
scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
|
||||
# Delete any other versioned pip entry points
|
||||
@@ -336,9 +332,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]:
|
||||
scripts_to_generate.append("easy_install = " + easy_install_script)
|
||||
|
||||
scripts_to_generate.append(
|
||||
"easy_install-{} = {}".format(
|
||||
get_major_minor_version(), easy_install_script
|
||||
)
|
||||
f"easy_install-{get_major_minor_version()} = {easy_install_script}"
|
||||
)
|
||||
# Delete any other versioned easy_install entry points
|
||||
easy_install_ep = [
|
||||
@@ -408,10 +402,10 @@ class ScriptFile:
|
||||
class MissingCallableSuffix(InstallationError):
|
||||
def __init__(self, entry_point: str) -> None:
|
||||
super().__init__(
|
||||
"Invalid script entry point: {} - A callable "
|
||||
f"Invalid script entry point: {entry_point} - A callable "
|
||||
"suffix is required. Cf https://packaging.python.org/"
|
||||
"specifications/entry-points/#use-for-scripts for more "
|
||||
"information.".format(entry_point)
|
||||
"information."
|
||||
)
|
||||
|
||||
|
||||
@@ -712,7 +706,7 @@ def req_error_context(req_description: str) -> Generator[None, None, None]:
|
||||
try:
|
||||
yield
|
||||
except InstallationError as e:
|
||||
message = "For req: {}. {}".format(req_description, e.args[0])
|
||||
message = f"For req: {req_description}. {e.args[0]}"
|
||||
raise InstallationError(message) from e
|
||||
|
||||
|
||||
|
||||
@@ -4,10 +4,10 @@
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, List, Optional
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
@@ -21,7 +21,6 @@ from pip._internal.exceptions import (
|
||||
InstallationError,
|
||||
MetadataInconsistent,
|
||||
NetworkConnectionError,
|
||||
PreviousBuildDirError,
|
||||
VcsHashUnsupported,
|
||||
)
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
@@ -37,6 +36,7 @@ from pip._internal.network.lazy_wheel import (
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.operations.build.build_tracker import BuildTracker
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils._log import getLogger
|
||||
from pip._internal.utils.direct_url_helpers import (
|
||||
direct_url_for_editable,
|
||||
direct_url_from_link,
|
||||
@@ -47,13 +47,13 @@ from pip._internal.utils.misc import (
|
||||
display_path,
|
||||
hash_file,
|
||||
hide_url,
|
||||
is_installable_dir,
|
||||
redact_auth_from_requirement,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.unpacking import unpack_file
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
def _get_prepared_distribution(
|
||||
@@ -65,10 +65,12 @@ def _get_prepared_distribution(
|
||||
) -> BaseDistribution:
|
||||
"""Prepare a distribution for installation."""
|
||||
abstract_dist = make_distribution_for_install_requirement(req)
|
||||
with build_tracker.track(req):
|
||||
abstract_dist.prepare_distribution_metadata(
|
||||
finder, build_isolation, check_build_deps
|
||||
)
|
||||
tracker_id = abstract_dist.build_tracker_id
|
||||
if tracker_id is not None:
|
||||
with build_tracker.track(req, tracker_id):
|
||||
abstract_dist.prepare_distribution_metadata(
|
||||
finder, build_isolation, check_build_deps
|
||||
)
|
||||
return abstract_dist.get_metadata_distribution()
|
||||
|
||||
|
||||
@@ -276,7 +278,7 @@ class RequirementPreparer:
|
||||
information = str(display_path(req.link.file_path))
|
||||
else:
|
||||
message = "Collecting %s"
|
||||
information = str(req.req or req)
|
||||
information = redact_auth_from_requirement(req.req) if req.req else str(req)
|
||||
|
||||
# If we used req.req, inject requirement source if available (this
|
||||
# would already be included if we used req directly)
|
||||
@@ -317,21 +319,7 @@ class RequirementPreparer:
|
||||
autodelete=True,
|
||||
parallel_builds=parallel_builds,
|
||||
)
|
||||
|
||||
# If a checkout exists, it's unwise to keep going. version
|
||||
# inconsistencies are logged later, but do not fail the
|
||||
# installation.
|
||||
# FIXME: this won't upgrade when there's an existing
|
||||
# package unpacked in `req.source_dir`
|
||||
# TODO: this check is now probably dead code
|
||||
if is_installable_dir(req.source_dir):
|
||||
raise PreviousBuildDirError(
|
||||
"pip can't proceed with requirements '{}' due to a"
|
||||
"pre-existing build directory ({}). This is likely "
|
||||
"due to a previous installation that failed . pip is "
|
||||
"being responsible and not assuming it can delete this. "
|
||||
"Please delete it and try again.".format(req, req.source_dir)
|
||||
)
|
||||
req.ensure_pristine_source_checkout()
|
||||
|
||||
def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
|
||||
# By the time this is called, the requirement's link should have
|
||||
@@ -394,7 +382,7 @@ class RequirementPreparer:
|
||||
if metadata_link is None:
|
||||
return None
|
||||
assert req.req is not None
|
||||
logger.info(
|
||||
logger.verbose(
|
||||
"Obtaining dependency information for %s from %s",
|
||||
req.req,
|
||||
metadata_link,
|
||||
@@ -479,20 +467,19 @@ class RequirementPreparer:
|
||||
for link, (filepath, _) in batch_download:
|
||||
logger.debug("Downloading link %s to %s", link, filepath)
|
||||
req = links_to_fully_download[link]
|
||||
# Record the downloaded file path so wheel reqs can extract a Distribution
|
||||
# in .get_dist().
|
||||
req.local_file_path = filepath
|
||||
# TODO: This needs fixing for sdists
|
||||
# This is an emergency fix for #11847, which reports that
|
||||
# distributions get downloaded twice when metadata is loaded
|
||||
# from a PEP 658 standalone metadata file. Setting _downloaded
|
||||
# fixes this for wheels, but breaks the sdist case (tests
|
||||
# test_download_metadata). As PyPI is currently only serving
|
||||
# metadata for wheels, this is not an immediate issue.
|
||||
# Fixing the problem properly looks like it will require a
|
||||
# complete refactoring of the `prepare_linked_requirements_more`
|
||||
# logic, and I haven't a clue where to start on that, so for now
|
||||
# I have fixed the issue *just* for wheels.
|
||||
if req.is_wheel:
|
||||
self._downloaded[req.link.url] = filepath
|
||||
# Record that the file is downloaded so we don't do it again in
|
||||
# _prepare_linked_requirement().
|
||||
self._downloaded[req.link.url] = filepath
|
||||
|
||||
# If this is an sdist, we need to unpack it after downloading, but the
|
||||
# .source_dir won't be set up until we are in _prepare_linked_requirement().
|
||||
# Add the downloaded archive to the install requirement to unpack after
|
||||
# preparing the source dir.
|
||||
if not req.is_wheel:
|
||||
req.needs_unpacked_archive(Path(filepath))
|
||||
|
||||
# This step is necessary to ensure all lazy wheels are processed
|
||||
# successfully by the 'download', 'wheel', and 'install' commands.
|
||||
@@ -616,8 +603,8 @@ class RequirementPreparer:
|
||||
)
|
||||
except NetworkConnectionError as exc:
|
||||
raise InstallationError(
|
||||
"Could not install requirement {} because of HTTP "
|
||||
"error {} for URL {}".format(req, exc, link)
|
||||
f"Could not install requirement {req} because of HTTP "
|
||||
f"error {exc} for URL {link}"
|
||||
)
|
||||
else:
|
||||
file_path = self._downloaded[link.url]
|
||||
@@ -697,9 +684,9 @@ class RequirementPreparer:
|
||||
with indent_log():
|
||||
if self.require_hashes:
|
||||
raise InstallationError(
|
||||
"The editable requirement {} cannot be installed when "
|
||||
f"The editable requirement {req} cannot be installed when "
|
||||
"requiring hashes, because there is no single file to "
|
||||
"hash.".format(req)
|
||||
"hash."
|
||||
)
|
||||
req.ensure_has_source_dir(self.src_dir)
|
||||
req.update_editable()
|
||||
@@ -727,7 +714,7 @@ class RequirementPreparer:
|
||||
assert req.satisfied_by, "req should have been satisfied but isn't"
|
||||
assert skip_reason is not None, (
|
||||
"did not get skip reason skipped but req.satisfied_by "
|
||||
"is set to {}".format(req.satisfied_by)
|
||||
f"is set to {req.satisfied_by}"
|
||||
)
|
||||
logger.info(
|
||||
"Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
|
||||
|
||||
Reference in New Issue
Block a user