Add python venv
This commit is contained in:
@ -0,0 +1,20 @@
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_set import RequirementSet
|
||||
|
||||
InstallRequirementProvider = Callable[
|
||||
[str, Optional[InstallRequirement]], InstallRequirement
|
||||
]
|
||||
|
||||
|
||||
class BaseResolver:
|
||||
def resolve(
|
||||
self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
|
||||
) -> RequirementSet:
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_installation_order(
|
||||
self, req_set: RequirementSet
|
||||
) -> List[InstallRequirement]:
|
||||
raise NotImplementedError()
|
@ -0,0 +1,467 @@
|
||||
"""Dependency Resolution
|
||||
|
||||
The dependency resolution in pip is performed as follows:
|
||||
|
||||
for top-level requirements:
|
||||
a. only one spec allowed per project, regardless of conflicts or not.
|
||||
otherwise a "double requirement" exception is raised
|
||||
b. they override sub-dependency requirements.
|
||||
for sub-dependencies
|
||||
a. "first found, wins" (where the order is breadth first)
|
||||
"""
|
||||
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from itertools import chain
|
||||
from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
|
||||
|
||||
from pip._vendor.packaging import specifiers
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.exceptions import (
|
||||
BestVersionAlreadyInstalled,
|
||||
DistributionNotFound,
|
||||
HashError,
|
||||
HashErrors,
|
||||
NoneMetadataError,
|
||||
UnsupportedPythonVersion,
|
||||
)
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req.req_install import (
|
||||
InstallRequirement,
|
||||
check_invalid_constraint_type,
|
||||
)
|
||||
from pip._internal.req.req_set import RequirementSet
|
||||
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
|
||||
from pip._internal.utils.compatibility_tags import get_supported
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import dist_in_usersite, normalize_version_info
|
||||
from pip._internal.utils.packaging import check_requires_python
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]]
|
||||
|
||||
|
||||
def _check_dist_requires_python(
|
||||
dist: BaseDistribution,
|
||||
version_info: Tuple[int, int, int],
|
||||
ignore_requires_python: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Check whether the given Python version is compatible with a distribution's
|
||||
"Requires-Python" value.
|
||||
|
||||
:param version_info: A 3-tuple of ints representing the Python
|
||||
major-minor-micro version to check.
|
||||
:param ignore_requires_python: Whether to ignore the "Requires-Python"
|
||||
value if the given Python version isn't compatible.
|
||||
|
||||
:raises UnsupportedPythonVersion: When the given Python version isn't
|
||||
compatible.
|
||||
"""
|
||||
# This idiosyncratically converts the SpecifierSet to str and let
|
||||
# check_requires_python then parse it again into SpecifierSet. But this
|
||||
# is the legacy resolver so I'm just not going to bother refactoring.
|
||||
try:
|
||||
requires_python = str(dist.requires_python)
|
||||
except FileNotFoundError as e:
|
||||
raise NoneMetadataError(dist, str(e))
|
||||
try:
|
||||
is_compatible = check_requires_python(
|
||||
requires_python,
|
||||
version_info=version_info,
|
||||
)
|
||||
except specifiers.InvalidSpecifier as exc:
|
||||
logger.warning(
|
||||
"Package %r has an invalid Requires-Python: %s", dist.raw_name, exc
|
||||
)
|
||||
return
|
||||
|
||||
if is_compatible:
|
||||
return
|
||||
|
||||
version = ".".join(map(str, version_info))
|
||||
if ignore_requires_python:
|
||||
logger.debug(
|
||||
"Ignoring failed Requires-Python check for package %r: %s not in %r",
|
||||
dist.raw_name,
|
||||
version,
|
||||
requires_python,
|
||||
)
|
||||
return
|
||||
|
||||
raise UnsupportedPythonVersion(
|
||||
"Package {!r} requires a different Python: {} not in {!r}".format(
|
||||
dist.raw_name, version, requires_python
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class Resolver(BaseResolver):
|
||||
"""Resolves which packages need to be installed/uninstalled to perform \
|
||||
the requested operation without breaking the requirements of any package.
|
||||
"""
|
||||
|
||||
_allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
preparer: RequirementPreparer,
|
||||
finder: PackageFinder,
|
||||
wheel_cache: Optional[WheelCache],
|
||||
make_install_req: InstallRequirementProvider,
|
||||
use_user_site: bool,
|
||||
ignore_dependencies: bool,
|
||||
ignore_installed: bool,
|
||||
ignore_requires_python: bool,
|
||||
force_reinstall: bool,
|
||||
upgrade_strategy: str,
|
||||
py_version_info: Optional[Tuple[int, ...]] = None,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
assert upgrade_strategy in self._allowed_strategies
|
||||
|
||||
if py_version_info is None:
|
||||
py_version_info = sys.version_info[:3]
|
||||
else:
|
||||
py_version_info = normalize_version_info(py_version_info)
|
||||
|
||||
self._py_version_info = py_version_info
|
||||
|
||||
self.preparer = preparer
|
||||
self.finder = finder
|
||||
self.wheel_cache = wheel_cache
|
||||
|
||||
self.upgrade_strategy = upgrade_strategy
|
||||
self.force_reinstall = force_reinstall
|
||||
self.ignore_dependencies = ignore_dependencies
|
||||
self.ignore_installed = ignore_installed
|
||||
self.ignore_requires_python = ignore_requires_python
|
||||
self.use_user_site = use_user_site
|
||||
self._make_install_req = make_install_req
|
||||
|
||||
self._discovered_dependencies: DiscoveredDependencies = defaultdict(list)
|
||||
|
||||
def resolve(
|
||||
self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
|
||||
) -> RequirementSet:
|
||||
"""Resolve what operations need to be done
|
||||
|
||||
As a side-effect of this method, the packages (and their dependencies)
|
||||
are downloaded, unpacked and prepared for installation. This
|
||||
preparation is done by ``pip.operations.prepare``.
|
||||
|
||||
Once PyPI has static dependency metadata available, it would be
|
||||
possible to move the preparation to become a step separated from
|
||||
dependency resolution.
|
||||
"""
|
||||
requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels)
|
||||
for req in root_reqs:
|
||||
if req.constraint:
|
||||
check_invalid_constraint_type(req)
|
||||
requirement_set.add_requirement(req)
|
||||
|
||||
# Actually prepare the files, and collect any exceptions. Most hash
|
||||
# exceptions cannot be checked ahead of time, because
|
||||
# _populate_link() needs to be called before we can make decisions
|
||||
# based on link type.
|
||||
discovered_reqs: List[InstallRequirement] = []
|
||||
hash_errors = HashErrors()
|
||||
for req in chain(requirement_set.all_requirements, discovered_reqs):
|
||||
try:
|
||||
discovered_reqs.extend(self._resolve_one(requirement_set, req))
|
||||
except HashError as exc:
|
||||
exc.req = req
|
||||
hash_errors.append(exc)
|
||||
|
||||
if hash_errors:
|
||||
raise hash_errors
|
||||
|
||||
return requirement_set
|
||||
|
||||
def _is_upgrade_allowed(self, req: InstallRequirement) -> bool:
|
||||
if self.upgrade_strategy == "to-satisfy-only":
|
||||
return False
|
||||
elif self.upgrade_strategy == "eager":
|
||||
return True
|
||||
else:
|
||||
assert self.upgrade_strategy == "only-if-needed"
|
||||
return req.user_supplied or req.constraint
|
||||
|
||||
def _set_req_to_reinstall(self, req: InstallRequirement) -> None:
|
||||
"""
|
||||
Set a requirement to be installed.
|
||||
"""
|
||||
# Don't uninstall the conflict if doing a user install and the
|
||||
# conflict is not a user install.
|
||||
if not self.use_user_site or dist_in_usersite(req.satisfied_by):
|
||||
req.should_reinstall = True
|
||||
req.satisfied_by = None
|
||||
|
||||
def _check_skip_installed(
|
||||
self, req_to_install: InstallRequirement
|
||||
) -> Optional[str]:
|
||||
"""Check if req_to_install should be skipped.
|
||||
|
||||
This will check if the req is installed, and whether we should upgrade
|
||||
or reinstall it, taking into account all the relevant user options.
|
||||
|
||||
After calling this req_to_install will only have satisfied_by set to
|
||||
None if the req_to_install is to be upgraded/reinstalled etc. Any
|
||||
other value will be a dist recording the current thing installed that
|
||||
satisfies the requirement.
|
||||
|
||||
Note that for vcs urls and the like we can't assess skipping in this
|
||||
routine - we simply identify that we need to pull the thing down,
|
||||
then later on it is pulled down and introspected to assess upgrade/
|
||||
reinstalls etc.
|
||||
|
||||
:return: A text reason for why it was skipped, or None.
|
||||
"""
|
||||
if self.ignore_installed:
|
||||
return None
|
||||
|
||||
req_to_install.check_if_exists(self.use_user_site)
|
||||
if not req_to_install.satisfied_by:
|
||||
return None
|
||||
|
||||
if self.force_reinstall:
|
||||
self._set_req_to_reinstall(req_to_install)
|
||||
return None
|
||||
|
||||
if not self._is_upgrade_allowed(req_to_install):
|
||||
if self.upgrade_strategy == "only-if-needed":
|
||||
return "already satisfied, skipping upgrade"
|
||||
return "already satisfied"
|
||||
|
||||
# Check for the possibility of an upgrade. For link-based
|
||||
# requirements we have to pull the tree down and inspect to assess
|
||||
# the version #, so it's handled way down.
|
||||
if not req_to_install.link:
|
||||
try:
|
||||
self.finder.find_requirement(req_to_install, upgrade=True)
|
||||
except BestVersionAlreadyInstalled:
|
||||
# Then the best version is installed.
|
||||
return "already up-to-date"
|
||||
except DistributionNotFound:
|
||||
# No distribution found, so we squash the error. It will
|
||||
# be raised later when we re-try later to do the install.
|
||||
# Why don't we just raise here?
|
||||
pass
|
||||
|
||||
self._set_req_to_reinstall(req_to_install)
|
||||
return None
|
||||
|
||||
def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]:
|
||||
upgrade = self._is_upgrade_allowed(req)
|
||||
best_candidate = self.finder.find_requirement(req, upgrade)
|
||||
if not best_candidate:
|
||||
return None
|
||||
|
||||
# Log a warning per PEP 592 if necessary before returning.
|
||||
link = best_candidate.link
|
||||
if link.is_yanked:
|
||||
reason = link.yanked_reason or "<none given>"
|
||||
msg = (
|
||||
# Mark this as a unicode string to prevent
|
||||
# "UnicodeEncodeError: 'ascii' codec can't encode character"
|
||||
# in Python 2 when the reason contains non-ascii characters.
|
||||
"The candidate selected for download or install is a "
|
||||
"yanked version: {candidate}\n"
|
||||
"Reason for being yanked: {reason}"
|
||||
).format(candidate=best_candidate, reason=reason)
|
||||
logger.warning(msg)
|
||||
|
||||
return link
|
||||
|
||||
def _populate_link(self, req: InstallRequirement) -> None:
|
||||
"""Ensure that if a link can be found for this, that it is found.
|
||||
|
||||
Note that req.link may still be None - if the requirement is already
|
||||
installed and not needed to be upgraded based on the return value of
|
||||
_is_upgrade_allowed().
|
||||
|
||||
If preparer.require_hashes is True, don't use the wheel cache, because
|
||||
cached wheels, always built locally, have different hashes than the
|
||||
files downloaded from the index server and thus throw false hash
|
||||
mismatches. Furthermore, cached wheels at present have undeterministic
|
||||
contents due to file modification times.
|
||||
"""
|
||||
if req.link is None:
|
||||
req.link = self._find_requirement_link(req)
|
||||
|
||||
if self.wheel_cache is None or self.preparer.require_hashes:
|
||||
return
|
||||
cache_entry = self.wheel_cache.get_cache_entry(
|
||||
link=req.link,
|
||||
package_name=req.name,
|
||||
supported_tags=get_supported(),
|
||||
)
|
||||
if cache_entry is not None:
|
||||
logger.debug("Using cached wheel link: %s", cache_entry.link)
|
||||
if req.link is req.original_link and cache_entry.persistent:
|
||||
req.original_link_is_in_wheel_cache = True
|
||||
req.link = cache_entry.link
|
||||
|
||||
def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution:
|
||||
"""Takes a InstallRequirement and returns a single AbstractDist \
|
||||
representing a prepared variant of the same.
|
||||
"""
|
||||
if req.editable:
|
||||
return self.preparer.prepare_editable_requirement(req)
|
||||
|
||||
# satisfied_by is only evaluated by calling _check_skip_installed,
|
||||
# so it must be None here.
|
||||
assert req.satisfied_by is None
|
||||
skip_reason = self._check_skip_installed(req)
|
||||
|
||||
if req.satisfied_by:
|
||||
return self.preparer.prepare_installed_requirement(req, skip_reason)
|
||||
|
||||
# We eagerly populate the link, since that's our "legacy" behavior.
|
||||
self._populate_link(req)
|
||||
dist = self.preparer.prepare_linked_requirement(req)
|
||||
|
||||
# NOTE
|
||||
# The following portion is for determining if a certain package is
|
||||
# going to be re-installed/upgraded or not and reporting to the user.
|
||||
# This should probably get cleaned up in a future refactor.
|
||||
|
||||
# req.req is only avail after unpack for URL
|
||||
# pkgs repeat check_if_exists to uninstall-on-upgrade
|
||||
# (#14)
|
||||
if not self.ignore_installed:
|
||||
req.check_if_exists(self.use_user_site)
|
||||
|
||||
if req.satisfied_by:
|
||||
should_modify = (
|
||||
self.upgrade_strategy != "to-satisfy-only"
|
||||
or self.force_reinstall
|
||||
or self.ignore_installed
|
||||
or req.link.scheme == "file"
|
||||
)
|
||||
if should_modify:
|
||||
self._set_req_to_reinstall(req)
|
||||
else:
|
||||
logger.info(
|
||||
"Requirement already satisfied (use --upgrade to upgrade): %s",
|
||||
req,
|
||||
)
|
||||
return dist
|
||||
|
||||
def _resolve_one(
|
||||
self,
|
||||
requirement_set: RequirementSet,
|
||||
req_to_install: InstallRequirement,
|
||||
) -> List[InstallRequirement]:
|
||||
"""Prepare a single requirements file.
|
||||
|
||||
:return: A list of additional InstallRequirements to also install.
|
||||
"""
|
||||
# Tell user what we are doing for this requirement:
|
||||
# obtain (editable), skipping, processing (local url), collecting
|
||||
# (remote url or package name)
|
||||
if req_to_install.constraint or req_to_install.prepared:
|
||||
return []
|
||||
|
||||
req_to_install.prepared = True
|
||||
|
||||
# Parse and return dependencies
|
||||
dist = self._get_dist_for(req_to_install)
|
||||
# This will raise UnsupportedPythonVersion if the given Python
|
||||
# version isn't compatible with the distribution's Requires-Python.
|
||||
_check_dist_requires_python(
|
||||
dist,
|
||||
version_info=self._py_version_info,
|
||||
ignore_requires_python=self.ignore_requires_python,
|
||||
)
|
||||
|
||||
more_reqs: List[InstallRequirement] = []
|
||||
|
||||
def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None:
|
||||
# This idiosyncratically converts the Requirement to str and let
|
||||
# make_install_req then parse it again into Requirement. But this is
|
||||
# the legacy resolver so I'm just not going to bother refactoring.
|
||||
sub_install_req = self._make_install_req(str(subreq), req_to_install)
|
||||
parent_req_name = req_to_install.name
|
||||
to_scan_again, add_to_parent = requirement_set.add_requirement(
|
||||
sub_install_req,
|
||||
parent_req_name=parent_req_name,
|
||||
extras_requested=extras_requested,
|
||||
)
|
||||
if parent_req_name and add_to_parent:
|
||||
self._discovered_dependencies[parent_req_name].append(add_to_parent)
|
||||
more_reqs.extend(to_scan_again)
|
||||
|
||||
with indent_log():
|
||||
# We add req_to_install before its dependencies, so that we
|
||||
# can refer to it when adding dependencies.
|
||||
if not requirement_set.has_requirement(req_to_install.name):
|
||||
# 'unnamed' requirements will get added here
|
||||
# 'unnamed' requirements can only come from being directly
|
||||
# provided by the user.
|
||||
assert req_to_install.user_supplied
|
||||
requirement_set.add_requirement(req_to_install, parent_req_name=None)
|
||||
|
||||
if not self.ignore_dependencies:
|
||||
if req_to_install.extras:
|
||||
logger.debug(
|
||||
"Installing extra requirements: %r",
|
||||
",".join(req_to_install.extras),
|
||||
)
|
||||
missing_requested = sorted(
|
||||
set(req_to_install.extras) - set(dist.iter_provided_extras())
|
||||
)
|
||||
for missing in missing_requested:
|
||||
logger.warning(
|
||||
"%s %s does not provide the extra '%s'",
|
||||
dist.raw_name,
|
||||
dist.version,
|
||||
missing,
|
||||
)
|
||||
|
||||
available_requested = sorted(
|
||||
set(dist.iter_provided_extras()) & set(req_to_install.extras)
|
||||
)
|
||||
for subreq in dist.iter_dependencies(available_requested):
|
||||
add_req(subreq, extras_requested=available_requested)
|
||||
|
||||
return more_reqs
|
||||
|
||||
def get_installation_order(
|
||||
self, req_set: RequirementSet
|
||||
) -> List[InstallRequirement]:
|
||||
"""Create the installation order.
|
||||
|
||||
The installation order is topological - requirements are installed
|
||||
before the requiring thing. We break cycles at an arbitrary point,
|
||||
and make no other guarantees.
|
||||
"""
|
||||
# The current implementation, which we may change at any point
|
||||
# installs the user specified things in the order given, except when
|
||||
# dependencies must come earlier to achieve topological order.
|
||||
order = []
|
||||
ordered_reqs: Set[InstallRequirement] = set()
|
||||
|
||||
def schedule(req: InstallRequirement) -> None:
|
||||
if req.satisfied_by or req in ordered_reqs:
|
||||
return
|
||||
if req.constraint:
|
||||
return
|
||||
ordered_reqs.add(req)
|
||||
for dep in self._discovered_dependencies[req.name]:
|
||||
schedule(dep)
|
||||
order.append(req)
|
||||
|
||||
for install_req in req_set.requirements.values():
|
||||
schedule(install_req)
|
||||
return order
|
@ -0,0 +1,141 @@
|
||||
from typing import FrozenSet, Iterable, Optional, Tuple, Union
|
||||
|
||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
from pip._vendor.packaging.version import LegacyVersion, Version
|
||||
|
||||
from pip._internal.models.link import Link, links_equivalent
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
|
||||
CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
|
||||
CandidateVersion = Union[LegacyVersion, Version]
|
||||
|
||||
|
||||
def format_name(project: str, extras: FrozenSet[str]) -> str:
|
||||
if not extras:
|
||||
return project
|
||||
canonical_extras = sorted(canonicalize_name(e) for e in extras)
|
||||
return "{}[{}]".format(project, ",".join(canonical_extras))
|
||||
|
||||
|
||||
class Constraint:
|
||||
def __init__(
|
||||
self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link]
|
||||
) -> None:
|
||||
self.specifier = specifier
|
||||
self.hashes = hashes
|
||||
self.links = links
|
||||
|
||||
@classmethod
|
||||
def empty(cls) -> "Constraint":
|
||||
return Constraint(SpecifierSet(), Hashes(), frozenset())
|
||||
|
||||
@classmethod
|
||||
def from_ireq(cls, ireq: InstallRequirement) -> "Constraint":
|
||||
links = frozenset([ireq.link]) if ireq.link else frozenset()
|
||||
return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self.specifier) or bool(self.hashes) or bool(self.links)
|
||||
|
||||
def __and__(self, other: InstallRequirement) -> "Constraint":
|
||||
if not isinstance(other, InstallRequirement):
|
||||
return NotImplemented
|
||||
specifier = self.specifier & other.specifier
|
||||
hashes = self.hashes & other.hashes(trust_internet=False)
|
||||
links = self.links
|
||||
if other.link:
|
||||
links = links.union([other.link])
|
||||
return Constraint(specifier, hashes, links)
|
||||
|
||||
def is_satisfied_by(self, candidate: "Candidate") -> bool:
|
||||
# Reject if there are any mismatched URL constraints on this package.
|
||||
if self.links and not all(_match_link(link, candidate) for link in self.links):
|
||||
return False
|
||||
# We can safely always allow prereleases here since PackageFinder
|
||||
# already implements the prerelease logic, and would have filtered out
|
||||
# prerelease candidates if the user does not expect them.
|
||||
return self.specifier.contains(candidate.version, prereleases=True)
|
||||
|
||||
|
||||
class Requirement:
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
"""The "project name" of a requirement.
|
||||
|
||||
This is different from ``name`` if this requirement contains extras,
|
||||
in which case ``name`` would contain the ``[...]`` part, while this
|
||||
refers to the name of the project.
|
||||
"""
|
||||
raise NotImplementedError("Subclass should override")
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""The name identifying this requirement in the resolver.
|
||||
|
||||
This is different from ``project_name`` if this requirement contains
|
||||
extras, where ``project_name`` would not contain the ``[...]`` part.
|
||||
"""
|
||||
raise NotImplementedError("Subclass should override")
|
||||
|
||||
def is_satisfied_by(self, candidate: "Candidate") -> bool:
|
||||
return False
|
||||
|
||||
def get_candidate_lookup(self) -> CandidateLookup:
|
||||
raise NotImplementedError("Subclass should override")
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
raise NotImplementedError("Subclass should override")
|
||||
|
||||
|
||||
def _match_link(link: Link, candidate: "Candidate") -> bool:
|
||||
if candidate.source_link:
|
||||
return links_equivalent(link, candidate.source_link)
|
||||
return False
|
||||
|
||||
|
||||
class Candidate:
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
"""The "project name" of the candidate.
|
||||
|
||||
This is different from ``name`` if this candidate contains extras,
|
||||
in which case ``name`` would contain the ``[...]`` part, while this
|
||||
refers to the name of the project.
|
||||
"""
|
||||
raise NotImplementedError("Override in subclass")
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""The name identifying this candidate in the resolver.
|
||||
|
||||
This is different from ``project_name`` if this candidate contains
|
||||
extras, where ``project_name`` would not contain the ``[...]`` part.
|
||||
"""
|
||||
raise NotImplementedError("Override in subclass")
|
||||
|
||||
@property
|
||||
def version(self) -> CandidateVersion:
|
||||
raise NotImplementedError("Override in subclass")
|
||||
|
||||
@property
|
||||
def is_installed(self) -> bool:
|
||||
raise NotImplementedError("Override in subclass")
|
||||
|
||||
@property
|
||||
def is_editable(self) -> bool:
|
||||
raise NotImplementedError("Override in subclass")
|
||||
|
||||
@property
|
||||
def source_link(self) -> Optional[Link]:
|
||||
raise NotImplementedError("Override in subclass")
|
||||
|
||||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||
raise NotImplementedError("Override in subclass")
|
||||
|
||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||
raise NotImplementedError("Override in subclass")
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
raise NotImplementedError("Subclass should override")
|
@ -0,0 +1,540 @@
|
||||
import logging
|
||||
import sys
|
||||
from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
|
||||
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
from pip._vendor.packaging.version import Version
|
||||
|
||||
from pip._internal.exceptions import HashError, MetadataInconsistent
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
from pip._internal.models.link import Link, links_equivalent
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_editable,
|
||||
install_req_from_line,
|
||||
)
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils.misc import normalize_version_info
|
||||
|
||||
from .base import Candidate, CandidateVersion, Requirement, format_name
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .factory import Factory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BaseCandidate = Union[
|
||||
"AlreadyInstalledCandidate",
|
||||
"EditableCandidate",
|
||||
"LinkCandidate",
|
||||
]
|
||||
|
||||
# Avoid conflicting with the PyPI package "Python".
|
||||
REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "<Python from Requires-Python>")
|
||||
|
||||
|
||||
def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]:
|
||||
"""The runtime version of BaseCandidate."""
|
||||
base_candidate_classes = (
|
||||
AlreadyInstalledCandidate,
|
||||
EditableCandidate,
|
||||
LinkCandidate,
|
||||
)
|
||||
if isinstance(candidate, base_candidate_classes):
|
||||
return candidate
|
||||
return None
|
||||
|
||||
|
||||
def make_install_req_from_link(
|
||||
link: Link, template: InstallRequirement
|
||||
) -> InstallRequirement:
|
||||
assert not template.editable, "template is editable"
|
||||
if template.req:
|
||||
line = str(template.req)
|
||||
else:
|
||||
line = link.url
|
||||
ireq = install_req_from_line(
|
||||
line,
|
||||
user_supplied=template.user_supplied,
|
||||
comes_from=template.comes_from,
|
||||
use_pep517=template.use_pep517,
|
||||
isolated=template.isolated,
|
||||
constraint=template.constraint,
|
||||
options=dict(
|
||||
install_options=template.install_options,
|
||||
global_options=template.global_options,
|
||||
hashes=template.hash_options,
|
||||
),
|
||||
)
|
||||
ireq.original_link = template.original_link
|
||||
ireq.link = link
|
||||
return ireq
|
||||
|
||||
|
||||
def make_install_req_from_editable(
|
||||
link: Link, template: InstallRequirement
|
||||
) -> InstallRequirement:
|
||||
assert template.editable, "template not editable"
|
||||
return install_req_from_editable(
|
||||
link.url,
|
||||
user_supplied=template.user_supplied,
|
||||
comes_from=template.comes_from,
|
||||
use_pep517=template.use_pep517,
|
||||
isolated=template.isolated,
|
||||
constraint=template.constraint,
|
||||
permit_editable_wheels=template.permit_editable_wheels,
|
||||
options=dict(
|
||||
install_options=template.install_options,
|
||||
global_options=template.global_options,
|
||||
hashes=template.hash_options,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _make_install_req_from_dist(
|
||||
dist: BaseDistribution, template: InstallRequirement
|
||||
) -> InstallRequirement:
|
||||
from pip._internal.metadata.pkg_resources import Distribution as _Dist
|
||||
|
||||
if template.req:
|
||||
line = str(template.req)
|
||||
elif template.link:
|
||||
line = f"{dist.canonical_name} @ {template.link.url}"
|
||||
else:
|
||||
line = f"{dist.canonical_name}=={dist.version}"
|
||||
ireq = install_req_from_line(
|
||||
line,
|
||||
user_supplied=template.user_supplied,
|
||||
comes_from=template.comes_from,
|
||||
use_pep517=template.use_pep517,
|
||||
isolated=template.isolated,
|
||||
constraint=template.constraint,
|
||||
options=dict(
|
||||
install_options=template.install_options,
|
||||
global_options=template.global_options,
|
||||
hashes=template.hash_options,
|
||||
),
|
||||
)
|
||||
ireq.satisfied_by = cast(_Dist, dist)._dist
|
||||
return ireq
|
||||
|
||||
|
||||
class _InstallRequirementBackedCandidate(Candidate):
|
||||
"""A candidate backed by an ``InstallRequirement``.
|
||||
|
||||
This represents a package request with the target not being already
|
||||
in the environment, and needs to be fetched and installed. The backing
|
||||
``InstallRequirement`` is responsible for most of the leg work; this
|
||||
class exposes appropriate information to the resolver.
|
||||
|
||||
:param link: The link passed to the ``InstallRequirement``. The backing
|
||||
``InstallRequirement`` will use this link to fetch the distribution.
|
||||
:param source_link: The link this candidate "originates" from. This is
|
||||
different from ``link`` when the link is found in the wheel cache.
|
||||
``link`` would point to the wheel cache, while this points to the
|
||||
found remote link (e.g. from pypi.org).
|
||||
"""
|
||||
|
||||
dist: BaseDistribution
|
||||
is_installed = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
link: Link,
|
||||
source_link: Link,
|
||||
ireq: InstallRequirement,
|
||||
factory: "Factory",
|
||||
name: Optional[NormalizedName] = None,
|
||||
version: Optional[CandidateVersion] = None,
|
||||
) -> None:
|
||||
self._link = link
|
||||
self._source_link = source_link
|
||||
self._factory = factory
|
||||
self._ireq = ireq
|
||||
self._name = name
|
||||
self._version = version
|
||||
self.dist = self._prepare()
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.name} {self.version}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{class_name}({link!r})".format(
|
||||
class_name=self.__class__.__name__,
|
||||
link=str(self._link),
|
||||
)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__, self._link))
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if isinstance(other, self.__class__):
|
||||
return links_equivalent(self._link, other._link)
|
||||
return False
|
||||
|
||||
@property
|
||||
def source_link(self) -> Optional[Link]:
|
||||
return self._source_link
|
||||
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
"""The normalised name of the project the candidate refers to"""
|
||||
if self._name is None:
|
||||
self._name = self.dist.canonical_name
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self.project_name
|
||||
|
||||
@property
|
||||
def version(self) -> CandidateVersion:
|
||||
if self._version is None:
|
||||
self._version = self.dist.version
|
||||
return self._version
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
return "{} {} (from {})".format(
|
||||
self.name,
|
||||
self.version,
|
||||
self._link.file_path if self._link.is_file else self._link,
|
||||
)
|
||||
|
||||
def _prepare_distribution(self) -> BaseDistribution:
|
||||
raise NotImplementedError("Override in subclass")
|
||||
|
||||
def _check_metadata_consistency(self, dist: BaseDistribution) -> None:
|
||||
"""Check for consistency of project name and version of dist."""
|
||||
if self._name is not None and self._name != dist.canonical_name:
|
||||
raise MetadataInconsistent(
|
||||
self._ireq,
|
||||
"name",
|
||||
self._name,
|
||||
dist.canonical_name,
|
||||
)
|
||||
if self._version is not None and self._version != dist.version:
|
||||
raise MetadataInconsistent(
|
||||
self._ireq,
|
||||
"version",
|
||||
str(self._version),
|
||||
str(dist.version),
|
||||
)
|
||||
|
||||
def _prepare(self) -> BaseDistribution:
|
||||
try:
|
||||
dist = self._prepare_distribution()
|
||||
except HashError as e:
|
||||
# Provide HashError the underlying ireq that caused it. This
|
||||
# provides context for the resulting error message to show the
|
||||
# offending line to the user.
|
||||
e.req = self._ireq
|
||||
raise
|
||||
self._check_metadata_consistency(dist)
|
||||
return dist
|
||||
|
||||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||
requires = self.dist.iter_dependencies() if with_requires else ()
|
||||
for r in requires:
|
||||
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
||||
yield self._factory.make_requires_python_requirement(self.dist.requires_python)
|
||||
|
||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||
return self._ireq
|
||||
|
||||
|
||||
class LinkCandidate(_InstallRequirementBackedCandidate):
|
||||
is_editable = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
link: Link,
|
||||
template: InstallRequirement,
|
||||
factory: "Factory",
|
||||
name: Optional[NormalizedName] = None,
|
||||
version: Optional[CandidateVersion] = None,
|
||||
) -> None:
|
||||
source_link = link
|
||||
cache_entry = factory.get_wheel_cache_entry(link, name)
|
||||
if cache_entry is not None:
|
||||
logger.debug("Using cached wheel link: %s", cache_entry.link)
|
||||
link = cache_entry.link
|
||||
ireq = make_install_req_from_link(link, template)
|
||||
assert ireq.link == link
|
||||
if ireq.link.is_wheel and not ireq.link.is_file:
|
||||
wheel = Wheel(ireq.link.filename)
|
||||
wheel_name = canonicalize_name(wheel.name)
|
||||
assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel"
|
||||
# Version may not be present for PEP 508 direct URLs
|
||||
if version is not None:
|
||||
wheel_version = Version(wheel.version)
|
||||
assert version == wheel_version, "{!r} != {!r} for wheel {}".format(
|
||||
version, wheel_version, name
|
||||
)
|
||||
|
||||
if (
|
||||
cache_entry is not None
|
||||
and cache_entry.persistent
|
||||
and template.link is template.original_link
|
||||
):
|
||||
ireq.original_link_is_in_wheel_cache = True
|
||||
|
||||
super().__init__(
|
||||
link=link,
|
||||
source_link=source_link,
|
||||
ireq=ireq,
|
||||
factory=factory,
|
||||
name=name,
|
||||
version=version,
|
||||
)
|
||||
|
||||
def _prepare_distribution(self) -> BaseDistribution:
|
||||
preparer = self._factory.preparer
|
||||
return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True)
|
||||
|
||||
|
||||
class EditableCandidate(_InstallRequirementBackedCandidate):
|
||||
is_editable = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
link: Link,
|
||||
template: InstallRequirement,
|
||||
factory: "Factory",
|
||||
name: Optional[NormalizedName] = None,
|
||||
version: Optional[CandidateVersion] = None,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
link=link,
|
||||
source_link=link,
|
||||
ireq=make_install_req_from_editable(link, template),
|
||||
factory=factory,
|
||||
name=name,
|
||||
version=version,
|
||||
)
|
||||
|
||||
def _prepare_distribution(self) -> BaseDistribution:
|
||||
return self._factory.preparer.prepare_editable_requirement(self._ireq)
|
||||
|
||||
|
||||
class AlreadyInstalledCandidate(Candidate):
|
||||
is_installed = True
|
||||
source_link = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
dist: BaseDistribution,
|
||||
template: InstallRequirement,
|
||||
factory: "Factory",
|
||||
) -> None:
|
||||
self.dist = dist
|
||||
self._ireq = _make_install_req_from_dist(dist, template)
|
||||
self._factory = factory
|
||||
|
||||
# This is just logging some messages, so we can do it eagerly.
|
||||
# The returned dist would be exactly the same as self.dist because we
|
||||
# set satisfied_by in _make_install_req_from_dist.
|
||||
# TODO: Supply reason based on force_reinstall and upgrade_strategy.
|
||||
skip_reason = "already satisfied"
|
||||
factory.preparer.prepare_installed_requirement(self._ireq, skip_reason)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.dist)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{class_name}({distribution!r})".format(
|
||||
class_name=self.__class__.__name__,
|
||||
distribution=self.dist,
|
||||
)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__, self.name, self.version))
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if isinstance(other, self.__class__):
|
||||
return self.name == other.name and self.version == other.version
|
||||
return False
|
||||
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
return self.dist.canonical_name
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self.project_name
|
||||
|
||||
@property
|
||||
def version(self) -> CandidateVersion:
|
||||
return self.dist.version
|
||||
|
||||
@property
|
||||
def is_editable(self) -> bool:
|
||||
return self.dist.editable
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
return f"{self.name} {self.version} (Installed)"
|
||||
|
||||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||
if not with_requires:
|
||||
return
|
||||
for r in self.dist.iter_dependencies():
|
||||
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
||||
|
||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||
return None
|
||||
|
||||
|
||||
class ExtrasCandidate(Candidate):
|
||||
"""A candidate that has 'extras', indicating additional dependencies.
|
||||
|
||||
Requirements can be for a project with dependencies, something like
|
||||
foo[extra]. The extras don't affect the project/version being installed
|
||||
directly, but indicate that we need additional dependencies. We model that
|
||||
by having an artificial ExtrasCandidate that wraps the "base" candidate.
|
||||
|
||||
The ExtrasCandidate differs from the base in the following ways:
|
||||
|
||||
1. It has a unique name, of the form foo[extra]. This causes the resolver
|
||||
to treat it as a separate node in the dependency graph.
|
||||
2. When we're getting the candidate's dependencies,
|
||||
a) We specify that we want the extra dependencies as well.
|
||||
b) We add a dependency on the base candidate.
|
||||
See below for why this is needed.
|
||||
3. We return None for the underlying InstallRequirement, as the base
|
||||
candidate will provide it, and we don't want to end up with duplicates.
|
||||
|
||||
The dependency on the base candidate is needed so that the resolver can't
|
||||
decide that it should recommend foo[extra1] version 1.0 and foo[extra2]
|
||||
version 2.0. Having those candidates depend on foo=1.0 and foo=2.0
|
||||
respectively forces the resolver to recognise that this is a conflict.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
base: BaseCandidate,
|
||||
extras: FrozenSet[str],
|
||||
) -> None:
|
||||
self.base = base
|
||||
self.extras = extras
|
||||
|
||||
def __str__(self) -> str:
|
||||
name, rest = str(self.base).split(" ", 1)
|
||||
return "{}[{}] {}".format(name, ",".join(self.extras), rest)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{class_name}(base={base!r}, extras={extras!r})".format(
|
||||
class_name=self.__class__.__name__,
|
||||
base=self.base,
|
||||
extras=self.extras,
|
||||
)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.base, self.extras))
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if isinstance(other, self.__class__):
|
||||
return self.base == other.base and self.extras == other.extras
|
||||
return False
|
||||
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
return self.base.project_name
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""The normalised name of the project the candidate refers to"""
|
||||
return format_name(self.base.project_name, self.extras)
|
||||
|
||||
@property
|
||||
def version(self) -> CandidateVersion:
|
||||
return self.base.version
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
return "{} [{}]".format(
|
||||
self.base.format_for_error(), ", ".join(sorted(self.extras))
|
||||
)
|
||||
|
||||
@property
|
||||
def is_installed(self) -> bool:
|
||||
return self.base.is_installed
|
||||
|
||||
@property
|
||||
def is_editable(self) -> bool:
|
||||
return self.base.is_editable
|
||||
|
||||
@property
|
||||
def source_link(self) -> Optional[Link]:
|
||||
return self.base.source_link
|
||||
|
||||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||
factory = self.base._factory
|
||||
|
||||
# Add a dependency on the exact base
|
||||
# (See note 2b in the class docstring)
|
||||
yield factory.make_requirement_from_candidate(self.base)
|
||||
if not with_requires:
|
||||
return
|
||||
|
||||
# The user may have specified extras that the candidate doesn't
|
||||
# support. We ignore any unsupported extras here.
|
||||
valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras())
|
||||
invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras())
|
||||
for extra in sorted(invalid_extras):
|
||||
logger.warning(
|
||||
"%s %s does not provide the extra '%s'",
|
||||
self.base.name,
|
||||
self.version,
|
||||
extra,
|
||||
)
|
||||
|
||||
for r in self.base.dist.iter_dependencies(valid_extras):
|
||||
requirement = factory.make_requirement_from_spec(
|
||||
str(r), self.base._ireq, valid_extras
|
||||
)
|
||||
if requirement:
|
||||
yield requirement
|
||||
|
||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||
# We don't return anything here, because we always
|
||||
# depend on the base candidate, and we'll get the
|
||||
# install requirement from that.
|
||||
return None
|
||||
|
||||
|
||||
class RequiresPythonCandidate(Candidate):
|
||||
is_installed = False
|
||||
source_link = None
|
||||
|
||||
def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None:
|
||||
if py_version_info is not None:
|
||||
version_info = normalize_version_info(py_version_info)
|
||||
else:
|
||||
version_info = sys.version_info[:3]
|
||||
self._version = Version(".".join(str(c) for c in version_info))
|
||||
|
||||
# We don't need to implement __eq__() and __ne__() since there is always
|
||||
# only one RequiresPythonCandidate in a resolution, i.e. the host Python.
|
||||
# The built-in object.__eq__() and object.__ne__() do exactly what we want.
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Python {self._version}"
|
||||
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
return REQUIRES_PYTHON_IDENTIFIER
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return REQUIRES_PYTHON_IDENTIFIER
|
||||
|
||||
@property
|
||||
def version(self) -> CandidateVersion:
|
||||
return self._version
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
return f"Python {self.version}"
|
||||
|
||||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||
return ()
|
||||
|
||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||
return None
|
@ -0,0 +1,701 @@
|
||||
import contextlib
|
||||
import functools
|
||||
import logging
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Dict,
|
||||
FrozenSet,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
cast,
|
||||
)
|
||||
|
||||
from pip._vendor.packaging.requirements import InvalidRequirement
|
||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
from pip._vendor.resolvelib import ResolutionImpossible
|
||||
|
||||
from pip._internal.cache import CacheEntry, WheelCache
|
||||
from pip._internal.exceptions import (
|
||||
DistributionNotFound,
|
||||
InstallationError,
|
||||
InstallationSubprocessError,
|
||||
MetadataInconsistent,
|
||||
UnsupportedPythonVersion,
|
||||
UnsupportedWheel,
|
||||
)
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import BaseDistribution, get_default_environment
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req.constructors import install_req_from_link_and_ireq
|
||||
from pip._internal.req.req_install import (
|
||||
InstallRequirement,
|
||||
check_invalid_constraint_type,
|
||||
)
|
||||
from pip._internal.resolution.base import InstallRequirementProvider
|
||||
from pip._internal.utils.compatibility_tags import get_supported
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
from pip._internal.utils.packaging import get_requirement
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
from .base import Candidate, CandidateVersion, Constraint, Requirement
|
||||
from .candidates import (
|
||||
AlreadyInstalledCandidate,
|
||||
BaseCandidate,
|
||||
EditableCandidate,
|
||||
ExtrasCandidate,
|
||||
LinkCandidate,
|
||||
RequiresPythonCandidate,
|
||||
as_base_candidate,
|
||||
)
|
||||
from .found_candidates import FoundCandidates, IndexCandidateInfo
|
||||
from .requirements import (
|
||||
ExplicitRequirement,
|
||||
RequiresPythonRequirement,
|
||||
SpecifierRequirement,
|
||||
UnsatisfiableRequirement,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Protocol
|
||||
|
||||
class ConflictCause(Protocol):
|
||||
requirement: RequiresPythonRequirement
|
||||
parent: Candidate
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
C = TypeVar("C")
|
||||
Cache = Dict[Link, C]
|
||||
|
||||
|
||||
class CollectedRootRequirements(NamedTuple):
|
||||
requirements: List[Requirement]
|
||||
constraints: Dict[str, Constraint]
|
||||
user_requested: Dict[str, int]
|
||||
|
||||
|
||||
class Factory:
|
||||
def __init__(
|
||||
self,
|
||||
finder: PackageFinder,
|
||||
preparer: RequirementPreparer,
|
||||
make_install_req: InstallRequirementProvider,
|
||||
wheel_cache: Optional[WheelCache],
|
||||
use_user_site: bool,
|
||||
force_reinstall: bool,
|
||||
ignore_installed: bool,
|
||||
ignore_requires_python: bool,
|
||||
py_version_info: Optional[Tuple[int, ...]] = None,
|
||||
) -> None:
|
||||
self._finder = finder
|
||||
self.preparer = preparer
|
||||
self._wheel_cache = wheel_cache
|
||||
self._python_candidate = RequiresPythonCandidate(py_version_info)
|
||||
self._make_install_req_from_spec = make_install_req
|
||||
self._use_user_site = use_user_site
|
||||
self._force_reinstall = force_reinstall
|
||||
self._ignore_requires_python = ignore_requires_python
|
||||
|
||||
self._build_failures: Cache[InstallationError] = {}
|
||||
self._link_candidate_cache: Cache[LinkCandidate] = {}
|
||||
self._editable_candidate_cache: Cache[EditableCandidate] = {}
|
||||
self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {}
|
||||
self._extras_candidate_cache: Dict[
|
||||
Tuple[int, FrozenSet[str]], ExtrasCandidate
|
||||
] = {}
|
||||
|
||||
if not ignore_installed:
|
||||
env = get_default_environment()
|
||||
self._installed_dists = {
|
||||
dist.canonical_name: dist
|
||||
for dist in env.iter_installed_distributions(local_only=False)
|
||||
}
|
||||
else:
|
||||
self._installed_dists = {}
|
||||
|
||||
@property
|
||||
def force_reinstall(self) -> bool:
|
||||
return self._force_reinstall
|
||||
|
||||
def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None:
|
||||
if not link.is_wheel:
|
||||
return
|
||||
wheel = Wheel(link.filename)
|
||||
if wheel.supported(self._finder.target_python.get_tags()):
|
||||
return
|
||||
msg = f"{link.filename} is not a supported wheel on this platform."
|
||||
raise UnsupportedWheel(msg)
|
||||
|
||||
def _make_extras_candidate(
|
||||
self, base: BaseCandidate, extras: FrozenSet[str]
|
||||
) -> ExtrasCandidate:
|
||||
cache_key = (id(base), extras)
|
||||
try:
|
||||
candidate = self._extras_candidate_cache[cache_key]
|
||||
except KeyError:
|
||||
candidate = ExtrasCandidate(base, extras)
|
||||
self._extras_candidate_cache[cache_key] = candidate
|
||||
return candidate
|
||||
|
||||
def _make_candidate_from_dist(
|
||||
self,
|
||||
dist: BaseDistribution,
|
||||
extras: FrozenSet[str],
|
||||
template: InstallRequirement,
|
||||
) -> Candidate:
|
||||
try:
|
||||
base = self._installed_candidate_cache[dist.canonical_name]
|
||||
except KeyError:
|
||||
base = AlreadyInstalledCandidate(dist, template, factory=self)
|
||||
self._installed_candidate_cache[dist.canonical_name] = base
|
||||
if not extras:
|
||||
return base
|
||||
return self._make_extras_candidate(base, extras)
|
||||
|
||||
def _make_candidate_from_link(
|
||||
self,
|
||||
link: Link,
|
||||
extras: FrozenSet[str],
|
||||
template: InstallRequirement,
|
||||
name: Optional[NormalizedName],
|
||||
version: Optional[CandidateVersion],
|
||||
) -> Optional[Candidate]:
|
||||
# TODO: Check already installed candidate, and use it if the link and
|
||||
# editable flag match.
|
||||
|
||||
if link in self._build_failures:
|
||||
# We already tried this candidate before, and it does not build.
|
||||
# Don't bother trying again.
|
||||
return None
|
||||
|
||||
if template.editable:
|
||||
if link not in self._editable_candidate_cache:
|
||||
try:
|
||||
self._editable_candidate_cache[link] = EditableCandidate(
|
||||
link,
|
||||
template,
|
||||
factory=self,
|
||||
name=name,
|
||||
version=version,
|
||||
)
|
||||
except (InstallationSubprocessError, MetadataInconsistent) as e:
|
||||
logger.warning("Discarding %s. %s", link, e)
|
||||
self._build_failures[link] = e
|
||||
return None
|
||||
base: BaseCandidate = self._editable_candidate_cache[link]
|
||||
else:
|
||||
if link not in self._link_candidate_cache:
|
||||
try:
|
||||
self._link_candidate_cache[link] = LinkCandidate(
|
||||
link,
|
||||
template,
|
||||
factory=self,
|
||||
name=name,
|
||||
version=version,
|
||||
)
|
||||
except (InstallationSubprocessError, MetadataInconsistent) as e:
|
||||
logger.warning("Discarding %s. %s", link, e)
|
||||
self._build_failures[link] = e
|
||||
return None
|
||||
base = self._link_candidate_cache[link]
|
||||
|
||||
if not extras:
|
||||
return base
|
||||
return self._make_extras_candidate(base, extras)
|
||||
|
||||
def _iter_found_candidates(
|
||||
self,
|
||||
ireqs: Sequence[InstallRequirement],
|
||||
specifier: SpecifierSet,
|
||||
hashes: Hashes,
|
||||
prefers_installed: bool,
|
||||
incompatible_ids: Set[int],
|
||||
) -> Iterable[Candidate]:
|
||||
if not ireqs:
|
||||
return ()
|
||||
|
||||
# The InstallRequirement implementation requires us to give it a
|
||||
# "template". Here we just choose the first requirement to represent
|
||||
# all of them.
|
||||
# Hopefully the Project model can correct this mismatch in the future.
|
||||
template = ireqs[0]
|
||||
assert template.req, "Candidates found on index must be PEP 508"
|
||||
name = canonicalize_name(template.req.name)
|
||||
|
||||
extras: FrozenSet[str] = frozenset()
|
||||
for ireq in ireqs:
|
||||
assert ireq.req, "Candidates found on index must be PEP 508"
|
||||
specifier &= ireq.req.specifier
|
||||
hashes &= ireq.hashes(trust_internet=False)
|
||||
extras |= frozenset(ireq.extras)
|
||||
|
||||
def _get_installed_candidate() -> Optional[Candidate]:
|
||||
"""Get the candidate for the currently-installed version."""
|
||||
# If --force-reinstall is set, we want the version from the index
|
||||
# instead, so we "pretend" there is nothing installed.
|
||||
if self._force_reinstall:
|
||||
return None
|
||||
try:
|
||||
installed_dist = self._installed_dists[name]
|
||||
except KeyError:
|
||||
return None
|
||||
# Don't use the installed distribution if its version does not fit
|
||||
# the current dependency graph.
|
||||
if not specifier.contains(installed_dist.version, prereleases=True):
|
||||
return None
|
||||
candidate = self._make_candidate_from_dist(
|
||||
dist=installed_dist,
|
||||
extras=extras,
|
||||
template=template,
|
||||
)
|
||||
# The candidate is a known incompatiblity. Don't use it.
|
||||
if id(candidate) in incompatible_ids:
|
||||
return None
|
||||
return candidate
|
||||
|
||||
def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]:
|
||||
result = self._finder.find_best_candidate(
|
||||
project_name=name,
|
||||
specifier=specifier,
|
||||
hashes=hashes,
|
||||
)
|
||||
icans = list(result.iter_applicable())
|
||||
|
||||
# PEP 592: Yanked releases must be ignored unless only yanked
|
||||
# releases can satisfy the version range. So if this is false,
|
||||
# all yanked icans need to be skipped.
|
||||
all_yanked = all(ican.link.is_yanked for ican in icans)
|
||||
|
||||
# PackageFinder returns earlier versions first, so we reverse.
|
||||
for ican in reversed(icans):
|
||||
if not all_yanked and ican.link.is_yanked:
|
||||
continue
|
||||
func = functools.partial(
|
||||
self._make_candidate_from_link,
|
||||
link=ican.link,
|
||||
extras=extras,
|
||||
template=template,
|
||||
name=name,
|
||||
version=ican.version,
|
||||
)
|
||||
yield ican.version, func
|
||||
|
||||
return FoundCandidates(
|
||||
iter_index_candidate_infos,
|
||||
_get_installed_candidate(),
|
||||
prefers_installed,
|
||||
incompatible_ids,
|
||||
)
|
||||
|
||||
def _iter_explicit_candidates_from_base(
|
||||
self,
|
||||
base_requirements: Iterable[Requirement],
|
||||
extras: FrozenSet[str],
|
||||
) -> Iterator[Candidate]:
|
||||
"""Produce explicit candidates from the base given an extra-ed package.
|
||||
|
||||
:param base_requirements: Requirements known to the resolver. The
|
||||
requirements are guaranteed to not have extras.
|
||||
:param extras: The extras to inject into the explicit requirements'
|
||||
candidates.
|
||||
"""
|
||||
for req in base_requirements:
|
||||
lookup_cand, _ = req.get_candidate_lookup()
|
||||
if lookup_cand is None: # Not explicit.
|
||||
continue
|
||||
# We've stripped extras from the identifier, and should always
|
||||
# get a BaseCandidate here, unless there's a bug elsewhere.
|
||||
base_cand = as_base_candidate(lookup_cand)
|
||||
assert base_cand is not None, "no extras here"
|
||||
yield self._make_extras_candidate(base_cand, extras)
|
||||
|
||||
def _iter_candidates_from_constraints(
|
||||
self,
|
||||
identifier: str,
|
||||
constraint: Constraint,
|
||||
template: InstallRequirement,
|
||||
) -> Iterator[Candidate]:
|
||||
"""Produce explicit candidates from constraints.
|
||||
|
||||
This creates "fake" InstallRequirement objects that are basically clones
|
||||
of what "should" be the template, but with original_link set to link.
|
||||
"""
|
||||
for link in constraint.links:
|
||||
self._fail_if_link_is_unsupported_wheel(link)
|
||||
candidate = self._make_candidate_from_link(
|
||||
link,
|
||||
extras=frozenset(),
|
||||
template=install_req_from_link_and_ireq(link, template),
|
||||
name=canonicalize_name(identifier),
|
||||
version=None,
|
||||
)
|
||||
if candidate:
|
||||
yield candidate
|
||||
|
||||
def find_candidates(
|
||||
self,
|
||||
identifier: str,
|
||||
requirements: Mapping[str, Iterable[Requirement]],
|
||||
incompatibilities: Mapping[str, Iterator[Candidate]],
|
||||
constraint: Constraint,
|
||||
prefers_installed: bool,
|
||||
) -> Iterable[Candidate]:
|
||||
# Collect basic lookup information from the requirements.
|
||||
explicit_candidates: Set[Candidate] = set()
|
||||
ireqs: List[InstallRequirement] = []
|
||||
for req in requirements[identifier]:
|
||||
cand, ireq = req.get_candidate_lookup()
|
||||
if cand is not None:
|
||||
explicit_candidates.add(cand)
|
||||
if ireq is not None:
|
||||
ireqs.append(ireq)
|
||||
|
||||
# If the current identifier contains extras, add explicit candidates
|
||||
# from entries from extra-less identifier.
|
||||
with contextlib.suppress(InvalidRequirement):
|
||||
parsed_requirement = get_requirement(identifier)
|
||||
explicit_candidates.update(
|
||||
self._iter_explicit_candidates_from_base(
|
||||
requirements.get(parsed_requirement.name, ()),
|
||||
frozenset(parsed_requirement.extras),
|
||||
),
|
||||
)
|
||||
|
||||
# Add explicit candidates from constraints. We only do this if there are
|
||||
# kown ireqs, which represent requirements not already explicit. If
|
||||
# there are no ireqs, we're constraining already-explicit requirements,
|
||||
# which is handled later when we return the explicit candidates.
|
||||
if ireqs:
|
||||
try:
|
||||
explicit_candidates.update(
|
||||
self._iter_candidates_from_constraints(
|
||||
identifier,
|
||||
constraint,
|
||||
template=ireqs[0],
|
||||
),
|
||||
)
|
||||
except UnsupportedWheel:
|
||||
# If we're constrained to install a wheel incompatible with the
|
||||
# target architecture, no candidates will ever be valid.
|
||||
return ()
|
||||
|
||||
# Since we cache all the candidates, incompatibility identification
|
||||
# can be made quicker by comparing only the id() values.
|
||||
incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())}
|
||||
|
||||
# If none of the requirements want an explicit candidate, we can ask
|
||||
# the finder for candidates.
|
||||
if not explicit_candidates:
|
||||
return self._iter_found_candidates(
|
||||
ireqs,
|
||||
constraint.specifier,
|
||||
constraint.hashes,
|
||||
prefers_installed,
|
||||
incompat_ids,
|
||||
)
|
||||
|
||||
return (
|
||||
c
|
||||
for c in explicit_candidates
|
||||
if id(c) not in incompat_ids
|
||||
and constraint.is_satisfied_by(c)
|
||||
and all(req.is_satisfied_by(c) for req in requirements[identifier])
|
||||
)
|
||||
|
||||
def _make_requirement_from_install_req(
|
||||
self, ireq: InstallRequirement, requested_extras: Iterable[str]
|
||||
) -> Optional[Requirement]:
|
||||
if not ireq.match_markers(requested_extras):
|
||||
logger.info(
|
||||
"Ignoring %s: markers '%s' don't match your environment",
|
||||
ireq.name,
|
||||
ireq.markers,
|
||||
)
|
||||
return None
|
||||
if not ireq.link:
|
||||
return SpecifierRequirement(ireq)
|
||||
self._fail_if_link_is_unsupported_wheel(ireq.link)
|
||||
cand = self._make_candidate_from_link(
|
||||
ireq.link,
|
||||
extras=frozenset(ireq.extras),
|
||||
template=ireq,
|
||||
name=canonicalize_name(ireq.name) if ireq.name else None,
|
||||
version=None,
|
||||
)
|
||||
if cand is None:
|
||||
# There's no way we can satisfy a URL requirement if the underlying
|
||||
# candidate fails to build. An unnamed URL must be user-supplied, so
|
||||
# we fail eagerly. If the URL is named, an unsatisfiable requirement
|
||||
# can make the resolver do the right thing, either backtrack (and
|
||||
# maybe find some other requirement that's buildable) or raise a
|
||||
# ResolutionImpossible eventually.
|
||||
if not ireq.name:
|
||||
raise self._build_failures[ireq.link]
|
||||
return UnsatisfiableRequirement(canonicalize_name(ireq.name))
|
||||
return self.make_requirement_from_candidate(cand)
|
||||
|
||||
def collect_root_requirements(
|
||||
self, root_ireqs: List[InstallRequirement]
|
||||
) -> CollectedRootRequirements:
|
||||
collected = CollectedRootRequirements([], {}, {})
|
||||
for i, ireq in enumerate(root_ireqs):
|
||||
if ireq.constraint:
|
||||
# Ensure we only accept valid constraints
|
||||
problem = check_invalid_constraint_type(ireq)
|
||||
if problem:
|
||||
raise InstallationError(problem)
|
||||
if not ireq.match_markers():
|
||||
continue
|
||||
assert ireq.name, "Constraint must be named"
|
||||
name = canonicalize_name(ireq.name)
|
||||
if name in collected.constraints:
|
||||
collected.constraints[name] &= ireq
|
||||
else:
|
||||
collected.constraints[name] = Constraint.from_ireq(ireq)
|
||||
else:
|
||||
req = self._make_requirement_from_install_req(
|
||||
ireq,
|
||||
requested_extras=(),
|
||||
)
|
||||
if req is None:
|
||||
continue
|
||||
if ireq.user_supplied and req.name not in collected.user_requested:
|
||||
collected.user_requested[req.name] = i
|
||||
collected.requirements.append(req)
|
||||
return collected
|
||||
|
||||
def make_requirement_from_candidate(
|
||||
self, candidate: Candidate
|
||||
) -> ExplicitRequirement:
|
||||
return ExplicitRequirement(candidate)
|
||||
|
||||
def make_requirement_from_spec(
|
||||
self,
|
||||
specifier: str,
|
||||
comes_from: Optional[InstallRequirement],
|
||||
requested_extras: Iterable[str] = (),
|
||||
) -> Optional[Requirement]:
|
||||
ireq = self._make_install_req_from_spec(specifier, comes_from)
|
||||
return self._make_requirement_from_install_req(ireq, requested_extras)
|
||||
|
||||
def make_requires_python_requirement(
|
||||
self,
|
||||
specifier: SpecifierSet,
|
||||
) -> Optional[Requirement]:
|
||||
if self._ignore_requires_python:
|
||||
return None
|
||||
# Don't bother creating a dependency for an empty Requires-Python.
|
||||
if not str(specifier):
|
||||
return None
|
||||
return RequiresPythonRequirement(specifier, self._python_candidate)
|
||||
|
||||
def get_wheel_cache_entry(
|
||||
self, link: Link, name: Optional[str]
|
||||
) -> Optional[CacheEntry]:
|
||||
"""Look up the link in the wheel cache.
|
||||
|
||||
If ``preparer.require_hashes`` is True, don't use the wheel cache,
|
||||
because cached wheels, always built locally, have different hashes
|
||||
than the files downloaded from the index server and thus throw false
|
||||
hash mismatches. Furthermore, cached wheels at present have
|
||||
nondeterministic contents due to file modification times.
|
||||
"""
|
||||
if self._wheel_cache is None or self.preparer.require_hashes:
|
||||
return None
|
||||
return self._wheel_cache.get_cache_entry(
|
||||
link=link,
|
||||
package_name=name,
|
||||
supported_tags=get_supported(),
|
||||
)
|
||||
|
||||
def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]:
|
||||
# TODO: Are there more cases this needs to return True? Editable?
|
||||
dist = self._installed_dists.get(candidate.project_name)
|
||||
if dist is None: # Not installed, no uninstallation required.
|
||||
return None
|
||||
|
||||
# We're installing into global site. The current installation must
|
||||
# be uninstalled, no matter it's in global or user site, because the
|
||||
# user site installation has precedence over global.
|
||||
if not self._use_user_site:
|
||||
return dist
|
||||
|
||||
# We're installing into user site. Remove the user site installation.
|
||||
if dist.in_usersite:
|
||||
return dist
|
||||
|
||||
# We're installing into user site, but the installed incompatible
|
||||
# package is in global site. We can't uninstall that, and would let
|
||||
# the new user installation to "shadow" it. But shadowing won't work
|
||||
# in virtual environments, so we error out.
|
||||
if running_under_virtualenv() and dist.in_site_packages:
|
||||
message = (
|
||||
f"Will not install to the user site because it will lack "
|
||||
f"sys.path precedence to {dist.raw_name} in {dist.location}"
|
||||
)
|
||||
raise InstallationError(message)
|
||||
return None
|
||||
|
||||
def _report_requires_python_error(
|
||||
self, causes: Sequence["ConflictCause"]
|
||||
) -> UnsupportedPythonVersion:
|
||||
assert causes, "Requires-Python error reported with no cause"
|
||||
|
||||
version = self._python_candidate.version
|
||||
|
||||
if len(causes) == 1:
|
||||
specifier = str(causes[0].requirement.specifier)
|
||||
message = (
|
||||
f"Package {causes[0].parent.name!r} requires a different "
|
||||
f"Python: {version} not in {specifier!r}"
|
||||
)
|
||||
return UnsupportedPythonVersion(message)
|
||||
|
||||
message = f"Packages require a different Python. {version} not in:"
|
||||
for cause in causes:
|
||||
package = cause.parent.format_for_error()
|
||||
specifier = str(cause.requirement.specifier)
|
||||
message += f"\n{specifier!r} (required by {package})"
|
||||
return UnsupportedPythonVersion(message)
|
||||
|
||||
def _report_single_requirement_conflict(
|
||||
self, req: Requirement, parent: Optional[Candidate]
|
||||
) -> DistributionNotFound:
|
||||
if parent is None:
|
||||
req_disp = str(req)
|
||||
else:
|
||||
req_disp = f"{req} (from {parent.name})"
|
||||
|
||||
cands = self._finder.find_all_candidates(req.project_name)
|
||||
versions = [str(v) for v in sorted({c.version for c in cands})]
|
||||
|
||||
logger.critical(
|
||||
"Could not find a version that satisfies the requirement %s "
|
||||
"(from versions: %s)",
|
||||
req_disp,
|
||||
", ".join(versions) or "none",
|
||||
)
|
||||
if str(req) == "requirements.txt":
|
||||
logger.info(
|
||||
"HINT: You are attempting to install a package literally "
|
||||
'named "requirements.txt" (which cannot exist). Consider '
|
||||
"using the '-r' flag to install the packages listed in "
|
||||
"requirements.txt"
|
||||
)
|
||||
|
||||
return DistributionNotFound(f"No matching distribution found for {req}")
|
||||
|
||||
def get_installation_error(
|
||||
self,
|
||||
e: "ResolutionImpossible[Requirement, Candidate]",
|
||||
constraints: Dict[str, Constraint],
|
||||
) -> InstallationError:
|
||||
|
||||
assert e.causes, "Installation error reported with no cause"
|
||||
|
||||
# If one of the things we can't solve is "we need Python X.Y",
|
||||
# that is what we report.
|
||||
requires_python_causes = [
|
||||
cause
|
||||
for cause in e.causes
|
||||
if isinstance(cause.requirement, RequiresPythonRequirement)
|
||||
and not cause.requirement.is_satisfied_by(self._python_candidate)
|
||||
]
|
||||
if requires_python_causes:
|
||||
# The comprehension above makes sure all Requirement instances are
|
||||
# RequiresPythonRequirement, so let's cast for convinience.
|
||||
return self._report_requires_python_error(
|
||||
cast("Sequence[ConflictCause]", requires_python_causes),
|
||||
)
|
||||
|
||||
# Otherwise, we have a set of causes which can't all be satisfied
|
||||
# at once.
|
||||
|
||||
# The simplest case is when we have *one* cause that can't be
|
||||
# satisfied. We just report that case.
|
||||
if len(e.causes) == 1:
|
||||
req, parent = e.causes[0]
|
||||
if req.name not in constraints:
|
||||
return self._report_single_requirement_conflict(req, parent)
|
||||
|
||||
# OK, we now have a list of requirements that can't all be
|
||||
# satisfied at once.
|
||||
|
||||
# A couple of formatting helpers
|
||||
def text_join(parts: List[str]) -> str:
|
||||
if len(parts) == 1:
|
||||
return parts[0]
|
||||
|
||||
return ", ".join(parts[:-1]) + " and " + parts[-1]
|
||||
|
||||
def describe_trigger(parent: Candidate) -> str:
|
||||
ireq = parent.get_install_requirement()
|
||||
if not ireq or not ireq.comes_from:
|
||||
return f"{parent.name}=={parent.version}"
|
||||
if isinstance(ireq.comes_from, InstallRequirement):
|
||||
return str(ireq.comes_from.name)
|
||||
return str(ireq.comes_from)
|
||||
|
||||
triggers = set()
|
||||
for req, parent in e.causes:
|
||||
if parent is None:
|
||||
# This is a root requirement, so we can report it directly
|
||||
trigger = req.format_for_error()
|
||||
else:
|
||||
trigger = describe_trigger(parent)
|
||||
triggers.add(trigger)
|
||||
|
||||
if triggers:
|
||||
info = text_join(sorted(triggers))
|
||||
else:
|
||||
info = "the requested packages"
|
||||
|
||||
msg = (
|
||||
"Cannot install {} because these package versions "
|
||||
"have conflicting dependencies.".format(info)
|
||||
)
|
||||
logger.critical(msg)
|
||||
msg = "\nThe conflict is caused by:"
|
||||
|
||||
relevant_constraints = set()
|
||||
for req, parent in e.causes:
|
||||
if req.name in constraints:
|
||||
relevant_constraints.add(req.name)
|
||||
msg = msg + "\n "
|
||||
if parent:
|
||||
msg = msg + f"{parent.name} {parent.version} depends on "
|
||||
else:
|
||||
msg = msg + "The user requested "
|
||||
msg = msg + req.format_for_error()
|
||||
for key in relevant_constraints:
|
||||
spec = constraints[key].specifier
|
||||
msg += f"\n The user requested (constraint) {key}{spec}"
|
||||
|
||||
msg = (
|
||||
msg
|
||||
+ "\n\n"
|
||||
+ "To fix this you could try to:\n"
|
||||
+ "1. loosen the range of package versions you've specified\n"
|
||||
+ "2. remove package versions to allow pip attempt to solve "
|
||||
+ "the dependency conflict\n"
|
||||
)
|
||||
|
||||
logger.info(msg)
|
||||
|
||||
return DistributionNotFound(
|
||||
"ResolutionImpossible: for help visit "
|
||||
"https://pip.pypa.io/en/latest/user_guide/"
|
||||
"#fixing-conflicting-dependencies"
|
||||
)
|
@ -0,0 +1,155 @@
|
||||
"""Utilities to lazily create and visit candidates found.
|
||||
|
||||
Creating and visiting a candidate is a *very* costly operation. It involves
|
||||
fetching, extracting, potentially building modules from source, and verifying
|
||||
distribution metadata. It is therefore crucial for performance to keep
|
||||
everything here lazy all the way down, so we only touch candidates that we
|
||||
absolutely need, and not "download the world" when we only need one version of
|
||||
something.
|
||||
"""
|
||||
|
||||
import functools
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple
|
||||
|
||||
from pip._vendor.packaging.version import _BaseVersion
|
||||
|
||||
from .base import Candidate
|
||||
|
||||
IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
SequenceCandidate = Sequence[Candidate]
|
||||
else:
|
||||
# For compatibility: Python before 3.9 does not support using [] on the
|
||||
# Sequence class.
|
||||
#
|
||||
# >>> from collections.abc import Sequence
|
||||
# >>> Sequence[str]
|
||||
# Traceback (most recent call last):
|
||||
# File "<stdin>", line 1, in <module>
|
||||
# TypeError: 'ABCMeta' object is not subscriptable
|
||||
#
|
||||
# TODO: Remove this block after dropping Python 3.8 support.
|
||||
SequenceCandidate = Sequence
|
||||
|
||||
|
||||
def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
|
||||
"""Iterator for ``FoundCandidates``.
|
||||
|
||||
This iterator is used when the package is not already installed. Candidates
|
||||
from index come later in their normal ordering.
|
||||
"""
|
||||
versions_found: Set[_BaseVersion] = set()
|
||||
for version, func in infos:
|
||||
if version in versions_found:
|
||||
continue
|
||||
candidate = func()
|
||||
if candidate is None:
|
||||
continue
|
||||
yield candidate
|
||||
versions_found.add(version)
|
||||
|
||||
|
||||
def _iter_built_with_prepended(
|
||||
installed: Candidate, infos: Iterator[IndexCandidateInfo]
|
||||
) -> Iterator[Candidate]:
|
||||
"""Iterator for ``FoundCandidates``.
|
||||
|
||||
This iterator is used when the resolver prefers the already-installed
|
||||
candidate and NOT to upgrade. The installed candidate is therefore
|
||||
always yielded first, and candidates from index come later in their
|
||||
normal ordering, except skipped when the version is already installed.
|
||||
"""
|
||||
yield installed
|
||||
versions_found: Set[_BaseVersion] = {installed.version}
|
||||
for version, func in infos:
|
||||
if version in versions_found:
|
||||
continue
|
||||
candidate = func()
|
||||
if candidate is None:
|
||||
continue
|
||||
yield candidate
|
||||
versions_found.add(version)
|
||||
|
||||
|
||||
def _iter_built_with_inserted(
|
||||
installed: Candidate, infos: Iterator[IndexCandidateInfo]
|
||||
) -> Iterator[Candidate]:
|
||||
"""Iterator for ``FoundCandidates``.
|
||||
|
||||
This iterator is used when the resolver prefers to upgrade an
|
||||
already-installed package. Candidates from index are returned in their
|
||||
normal ordering, except replaced when the version is already installed.
|
||||
|
||||
The implementation iterates through and yields other candidates, inserting
|
||||
the installed candidate exactly once before we start yielding older or
|
||||
equivalent candidates, or after all other candidates if they are all newer.
|
||||
"""
|
||||
versions_found: Set[_BaseVersion] = set()
|
||||
for version, func in infos:
|
||||
if version in versions_found:
|
||||
continue
|
||||
# If the installed candidate is better, yield it first.
|
||||
if installed.version >= version:
|
||||
yield installed
|
||||
versions_found.add(installed.version)
|
||||
candidate = func()
|
||||
if candidate is None:
|
||||
continue
|
||||
yield candidate
|
||||
versions_found.add(version)
|
||||
|
||||
# If the installed candidate is older than all other candidates.
|
||||
if installed.version not in versions_found:
|
||||
yield installed
|
||||
|
||||
|
||||
class FoundCandidates(SequenceCandidate):
|
||||
"""A lazy sequence to provide candidates to the resolver.
|
||||
|
||||
The intended usage is to return this from `find_matches()` so the resolver
|
||||
can iterate through the sequence multiple times, but only access the index
|
||||
page when remote packages are actually needed. This improve performances
|
||||
when suitable candidates are already installed on disk.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
get_infos: Callable[[], Iterator[IndexCandidateInfo]],
|
||||
installed: Optional[Candidate],
|
||||
prefers_installed: bool,
|
||||
incompatible_ids: Set[int],
|
||||
):
|
||||
self._get_infos = get_infos
|
||||
self._installed = installed
|
||||
self._prefers_installed = prefers_installed
|
||||
self._incompatible_ids = incompatible_ids
|
||||
|
||||
def __getitem__(self, index: Any) -> Any:
|
||||
# Implemented to satisfy the ABC check. This is not needed by the
|
||||
# resolver, and should not be used by the provider either (for
|
||||
# performance reasons).
|
||||
raise NotImplementedError("don't do this")
|
||||
|
||||
def __iter__(self) -> Iterator[Candidate]:
|
||||
infos = self._get_infos()
|
||||
if not self._installed:
|
||||
iterator = _iter_built(infos)
|
||||
elif self._prefers_installed:
|
||||
iterator = _iter_built_with_prepended(self._installed, infos)
|
||||
else:
|
||||
iterator = _iter_built_with_inserted(self._installed, infos)
|
||||
return (c for c in iterator if id(c) not in self._incompatible_ids)
|
||||
|
||||
def __len__(self) -> int:
|
||||
# Implemented to satisfy the ABC check. This is not needed by the
|
||||
# resolver, and should not be used by the provider either (for
|
||||
# performance reasons).
|
||||
raise NotImplementedError("don't do this")
|
||||
|
||||
@functools.lru_cache(maxsize=1)
|
||||
def __bool__(self) -> bool:
|
||||
if self._prefers_installed and self._installed:
|
||||
return True
|
||||
return any(self)
|
@ -0,0 +1,215 @@
|
||||
import collections
|
||||
import math
|
||||
from typing import TYPE_CHECKING, Dict, Iterable, Iterator, Mapping, Sequence, Union
|
||||
|
||||
from pip._vendor.resolvelib.providers import AbstractProvider
|
||||
|
||||
from .base import Candidate, Constraint, Requirement
|
||||
from .candidates import REQUIRES_PYTHON_IDENTIFIER
|
||||
from .factory import Factory
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._vendor.resolvelib.providers import Preference
|
||||
from pip._vendor.resolvelib.resolvers import RequirementInformation
|
||||
|
||||
PreferenceInformation = RequirementInformation[Requirement, Candidate]
|
||||
|
||||
_ProviderBase = AbstractProvider[Requirement, Candidate, str]
|
||||
else:
|
||||
_ProviderBase = AbstractProvider
|
||||
|
||||
# Notes on the relationship between the provider, the factory, and the
|
||||
# candidate and requirement classes.
|
||||
#
|
||||
# The provider is a direct implementation of the resolvelib class. Its role
|
||||
# is to deliver the API that resolvelib expects.
|
||||
#
|
||||
# Rather than work with completely abstract "requirement" and "candidate"
|
||||
# concepts as resolvelib does, pip has concrete classes implementing these two
|
||||
# ideas. The API of Requirement and Candidate objects are defined in the base
|
||||
# classes, but essentially map fairly directly to the equivalent provider
|
||||
# methods. In particular, `find_matches` and `is_satisfied_by` are
|
||||
# requirement methods, and `get_dependencies` is a candidate method.
|
||||
#
|
||||
# The factory is the interface to pip's internal mechanisms. It is stateless,
|
||||
# and is created by the resolver and held as a property of the provider. It is
|
||||
# responsible for creating Requirement and Candidate objects, and provides
|
||||
# services to those objects (access to pip's finder and preparer).
|
||||
|
||||
|
||||
class PipProvider(_ProviderBase):
|
||||
"""Pip's provider implementation for resolvelib.
|
||||
|
||||
:params constraints: A mapping of constraints specified by the user. Keys
|
||||
are canonicalized project names.
|
||||
:params ignore_dependencies: Whether the user specified ``--no-deps``.
|
||||
:params upgrade_strategy: The user-specified upgrade strategy.
|
||||
:params user_requested: A set of canonicalized package names that the user
|
||||
supplied for pip to install/upgrade.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
factory: Factory,
|
||||
constraints: Dict[str, Constraint],
|
||||
ignore_dependencies: bool,
|
||||
upgrade_strategy: str,
|
||||
user_requested: Dict[str, int],
|
||||
) -> None:
|
||||
self._factory = factory
|
||||
self._constraints = constraints
|
||||
self._ignore_dependencies = ignore_dependencies
|
||||
self._upgrade_strategy = upgrade_strategy
|
||||
self._user_requested = user_requested
|
||||
self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf)
|
||||
|
||||
def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str:
|
||||
return requirement_or_candidate.name
|
||||
|
||||
def get_preference( # type: ignore
|
||||
self,
|
||||
identifier: str,
|
||||
resolutions: Mapping[str, Candidate],
|
||||
candidates: Mapping[str, Iterator[Candidate]],
|
||||
information: Mapping[str, Iterable["PreferenceInformation"]],
|
||||
backtrack_causes: Sequence["PreferenceInformation"],
|
||||
) -> "Preference":
|
||||
"""Produce a sort key for given requirement based on preference.
|
||||
|
||||
The lower the return value is, the more preferred this group of
|
||||
arguments is.
|
||||
|
||||
Currently pip considers the followings in order:
|
||||
|
||||
* Prefer if any of the known requirements is "direct", e.g. points to an
|
||||
explicit URL.
|
||||
* If equal, prefer if any requirement is "pinned", i.e. contains
|
||||
operator ``===`` or ``==``.
|
||||
* If equal, calculate an approximate "depth" and resolve requirements
|
||||
closer to the user-specified requirements first.
|
||||
* Order user-specified requirements by the order they are specified.
|
||||
* If equal, prefers "non-free" requirements, i.e. contains at least one
|
||||
operator, such as ``>=`` or ``<``.
|
||||
* If equal, order alphabetically for consistency (helps debuggability).
|
||||
"""
|
||||
lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
|
||||
candidate, ireqs = zip(*lookups)
|
||||
operators = [
|
||||
specifier.operator
|
||||
for specifier_set in (ireq.specifier for ireq in ireqs if ireq)
|
||||
for specifier in specifier_set
|
||||
]
|
||||
|
||||
direct = candidate is not None
|
||||
pinned = any(op[:2] == "==" for op in operators)
|
||||
unfree = bool(operators)
|
||||
|
||||
try:
|
||||
requested_order: Union[int, float] = self._user_requested[identifier]
|
||||
except KeyError:
|
||||
requested_order = math.inf
|
||||
parent_depths = (
|
||||
self._known_depths[parent.name] if parent is not None else 0.0
|
||||
for _, parent in information[identifier]
|
||||
)
|
||||
inferred_depth = min(d for d in parent_depths) + 1.0
|
||||
else:
|
||||
inferred_depth = 1.0
|
||||
self._known_depths[identifier] = inferred_depth
|
||||
|
||||
requested_order = self._user_requested.get(identifier, math.inf)
|
||||
|
||||
# Requires-Python has only one candidate and the check is basically
|
||||
# free, so we always do it first to avoid needless work if it fails.
|
||||
requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER
|
||||
|
||||
# HACK: Setuptools have a very long and solid backward compatibility
|
||||
# track record, and extremely few projects would request a narrow,
|
||||
# non-recent version range of it since that would break a lot things.
|
||||
# (Most projects specify it only to request for an installer feature,
|
||||
# which does not work, but that's another topic.) Intentionally
|
||||
# delaying Setuptools helps reduce branches the resolver has to check.
|
||||
# This serves as a temporary fix for issues like "apache-airlfow[all]"
|
||||
# while we work on "proper" branch pruning techniques.
|
||||
delay_this = identifier == "setuptools"
|
||||
|
||||
# Prefer the causes of backtracking on the assumption that the problem
|
||||
# resolving the dependency tree is related to the failures that caused
|
||||
# the backtracking
|
||||
backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes)
|
||||
|
||||
return (
|
||||
not requires_python,
|
||||
delay_this,
|
||||
not direct,
|
||||
not pinned,
|
||||
not backtrack_cause,
|
||||
inferred_depth,
|
||||
requested_order,
|
||||
not unfree,
|
||||
identifier,
|
||||
)
|
||||
|
||||
def _get_constraint(self, identifier: str) -> Constraint:
|
||||
if identifier in self._constraints:
|
||||
return self._constraints[identifier]
|
||||
|
||||
# HACK: Theoratically we should check whether this identifier is a valid
|
||||
# "NAME[EXTRAS]" format, and parse out the name part with packaging or
|
||||
# some regular expression. But since pip's resolver only spits out
|
||||
# three kinds of identifiers: normalized PEP 503 names, normalized names
|
||||
# plus extras, and Requires-Python, we can cheat a bit here.
|
||||
name, open_bracket, _ = identifier.partition("[")
|
||||
if open_bracket and name in self._constraints:
|
||||
return self._constraints[name]
|
||||
|
||||
return Constraint.empty()
|
||||
|
||||
def find_matches(
|
||||
self,
|
||||
identifier: str,
|
||||
requirements: Mapping[str, Iterator[Requirement]],
|
||||
incompatibilities: Mapping[str, Iterator[Candidate]],
|
||||
) -> Iterable[Candidate]:
|
||||
def _eligible_for_upgrade(name: str) -> bool:
|
||||
"""Are upgrades allowed for this project?
|
||||
|
||||
This checks the upgrade strategy, and whether the project was one
|
||||
that the user specified in the command line, in order to decide
|
||||
whether we should upgrade if there's a newer version available.
|
||||
|
||||
(Note that we don't need access to the `--upgrade` flag, because
|
||||
an upgrade strategy of "to-satisfy-only" means that `--upgrade`
|
||||
was not specified).
|
||||
"""
|
||||
if self._upgrade_strategy == "eager":
|
||||
return True
|
||||
elif self._upgrade_strategy == "only-if-needed":
|
||||
return name in self._user_requested
|
||||
return False
|
||||
|
||||
return self._factory.find_candidates(
|
||||
identifier=identifier,
|
||||
requirements=requirements,
|
||||
constraint=self._get_constraint(identifier),
|
||||
prefers_installed=(not _eligible_for_upgrade(identifier)),
|
||||
incompatibilities=incompatibilities,
|
||||
)
|
||||
|
||||
def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool:
|
||||
return requirement.is_satisfied_by(candidate)
|
||||
|
||||
def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]:
|
||||
with_requires = not self._ignore_dependencies
|
||||
return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
|
||||
|
||||
@staticmethod
|
||||
def is_backtrack_cause(
|
||||
identifier: str, backtrack_causes: Sequence["PreferenceInformation"]
|
||||
) -> bool:
|
||||
for backtrack_cause in backtrack_causes:
|
||||
if identifier == backtrack_cause.requirement.name:
|
||||
return True
|
||||
if backtrack_cause.parent and identifier == backtrack_cause.parent.name:
|
||||
return True
|
||||
return False
|
@ -0,0 +1,68 @@
|
||||
from collections import defaultdict
|
||||
from logging import getLogger
|
||||
from typing import Any, DefaultDict
|
||||
|
||||
from pip._vendor.resolvelib.reporters import BaseReporter
|
||||
|
||||
from .base import Candidate, Requirement
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
class PipReporter(BaseReporter):
|
||||
def __init__(self) -> None:
|
||||
self.backtracks_by_package: DefaultDict[str, int] = defaultdict(int)
|
||||
|
||||
self._messages_at_backtrack = {
|
||||
1: (
|
||||
"pip is looking at multiple versions of {package_name} to "
|
||||
"determine which version is compatible with other "
|
||||
"requirements. This could take a while."
|
||||
),
|
||||
8: (
|
||||
"pip is looking at multiple versions of {package_name} to "
|
||||
"determine which version is compatible with other "
|
||||
"requirements. This could take a while."
|
||||
),
|
||||
13: (
|
||||
"This is taking longer than usual. You might need to provide "
|
||||
"the dependency resolver with stricter constraints to reduce "
|
||||
"runtime. See https://pip.pypa.io/warnings/backtracking for "
|
||||
"guidance. If you want to abort this run, press Ctrl + C."
|
||||
),
|
||||
}
|
||||
|
||||
def backtracking(self, candidate: Candidate) -> None:
|
||||
self.backtracks_by_package[candidate.name] += 1
|
||||
|
||||
count = self.backtracks_by_package[candidate.name]
|
||||
if count not in self._messages_at_backtrack:
|
||||
return
|
||||
|
||||
message = self._messages_at_backtrack[count]
|
||||
logger.info("INFO: %s", message.format(package_name=candidate.name))
|
||||
|
||||
|
||||
class PipDebuggingReporter(BaseReporter):
|
||||
"""A reporter that does an info log for every event it sees."""
|
||||
|
||||
def starting(self) -> None:
|
||||
logger.info("Reporter.starting()")
|
||||
|
||||
def starting_round(self, index: int) -> None:
|
||||
logger.info("Reporter.starting_round(%r)", index)
|
||||
|
||||
def ending_round(self, index: int, state: Any) -> None:
|
||||
logger.info("Reporter.ending_round(%r, state)", index)
|
||||
|
||||
def ending(self, state: Any) -> None:
|
||||
logger.info("Reporter.ending(%r)", state)
|
||||
|
||||
def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None:
|
||||
logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent)
|
||||
|
||||
def backtracking(self, candidate: Candidate) -> None:
|
||||
logger.info("Reporter.backtracking(%r)", candidate)
|
||||
|
||||
def pinning(self, candidate: Candidate) -> None:
|
||||
logger.info("Reporter.pinning(%r)", candidate)
|
@ -0,0 +1,166 @@
|
||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
from .base import Candidate, CandidateLookup, Requirement, format_name
|
||||
|
||||
|
||||
class ExplicitRequirement(Requirement):
|
||||
def __init__(self, candidate: Candidate) -> None:
|
||||
self.candidate = candidate
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.candidate)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{class_name}({candidate!r})".format(
|
||||
class_name=self.__class__.__name__,
|
||||
candidate=self.candidate,
|
||||
)
|
||||
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
# No need to canonicalise - the candidate did this
|
||||
return self.candidate.project_name
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
# No need to canonicalise - the candidate did this
|
||||
return self.candidate.name
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
return self.candidate.format_for_error()
|
||||
|
||||
def get_candidate_lookup(self) -> CandidateLookup:
|
||||
return self.candidate, None
|
||||
|
||||
def is_satisfied_by(self, candidate: Candidate) -> bool:
|
||||
return candidate == self.candidate
|
||||
|
||||
|
||||
class SpecifierRequirement(Requirement):
|
||||
def __init__(self, ireq: InstallRequirement) -> None:
|
||||
assert ireq.link is None, "This is a link, not a specifier"
|
||||
self._ireq = ireq
|
||||
self._extras = frozenset(ireq.extras)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self._ireq.req)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{class_name}({requirement!r})".format(
|
||||
class_name=self.__class__.__name__,
|
||||
requirement=str(self._ireq.req),
|
||||
)
|
||||
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
|
||||
return canonicalize_name(self._ireq.req.name)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return format_name(self.project_name, self._extras)
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
|
||||
# Convert comma-separated specifiers into "A, B, ..., F and G"
|
||||
# This makes the specifier a bit more "human readable", without
|
||||
# risking a change in meaning. (Hopefully! Not all edge cases have
|
||||
# been checked)
|
||||
parts = [s.strip() for s in str(self).split(",")]
|
||||
if len(parts) == 0:
|
||||
return ""
|
||||
elif len(parts) == 1:
|
||||
return parts[0]
|
||||
|
||||
return ", ".join(parts[:-1]) + " and " + parts[-1]
|
||||
|
||||
def get_candidate_lookup(self) -> CandidateLookup:
|
||||
return None, self._ireq
|
||||
|
||||
def is_satisfied_by(self, candidate: Candidate) -> bool:
|
||||
assert candidate.name == self.name, (
|
||||
f"Internal issue: Candidate is not for this requirement "
|
||||
f"{candidate.name} vs {self.name}"
|
||||
)
|
||||
# We can safely always allow prereleases here since PackageFinder
|
||||
# already implements the prerelease logic, and would have filtered out
|
||||
# prerelease candidates if the user does not expect them.
|
||||
assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
|
||||
spec = self._ireq.req.specifier
|
||||
return spec.contains(candidate.version, prereleases=True)
|
||||
|
||||
|
||||
class RequiresPythonRequirement(Requirement):
|
||||
"""A requirement representing Requires-Python metadata."""
|
||||
|
||||
def __init__(self, specifier: SpecifierSet, match: Candidate) -> None:
|
||||
self.specifier = specifier
|
||||
self._candidate = match
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Python {self.specifier}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{class_name}({specifier!r})".format(
|
||||
class_name=self.__class__.__name__,
|
||||
specifier=str(self.specifier),
|
||||
)
|
||||
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
return self._candidate.project_name
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._candidate.name
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
return str(self)
|
||||
|
||||
def get_candidate_lookup(self) -> CandidateLookup:
|
||||
if self.specifier.contains(self._candidate.version, prereleases=True):
|
||||
return self._candidate, None
|
||||
return None, None
|
||||
|
||||
def is_satisfied_by(self, candidate: Candidate) -> bool:
|
||||
assert candidate.name == self._candidate.name, "Not Python candidate"
|
||||
# We can safely always allow prereleases here since PackageFinder
|
||||
# already implements the prerelease logic, and would have filtered out
|
||||
# prerelease candidates if the user does not expect them.
|
||||
return self.specifier.contains(candidate.version, prereleases=True)
|
||||
|
||||
|
||||
class UnsatisfiableRequirement(Requirement):
|
||||
"""A requirement that cannot be satisfied."""
|
||||
|
||||
def __init__(self, name: NormalizedName) -> None:
|
||||
self._name = name
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self._name} (unavailable)"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{class_name}({name!r})".format(
|
||||
class_name=self.__class__.__name__,
|
||||
name=str(self._name),
|
||||
)
|
||||
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
return str(self)
|
||||
|
||||
def get_candidate_lookup(self) -> CandidateLookup:
|
||||
return None, None
|
||||
|
||||
def is_satisfied_by(self, candidate: Candidate) -> bool:
|
||||
return False
|
@ -0,0 +1,251 @@
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible
|
||||
from pip._vendor.resolvelib import Resolver as RLResolver
|
||||
from pip._vendor.resolvelib.structs import DirectedGraph
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_set import RequirementSet
|
||||
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
|
||||
from pip._internal.resolution.resolvelib.provider import PipProvider
|
||||
from pip._internal.resolution.resolvelib.reporter import (
|
||||
PipDebuggingReporter,
|
||||
PipReporter,
|
||||
)
|
||||
|
||||
from .base import Candidate, Requirement
|
||||
from .factory import Factory
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._vendor.resolvelib.resolvers import Result as RLResult
|
||||
|
||||
Result = RLResult[Requirement, Candidate, str]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Resolver(BaseResolver):
|
||||
_allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
preparer: RequirementPreparer,
|
||||
finder: PackageFinder,
|
||||
wheel_cache: Optional[WheelCache],
|
||||
make_install_req: InstallRequirementProvider,
|
||||
use_user_site: bool,
|
||||
ignore_dependencies: bool,
|
||||
ignore_installed: bool,
|
||||
ignore_requires_python: bool,
|
||||
force_reinstall: bool,
|
||||
upgrade_strategy: str,
|
||||
py_version_info: Optional[Tuple[int, ...]] = None,
|
||||
):
|
||||
super().__init__()
|
||||
assert upgrade_strategy in self._allowed_strategies
|
||||
|
||||
self.factory = Factory(
|
||||
finder=finder,
|
||||
preparer=preparer,
|
||||
make_install_req=make_install_req,
|
||||
wheel_cache=wheel_cache,
|
||||
use_user_site=use_user_site,
|
||||
force_reinstall=force_reinstall,
|
||||
ignore_installed=ignore_installed,
|
||||
ignore_requires_python=ignore_requires_python,
|
||||
py_version_info=py_version_info,
|
||||
)
|
||||
self.ignore_dependencies = ignore_dependencies
|
||||
self.upgrade_strategy = upgrade_strategy
|
||||
self._result: Optional[Result] = None
|
||||
|
||||
def resolve(
|
||||
self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
|
||||
) -> RequirementSet:
|
||||
collected = self.factory.collect_root_requirements(root_reqs)
|
||||
provider = PipProvider(
|
||||
factory=self.factory,
|
||||
constraints=collected.constraints,
|
||||
ignore_dependencies=self.ignore_dependencies,
|
||||
upgrade_strategy=self.upgrade_strategy,
|
||||
user_requested=collected.user_requested,
|
||||
)
|
||||
if "PIP_RESOLVER_DEBUG" in os.environ:
|
||||
reporter: BaseReporter = PipDebuggingReporter()
|
||||
else:
|
||||
reporter = PipReporter()
|
||||
resolver: RLResolver[Requirement, Candidate, str] = RLResolver(
|
||||
provider,
|
||||
reporter,
|
||||
)
|
||||
|
||||
try:
|
||||
try_to_avoid_resolution_too_deep = 2000000
|
||||
result = self._result = resolver.resolve(
|
||||
collected.requirements, max_rounds=try_to_avoid_resolution_too_deep
|
||||
)
|
||||
|
||||
except ResolutionImpossible as e:
|
||||
error = self.factory.get_installation_error(
|
||||
cast("ResolutionImpossible[Requirement, Candidate]", e),
|
||||
collected.constraints,
|
||||
)
|
||||
raise error from e
|
||||
|
||||
req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
|
||||
for candidate in result.mapping.values():
|
||||
ireq = candidate.get_install_requirement()
|
||||
if ireq is None:
|
||||
continue
|
||||
|
||||
# Check if there is already an installation under the same name,
|
||||
# and set a flag for later stages to uninstall it, if needed.
|
||||
installed_dist = self.factory.get_dist_to_uninstall(candidate)
|
||||
if installed_dist is None:
|
||||
# There is no existing installation -- nothing to uninstall.
|
||||
ireq.should_reinstall = False
|
||||
elif self.factory.force_reinstall:
|
||||
# The --force-reinstall flag is set -- reinstall.
|
||||
ireq.should_reinstall = True
|
||||
elif installed_dist.version != candidate.version:
|
||||
# The installation is different in version -- reinstall.
|
||||
ireq.should_reinstall = True
|
||||
elif candidate.is_editable or installed_dist.editable:
|
||||
# The incoming distribution is editable, or different in
|
||||
# editable-ness to installation -- reinstall.
|
||||
ireq.should_reinstall = True
|
||||
elif candidate.source_link and candidate.source_link.is_file:
|
||||
# The incoming distribution is under file://
|
||||
if candidate.source_link.is_wheel:
|
||||
# is a local wheel -- do nothing.
|
||||
logger.info(
|
||||
"%s is already installed with the same version as the "
|
||||
"provided wheel. Use --force-reinstall to force an "
|
||||
"installation of the wheel.",
|
||||
ireq.name,
|
||||
)
|
||||
continue
|
||||
|
||||
# is a local sdist or path -- reinstall
|
||||
ireq.should_reinstall = True
|
||||
else:
|
||||
continue
|
||||
|
||||
link = candidate.source_link
|
||||
if link and link.is_yanked:
|
||||
# The reason can contain non-ASCII characters, Unicode
|
||||
# is required for Python 2.
|
||||
msg = (
|
||||
"The candidate selected for download or install is a "
|
||||
"yanked version: {name!r} candidate (version {version} "
|
||||
"at {link})\nReason for being yanked: {reason}"
|
||||
).format(
|
||||
name=candidate.name,
|
||||
version=candidate.version,
|
||||
link=link,
|
||||
reason=link.yanked_reason or "<none given>",
|
||||
)
|
||||
logger.warning(msg)
|
||||
|
||||
req_set.add_named_requirement(ireq)
|
||||
|
||||
reqs = req_set.all_requirements
|
||||
self.factory.preparer.prepare_linked_requirements_more(reqs)
|
||||
return req_set
|
||||
|
||||
def get_installation_order(
|
||||
self, req_set: RequirementSet
|
||||
) -> List[InstallRequirement]:
|
||||
"""Get order for installation of requirements in RequirementSet.
|
||||
|
||||
The returned list contains a requirement before another that depends on
|
||||
it. This helps ensure that the environment is kept consistent as they
|
||||
get installed one-by-one.
|
||||
|
||||
The current implementation creates a topological ordering of the
|
||||
dependency graph, while breaking any cycles in the graph at arbitrary
|
||||
points. We make no guarantees about where the cycle would be broken,
|
||||
other than they would be broken.
|
||||
"""
|
||||
assert self._result is not None, "must call resolve() first"
|
||||
|
||||
graph = self._result.graph
|
||||
weights = get_topological_weights(
|
||||
graph,
|
||||
expected_node_count=len(self._result.mapping) + 1,
|
||||
)
|
||||
|
||||
sorted_items = sorted(
|
||||
req_set.requirements.items(),
|
||||
key=functools.partial(_req_set_item_sorter, weights=weights),
|
||||
reverse=True,
|
||||
)
|
||||
return [ireq for _, ireq in sorted_items]
|
||||
|
||||
|
||||
def get_topological_weights(
|
||||
graph: "DirectedGraph[Optional[str]]", expected_node_count: int
|
||||
) -> Dict[Optional[str], int]:
|
||||
"""Assign weights to each node based on how "deep" they are.
|
||||
|
||||
This implementation may change at any point in the future without prior
|
||||
notice.
|
||||
|
||||
We take the length for the longest path to any node from root, ignoring any
|
||||
paths that contain a single node twice (i.e. cycles). This is done through
|
||||
a depth-first search through the graph, while keeping track of the path to
|
||||
the node.
|
||||
|
||||
Cycles in the graph result would result in node being revisited while also
|
||||
being it's own path. In this case, take no action. This helps ensure we
|
||||
don't get stuck in a cycle.
|
||||
|
||||
When assigning weight, the longer path (i.e. larger length) is preferred.
|
||||
"""
|
||||
path: Set[Optional[str]] = set()
|
||||
weights: Dict[Optional[str], int] = {}
|
||||
|
||||
def visit(node: Optional[str]) -> None:
|
||||
if node in path:
|
||||
# We hit a cycle, so we'll break it here.
|
||||
return
|
||||
|
||||
# Time to visit the children!
|
||||
path.add(node)
|
||||
for child in graph.iter_children(node):
|
||||
visit(child)
|
||||
path.remove(node)
|
||||
|
||||
last_known_parent_count = weights.get(node, 0)
|
||||
weights[node] = max(last_known_parent_count, len(path))
|
||||
|
||||
# `None` is guaranteed to be the root node by resolvelib.
|
||||
visit(None)
|
||||
|
||||
# Sanity checks
|
||||
assert weights[None] == 0
|
||||
assert len(weights) == expected_node_count
|
||||
|
||||
return weights
|
||||
|
||||
|
||||
def _req_set_item_sorter(
|
||||
item: Tuple[str, InstallRequirement],
|
||||
weights: Dict[Optional[str], int],
|
||||
) -> Tuple[int, str]:
|
||||
"""Key function used to sort install requirements for installation.
|
||||
|
||||
Based on the "weight" mapping calculated in ``get_installation_order()``.
|
||||
The canonical package name is returned as the second member as a tie-
|
||||
breaker to ensure the result is predictable, which is useful in tests.
|
||||
"""
|
||||
name = canonicalize_name(item[0])
|
||||
return weights[name], name
|
Reference in New Issue
Block a user