summaryrefslogtreecommitdiff
path: root/src/pip/_internal/resolution
diff options
context:
space:
mode:
authorTzu-ping Chung <uranusjr@gmail.com>2022-03-10 15:41:05 +0800
committerGitHub <noreply@github.com>2022-03-10 15:41:05 +0800
commit2e5e9e54bc3c80c93adc8525bf9ae9cbb168a655 (patch)
tree321da314c4bbef000bc7a5145c1162f28fe44366 /src/pip/_internal/resolution
parent22ea509025b8ad1412af2bc3f740b839269b1e7d (diff)
parentbc45b93eb679963d23f100be4ebb5d0f1568ceee (diff)
downloadpip-2e5e9e54bc3c80c93adc8525bf9ae9cbb168a655.tar.gz
Merge branch 'main' into requires-python
Diffstat (limited to 'src/pip/_internal/resolution')
-rw-r--r--src/pip/_internal/resolution/base.py6
-rw-r--r--src/pip/_internal/resolution/legacy/resolver.py4
-rw-r--r--src/pip/_internal/resolution/resolvelib/candidates.py16
-rw-r--r--src/pip/_internal/resolution/resolvelib/factory.py72
-rw-r--r--src/pip/_internal/resolution/resolvelib/provider.py97
-rw-r--r--src/pip/_internal/resolution/resolvelib/reporter.py4
-rw-r--r--src/pip/_internal/resolution/resolvelib/requirements.py4
-rw-r--r--src/pip/_internal/resolution/resolvelib/resolver.py100
8 files changed, 214 insertions, 89 deletions
diff --git a/src/pip/_internal/resolution/base.py b/src/pip/_internal/resolution/base.py
index 3f83ef0f5..42dade18c 100644
--- a/src/pip/_internal/resolution/base.py
+++ b/src/pip/_internal/resolution/base.py
@@ -1,9 +1,11 @@
-from typing import Callable, List
+from typing import Callable, List, Optional
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_set import RequirementSet
-InstallRequirementProvider = Callable[[str, InstallRequirement], InstallRequirement]
+InstallRequirementProvider = Callable[
+ [str, Optional[InstallRequirement]], InstallRequirement
+]
class BaseResolver:
diff --git a/src/pip/_internal/resolution/legacy/resolver.py b/src/pip/_internal/resolution/legacy/resolver.py
index 09caaa6d5..8c149d437 100644
--- a/src/pip/_internal/resolution/legacy/resolver.py
+++ b/src/pip/_internal/resolution/legacy/resolver.py
@@ -43,7 +43,7 @@ from pip._internal.req.req_set import RequirementSet
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
from pip._internal.utils.compatibility_tags import get_supported
from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import dist_in_usersite, normalize_version_info
+from pip._internal.utils.misc import normalize_version_info
from pip._internal.utils.packaging import check_requires_python
logger = logging.getLogger(__name__)
@@ -203,7 +203,7 @@ class Resolver(BaseResolver):
"""
# Don't uninstall the conflict if doing a user install and the
# conflict is not a user install.
- if not self.use_user_site or dist_in_usersite(req.satisfied_by):
+ if not self.use_user_site or req.satisfied_by.in_usersite:
req.should_reinstall = True
req.satisfied_by = None
diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py
index 5c6f9a04a..9b8450e86 100644
--- a/src/pip/_internal/resolution/resolvelib/candidates.py
+++ b/src/pip/_internal/resolution/resolvelib/candidates.py
@@ -5,7 +5,11 @@ from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Uni
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.version import Version
-from pip._internal.exceptions import HashError, MetadataInconsistent
+from pip._internal.exceptions import (
+ HashError,
+ InstallationSubprocessError,
+ MetadataInconsistent,
+)
from pip._internal.metadata import BaseDistribution
from pip._internal.models.link import Link, links_equivalent
from pip._internal.models.wheel import Wheel
@@ -82,6 +86,7 @@ def make_install_req_from_editable(
use_pep517=template.use_pep517,
isolated=template.isolated,
constraint=template.constraint,
+ permit_editable_wheels=template.permit_editable_wheels,
options=dict(
install_options=template.install_options,
global_options=template.global_options,
@@ -93,8 +98,6 @@ def make_install_req_from_editable(
def _make_install_req_from_dist(
dist: BaseDistribution, template: InstallRequirement
) -> InstallRequirement:
- from pip._internal.metadata.pkg_resources import Distribution as _Dist
-
if template.req:
line = str(template.req)
elif template.link:
@@ -114,7 +117,7 @@ def _make_install_req_from_dist(
hashes=template.hash_options,
),
)
- ireq.satisfied_by = cast(_Dist, dist)._dist
+ ireq.satisfied_by = dist
return ireq
@@ -228,6 +231,11 @@ class _InstallRequirementBackedCandidate(Candidate):
# offending line to the user.
e.req = self._ireq
raise
+ except InstallationSubprocessError as exc:
+ # The output has been presented already, so don't duplicate it.
+ exc.context = "See above for output."
+ raise
+
self._check_metadata_consistency(dist)
return dist
diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py
index 82c4492a7..d3648e158 100644
--- a/src/pip/_internal/resolution/resolvelib/factory.py
+++ b/src/pip/_internal/resolution/resolvelib/factory.py
@@ -19,7 +19,6 @@ from typing import (
)
from pip._vendor.packaging.requirements import InvalidRequirement
-from pip._vendor.packaging.requirements import Requirement as PackagingRequirement
from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.resolvelib import ResolutionImpossible
@@ -46,6 +45,7 @@ from pip._internal.req.req_install import (
from pip._internal.resolution.base import InstallRequirementProvider
from pip._internal.utils.compatibility_tags import get_supported
from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.packaging import get_requirement
from pip._internal.utils.virtualenv import running_under_virtualenv
from .base import Candidate, CandidateVersion, Constraint, Requirement
@@ -97,6 +97,7 @@ class Factory:
force_reinstall: bool,
ignore_installed: bool,
ignore_requires_python: bool,
+ suppress_build_failures: bool,
py_version_info: Optional[Tuple[int, ...]] = None,
) -> None:
self._finder = finder
@@ -107,6 +108,7 @@ class Factory:
self._use_user_site = use_user_site
self._force_reinstall = force_reinstall
self._ignore_requires_python = ignore_requires_python
+ self._suppress_build_failures = suppress_build_failures
self._build_failures: Cache[InstallationError] = {}
self._link_candidate_cache: Cache[LinkCandidate] = {}
@@ -190,10 +192,22 @@ class Factory:
name=name,
version=version,
)
- except (InstallationSubprocessError, MetadataInconsistent) as e:
- logger.warning("Discarding %s. %s", link, e)
+ except MetadataInconsistent as e:
+ logger.info(
+ "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
+ link,
+ e,
+ extra={"markup": True},
+ )
+ self._build_failures[link] = e
+ return None
+ except InstallationSubprocessError as e:
+ if not self._suppress_build_failures:
+ raise
+ logger.warning("Discarding %s due to build failure: %s", link, e)
self._build_failures[link] = e
return None
+
base: BaseCandidate = self._editable_candidate_cache[link]
else:
if link not in self._link_candidate_cache:
@@ -205,8 +219,19 @@ class Factory:
name=name,
version=version,
)
- except (InstallationSubprocessError, MetadataInconsistent) as e:
- logger.warning("Discarding %s. %s", link, e)
+ except MetadataInconsistent as e:
+ logger.info(
+ "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
+ link,
+ e,
+ extra={"markup": True},
+ )
+ self._build_failures[link] = e
+ return None
+ except InstallationSubprocessError as e:
+ if not self._suppress_build_failures:
+ raise
+ logger.warning("Discarding %s due to build failure: %s", link, e)
self._build_failures[link] = e
return None
base = self._link_candidate_cache[link]
@@ -260,7 +285,7 @@ class Factory:
extras=extras,
template=template,
)
- # The candidate is a known incompatiblity. Don't use it.
+ # The candidate is a known incompatibility. Don't use it.
if id(candidate) in incompatible_ids:
return None
return candidate
@@ -273,14 +298,27 @@ class Factory:
)
icans = list(result.iter_applicable())
- # PEP 592: Yanked releases must be ignored unless only yanked
- # releases can satisfy the version range. So if this is false,
- # all yanked icans need to be skipped.
+ # PEP 592: Yanked releases are ignored unless the specifier
+ # explicitly pins a version (via '==' or '===') that can be
+ # solely satisfied by a yanked release.
all_yanked = all(ican.link.is_yanked for ican in icans)
+ def is_pinned(specifier: SpecifierSet) -> bool:
+ for sp in specifier:
+ if sp.operator == "===":
+ return True
+ if sp.operator != "==":
+ continue
+ if sp.version.endswith(".*"):
+ continue
+ return True
+ return False
+
+ pinned = is_pinned(specifier)
+
# PackageFinder returns earlier versions first, so we reverse.
for ican in reversed(icans):
- if not all_yanked and ican.link.is_yanked:
+ if not (all_yanked and pinned) and ican.link.is_yanked:
continue
func = functools.partial(
self._make_candidate_from_link,
@@ -347,7 +385,7 @@ class Factory:
def find_candidates(
self,
identifier: str,
- requirements: Mapping[str, Iterator[Requirement]],
+ requirements: Mapping[str, Iterable[Requirement]],
incompatibilities: Mapping[str, Iterator[Candidate]],
constraint: Constraint,
prefers_installed: bool,
@@ -365,7 +403,7 @@ class Factory:
# If the current identifier contains extras, add explicit candidates
# from entries from extra-less identifier.
with contextlib.suppress(InvalidRequirement):
- parsed_requirement = PackagingRequirement(identifier)
+ parsed_requirement = get_requirement(identifier)
explicit_candidates.update(
self._iter_explicit_candidates_from_base(
requirements.get(parsed_requirement.name, ()),
@@ -374,7 +412,7 @@ class Factory:
)
# Add explicit candidates from constraints. We only do this if there are
- # kown ireqs, which represent requirements not already explicit. If
+ # known ireqs, which represent requirements not already explicit. If
# there are no ireqs, we're constraining already-explicit requirements,
# which is handled later when we return the explicit candidates.
if ireqs:
@@ -484,7 +522,7 @@ class Factory:
def make_requirement_from_spec(
self,
specifier: str,
- comes_from: InstallRequirement,
+ comes_from: Optional[InstallRequirement],
requested_extras: Iterable[str] = (),
) -> Optional[Requirement]:
ireq = self._make_install_req_from_spec(specifier, comes_from)
@@ -622,7 +660,7 @@ class Factory:
]
if requires_python_causes:
# The comprehension above makes sure all Requirement instances are
- # RequiresPythonRequirement, so let's cast for convinience.
+ # RequiresPythonRequirement, so let's cast for convenience.
return self._report_requires_python_error(
cast("Sequence[ConflictCause]", requires_python_causes),
)
@@ -703,6 +741,6 @@ class Factory:
return DistributionNotFound(
"ResolutionImpossible: for help visit "
- "https://pip.pypa.io/en/latest/user_guide/"
- "#fixing-conflicting-dependencies"
+ "https://pip.pypa.io/en/latest/topics/dependency-resolution/"
+ "#dealing-with-dependency-conflicts"
)
diff --git a/src/pip/_internal/resolution/resolvelib/provider.py b/src/pip/_internal/resolution/resolvelib/provider.py
index 632854d3b..e6ec9594f 100644
--- a/src/pip/_internal/resolution/resolvelib/provider.py
+++ b/src/pip/_internal/resolution/resolvelib/provider.py
@@ -1,6 +1,15 @@
import collections
import math
-from typing import TYPE_CHECKING, Dict, Iterable, Iterator, Mapping, Sequence, Union
+from typing import (
+ TYPE_CHECKING,
+ Dict,
+ Iterable,
+ Iterator,
+ Mapping,
+ Sequence,
+ TypeVar,
+ Union,
+)
from pip._vendor.resolvelib.providers import AbstractProvider
@@ -37,6 +46,35 @@ else:
# services to those objects (access to pip's finder and preparer).
+D = TypeVar("D")
+V = TypeVar("V")
+
+
+def _get_with_identifier(
+ mapping: Mapping[str, V],
+ identifier: str,
+ default: D,
+) -> Union[D, V]:
+ """Get item from a package name lookup mapping with a resolver identifier.
+
+ This extra logic is needed when the target mapping is keyed by package
+ name, which cannot be directly looked up with an identifier (which may
+ contain requested extras). Additional logic is added to also look up a value
+ by "cleaning up" the extras from the identifier.
+ """
+ if identifier in mapping:
+ return mapping[identifier]
+ # HACK: Theoretically we should check whether this identifier is a valid
+ # "NAME[EXTRAS]" format, and parse out the name part with packaging or
+ # some regular expression. But since pip's resolver only spits out three
+ # kinds of identifiers: normalized PEP 503 names, normalized names plus
+ # extras, and Requires-Python, we can cheat a bit here.
+ name, open_bracket, _ = identifier.partition("[")
+ if open_bracket and name in mapping:
+ return mapping[name]
+ return default
+
+
class PipProvider(_ProviderBase):
"""Pip's provider implementation for resolvelib.
@@ -66,12 +104,13 @@ class PipProvider(_ProviderBase):
def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str:
return requirement_or_candidate.name
- def get_preference(
+ def get_preference( # type: ignore
self,
identifier: str,
resolutions: Mapping[str, Candidate],
candidates: Mapping[str, Iterator[Candidate]],
- information: Mapping[str, Iterator["PreferenceInformation"]],
+ information: Mapping[str, Iterable["PreferenceInformation"]],
+ backtrack_causes: Sequence["PreferenceInformation"],
) -> "Preference":
"""Produce a sort key for given requirement based on preference.
@@ -112,9 +151,9 @@ class PipProvider(_ProviderBase):
for _, parent in information[identifier]
)
inferred_depth = min(d for d in parent_depths) + 1.0
- self._known_depths[identifier] = inferred_depth
else:
inferred_depth = 1.0
+ self._known_depths[identifier] = inferred_depth
requested_order = self._user_requested.get(identifier, math.inf)
@@ -128,43 +167,34 @@ class PipProvider(_ProviderBase):
# (Most projects specify it only to request for an installer feature,
# which does not work, but that's another topic.) Intentionally
# delaying Setuptools helps reduce branches the resolver has to check.
- # This serves as a temporary fix for issues like "apache-airlfow[all]"
+ # This serves as a temporary fix for issues like "apache-airflow[all]"
# while we work on "proper" branch pruning techniques.
delay_this = identifier == "setuptools"
+ # Prefer the causes of backtracking on the assumption that the problem
+ # resolving the dependency tree is related to the failures that caused
+ # the backtracking
+ backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes)
+
return (
not requires_python,
delay_this,
not direct,
not pinned,
+ not backtrack_cause,
inferred_depth,
requested_order,
not unfree,
identifier,
)
- def _get_constraint(self, identifier: str) -> Constraint:
- if identifier in self._constraints:
- return self._constraints[identifier]
-
- # HACK: Theoratically we should check whether this identifier is a valid
- # "NAME[EXTRAS]" format, and parse out the name part with packaging or
- # some regular expression. But since pip's resolver only spits out
- # three kinds of identifiers: normalized PEP 503 names, normalized names
- # plus extras, and Requires-Python, we can cheat a bit here.
- name, open_bracket, _ = identifier.partition("[")
- if open_bracket and name in self._constraints:
- return self._constraints[name]
-
- return Constraint.empty()
-
def find_matches(
self,
identifier: str,
requirements: Mapping[str, Iterator[Requirement]],
incompatibilities: Mapping[str, Iterator[Candidate]],
) -> Iterable[Candidate]:
- def _eligible_for_upgrade(name: str) -> bool:
+ def _eligible_for_upgrade(identifier: str) -> bool:
"""Are upgrades allowed for this project?
This checks the upgrade strategy, and whether the project was one
@@ -178,13 +208,23 @@ class PipProvider(_ProviderBase):
if self._upgrade_strategy == "eager":
return True
elif self._upgrade_strategy == "only-if-needed":
- return name in self._user_requested
+ user_order = _get_with_identifier(
+ self._user_requested,
+ identifier,
+ default=None,
+ )
+ return user_order is not None
return False
+ constraint = _get_with_identifier(
+ self._constraints,
+ identifier,
+ default=Constraint.empty(),
+ )
return self._factory.find_candidates(
identifier=identifier,
requirements=requirements,
- constraint=self._get_constraint(identifier),
+ constraint=constraint,
prefers_installed=(not _eligible_for_upgrade(identifier)),
incompatibilities=incompatibilities,
)
@@ -195,3 +235,14 @@ class PipProvider(_ProviderBase):
def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]:
with_requires = not self._ignore_dependencies
return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
+
+ @staticmethod
+ def is_backtrack_cause(
+ identifier: str, backtrack_causes: Sequence["PreferenceInformation"]
+ ) -> bool:
+ for backtrack_cause in backtrack_causes:
+ if identifier == backtrack_cause.requirement.name:
+ return True
+ if backtrack_cause.parent and identifier == backtrack_cause.parent.name:
+ return True
+ return False
diff --git a/src/pip/_internal/resolution/resolvelib/reporter.py b/src/pip/_internal/resolution/resolvelib/reporter.py
index 4e16b7720..6ced5329b 100644
--- a/src/pip/_internal/resolution/resolvelib/reporter.py
+++ b/src/pip/_internal/resolution/resolvelib/reporter.py
@@ -27,8 +27,8 @@ class PipReporter(BaseReporter):
13: (
"This is taking longer than usual. You might need to provide "
"the dependency resolver with stricter constraints to reduce "
- "runtime. If you want to abort this run, you can press "
- "Ctrl + C to do so."
+ "runtime. See https://pip.pypa.io/warnings/backtracking for "
+ "guidance. If you want to abort this run, press Ctrl + C."
),
}
diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py
index c19f83c17..f561f1f1e 100644
--- a/src/pip/_internal/resolution/resolvelib/requirements.py
+++ b/src/pip/_internal/resolution/resolvelib/requirements.py
@@ -21,12 +21,12 @@ class ExplicitRequirement(Requirement):
@property
def project_name(self) -> NormalizedName:
- # No need to canonicalise - the candidate did this
+ # No need to canonicalize - the candidate did this
return self.candidate.project_name
@property
def name(self) -> str:
- # No need to canonicalise - the candidate did this
+ # No need to canonicalize - the candidate did this
return self.candidate.name
def format_for_error(self) -> str:
diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py
index f89afaf43..32ef7899b 100644
--- a/src/pip/_internal/resolution/resolvelib/resolver.py
+++ b/src/pip/_internal/resolution/resolvelib/resolver.py
@@ -19,8 +19,6 @@ from pip._internal.resolution.resolvelib.reporter import (
PipDebuggingReporter,
PipReporter,
)
-from pip._internal.utils.deprecation import deprecated
-from pip._internal.utils.filetypes import is_archive_file
from .base import Candidate, Requirement
from .factory import Factory
@@ -49,6 +47,7 @@ class Resolver(BaseResolver):
ignore_requires_python: bool,
force_reinstall: bool,
upgrade_strategy: str,
+ suppress_build_failures: bool,
py_version_info: Optional[Tuple[int, ...]] = None,
):
super().__init__()
@@ -63,6 +62,7 @@ class Resolver(BaseResolver):
force_reinstall=force_reinstall,
ignore_installed=ignore_installed,
ignore_requires_python=ignore_requires_python,
+ suppress_build_failures=suppress_build_failures,
py_version_info=py_version_info,
)
self.ignore_dependencies = ignore_dependencies
@@ -136,25 +136,6 @@ class Resolver(BaseResolver):
)
continue
- looks_like_sdist = (
- is_archive_file(candidate.source_link.file_path)
- and candidate.source_link.ext != ".zip"
- )
- if looks_like_sdist:
- # is a local sdist -- show a deprecation warning!
- reason = (
- "Source distribution is being reinstalled despite an "
- "installed package having the same name and version as "
- "the installed package."
- )
- replacement = "use --force-reinstall"
- deprecated(
- reason=reason,
- replacement=replacement,
- gone_in="21.3",
- issue=8711,
- )
-
# is a local sdist or path -- reinstall
ireq.should_reinstall = True
else:
@@ -192,17 +173,19 @@ class Resolver(BaseResolver):
get installed one-by-one.
The current implementation creates a topological ordering of the
- dependency graph, while breaking any cycles in the graph at arbitrary
- points. We make no guarantees about where the cycle would be broken,
- other than they would be broken.
+ dependency graph, giving more weight to packages with less
+ or no dependencies, while breaking any cycles in the graph at
+ arbitrary points. We make no guarantees about where the cycle
+ would be broken, other than it *would* be broken.
"""
assert self._result is not None, "must call resolve() first"
+ if not req_set.requirements:
+ # Nothing is left to install, so we do not need an order.
+ return []
+
graph = self._result.graph
- weights = get_topological_weights(
- graph,
- expected_node_count=len(self._result.mapping) + 1,
- )
+ weights = get_topological_weights(graph, set(req_set.requirements.keys()))
sorted_items = sorted(
req_set.requirements.items(),
@@ -213,23 +196,32 @@ class Resolver(BaseResolver):
def get_topological_weights(
- graph: "DirectedGraph[Optional[str]]", expected_node_count: int
+ graph: "DirectedGraph[Optional[str]]", requirement_keys: Set[str]
) -> Dict[Optional[str], int]:
"""Assign weights to each node based on how "deep" they are.
This implementation may change at any point in the future without prior
notice.
- We take the length for the longest path to any node from root, ignoring any
- paths that contain a single node twice (i.e. cycles). This is done through
- a depth-first search through the graph, while keeping track of the path to
- the node.
+ We first simplify the dependency graph by pruning any leaves and giving them
+ the highest weight: a package without any dependencies should be installed
+ first. This is done again and again in the same way, giving ever less weight
+ to the newly found leaves. The loop stops when no leaves are left: all
+ remaining packages have at least one dependency left in the graph.
+
+ Then we continue with the remaining graph, by taking the length for the
+ longest path to any node from root, ignoring any paths that contain a single
+ node twice (i.e. cycles). This is done through a depth-first search through
+ the graph, while keeping track of the path to the node.
Cycles in the graph result would result in node being revisited while also
- being it's own path. In this case, take no action. This helps ensure we
+ being on its own path. In this case, take no action. This helps ensure we
don't get stuck in a cycle.
When assigning weight, the longer path (i.e. larger length) is preferred.
+
+ We are only interested in the weights of packages that are in the
+ requirement_keys.
"""
path: Set[Optional[str]] = set()
weights: Dict[Optional[str], int] = {}
@@ -245,15 +237,49 @@ def get_topological_weights(
visit(child)
path.remove(node)
+ if node not in requirement_keys:
+ return
+
last_known_parent_count = weights.get(node, 0)
weights[node] = max(last_known_parent_count, len(path))
+ # Simplify the graph, pruning leaves that have no dependencies.
+ # This is needed for large graphs (say over 200 packages) because the
+ # `visit` function is exponentially slower then, taking minutes.
+ # See https://github.com/pypa/pip/issues/10557
+ # We will loop until we explicitly break the loop.
+ while True:
+ leaves = set()
+ for key in graph:
+ if key is None:
+ continue
+ for _child in graph.iter_children(key):
+ # This means we have at least one child
+ break
+ else:
+ # No child.
+ leaves.add(key)
+ if not leaves:
+ # We are done simplifying.
+ break
+ # Calculate the weight for the leaves.
+ weight = len(graph) - 1
+ for leaf in leaves:
+ if leaf not in requirement_keys:
+ continue
+ weights[leaf] = weight
+ # Remove the leaves from the graph, making it simpler.
+ for leaf in leaves:
+ graph.remove(leaf)
+
+ # Visit the remaining graph.
# `None` is guaranteed to be the root node by resolvelib.
visit(None)
- # Sanity checks
- assert weights[None] == 0
- assert len(weights) == expected_node_count
+ # Sanity check: all requirement keys should be in the weights,
+ # and no other keys should be in the weights.
+ difference = set(weights.keys()).difference(requirement_keys)
+ assert not difference, difference
return weights