Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 6 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ include = [
[tool.poetry.dependencies]
python = "^3.7"
poetry = "^1.2.0b3"
poetry-core = "^1.1.0b3"

[tool.poetry.dev-dependencies]
pre-commit = "^2.18"
Expand All @@ -43,7 +44,11 @@ use_parentheses = true
[tool.mypy]
namespace_packages = true
show_error_codes = true
enable_error_code = ["ignore-without-code"]
enable_error_code = [
"ignore-without-code",
"redundant-expr",
"truthy-bool",
]
strict = true
files = ["src", "tests"]
exclude = ["^tests/fixtures/"]
Expand Down
5 changes: 4 additions & 1 deletion src/poetry_plugin_export/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
from poetry.core.packages.dependency_group import MAIN_GROUP
from poetry.repositories.http import HTTPRepository

from poetry_plugin_export.walker import get_project_dependency_packages


if TYPE_CHECKING:
from pathlib import Path
Expand Down Expand Up @@ -80,7 +82,8 @@ def _export_requirements_txt(self, cwd: Path, output: IO | str) -> None:
list(self._groups), only=True
)

for dependency_package in self._poetry.locker.get_project_dependency_packages(
for dependency_package in get_project_dependency_packages(
self._poetry.locker,
project_requires=root.all_requires,
project_python_marker=root.python_marker,
extras=self._extras,
Expand Down
242 changes: 242 additions & 0 deletions src/poetry_plugin_export/walker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,242 @@
from __future__ import annotations

from typing import TYPE_CHECKING

from poetry.core.semver.util import constraint_regions
from poetry.core.version.markers import AnyMarker
from poetry.core.version.markers import SingleMarker
from poetry.packages import DependencyPackage
from poetry.utils.extras import get_extra_package_names


if TYPE_CHECKING:
from collections.abc import Iterable
from collections.abc import Iterator
from collections.abc import Sequence

from poetry.core.packages.dependency import Dependency
from poetry.core.packages.package import Package
from poetry.core.version.markers import BaseMarker
from poetry.packages import Locker


def get_python_version_region_markers(packages: list[Package]) -> list[BaseMarker]:
markers = []

regions = constraint_regions([package.python_constraint for package in packages])
for region in regions:
marker: BaseMarker = AnyMarker()
if region.min is not None:
min_operator = ">=" if region.include_min else ">"
marker_name = (
"python_full_version" if region.min.precision > 2 else "python_version"
)
lo = SingleMarker(marker_name, f"{min_operator} {region.min}")
marker = marker.intersect(lo)

if region.max is not None:
max_operator = "<=" if region.include_max else "<"
marker_name = (
"python_full_version" if region.max.precision > 2 else "python_version"
)
hi = SingleMarker(marker_name, f"{max_operator} {region.max}")
marker = marker.intersect(hi)

markers.append(marker)

return markers


def get_project_dependency_packages(
locker: Locker,
project_requires: list[Dependency],
project_python_marker: BaseMarker | None = None,
extras: bool | Sequence[str] | None = None,
) -> Iterator[DependencyPackage]:
# Apply the project python marker to all requirements.
if project_python_marker is not None:
marked_requires: list[Dependency] = []
for require in project_requires:
require = require.clone()
require.marker = require.marker.intersect(project_python_marker)
marked_requires.append(require)
project_requires = marked_requires

repository = locker.locked_repository()

# Build a set of all packages required by our selected extras
extra_package_names: set[str] | None = None

if extras is not True:
extra_package_names = set(
get_extra_package_names(
repository.packages,
locker.lock_data.get("extras", {}),
extras or (),
)
)

# If a package is optional and we haven't opted in to it, do not select
selected = []
for dependency in project_requires:
try:
package = repository.find_packages(dependency=dependency)[0]
except IndexError:
continue

if extra_package_names is not None and (
package.optional and package.name not in extra_package_names
):
# a package is locked as optional, but is not activated via extras
continue

selected.append(dependency)

for package, dependency in get_project_dependencies(
project_requires=selected,
locked_packages=repository.packages,
):
yield DependencyPackage(dependency=dependency, package=package)


def get_project_dependencies(
project_requires: list[Dependency],
locked_packages: list[Package],
) -> Iterable[tuple[Package, Dependency]]:
# group packages entries by name, this is required because requirement might use
# different constraints.
packages_by_name: dict[str, list[Package]] = {}
for pkg in locked_packages:
if pkg.name not in packages_by_name:
packages_by_name[pkg.name] = []
packages_by_name[pkg.name].append(pkg)

# Put higher versions first so that we prefer them.
for packages in packages_by_name.values():
packages.sort(
key=lambda package: package.version,
reverse=True,
)

nested_dependencies = walk_dependencies(
dependencies=project_requires,
packages_by_name=packages_by_name,
)

return nested_dependencies.items()


def walk_dependencies(
dependencies: list[Dependency],
packages_by_name: dict[str, list[Package]],
) -> dict[Package, Dependency]:
nested_dependencies: dict[Package, Dependency] = {}

visited: set[tuple[Dependency, BaseMarker]] = set()
while dependencies:
requirement = dependencies.pop(0)
if (requirement, requirement.marker) in visited:
continue
visited.add((requirement, requirement.marker))

locked_package = get_locked_package(
requirement, packages_by_name, nested_dependencies
)

if not locked_package:
raise RuntimeError(f"Dependency walk failed at {requirement}")

if requirement.extras:
locked_package = locked_package.with_features(requirement.extras)

# create dependency from locked package to retain dependency metadata
# if this is not done, we can end-up with incorrect nested dependencies
constraint = requirement.constraint
marker = requirement.marker
requirement = locked_package.to_dependency()
requirement.marker = requirement.marker.intersect(marker)

requirement.constraint = constraint

for require in locked_package.requires:
if require.is_optional() and not any(
require in locked_package.extras[feature]
for feature in locked_package.features
):
continue

base_marker = require.marker.intersect(requirement.marker.without_extras())

if not base_marker.is_empty():
# So as to give ourselves enough flexibility in choosing a solution,
# we need to split the world up into the python version ranges that
# this package might care about.
#
# We create a marker for all of the possible regions, and add a
# requirement for each separately.
candidates = packages_by_name.get(require.name, [])
region_markers = get_python_version_region_markers(candidates)
for region_marker in region_markers:
marker = region_marker.intersect(base_marker)
if not marker.is_empty():
require2 = require.clone()
require2.marker = marker
dependencies.append(require2)

key = locked_package
if key not in nested_dependencies:
nested_dependencies[key] = requirement
else:
nested_dependencies[key].marker = nested_dependencies[key].marker.union(
requirement.marker
)

return nested_dependencies


def get_locked_package(
dependency: Dependency,
packages_by_name: dict[str, list[Package]],
decided: dict[Package, Dependency] | None = None,
) -> Package | None:
"""
Internal helper to identify corresponding locked package using dependency
version constraints.
"""
decided = decided or {}

candidates = packages_by_name.get(dependency.name, [])

# If we've previously chosen a version of this package that is compatible with
# the current requirement, we are forced to stick with it. (Else we end up with
# different versions of the same package at the same time.)
overlapping_candidates = set()
for package in candidates:
old_decision = decided.get(package)
if (
old_decision is not None
and not old_decision.marker.intersect(dependency.marker).is_empty()
):
overlapping_candidates.add(package)

# If we have more than one overlapping candidate, we've run into trouble.
if len(overlapping_candidates) > 1:
return None

# Get the packages that are consistent with this dependency.
compatible_candidates = [
package
for package in candidates
if package.python_constraint.allows_all(dependency.python_constraint)
and dependency.constraint.allows(package.version)
]

# If we have an overlapping candidate, we must use it.
if overlapping_candidates:
compatible_candidates = [
package
for package in compatible_candidates
if package in overlapping_candidates
]

return next(iter(compatible_candidates), None)
11 changes: 8 additions & 3 deletions tests/markers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,21 @@
MARKER_LINUX = parse_marker('sys_platform == "linux"')
MARKER_DARWIN = parse_marker('sys_platform == "darwin"')

MARKER_CPYTHON = parse_marker('implementation_name == "cpython"')

MARKER_PY27 = parse_marker('python_version >= "2.7" and python_version < "2.8"')

MARKER_PY36 = parse_marker('python_version >= "3.6" and python_version < "4.0"')
MARKER_PY36_38 = parse_marker('python_version >= "3.6" and python_version < "3.8"')
MARKER_PY36_PY362 = parse_marker(
'python_version >= "3.6" and python_full_version < "3.6.2"'
)
MARKER_PY362_PY40 = parse_marker(
'python_full_version >= "3.6.2" and python_version < "4.0"'
)
MARKER_PY36_ONLY = parse_marker('python_version >= "3.6" and python_version < "3.7"')

MARKER_PY37 = parse_marker('python_version >= "3.7" and python_version < "4.0"')
MARKER_PY37_PY400 = parse_marker(
'python_version >= "3.7" and python_full_version < "4.0.0"'
)

MARKER_PY = MARKER_PY27.union(MARKER_PY36)

Expand Down
Loading