Skip to content

Commit

Permalink
Add Requires-Dist and Requires-Python to locks.
Browse files Browse the repository at this point in the history
These bits of metadata are needed to consume the lock without needing to
resolve through Pip.

Work towards #1583.
  • Loading branch information
jsirois committed Jan 24, 2022
1 parent 8db598b commit 2ec7a64
Show file tree
Hide file tree
Showing 9 changed files with 212 additions and 120 deletions.
44 changes: 35 additions & 9 deletions pex/cli/commands/lockfile/json_codec.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
)
from pex.resolve.resolver_configuration import ResolverVersion
from pex.third_party.packaging import tags
from pex.third_party.packaging.specifiers import InvalidSpecifier, SpecifierSet
from pex.third_party.pkg_resources import Requirement, RequirementParseError
from pex.typing import TYPE_CHECKING, cast

Expand Down Expand Up @@ -131,6 +132,18 @@ def parse_requirement(
"The requirement string at '{path}' is invalid: {err}".format(path=path, err=e)
)

def parse_version_specifier(
raw_version_specifier, # type: str
path, # type: str
):
# type: (...) -> SpecifierSet
try:
return SpecifierSet(raw_version_specifier)
except InvalidSpecifier as e:
raise ParseError(
"The version specifier at '{path}' is invalid: {err}".format(path=path, err=e)
)

requirements = [
parse_requirement(req, path=".requirements[{index}]".format(index=index))
for index, req in enumerate(get("requirements", list))
Expand Down Expand Up @@ -178,11 +191,6 @@ def assemble_tag(
):
req_path = "{lock_path}[{req_index}]".format(lock_path=lock_path, req_index=req_index)

requirement = parse_requirement(
raw_requirement=get("requirement", data=req, path=req_path),
path='{path}["requirement"]'.format(path=req_path),
)

artifacts = []
for i, artifact in enumerate(get("artifacts", list, data=req, path=req_path)):
ap = '{path}["artifacts"][{index}]'.format(path=req_path, index=i)
Expand All @@ -202,15 +210,31 @@ def assemble_tag(
path=req_path, source=source
)
)

requires_python = None
version_specifier = get("requires_python", data=req, path=req_path, optional=True)
if version_specifier:
requires_python = parse_version_specifier(
version_specifier, path='{path}["requires_python"]'.format(path=req_path)
)

locked_reqs.append(
LockedRequirement.create(
pin=Pin(
project_name=ProjectName(get("project_name", data=req, path=req_path)),
version=Version(get("version", data=req, path=req_path)),
),
requirement=requirement,
requires_python=requires_python,
requires_dists=[
parse_requirement(
requires_dist,
path='{path}["requires_dists"][{index}]'.format(path=req_path, index=i),
)
for i, requires_dist in enumerate(
get("requires_dists", list, data=req, path=req_path)
)
],
artifact=artifacts[0],
via=tuple(get("via", list, data=req, path=req_path)),
additional_artifacts=artifacts[1:],
)
)
Expand Down Expand Up @@ -288,8 +312,10 @@ def as_json_data(lockfile):
{
"project_name": str(req.pin.project_name),
"version": str(req.pin.version),
"requirement": str(req.requirement),
"via": req.via,
"requires_dists": [str(dependency) for dependency in req.requires_dists],
"requires_python": str(req.requires_python)
if req.requires_python
else None,
"artifacts": [
{
"url": artifact.url,
Expand Down
6 changes: 0 additions & 6 deletions pex/cli/commands/lockfile/updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,12 +187,6 @@ def update_resolve(
elif project_name in self.update_constraints_by_project_name:
updates[project_name] = None

updated_requirements_by_project_name[project_name] = attr.evolve(
updated_requirement,
requirement=locked_requirement.requirement,
via=locked_requirement.via,
)

return ResolveUpdate(
updated_resolve=attr.evolve(
locked_resolve,
Expand Down
34 changes: 28 additions & 6 deletions pex/pip.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from pex import dist_metadata, third_party
from pex.common import atomic_directory, is_python_script, safe_mkdtemp
from pex.compatibility import MODE_READ_UNIVERSAL_NEWLINES, get_stdout_bytes_buffer, urlparse
from pex.dist_metadata import ProjectNameAndVersion
from pex.dist_metadata import DistMetadata, ProjectNameAndVersion
from pex.distribution_target import DistributionTarget
from pex.fetcher import URLFetcher
from pex.interpreter import PythonInterpreter
Expand Down Expand Up @@ -327,6 +327,7 @@ class ResolvedRequirement(object):
def lock_all(
cls,
resolved_requirements, # type: Iterable[ResolvedRequirement]
dist_metadatas, # type: Iterable[DistMetadata]
url_fetcher, # type: URLFetcher
):
# type: (...) -> Iterator[LockedRequirement]
Expand Down Expand Up @@ -355,16 +356,32 @@ def resolve_fingerprint(partial_artifact):
or fingerprint_by_url[partial_artifact.url],
)

dist_metadata_by_pin = {
Pin(dist_info.project_name, dist_info.version): dist_info
for dist_info in dist_metadatas
}
for resolved_requirement in resolved_requirements:
distribution_metadata = dist_metadata_by_pin.get(resolved_requirement.pin)
if distribution_metadata is None:
raise ValueError(
"No distribution metadata found for {project}.\n"
"Given distribution metadata for:\n"
"{projects}".format(
project=resolved_requirement.pin.as_requirement(),
projects="\n".join(
sorted(str(pin.as_requirement()) for pin in dist_metadata_by_pin)
),
)
)
yield LockedRequirement.create(
pin=resolved_requirement.pin,
artifact=resolve_fingerprint(resolved_requirement.artifact),
requirement=resolved_requirement.requirement,
requires_dists=distribution_metadata.requires_dists,
requires_python=distribution_metadata.requires_python,
additional_artifacts=(
resolve_fingerprint(artifact)
for artifact in resolved_requirement.additional_artifacts
),
via=resolved_requirement.via,
)

pin = attr.ib() # type: Pin
Expand Down Expand Up @@ -506,8 +523,11 @@ def analysis_completed(self):
# type: () -> None
self._analysis_completed = True

def lock(self):
# type: () -> LockedResolve
def lock(
self,
dist_metadatas, # type: Iterable[DistMetadata]
):
# type: (...) -> LockedResolve
if not self._analysis_completed:
raise self.StateError(
"Lock retrieval was attempted before Pip log analysis was complete."
Expand All @@ -516,7 +536,9 @@ def lock(self):
self._locked_resolve = LockedResolve.from_target(
target=self._target,
locked_requirements=tuple(
ResolvedRequirement.lock_all(self._resolved_requirements, self._url_fetcher)
ResolvedRequirement.lock_all(
self._resolved_requirements, dist_metadatas, self._url_fetcher
)
),
)
return self._locked_resolve
Expand Down
28 changes: 12 additions & 16 deletions pex/resolve/locked_resolve.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,14 @@
from pex.pep_503 import ProjectName
from pex.sorted_tuple import SortedTuple
from pex.third_party.packaging import tags
from pex.third_party.packaging.specifiers import SpecifierSet
from pex.third_party.pkg_resources import Requirement
from pex.typing import TYPE_CHECKING, cast
from pex.typing import TYPE_CHECKING
from pex.util import CacheHelper

if TYPE_CHECKING:
import attr # vendor:skip
from typing import BinaryIO, IO, Iterable, Iterator, Tuple
from typing import BinaryIO, IO, Iterable, Iterator, Optional
else:
from pex.third_party import attr

Expand Down Expand Up @@ -86,24 +87,24 @@ def create(
cls,
pin, # type: Pin
artifact, # type: Artifact
requirement, # type: Requirement
requires_dists=(), # type: Iterable[Requirement]
requires_python=None, # type: Optional[SpecifierSet]
additional_artifacts=(), # type: Iterable[Artifact]
via=(), # type: Iterable[str]
):
# type: (...) -> LockedRequirement
return cls(
pin=pin,
artifact=artifact,
requirement=requirement,
requires_dists=SortedTuple(requires_dists, key=lambda req: str(req)),
requires_python=requires_python,
additional_artifacts=SortedTuple(additional_artifacts),
via=tuple(via),
)

pin = attr.ib() # type: Pin
artifact = attr.ib() # type: Artifact
requirement = attr.ib(order=str) # type: Requirement
additional_artifacts = attr.ib(default=()) # type: SortedTuple[Artifact]
via = attr.ib(default=()) # type: Tuple[str, ...]
requires_dists = attr.ib(default=SortedTuple()) # type: SortedTuple[Requirement]
requires_python = attr.ib(default=None) # type: Optional[SpecifierSet]
additional_artifacts = attr.ib(default=SortedTuple()) # type: SortedTuple[Artifact]

def iter_artifacts(self):
# type: () -> Iterator[Artifact]
Expand Down Expand Up @@ -146,25 +147,20 @@ def emit_artifact(
):
# type: (...) -> None
stream.write(
" --hash:{algorithm}={hash} # {url}{line_continuation}\n".format(
" --hash={algorithm}:{hash} {line_continuation}\n".format(
algorithm=artifact.fingerprint.algorithm,
hash=artifact.fingerprint.hash,
url=artifact.url,
line_continuation=" \\" if line_continuation else "",
)
)

for locked_requirement in self.locked_requirements:
stream.write(
"{project_name}=={version} # {requirement}".format(
"{project_name}=={version} \\\n".format(
project_name=locked_requirement.pin.project_name,
version=locked_requirement.pin.version,
requirement=locked_requirement.requirement,
)
)
if locked_requirement.via:
stream.write(" via -> {}".format(" via -> ".join(locked_requirement.via)))
stream.write(" \\\n")
emit_artifact(
locked_requirement.artifact,
line_continuation=bool(locked_requirement.additional_artifacts),
Expand Down
11 changes: 0 additions & 11 deletions pex/resolve/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,8 @@

from __future__ import absolute_import

from pex.pep_503 import ProjectName
from pex.resolve.locked_resolve import Artifact, LockedRequirement, LockedResolve
from pex.sorted_tuple import SortedTuple
from pex.third_party.pkg_resources import Requirement
from pex.typing import TYPE_CHECKING

if TYPE_CHECKING:
Expand All @@ -29,23 +27,14 @@ def normalize_locked_requirement(
skip_urls=False, # type: bool
):
# type: (...) -> LockedRequirement

# We always normalize the following:
# 1. If an input requirement is not pinned, its locked equivalent always will be; so just check
# matching project names.
# 2. Creating a lock using a lock file as input will differ from a creating a lock using
# requirement strings in its via descriptions for each requirement; so don't compare vias at
# all.
return attr.evolve(
locked_req,
artifact=normalize_artifact(locked_req.artifact, skip_urls=skip_urls),
requirement=Requirement.parse(str(ProjectName(locked_req.requirement.project_name))),
additional_artifacts=()
if skip_additional_artifacts
else SortedTuple(
normalize_artifact(a, skip_urls=skip_urls) for a in locked_req.additional_artifacts
),
via=(),
)


Expand Down
60 changes: 53 additions & 7 deletions pex/resolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
from collections import OrderedDict, defaultdict

from pex import environment
from pex.common import AtomicDirectory, atomic_directory, safe_mkdtemp
from pex.common import AtomicDirectory, atomic_directory, pluralize, safe_mkdtemp
from pex.dist_metadata import DistMetadata
from pex.distribution_target import DistributionTarget, DistributionTargets
from pex.environment import FingerprintedDistribution, PEXEnvironment
from pex.jobs import Raise, SpawnedJob, execute_parallel
Expand Down Expand Up @@ -139,7 +140,7 @@ def download_distributions(self, dest=None, max_parallel_jobs=None):
return []

dest = dest or safe_mkdtemp()
spawn_download = functools.partial(self._spawn_download, dest)
spawn_download = functools.partial(self._spawn_download, dest, max_parallel_jobs)
with TRACER.timed("Resolving for:\n {}".format("\n ".join(map(str, self.targets)))):
return list(
execute_parallel(
Expand All @@ -153,6 +154,7 @@ def download_distributions(self, dest=None, max_parallel_jobs=None):
def _spawn_download(
self,
resolved_dists_dir, # type: str
max_parallel_jobs, # type: Optional[int]
target, # type: DistributionTarget
):
# type: (...) -> SpawnedJob[DownloadResult]
Expand Down Expand Up @@ -185,13 +187,57 @@ def _spawn_download(
build_isolation=self.build_isolation,
locker=locker,
)
return SpawnedJob.and_then(
job=download_job,
result_func=lambda: DownloadResult(
target, download_dir, locked_resolve=locker.lock() if locker else None
),

wheel_builder = WheelBuilder(
package_index_configuration=self.package_index_configuration,
cache=self.cache,
prefer_older_binary=self.prefer_older_binary,
use_pep517=self.use_pep517,
build_isolation=self.build_isolation,
)

def result_func():
return DownloadResult(target, download_dir)

if locker:
result_func = functools.partial(
self._finalize_lock,
locker=locker,
download_result=result_func(),
wheel_builder=wheel_builder,
max_parallel_jobs=max_parallel_jobs,
)

return SpawnedJob.and_then(job=download_job, result_func=result_func)

@staticmethod
def _finalize_lock(
locker, # type: Locker
download_result, # type: DownloadResult
wheel_builder, # type: WheelBuilder
max_parallel_jobs, # type: Optional[int]
):
# type: (...) -> DownloadResult

build_requests = tuple(download_result.build_requests())
with TRACER.timed(
"Building {count} source {distributions} to gather metadata for lock.".format(
count=len(build_requests), distributions=pluralize(build_requests, "distribution")
)
):
build_results = wheel_builder.build_wheels(
build_requests=build_requests,
max_parallel_jobs=max_parallel_jobs,
)
dist_metadatas = tuple(
DistMetadata.for_dist(install_request.wheel_path)
for install_request in itertools.chain(
tuple(download_result.install_requests()),
build_results.values(),
)
)
return attr.evolve(download_result, locked_resolve=locker.lock(dist_metadatas))


@attr.s(frozen=True)
class DownloadResult(object):
Expand Down
Loading

0 comments on commit 2ec7a64

Please sign in to comment.