Reproducible package builds (and requirements.txt support) (#3469)

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
Joe Marshall 2023-02-04 13:55:09 +00:00 committed by GitHub
parent c221532c8c
commit ba3a8d7eb8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 262 additions and 46 deletions

View File

@ -61,6 +61,8 @@ myst:
- {{ Enhancement }} Improved logging in `pyodide-build` with rich.
{pr}`3442`
- {{ Enhancement}} Added `pyodide-build` support for building a list of packages from a requirements.txt file with `pyodide build -r <requirements.txt>`. Also can output a list of chosen dependencies in the same format when building a package and dependencies using the `--output-lockfile <lockfile.txt>` argument. This enables repeatable builds of packages. {pr}`3469`
- {{ Enhancement }} Added `package/tag` key to the `meta.yaml` spec to group
packages.
{pr}`3444`

View File

@ -11,7 +11,11 @@ import typer
from .. import common
from ..out_of_tree import build
from ..out_of_tree.pypi import build_dependencies_for_wheel, fetch_pypi_package
from ..out_of_tree.pypi import (
build_dependencies_for_wheel,
build_wheels_from_pypi_requirements,
fetch_pypi_package,
)
from ..out_of_tree.utils import initialize_pyodide_root
@ -123,6 +127,12 @@ def main(
"",
help="Build source, can be source folder, pypi version specification, or url to a source dist archive or wheel file. If this is blank, it will build the current directory.",
),
requirements_txt: str = typer.Option(
"",
"--requirements",
"-r",
help="Build a list of package requirements from a requirements.txt file",
),
exports: str = typer.Option(
"requested",
help="Which symbols should be exported when linking .so files?",
@ -130,6 +140,10 @@ def main(
build_dependencies: bool = typer.Option(
False, help="Fetch non-pyodide dependencies from pypi and build them too."
),
output_lockfile: str = typer.Option(
"",
help="Output list of resolved dependencies to a file in requirements.txt format",
),
skip_dependency: list[str] = typer.Option(
[],
help="Skip building or resolving a single dependency. Use multiple times or provide a comma separated list to skip multiple dependencies.",
@ -138,11 +152,52 @@ def main(
) -> None:
"""Use pypa/build to build a Python package from source, pypi or url."""
extras: list[str] = []
if len(requirements_txt) > 0:
# a requirements.txt - build it (and optionally deps)
if not Path(requirements_txt).exists():
raise RuntimeError(
f"Couldn't find requirements text file {requirements_txt}"
)
reqs = []
with open(requirements_txt) as f:
raw_reqs = [x.strip() for x in f.readlines()]
for x in raw_reqs:
# remove comments
comment_pos = x.find("#")
if comment_pos != -1:
x = x[:comment_pos].strip()
if len(x) > 0:
if x[0] == "-":
raise RuntimeError(
f"pyodide build only supports name-based PEP508 requirements. [{x}] will not work."
)
if x.find("@") != -1:
raise RuntimeError(
f"pyodide build does not support URL based requirements. [{x}] will not work"
)
reqs.append(x)
try:
build_wheels_from_pypi_requirements(
reqs,
Path("./dist").resolve(),
build_dependencies,
skip_dependency,
exports,
ctx.args,
output_lockfile=output_lockfile,
)
except BaseException as e:
import traceback
print("Failed building multiple wheels:", traceback.format_exc())
raise e
return
if source_location is not None:
extras = re.findall(r"\[(\w+)\]", source_location)
if len(extras) != 0:
source_location = source_location[0 : source_location.find("[")]
if not source_location:
# build the current folder
wheel = source(".", exports, ctx)
@ -160,7 +215,12 @@ def main(
if build_dependencies:
try:
build_dependencies_for_wheel(
wheel, extras, skip_dependency, exports, ctx.args
wheel,
extras,
skip_dependency,
exports,
ctx.args,
output_lockfile=output_lockfile,
)
except BaseException as e:
import traceback

View File

@ -44,6 +44,9 @@ def stream_redirected(to=os.devnull, stream=None):
if stream is None:
stream = sys.stdout
try:
if not hasattr(stream, "fileno"):
yield
return
stream_fd = stream.fileno()
except io.UnsupportedOperation:
# in case we're already capturing to something that isn't really a file
@ -227,6 +230,9 @@ class PyPIProvider(APBase):
BUILD_SKIP: list[str] = []
BUILD_EXPORTS: str = ""
def __init__(self, build_dependencies: bool):
self.build_dependencies = build_dependencies
def identify(self, requirement_or_candidate):
base = canonicalize_name(requirement_or_candidate.name)
return base
@ -274,9 +280,10 @@ class PyPIProvider(APBase):
def get_dependencies(self, candidate):
deps = []
for d in candidate.dependencies:
if d.name not in PyPIProvider.BUILD_SKIP:
deps.append(d)
if self.build_dependencies:
for d in candidate.dependencies:
if d.name not in PyPIProvider.BUILD_SKIP:
deps.append(d)
if candidate.extras:
# add the base package as a dependency too, so we can avoid conflicts between same package
# but with different extras
@ -297,21 +304,7 @@ def _get_json_package_list(fname: Path) -> Generator[str, None, None]:
yield k
def build_dependencies_for_wheel(
wheel: Path,
extras: list[str],
skip_dependency: list[str],
exports: str,
build_flags: list[str],
) -> None:
"""Extract dependencies from this wheel and build pypi dependencies
for each one in ./dist/
n.b. because dependency resolution may need to backtrack, this
is potentially quite slow in the case that one needs to build an
sdist in order to discover dependencies of a candidate sub-dependency.
"""
metadata = None
def _parse_skip_list(skip_dependency: list[str]) -> None:
PyPIProvider.BUILD_SKIP = []
for skip in skip_dependency:
split_deps = skip.split(",")
@ -324,6 +317,95 @@ def build_dependencies_for_wheel(
else:
PyPIProvider.BUILD_SKIP.append(dep)
def _resolve_and_build(
deps: list[str],
target_folder: Path,
build_dependencies: bool,
extras: list[str],
output_lockfile: str | None,
) -> None:
requirements = []
target_env = {
"python_version": f'{common.get_make_flag("PYMAJOR")}.{common.get_make_flag("PYMINOR")}',
"sys_platform": common.platform().split("_")[0],
"extra": ",".join(extras),
}
for d in deps:
r = Requirement(d)
if (r.name not in PyPIProvider.BUILD_SKIP) and (
(not r.marker) or r.marker.evaluate(target_env)
):
requirements.append(r)
# Create the (reusable) resolver.
provider = PyPIProvider(build_dependencies=build_dependencies)
reporter = BaseReporter()
resolver: Resolver[Requirement, Candidate, str] = Resolver(provider, reporter)
# Kick off the resolution process, and get the final result.
result = resolver.resolve(requirements)
target_folder.mkdir(parents=True, exist_ok=True)
version_file = None
if output_lockfile is not None and len(output_lockfile) > 0:
version_file = open(output_lockfile, "w")
for x in result.mapping.values():
download_or_build_wheel(x.url, target_folder)
if len(x.extras) > 0:
extratxt = "[" + ",".join(x.extras) + "]"
else:
extratxt = ""
if version_file:
version_file.write(f"{x.name}{extratxt}=={x.version}\n")
if version_file:
version_file.close()
def build_wheels_from_pypi_requirements(
reqs: list[str],
target_folder: Path,
build_dependencies: bool,
skip_dependency: list[str],
exports: str,
build_flags: list[str],
output_lockfile: str | None,
) -> None:
"""
Given a list of package requirements, build or fetch them. If build_dependencies is true, then
package dependencies will be built or fetched also.
"""
_parse_skip_list(skip_dependency)
PyPIProvider.BUILD_EXPORTS = exports
PyPIProvider.BUILD_FLAGS = build_flags
_resolve_and_build(
reqs,
target_folder,
build_dependencies,
extras=[],
output_lockfile=output_lockfile,
)
def build_dependencies_for_wheel(
wheel: Path,
extras: list[str],
skip_dependency: list[str],
exports: str,
build_flags: list[str],
output_lockfile: str | None,
) -> None:
"""Extract dependencies from this wheel and build pypi dependencies
for each one in ./dist/
n.b. because dependency resolution may need to backtrack, this
is potentially quite slow in the case that one needs to build an
sdist in order to discover dependencies of a candidate sub-dependency.
"""
metadata = None
_parse_skip_list(skip_dependency)
PyPIProvider.BUILD_EXPORTS = exports
PyPIProvider.BUILD_FLAGS = build_flags
with ZipFile(wheel) as z:
@ -335,30 +417,24 @@ def build_dependencies_for_wheel(
raise RuntimeError(f"Can't find package metadata in {wheel}")
deps: list[str] = metadata.get_all("Requires-Dist", [])
requirements = []
target_env = {
"extra": ",".join(extras),
"python_version": f'{common.get_make_flag("PYMAJOR")}.{common.get_make_flag("PYMINOR")}',
"sys_platform": common.platform().split("_")[0],
}
for d in deps:
r = Requirement(d)
if (r.name not in PyPIProvider.BUILD_SKIP) and (
(not r.marker) or r.marker.evaluate(target_env)
):
requirements.append(r)
# Create the (reusable) resolver.
provider = PyPIProvider()
reporter = BaseReporter()
resolver: Resolver[Requirement, Candidate, str] = Resolver(provider, reporter)
# Kick off the resolution process, and get the final result.
result = resolver.resolve(requirements)
for x in result.mapping.values():
download_or_build_wheel(x.url, wheel.parent)
metadata.get("version")
_resolve_and_build(
deps,
wheel.parent,
build_dependencies=True,
extras=extras,
output_lockfile=output_lockfile,
)
# add the current wheel to the package-versions.txt
if output_lockfile is not None and len(output_lockfile) > 0:
with open(output_lockfile, "a") as version_txt:
name = metadata.get("Name")
version = metadata.get("Version")
if extras:
extratxt = "[" + ",".join(extras) + "]"
else:
extratxt = ""
version_txt.write(f"{name}{extratxt}=={version}\n")
def fetch_pypi_package(package_spec: str, destdir: Path) -> Path:

View File

@ -113,7 +113,11 @@ def _make_fake_package(
f.write(f'print("Hello from compiled module {name}")')
with open(build_path / "setup.py", "w") as sf:
sf.write(
f'from setuptools import setup\nfrom Cython.Build import cythonize\nsetup(ext_modules=cythonize("src/{module_name}/*.pyx",language_level=3))'
f"""
from setuptools import setup
from Cython.Build import cythonize
setup(ext_modules=cythonize("src/{module_name}/*.pyx",language_level=3))
"""
)
with open(build_path / "MANIFEST.in", "w") as mf:
mf.write("global-include *.pyx\n")
@ -307,3 +311,77 @@ def test_fake_pypi_extras_build(selenium, tmp_path, fake_pypi_url):
assert result.exit_code == 0, result.stdout
built_wheels = set(output_dir.glob("*.whl"))
assert len(built_wheels) == 2
def test_fake_pypi_repeatable_build(selenium, tmp_path, fake_pypi_url):
# TODO: - make test run without pyodide
output_dir = tmp_path / "dist"
# build package that resolves right
app = typer.Typer()
app.command()(build.main)
# override a dependency version and build
# pkg-a
with open(tmp_path / "requirements.txt", "w") as req_file:
req_file.write(
"""
# Whole line comment
pkg-c~=1.0.0 # end of line comment
pkg-a
"""
)
with chdir(tmp_path):
result = runner.invoke(
app,
[
"-r",
"requirements.txt",
"--build-dependencies",
"--output-lockfile",
"lockfile.txt",
],
)
# this should work
assert result.exit_code == 0, result.stdout
built_wheels = list(output_dir.glob("*.whl"))
assert len(built_wheels) == 2, result.stdout
# should have built version 1.0.0 of pkg-c
for x in built_wheels:
if x.name.startswith("pkg_c"):
assert x.name.find("1.0.0") != -1, x.name
x.unlink()
# rebuild from package-versions lockfile and
# check it outputs the same version number
with chdir(tmp_path):
result = runner.invoke(
app,
["-r", str(tmp_path / "lockfile.txt")],
)
# should still have built 1.0.0 of pkg-c
built_wheels = list(output_dir.glob("*.whl"))
for x in built_wheels:
if x.name.startswith("pkg_c"):
assert x.name.find("1.0.0") != -1, x.name
assert len(built_wheels) == 2, result.stdout
def test_bad_requirements_text(selenium, tmp_path):
app = typer.Typer()
app.command()(build.main)
# test 1 - error on URL location in requirements
# test 2 - error on advanced options
# test 3 - error on editable install of package
bad_lines = [" pkg-c@http://www.pkg-c.org", " -r bob.txt", " -e pkg-c"]
for line in bad_lines:
with open(tmp_path / "requirements.txt", "w") as req_file:
req_file.write(line + "\n")
with chdir(tmp_path):
result = runner.invoke(
app,
["-r", "requirements.txt"],
)
assert result.exit_code != 0 and line.strip() in str(result)