`pyodide_build.buildpkg`: Build in `$PYODIDE_RECIPE_BUILD_DIR ` if set (#4405)

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
Matthias Köppe 2024-01-24 04:27:41 -08:00 committed by GitHub
parent 5093db325f
commit 9ca93345cf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 75 additions and 40 deletions

View File

@ -18,7 +18,7 @@ echo "conda activate pyodide-env" >> ~/.bashrc
# https://pyodide.org/en/stable/development/building-from-sources.html#using-docker
export EMSDK_NUM_CORE=12 EMCC_CORES=12 PYODIDE_JOBS=12
echo "export EMSDK_NUM_CORE=12 EMCC_CORES=12 PYODIDE_JOBS=12" >> ~/.bashrc
echo "export PYODIDE_BUILD_TMP=/tmp/pyodide-build" >> ~/.bashrc
echo "export PYODIDE_RECIPE_BUILD_DIR=/tmp/pyodide-build" >> ~/.bashrc
conda run -n pyodide-env --live-stream pip install -r requirements.txt

View File

@ -6,6 +6,8 @@ set -e
# https://pyodide.org/en/stable/development/new-packages.html#prerequisites
pip install -e ./pyodide-build
echo "export PYODIDE_RECIPE_BUILD_DIR=/tmp/pyodide-build" >> ~/.bashrc
# Building emsdk and cpython takes a few minutes to run, so we do not run it here.
#
# make -C emsdk

View File

@ -78,10 +78,13 @@ class BasePackage:
def __repr__(self) -> str:
return f"{type(self).__name__}({self.name})"
def needs_rebuild(self) -> bool:
return needs_rebuild(self.pkgdir, self.pkgdir / "build", self.meta.source)
def build_path(self, build_dir: Path) -> Path:
return build_dir / self.name / "build"
def build(self, build_args: BuildArgs) -> None:
def needs_rebuild(self, build_dir: Path) -> bool:
return needs_rebuild(self.pkgdir, self.build_path(build_dir), self.meta.source)
def build(self, build_args: BuildArgs, build_dir: Path) -> None:
raise NotImplementedError()
def dist_artifact_path(self) -> Path:
@ -134,7 +137,7 @@ class Package(BasePackage):
return tests[0]
return None
def build(self, build_args: BuildArgs) -> None:
def build(self, build_args: BuildArgs, build_dir: Path) -> None:
p = subprocess.run(
[
"pyodide",
@ -147,6 +150,7 @@ class Package(BasePackage):
f"--ldflags={build_args.ldflags}",
f"--target-install-dir={build_args.target_install_dir}",
f"--host-install-dir={build_args.host_install_dir}",
f"--build-dir={build_dir}",
# Either this package has been updated and this doesn't
# matter, or this package is dependent on a package that has
# been updated and should be rebuilt even though its own
@ -472,7 +476,9 @@ def mark_package_needs_build(
mark_package_needs_build(pkg_map, pkg_map[dep], needs_build)
def generate_needs_build_set(pkg_map: dict[str, BasePackage]) -> set[str]:
def generate_needs_build_set(
pkg_map: dict[str, BasePackage], build_dir: Path
) -> set[str]:
"""
Generate the set of packages that need to be rebuilt.
@ -484,7 +490,7 @@ def generate_needs_build_set(pkg_map: dict[str, BasePackage]) -> set[str]:
needs_build: set[str] = set()
for pkg in pkg_map.values():
# Otherwise, rebuild packages that have been updated and their dependents.
if pkg.needs_rebuild():
if pkg.needs_rebuild(build_dir):
mark_package_needs_build(pkg_map, pkg, needs_build)
return needs_build
@ -492,6 +498,7 @@ def generate_needs_build_set(pkg_map: dict[str, BasePackage]) -> set[str]:
def build_from_graph(
pkg_map: dict[str, BasePackage],
build_args: BuildArgs,
build_dir: Path,
n_jobs: int = 1,
force_rebuild: bool = False,
) -> None:
@ -520,7 +527,7 @@ def build_from_graph(
# If "force_rebuild" is set, just rebuild everything
needs_build = set(pkg_map.keys())
else:
needs_build = generate_needs_build_set(pkg_map)
needs_build = generate_needs_build_set(pkg_map, build_dir)
# We won't rebuild the complement of the packages that we will build.
already_built = set(pkg_map.keys()).difference(needs_build)
@ -589,7 +596,7 @@ def build_from_graph(
success = True
try:
pkg.build(build_args)
pkg.build(build_args, build_dir)
except Exception as e:
built_queue.put(e)
success = False
@ -747,6 +754,7 @@ def build_packages(
packages_dir: Path,
targets: str,
build_args: BuildArgs,
build_dir: Path,
n_jobs: int = 1,
force_rebuild: bool = False,
) -> dict[str, BasePackage]:
@ -756,7 +764,7 @@ def build_packages(
packages_dir, set(requested_packages.keys()), disabled
)
build_from_graph(pkg_map, build_args, n_jobs, force_rebuild)
build_from_graph(pkg_map, build_args, build_dir, n_jobs, force_rebuild)
for pkg in pkg_map.values():
assert isinstance(pkg, Package)

View File

@ -612,7 +612,7 @@ def needs_rebuild(
$PYODIDE_ROOT/packages/<PACKAGES>
buildpath
The path to the build directory. Generally will be
The path to the build directory. By default, it will be
$(PYOIDE_ROOT)/packages/<PACKAGE>/build/.
src_metadata
@ -620,6 +620,9 @@ def needs_rebuild(
"""
packaged_token = buildpath / ".packaged"
if not packaged_token.is_file():
logger.debug(
f"{pkg_root} needs rebuild because {packaged_token} does not exist"
)
return True
package_time = packaged_token.stat().st_mtime
@ -643,6 +646,7 @@ def _build_package_inner(
pkg_root: Path,
pkg: MetaConfig,
build_args: BuildArgs,
build_dir: Path,
*,
force_rebuild: bool = False,
continue_: bool = False,
@ -659,15 +663,19 @@ def _build_package_inner(
build_args
The extra build arguments passed to the build script.
build_dir
The directory where build directories for packages are created.
"""
source_metadata = pkg.source
build_metadata = pkg.build
name = pkg.package.name
version = pkg.package.version
build_dir = pkg_root / "build"
build_path = build_dir / name / "build"
build_path.mkdir(parents=True, exist_ok=True)
dist_dir = pkg_root / "dist"
src_dir_name: str = f"{name}-{version}"
srcpath = build_dir / src_dir_name
srcpath = build_path / src_dir_name
src_dist_dir = srcpath / "dist"
# Python produces output .whl or .so files in src_dist_dir.
# We copy them to dist_dir later
@ -686,7 +694,7 @@ def _build_package_inner(
if post:
assert package_type == "package"
if not force_rebuild and not needs_rebuild(pkg_root, build_dir, source_metadata):
if not force_rebuild and not needs_rebuild(pkg_root, build_path, source_metadata):
return
if continue_ and not srcpath.exists():
@ -695,10 +703,6 @@ def _build_package_inner(
f"directory at the path {srcpath}, but that path does not exist."
)
import os
import subprocess
import sys
try:
stdout_fileno = sys.stdout.fileno()
stderr_fileno = sys.stderr.fileno()
@ -719,7 +723,7 @@ def _build_package_inner(
bash_runner.env["PKG_BUILD_DIR"] = str(srcpath)
bash_runner.env["DISTDIR"] = str(src_dist_dir)
if not continue_:
prepare_source(build_dir, srcpath, source_metadata)
prepare_source(build_path, srcpath, source_metadata)
patch(pkg_root, srcpath, source_metadata)
src_dist_dir.mkdir(exist_ok=True, parents=True)
@ -756,7 +760,7 @@ def _build_package_inner(
shutil.rmtree(dist_dir, ignore_errors=True)
shutil.copytree(src_dist_dir, dist_dir)
create_packaged_token(build_dir)
create_packaged_token(build_path)
def _load_package_config(package_dir: Path) -> tuple[Path, MetaConfig]:
@ -811,6 +815,7 @@ def _check_executables(pkg: MetaConfig) -> None:
def build_package(
package: str | Path,
build_args: BuildArgs,
build_dir: str | Path,
force_rebuild: bool = False,
continue_: bool = False,
) -> None:
@ -831,11 +836,15 @@ def build_package(
continue_
If True, continue a build from the middle. For debugging. Implies "--force-rebuild".
build_dir
The directory where build directories for packages are created.
"""
force_rebuild = force_rebuild or continue_
meta_file = Path(package).resolve()
build_dir = Path(build_dir)
pkg_root, pkg = _load_package_config(meta_file)
_check_executables(pkg)
@ -854,6 +863,7 @@ def build_package(
pkg_root,
pkg,
build_args,
build_dir,
force_rebuild=force_rebuild,
continue_=continue_,
)

View File

@ -17,6 +17,12 @@ def recipe(
help="The directory containing the recipe of packages. "
"If not specified, the default is ``./packages``",
),
build_dir: str = typer.Option(
None,
envvar="PYODIDE_RECIPE_BUILD_DIR",
help="The directory where build directories for packages are created. "
"Default: recipe_dir.",
),
no_deps: bool = typer.Option(
False, help="If true, do not build dependencies of the specified packages. "
),
@ -80,6 +86,7 @@ def recipe(
root = Path.cwd()
recipe_dir_ = root / "packages" if not recipe_dir else Path(recipe_dir).resolve()
build_dir_ = recipe_dir_ if not build_dir else Path(build_dir).resolve()
install_dir_ = root / "dist" if not install_dir else Path(install_dir).resolve()
log_dir_ = None if not log_dir else Path(log_dir).resolve()
n_jobs = n_jobs or get_num_cores()
@ -105,7 +112,9 @@ def recipe(
# TODO: use multiprocessing?
for package in packages:
package_path = recipe_dir_ / package
buildpkg.build_package(package_path, build_args, force_rebuild, continue_)
buildpkg.build_package(
package_path, build_args, build_dir_, force_rebuild, continue_
)
else:
if len(packages) == 1 and "," in packages[0]:
@ -116,7 +125,12 @@ def recipe(
targets = ",".join(packages)
pkg_map = buildall.build_packages(
recipe_dir_, targets, build_args, n_jobs, force_rebuild
recipe_dir_,
targets,
build_args,
build_dir_,
n_jobs,
force_rebuild,
)
if log_dir_:

View File

@ -10,6 +10,7 @@ from pyodide_build import buildall
from pyodide_build.pywasmcross import BuildArgs
RECIPE_DIR = Path(__file__).parent / "_test_recipes"
BUILD_DIR = RECIPE_DIR
def test_generate_dependency_graph():
@ -102,14 +103,16 @@ def test_build_dependencies(n_jobs, monkeypatch):
build_list = []
class MockPackage(buildall.Package):
def build(self, args: Any) -> None:
def build(self, args: Any, build_dir: Path) -> None:
build_list.append(self.name)
monkeypatch.setattr(buildall, "Package", MockPackage)
pkg_map = buildall.generate_dependency_graph(RECIPE_DIR, {"pkg_1", "pkg_2"})
buildall.build_from_graph(pkg_map, BuildArgs(), n_jobs=n_jobs, force_rebuild=True)
buildall.build_from_graph(
pkg_map, BuildArgs(), BUILD_DIR, n_jobs=n_jobs, force_rebuild=True
)
assert set(build_list) == {
"pkg_1",
@ -129,7 +132,7 @@ def test_build_error(n_jobs, monkeypatch):
"""Try building all the dependency graph, without the actual build operations"""
class MockPackage(buildall.Package):
def build(self, args: Any) -> None:
def build(self, args: Any, build_dir: Path) -> None:
raise ValueError("Failed build")
monkeypatch.setattr(buildall, "Package", MockPackage)
@ -138,7 +141,7 @@ def test_build_error(n_jobs, monkeypatch):
with pytest.raises(ValueError, match="Failed build"):
buildall.build_from_graph(
pkg_map, BuildArgs(), n_jobs=n_jobs, force_rebuild=True
pkg_map, BuildArgs(), BUILD_DIR, n_jobs=n_jobs, force_rebuild=True
)

View File

@ -102,8 +102,7 @@ def test_prepare_source(monkeypatch, tmp_path):
for pkg in test_pkgs:
source_dir_name = pkg.package.name + "-" + pkg.package.version
pkg_root = Path(pkg.package.name)
buildpath = tmp_path / pkg_root / "build"
buildpath = tmp_path / pkg.package.name / "build"
src_metadata = pkg.source
srcpath = buildpath / source_dir_name
buildpkg.prepare_source(buildpath, srcpath, src_metadata)
@ -153,11 +152,10 @@ def test_unvendor_tests(tmpdir):
def test_needs_rebuild(tmpdir):
pkg_root = tmpdir
pkg_root = Path(pkg_root)
builddir = pkg_root / "build"
pkg_root = Path(tmpdir)
buildpath = pkg_root / "build"
meta_yaml = pkg_root / "meta.yaml"
packaged = builddir / ".packaged"
packaged = buildpath / ".packaged"
patch_file = pkg_root / "patch"
extra_file = pkg_root / "extra"
@ -179,7 +177,7 @@ def test_needs_rebuild(tmpdir):
path=str(src_path),
)
builddir.mkdir()
buildpath.mkdir()
meta_yaml.touch()
patch_file.touch()
extra_file.touch()
@ -187,39 +185,39 @@ def test_needs_rebuild(tmpdir):
src_path_file.touch()
# No .packaged file, rebuild
assert buildpkg.needs_rebuild(pkg_root, builddir, source_metadata) is True
assert buildpkg.needs_rebuild(pkg_root, buildpath, source_metadata) is True
# .packaged file exists, no rebuild
packaged.touch()
assert buildpkg.needs_rebuild(pkg_root, builddir, source_metadata) is False
assert buildpkg.needs_rebuild(pkg_root, buildpath, source_metadata) is False
# newer meta.yaml file, rebuild
packaged.touch()
time.sleep(0.01)
meta_yaml.touch()
assert buildpkg.needs_rebuild(pkg_root, builddir, source_metadata) is True
assert buildpkg.needs_rebuild(pkg_root, buildpath, source_metadata) is True
# newer patch file, rebuild
packaged.touch()
time.sleep(0.01)
patch_file.touch()
assert buildpkg.needs_rebuild(pkg_root, builddir, source_metadata) is True
assert buildpkg.needs_rebuild(pkg_root, buildpath, source_metadata) is True
# newer extra file, rebuild
packaged.touch()
time.sleep(0.01)
extra_file.touch()
assert buildpkg.needs_rebuild(pkg_root, builddir, source_metadata) is True
assert buildpkg.needs_rebuild(pkg_root, buildpath, source_metadata) is True
# newer source path, rebuild
packaged.touch()
time.sleep(0.01)
src_path_file.touch()
assert buildpkg.needs_rebuild(pkg_root, builddir, source_metadata) is True
assert buildpkg.needs_rebuild(pkg_root, buildpath, source_metadata) is True
# newer .packaged file, no rebuild
packaged.touch()
assert buildpkg.needs_rebuild(pkg_root, builddir, source_metadata) is False
assert buildpkg.needs_rebuild(pkg_root, buildpath, source_metadata) is False
def test_copy_sharedlib(tmp_path):