2018-06-20 18:54:47 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2018-06-22 14:22:00 +00:00
|
|
|
"""
|
|
|
|
Build all of the packages in a given directory.
|
|
|
|
"""
|
|
|
|
|
2018-06-20 18:54:47 +00:00
|
|
|
import argparse
|
|
|
|
import json
|
2022-02-21 22:27:03 +00:00
|
|
|
import os
|
2018-06-20 18:54:47 +00:00
|
|
|
import shutil
|
2020-12-10 19:37:08 +00:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2022-02-21 22:27:03 +00:00
|
|
|
from functools import total_ordering
|
|
|
|
from pathlib import Path
|
|
|
|
from queue import PriorityQueue, Queue
|
|
|
|
from threading import Lock, Thread
|
|
|
|
from time import perf_counter, sleep
|
2022-04-01 22:36:55 +00:00
|
|
|
from typing import Any
|
2018-06-20 18:54:47 +00:00
|
|
|
|
2018-09-20 14:42:38 +00:00
|
|
|
from . import common
|
2021-12-31 17:29:36 +00:00
|
|
|
from .buildpkg import needs_rebuild
|
2022-03-16 17:55:07 +00:00
|
|
|
from .common import UNVENDORED_STDLIB_MODULES, find_matching_wheels
|
2022-02-21 22:27:03 +00:00
|
|
|
from .io import parse_package_config
|
2021-05-02 13:42:28 +00:00
|
|
|
|
|
|
|
|
2022-03-05 07:44:54 +00:00
|
|
|
class BuildError(Exception):
|
|
|
|
def __init__(self, returncode):
|
|
|
|
self.returncode = returncode
|
|
|
|
super().__init__()
|
|
|
|
|
|
|
|
|
2021-05-02 13:42:28 +00:00
|
|
|
class BasePackage:
|
|
|
|
pkgdir: Path
|
|
|
|
name: str
|
|
|
|
version: str
|
|
|
|
meta: dict
|
|
|
|
library: bool
|
|
|
|
shared_library: bool
|
2022-02-20 22:13:37 +00:00
|
|
|
dependencies: list[str]
|
|
|
|
unbuilt_dependencies: set[str]
|
|
|
|
dependents: set[str]
|
2022-04-01 22:36:55 +00:00
|
|
|
unvendored_tests: Path | None = None
|
|
|
|
file_name: str | None = None
|
2022-01-24 01:47:04 +00:00
|
|
|
install_dir: str = "site"
|
2021-05-02 13:42:28 +00:00
|
|
|
|
|
|
|
# We use this in the priority queue, which pops off the smallest element.
|
|
|
|
# So we want the smallest element to have the largest number of dependents
|
|
|
|
def __lt__(self, other) -> bool:
|
|
|
|
return len(self.dependents) > len(other.dependents)
|
|
|
|
|
|
|
|
def __eq__(self, other) -> bool:
|
|
|
|
return len(self.dependents) == len(other.dependents)
|
2018-06-20 18:54:47 +00:00
|
|
|
|
2022-01-24 01:47:04 +00:00
|
|
|
def __repr__(self) -> str:
|
|
|
|
return f"{type(self).__name__}({self.name})"
|
|
|
|
|
|
|
|
def needs_rebuild(self) -> bool:
|
2022-04-26 07:10:36 +00:00
|
|
|
return needs_rebuild(
|
|
|
|
self.pkgdir, self.pkgdir / "build", self.meta.get("source", {})
|
|
|
|
)
|
2022-01-24 01:47:04 +00:00
|
|
|
|
2018-06-20 18:54:47 +00:00
|
|
|
|
2020-12-10 19:37:08 +00:00
|
|
|
@total_ordering
|
2021-05-02 13:42:28 +00:00
|
|
|
class StdLibPackage(BasePackage):
|
|
|
|
def __init__(self, pkgdir: Path):
|
|
|
|
self.pkgdir = pkgdir
|
|
|
|
self.meta = {}
|
|
|
|
self.name = pkgdir.stem
|
|
|
|
self.version = "1.0"
|
|
|
|
self.library = False
|
|
|
|
self.shared_library = False
|
|
|
|
self.dependencies = []
|
|
|
|
self.unbuilt_dependencies = set()
|
|
|
|
self.dependents = set()
|
2022-01-24 01:47:04 +00:00
|
|
|
self.install_dir = "lib"
|
2021-05-02 13:42:28 +00:00
|
|
|
|
|
|
|
def build(self, outputdir: Path, args) -> None:
|
|
|
|
# All build / packaging steps are already done in the main Makefile
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
@total_ordering
|
|
|
|
class Package(BasePackage):
|
2020-12-10 19:37:08 +00:00
|
|
|
def __init__(self, pkgdir: Path):
|
|
|
|
self.pkgdir = pkgdir
|
2020-05-08 23:28:44 +00:00
|
|
|
|
2020-06-28 18:24:40 +00:00
|
|
|
pkgpath = pkgdir / "meta.yaml"
|
2020-12-10 19:37:08 +00:00
|
|
|
if not pkgpath.is_file():
|
|
|
|
raise ValueError(f"Directory {pkgdir} does not contain meta.yaml")
|
|
|
|
|
2021-05-02 13:42:28 +00:00
|
|
|
self.meta = parse_package_config(pkgpath)
|
|
|
|
self.name = self.meta["package"]["name"]
|
|
|
|
self.version = self.meta["package"]["version"]
|
2021-12-31 17:29:36 +00:00
|
|
|
self.meta["build"] = self.meta.get("build", {})
|
|
|
|
self.meta["requirements"] = self.meta.get("requirements", {})
|
|
|
|
|
|
|
|
self.library = self.meta["build"].get("library", False)
|
|
|
|
self.shared_library = self.meta["build"].get("sharedlibrary", False)
|
2020-12-10 19:37:08 +00:00
|
|
|
|
|
|
|
assert self.name == pkgdir.stem
|
|
|
|
|
2021-12-31 17:29:36 +00:00
|
|
|
self.dependencies = self.meta["requirements"].get("run", [])
|
2021-05-02 13:42:28 +00:00
|
|
|
self.unbuilt_dependencies = set(self.dependencies)
|
|
|
|
self.dependents = set()
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2022-01-24 01:47:04 +00:00
|
|
|
def wheel_path(self) -> Path:
|
2022-03-10 04:34:25 +00:00
|
|
|
dist_dir = self.pkgdir / "dist"
|
2022-03-18 03:22:17 +00:00
|
|
|
wheels = list(find_matching_wheels(dist_dir.glob("*.whl")))
|
|
|
|
if len(wheels) != 1:
|
2022-01-24 01:47:04 +00:00
|
|
|
raise Exception(
|
2022-03-18 03:22:17 +00:00
|
|
|
f"Unexpected number of wheels {len(wheels)} when building {self.name}"
|
2022-01-24 01:47:04 +00:00
|
|
|
)
|
2022-03-18 03:22:17 +00:00
|
|
|
return wheels[0]
|
2022-01-24 01:47:04 +00:00
|
|
|
|
2022-04-01 22:36:55 +00:00
|
|
|
def tests_path(self) -> Path | None:
|
2022-01-24 01:47:04 +00:00
|
|
|
tests = list((self.pkgdir / "dist").glob("*-tests.tar"))
|
|
|
|
assert len(tests) <= 1
|
|
|
|
if tests:
|
|
|
|
return tests[0]
|
|
|
|
return None
|
|
|
|
|
2020-12-10 19:37:08 +00:00
|
|
|
def build(self, outputdir: Path, args) -> None:
|
2022-03-11 12:13:29 +00:00
|
|
|
|
2022-03-21 03:24:39 +00:00
|
|
|
p = subprocess.run(
|
|
|
|
[
|
|
|
|
sys.executable,
|
|
|
|
"-m",
|
|
|
|
"pyodide_build",
|
|
|
|
"buildpkg",
|
|
|
|
str(self.pkgdir / "meta.yaml"),
|
|
|
|
"--cflags",
|
|
|
|
args.cflags,
|
|
|
|
"--cxxflags",
|
|
|
|
args.cxxflags,
|
|
|
|
"--ldflags",
|
|
|
|
args.ldflags,
|
|
|
|
"--target-install-dir",
|
|
|
|
args.target_install_dir,
|
|
|
|
"--host-install-dir",
|
|
|
|
args.host_install_dir,
|
|
|
|
# Either this package has been updated and this doesn't
|
|
|
|
# matter, or this package is dependent on a package that has
|
|
|
|
# been updated and should be rebuilt even though its own
|
|
|
|
# files haven't been updated.
|
|
|
|
"--force-rebuild",
|
|
|
|
],
|
|
|
|
check=False,
|
|
|
|
stdout=subprocess.DEVNULL,
|
|
|
|
stderr=subprocess.DEVNULL,
|
|
|
|
)
|
|
|
|
|
|
|
|
log_dir = Path(args.log_dir).resolve() if args.log_dir else None
|
|
|
|
if log_dir and (self.pkgdir / "build.log").exists():
|
|
|
|
log_dir.mkdir(exist_ok=True, parents=True)
|
|
|
|
shutil.copy(
|
|
|
|
self.pkgdir / "build.log",
|
|
|
|
log_dir / f"{self.name}.log",
|
2021-01-03 21:09:40 +00:00
|
|
|
)
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2022-03-05 07:44:54 +00:00
|
|
|
if p.returncode != 0:
|
2020-12-31 12:19:41 +00:00
|
|
|
print(f"Error building {self.name}. Printing build logs.")
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2022-02-20 22:13:37 +00:00
|
|
|
with open(self.pkgdir / "build.log") as f:
|
2020-12-31 12:19:41 +00:00
|
|
|
shutil.copyfileobj(f, sys.stdout)
|
|
|
|
|
2022-03-05 07:44:54 +00:00
|
|
|
print("ERROR: cancelling buildall")
|
|
|
|
raise BuildError(p.returncode)
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2021-12-15 04:02:27 +00:00
|
|
|
if self.library:
|
|
|
|
return
|
2022-01-24 01:47:04 +00:00
|
|
|
if self.shared_library:
|
2022-03-16 15:00:11 +00:00
|
|
|
file_path = Path(self.pkgdir / f"{self.name}-{self.version}.zip")
|
|
|
|
shutil.copy(file_path, outputdir)
|
|
|
|
file_path.unlink()
|
2022-01-24 01:47:04 +00:00
|
|
|
return
|
2022-03-16 15:00:11 +00:00
|
|
|
|
2022-01-24 01:47:04 +00:00
|
|
|
shutil.copy(self.wheel_path(), outputdir)
|
|
|
|
test_path = self.tests_path()
|
|
|
|
if test_path:
|
|
|
|
shutil.copy(test_path, outputdir)
|
2020-12-10 19:37:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
def generate_dependency_graph(
|
2022-02-20 22:13:37 +00:00
|
|
|
packages_dir: Path, packages: set[str]
|
|
|
|
) -> dict[str, BasePackage]:
|
2021-09-02 20:19:23 +00:00
|
|
|
"""This generates a dependency graph for listed packages.
|
|
|
|
|
2021-05-02 13:42:28 +00:00
|
|
|
A node in the graph is a BasePackage object defined above, which maintains
|
|
|
|
a list of dependencies and also dependents. That is, each node stores both
|
2020-12-10 19:37:08 +00:00
|
|
|
incoming and outgoing edges.
|
|
|
|
|
|
|
|
The dependencies and dependents are stored via their name, and we have a
|
2021-05-02 13:42:28 +00:00
|
|
|
lookup table pkg_map: Dict[str, BasePackage] to look up the corresponding
|
|
|
|
BasePackage object. The function returns pkg_map, which contains all
|
|
|
|
packages in the graph as its values.
|
2020-12-10 19:37:08 +00:00
|
|
|
|
|
|
|
Parameters:
|
|
|
|
- packages_dir: directory that contains packages
|
2021-09-02 20:19:23 +00:00
|
|
|
- packages: set of packages to build. If None, then all packages in
|
2020-12-10 19:37:08 +00:00
|
|
|
packages_dir are compiled.
|
|
|
|
|
|
|
|
Returns:
|
2021-05-02 13:42:28 +00:00
|
|
|
- pkg_map: dictionary mapping package names to BasePackage objects
|
2020-12-10 19:37:08 +00:00
|
|
|
"""
|
|
|
|
|
2022-02-20 22:13:37 +00:00
|
|
|
pkg_map: dict[str, BasePackage] = {}
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2021-12-05 20:34:09 +00:00
|
|
|
if "*" in packages:
|
|
|
|
packages.discard("*")
|
|
|
|
packages.update(
|
2022-04-19 00:24:47 +00:00
|
|
|
str(x.name) for x in packages_dir.iterdir() if (x / "meta.yaml").is_file()
|
2020-12-10 19:37:08 +00:00
|
|
|
)
|
|
|
|
|
2021-12-05 20:34:09 +00:00
|
|
|
no_numpy_dependents = "no-numpy-dependents" in packages
|
|
|
|
if no_numpy_dependents:
|
|
|
|
packages.discard("no-numpy-dependents")
|
|
|
|
|
2022-04-19 00:24:47 +00:00
|
|
|
packages_exclude = list(filter(lambda pkg: pkg.startswith("!"), packages))
|
|
|
|
for pkg_exclude in packages_exclude:
|
|
|
|
packages.discard(pkg_exclude)
|
|
|
|
packages.discard(pkg_exclude[1:])
|
|
|
|
|
2020-12-10 19:37:08 +00:00
|
|
|
while packages:
|
|
|
|
pkgname = packages.pop()
|
|
|
|
|
2021-05-02 13:42:28 +00:00
|
|
|
pkg: BasePackage
|
|
|
|
if pkgname in UNVENDORED_STDLIB_MODULES:
|
|
|
|
pkg = StdLibPackage(packages_dir / pkgname)
|
|
|
|
else:
|
|
|
|
pkg = Package(packages_dir / pkgname)
|
2021-12-05 20:34:09 +00:00
|
|
|
if no_numpy_dependents and "numpy" in pkg.dependencies:
|
|
|
|
continue
|
2020-12-10 19:37:08 +00:00
|
|
|
pkg_map[pkg.name] = pkg
|
2018-07-18 13:26:18 +00:00
|
|
|
|
2020-12-10 19:37:08 +00:00
|
|
|
for dep in pkg.dependencies:
|
|
|
|
if pkg_map.get(dep) is None:
|
|
|
|
packages.add(dep)
|
|
|
|
|
|
|
|
# Compute dependents
|
|
|
|
for pkg in pkg_map.values():
|
|
|
|
for dep in pkg.dependencies:
|
|
|
|
pkg_map[dep].dependents.add(pkg.name)
|
|
|
|
|
|
|
|
return pkg_map
|
|
|
|
|
|
|
|
|
2021-12-05 20:34:09 +00:00
|
|
|
def job_priority(pkg: BasePackage):
|
|
|
|
if pkg.name == "numpy":
|
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
2021-12-27 09:00:09 +00:00
|
|
|
def print_with_progress_line(str, progress_line):
|
|
|
|
if not sys.stdout.isatty():
|
|
|
|
print(str)
|
|
|
|
return
|
|
|
|
twidth = os.get_terminal_size()[0]
|
|
|
|
print(" " * twidth, end="\r")
|
|
|
|
print(str)
|
|
|
|
if progress_line:
|
|
|
|
print(progress_line, end="\r")
|
|
|
|
|
|
|
|
|
|
|
|
def get_progress_line(package_set):
|
|
|
|
if not package_set:
|
|
|
|
return None
|
2022-02-27 17:39:53 +00:00
|
|
|
return "In progress: " + ", ".join(package_set.keys())
|
2021-12-27 09:00:09 +00:00
|
|
|
|
|
|
|
|
2022-02-20 22:13:37 +00:00
|
|
|
def format_name_list(l: list[str]) -> str:
|
2021-12-31 17:29:36 +00:00
|
|
|
"""
|
|
|
|
>>> format_name_list(["regex"])
|
|
|
|
'regex'
|
|
|
|
>>> format_name_list(["regex", "parso"])
|
|
|
|
'regex and parso'
|
|
|
|
>>> format_name_list(["regex", "parso", "jedi"])
|
|
|
|
'regex, parso, and jedi'
|
|
|
|
"""
|
|
|
|
if len(l) == 1:
|
|
|
|
return l[0]
|
|
|
|
most = l[:-1]
|
|
|
|
if len(most) > 1:
|
|
|
|
most = [x + "," for x in most]
|
|
|
|
return " ".join(most) + " and " + l[-1]
|
|
|
|
|
|
|
|
|
|
|
|
def mark_package_needs_build(
|
2022-02-20 22:13:37 +00:00
|
|
|
pkg_map: dict[str, BasePackage], pkg: BasePackage, needs_build: set[str]
|
2021-12-31 17:29:36 +00:00
|
|
|
):
|
|
|
|
"""
|
|
|
|
Helper for generate_needs_build_set. Modifies needs_build in place.
|
|
|
|
Recursively add pkg and all of its dependencies to needs_build.
|
|
|
|
"""
|
2022-01-02 01:16:16 +00:00
|
|
|
if isinstance(pkg, StdLibPackage):
|
|
|
|
return
|
|
|
|
if pkg.name in needs_build:
|
|
|
|
return
|
2021-12-31 17:29:36 +00:00
|
|
|
needs_build.add(pkg.name)
|
|
|
|
for dep in pkg.dependents:
|
2022-01-02 01:16:16 +00:00
|
|
|
mark_package_needs_build(pkg_map, pkg_map[dep], needs_build)
|
2021-12-31 17:29:36 +00:00
|
|
|
|
|
|
|
|
2022-02-20 22:13:37 +00:00
|
|
|
def generate_needs_build_set(pkg_map: dict[str, BasePackage]) -> set[str]:
|
2021-12-31 17:29:36 +00:00
|
|
|
"""
|
|
|
|
Generate the set of packages that need to be rebuilt.
|
|
|
|
|
|
|
|
This consists of:
|
|
|
|
1. packages whose source files have changed since they were last built
|
|
|
|
according to needs_rebuild, and
|
|
|
|
2. packages which depend on case 1 packages.
|
|
|
|
"""
|
2022-02-20 22:13:37 +00:00
|
|
|
needs_build: set[str] = set()
|
2021-12-31 17:29:36 +00:00
|
|
|
for pkg in pkg_map.values():
|
|
|
|
# Otherwise, rebuild packages that have been updated and their dependents.
|
2022-01-24 01:47:04 +00:00
|
|
|
if pkg.needs_rebuild():
|
2021-12-31 17:29:36 +00:00
|
|
|
mark_package_needs_build(pkg_map, pkg, needs_build)
|
|
|
|
return needs_build
|
|
|
|
|
|
|
|
|
2022-02-20 22:13:37 +00:00
|
|
|
def build_from_graph(pkg_map: dict[str, BasePackage], outputdir: Path, args) -> None:
|
2020-12-10 19:37:08 +00:00
|
|
|
"""
|
|
|
|
This builds packages in pkg_map in parallel, building at most args.n_jobs
|
|
|
|
packages at once.
|
|
|
|
|
|
|
|
We have a priority queue of packages we are ready to build (build_queue),
|
|
|
|
where a package is ready to build if all its dependencies are built. The
|
|
|
|
priority is based on the number of dependents --- we prefer to build
|
|
|
|
packages with more dependents first.
|
|
|
|
|
|
|
|
To build packages in parallel, we use a thread pool of args.n_jobs many
|
|
|
|
threads listening to build_queue. When the thread is free, it takes an
|
|
|
|
item off build_queue and builds it. Once the package is built, it sends the
|
|
|
|
package to the built_queue. The main thread listens to the built_queue and
|
2021-11-14 20:47:49 +00:00
|
|
|
checks if any of the dependents are ready to be built. If so, it adds the
|
2020-12-10 19:37:08 +00:00
|
|
|
package to the build queue.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Insert packages into build_queue. We *must* do this after counting
|
|
|
|
# dependents, because the ordering ought not to change after insertion.
|
|
|
|
build_queue: PriorityQueue = PriorityQueue()
|
2021-09-15 08:18:56 +00:00
|
|
|
|
2021-12-31 17:29:36 +00:00
|
|
|
if args.force_rebuild:
|
|
|
|
# If "force_rebuild" is set, just rebuild everything
|
|
|
|
needs_build = set(pkg_map.keys())
|
|
|
|
else:
|
|
|
|
needs_build = generate_needs_build_set(pkg_map)
|
|
|
|
|
|
|
|
# We won't rebuild the complement of the packages that we will build.
|
|
|
|
already_built = set(pkg_map.keys()).difference(needs_build)
|
|
|
|
|
|
|
|
# Remove the packages we've already built from the dependency sets of
|
|
|
|
# the remaining ones
|
|
|
|
for pkg_name in needs_build:
|
|
|
|
pkg_map[pkg_name].unbuilt_dependencies.difference_update(already_built)
|
|
|
|
|
|
|
|
if already_built:
|
|
|
|
print(
|
|
|
|
f"The following packages are already built: {format_name_list(sorted(already_built))}\n"
|
|
|
|
)
|
|
|
|
if not needs_build:
|
|
|
|
print("All packages already built. Quitting.")
|
|
|
|
return
|
|
|
|
print(f"Building the following packages: {format_name_list(sorted(needs_build))}")
|
|
|
|
|
2021-12-05 20:34:09 +00:00
|
|
|
t0 = perf_counter()
|
2021-12-31 17:29:36 +00:00
|
|
|
for pkg_name in needs_build:
|
|
|
|
pkg = pkg_map[pkg_name]
|
|
|
|
if len(pkg.unbuilt_dependencies) == 0:
|
2021-12-05 20:34:09 +00:00
|
|
|
build_queue.put((job_priority(pkg), pkg))
|
2020-12-10 19:37:08 +00:00
|
|
|
|
|
|
|
built_queue: Queue = Queue()
|
2021-09-15 08:18:56 +00:00
|
|
|
thread_lock = Lock()
|
|
|
|
queue_idx = 1
|
2022-03-11 12:13:29 +00:00
|
|
|
# Using dict keys for insertion order preservation
|
|
|
|
package_set: dict[str, None] = {}
|
2020-12-10 19:37:08 +00:00
|
|
|
|
|
|
|
def builder(n):
|
2021-09-15 08:18:56 +00:00
|
|
|
nonlocal queue_idx
|
2020-12-10 19:37:08 +00:00
|
|
|
while True:
|
2021-12-05 20:34:09 +00:00
|
|
|
pkg = build_queue.get()[1]
|
2021-09-15 08:18:56 +00:00
|
|
|
with thread_lock:
|
|
|
|
pkg._queue_idx = queue_idx
|
|
|
|
queue_idx += 1
|
2021-12-27 09:00:09 +00:00
|
|
|
package_set[pkg.name] = None
|
2021-12-31 17:29:36 +00:00
|
|
|
msg = f"[{pkg._queue_idx}/{len(needs_build)}] (thread {n}) building {pkg.name}"
|
2021-12-27 09:00:09 +00:00
|
|
|
print_with_progress_line(msg, get_progress_line(package_set))
|
2021-04-20 20:57:13 +00:00
|
|
|
t0 = perf_counter()
|
2021-12-27 09:00:09 +00:00
|
|
|
success = True
|
2020-12-10 19:37:08 +00:00
|
|
|
try:
|
2021-12-23 01:15:46 +00:00
|
|
|
pkg.build(outputdir, args)
|
2020-12-10 19:37:08 +00:00
|
|
|
except Exception as e:
|
|
|
|
built_queue.put(e)
|
2021-12-27 09:00:09 +00:00
|
|
|
success = False
|
2020-12-10 19:37:08 +00:00
|
|
|
return
|
2021-12-27 09:00:09 +00:00
|
|
|
finally:
|
|
|
|
del package_set[pkg.name]
|
|
|
|
status = "built" if success else "failed"
|
|
|
|
msg = (
|
2021-12-31 17:29:36 +00:00
|
|
|
f"[{pkg._queue_idx}/{len(needs_build)}] (thread {n}) "
|
2021-12-27 09:00:09 +00:00
|
|
|
f"{status} {pkg.name} in {perf_counter() - t0:.2f} s"
|
|
|
|
)
|
|
|
|
print_with_progress_line(msg, get_progress_line(package_set))
|
2020-12-10 19:37:08 +00:00
|
|
|
built_queue.put(pkg)
|
|
|
|
# Release the GIL so new packages get queued
|
|
|
|
sleep(0.01)
|
|
|
|
|
|
|
|
for n in range(0, args.n_jobs):
|
|
|
|
Thread(target=builder, args=(n + 1,), daemon=True).start()
|
|
|
|
|
2021-12-31 17:29:36 +00:00
|
|
|
num_built = len(already_built)
|
2020-12-10 19:37:08 +00:00
|
|
|
while num_built < len(pkg_map):
|
|
|
|
pkg = built_queue.get()
|
2022-03-05 07:44:54 +00:00
|
|
|
if isinstance(pkg, BuildError):
|
|
|
|
raise SystemExit(pkg.returncode)
|
2020-12-10 19:37:08 +00:00
|
|
|
if isinstance(pkg, Exception):
|
|
|
|
raise pkg
|
|
|
|
|
|
|
|
num_built += 1
|
|
|
|
|
|
|
|
for _dependent in pkg.dependents:
|
|
|
|
dependent = pkg_map[_dependent]
|
|
|
|
dependent.unbuilt_dependencies.remove(pkg.name)
|
|
|
|
if len(dependent.unbuilt_dependencies) == 0:
|
2021-12-05 20:34:09 +00:00
|
|
|
build_queue.put((job_priority(dependent), dependent))
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2021-12-05 20:34:09 +00:00
|
|
|
print(
|
|
|
|
"\n===================================================\n"
|
|
|
|
f"built all packages in {perf_counter() - t0:.2f} s"
|
|
|
|
)
|
|
|
|
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2022-02-20 22:13:37 +00:00
|
|
|
def generate_packages_json(pkg_map: dict[str, BasePackage]) -> dict:
|
2021-07-24 16:22:46 +00:00
|
|
|
"""Generate the package.json file"""
|
2021-05-02 13:42:28 +00:00
|
|
|
# Build package.json data.
|
2022-02-20 22:13:37 +00:00
|
|
|
package_data: dict[str, dict[str, Any]] = {
|
2021-07-24 16:22:46 +00:00
|
|
|
"info": {"arch": "wasm32", "platform": "Emscripten-1.0"},
|
|
|
|
"packages": {},
|
2020-12-10 19:37:08 +00:00
|
|
|
}
|
|
|
|
|
2020-12-24 08:38:54 +00:00
|
|
|
libraries = [pkg.name for pkg in pkg_map.values() if pkg.library]
|
|
|
|
|
2020-12-10 19:37:08 +00:00
|
|
|
for name, pkg in pkg_map.items():
|
2022-01-24 01:47:04 +00:00
|
|
|
if not pkg.file_name:
|
2020-12-23 13:24:32 +00:00
|
|
|
continue
|
2022-01-24 01:47:04 +00:00
|
|
|
pkg_entry: Any = {
|
|
|
|
"name": name,
|
|
|
|
"version": pkg.version,
|
|
|
|
"file_name": pkg.file_name,
|
|
|
|
"install_dir": pkg.install_dir,
|
|
|
|
}
|
2021-02-26 16:01:10 +00:00
|
|
|
if pkg.shared_library:
|
2021-07-24 16:22:46 +00:00
|
|
|
pkg_entry["shared_library"] = True
|
|
|
|
pkg_entry["depends"] = [
|
2021-05-31 03:07:31 +00:00
|
|
|
x.lower() for x in pkg.dependencies if x not in libraries
|
2020-12-24 08:38:54 +00:00
|
|
|
]
|
2021-07-24 16:22:46 +00:00
|
|
|
pkg_entry["imports"] = pkg.meta.get("test", {}).get("imports", [name])
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2021-07-24 16:22:46 +00:00
|
|
|
package_data["packages"][name.lower()] = pkg_entry
|
|
|
|
|
2021-09-21 06:47:48 +00:00
|
|
|
if pkg.unvendored_tests:
|
|
|
|
package_data["packages"][name.lower()]["unvendored_tests"] = True
|
|
|
|
|
|
|
|
# Create the test package if necessary
|
|
|
|
pkg_entry = {
|
|
|
|
"name": name + "-tests",
|
|
|
|
"version": pkg.version,
|
|
|
|
"depends": [name.lower()],
|
|
|
|
"imports": [],
|
2022-01-24 01:47:04 +00:00
|
|
|
"file_name": pkg.unvendored_tests.name,
|
|
|
|
"install_dir": pkg.install_dir,
|
2021-09-21 06:47:48 +00:00
|
|
|
}
|
|
|
|
package_data["packages"][name.lower() + "-tests"] = pkg_entry
|
|
|
|
|
2021-07-24 16:22:46 +00:00
|
|
|
# Workaround for circular dependency between soupsieve and beautifulsoup4
|
2021-04-18 19:37:14 +00:00
|
|
|
# TODO: FIXME!!
|
2021-07-24 16:22:46 +00:00
|
|
|
if "soupsieve" in package_data["packages"]:
|
|
|
|
package_data["packages"]["soupsieve"]["depends"].append("beautifulsoup4")
|
|
|
|
|
2021-08-03 15:01:29 +00:00
|
|
|
# re-order packages by name
|
|
|
|
package_data["packages"] = dict(sorted(package_data["packages"].items()))
|
|
|
|
|
2021-07-24 16:22:46 +00:00
|
|
|
return package_data
|
|
|
|
|
|
|
|
|
|
|
|
def build_packages(packages_dir: Path, outputdir: Path, args) -> None:
|
2021-09-02 20:19:23 +00:00
|
|
|
packages = common._parse_package_subset(args.only)
|
|
|
|
|
|
|
|
pkg_map = generate_dependency_graph(packages_dir, packages)
|
2021-07-24 16:22:46 +00:00
|
|
|
|
|
|
|
build_from_graph(pkg_map, outputdir, args)
|
2022-01-24 01:47:04 +00:00
|
|
|
for pkg in pkg_map.values():
|
|
|
|
if pkg.library:
|
|
|
|
continue
|
|
|
|
if isinstance(pkg, StdLibPackage):
|
|
|
|
pkg.file_name = pkg.name + ".tar"
|
|
|
|
continue
|
|
|
|
if pkg.needs_rebuild():
|
|
|
|
continue
|
|
|
|
if pkg.shared_library:
|
|
|
|
pkg.file_name = f"{pkg.name}-{pkg.version}.zip"
|
|
|
|
continue
|
|
|
|
assert isinstance(pkg, Package)
|
|
|
|
pkg.file_name = pkg.wheel_path().name
|
|
|
|
pkg.unvendored_tests = pkg.tests_path()
|
2021-07-24 16:22:46 +00:00
|
|
|
|
|
|
|
package_data = generate_packages_json(pkg_map)
|
|
|
|
|
2020-06-28 18:24:40 +00:00
|
|
|
with open(outputdir / "packages.json", "w") as fd:
|
2020-12-10 19:37:08 +00:00
|
|
|
json.dump(package_data, fd)
|
2022-01-24 01:47:04 +00:00
|
|
|
fd.write("\n")
|
2018-06-20 18:54:47 +00:00
|
|
|
|
|
|
|
|
2018-09-20 14:42:38 +00:00
|
|
|
def make_parser(parser):
|
2020-05-08 23:28:44 +00:00
|
|
|
parser.description = (
|
2021-11-14 20:47:49 +00:00
|
|
|
"Build all the packages in a given directory\n\n"
|
2021-04-15 16:54:26 +00:00
|
|
|
"Unless the --only option is provided\n\n"
|
|
|
|
"Note: this is a private endpoint that should not be used "
|
|
|
|
"outside of the pyodide Makefile."
|
2020-05-08 23:28:44 +00:00
|
|
|
)
|
2018-06-22 14:22:00 +00:00
|
|
|
parser.add_argument(
|
2020-06-28 18:24:40 +00:00
|
|
|
"dir",
|
|
|
|
type=str,
|
|
|
|
nargs=1,
|
|
|
|
help="Input directory containing a tree of package definitions",
|
|
|
|
)
|
2018-06-22 14:22:00 +00:00
|
|
|
parser.add_argument(
|
2020-06-28 18:24:40 +00:00
|
|
|
"output",
|
|
|
|
type=str,
|
|
|
|
nargs=1,
|
|
|
|
help="Output directory in which to put all built packages",
|
|
|
|
)
|
2018-06-20 19:05:13 +00:00
|
|
|
parser.add_argument(
|
2020-06-28 18:24:40 +00:00
|
|
|
"--cflags",
|
|
|
|
type=str,
|
|
|
|
nargs="?",
|
2021-09-12 21:13:49 +00:00
|
|
|
default=None,
|
|
|
|
help="Extra compiling flags. Default: SIDE_MODULE_CFLAGS",
|
2020-06-28 18:24:40 +00:00
|
|
|
)
|
2021-01-03 00:17:08 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--cxxflags",
|
|
|
|
type=str,
|
|
|
|
nargs="?",
|
2021-09-12 21:13:49 +00:00
|
|
|
default=None,
|
|
|
|
help=("Extra C++ specific compiling flags. " "Default: SIDE_MODULE_CXXFLAGS"),
|
2021-01-03 00:17:08 +00:00
|
|
|
)
|
2018-06-20 19:05:13 +00:00
|
|
|
parser.add_argument(
|
2020-06-28 18:24:40 +00:00
|
|
|
"--ldflags",
|
|
|
|
type=str,
|
|
|
|
nargs="?",
|
2021-09-12 21:13:49 +00:00
|
|
|
default=None,
|
|
|
|
help="Extra linking flags. Default: SIDE_MODULE_LDFLAGS",
|
2020-06-28 18:24:40 +00:00
|
|
|
)
|
2018-06-20 19:05:13 +00:00
|
|
|
parser.add_argument(
|
2021-12-20 19:26:27 +00:00
|
|
|
"--target-install-dir",
|
2020-06-28 18:24:40 +00:00
|
|
|
type=str,
|
|
|
|
nargs="?",
|
2021-09-12 21:13:49 +00:00
|
|
|
default=None,
|
2021-12-20 19:26:27 +00:00
|
|
|
help="The path to the target Python installation. Default: TARGETINSTALLDIR",
|
2020-06-28 18:24:40 +00:00
|
|
|
)
|
2020-12-05 18:42:41 +00:00
|
|
|
parser.add_argument(
|
2021-12-20 19:26:27 +00:00
|
|
|
"--host-install-dir",
|
2020-12-05 18:42:41 +00:00
|
|
|
type=str,
|
|
|
|
nargs="?",
|
2021-12-20 19:26:27 +00:00
|
|
|
default=None,
|
|
|
|
help=("Directory for installing built host packages. Default: HOSTINSTALLDIR"),
|
2020-12-05 18:42:41 +00:00
|
|
|
)
|
2021-06-19 16:51:36 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--log-dir",
|
|
|
|
type=str,
|
|
|
|
dest="log_dir",
|
|
|
|
nargs="?",
|
|
|
|
default=None,
|
|
|
|
help=("Directory to place log files"),
|
|
|
|
)
|
2020-05-08 23:28:44 +00:00
|
|
|
parser.add_argument(
|
2020-06-28 18:24:40 +00:00
|
|
|
"--only",
|
|
|
|
type=str,
|
|
|
|
nargs="?",
|
|
|
|
default=None,
|
2021-03-20 18:00:35 +00:00
|
|
|
help=("Only build the specified packages, provided as a comma-separated list"),
|
2020-06-28 18:24:40 +00:00
|
|
|
)
|
2021-12-31 17:29:36 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--force-rebuild",
|
|
|
|
action="store_true",
|
|
|
|
help=(
|
|
|
|
"Force rebuild of all packages regardless of whether they appear to have been updated"
|
|
|
|
),
|
|
|
|
)
|
2020-12-10 19:37:08 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--n-jobs",
|
|
|
|
type=int,
|
|
|
|
nargs="?",
|
|
|
|
default=4,
|
|
|
|
help="Number of packages to build in parallel",
|
|
|
|
)
|
2018-09-20 14:42:38 +00:00
|
|
|
return parser
|
2018-06-20 18:54:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
def main(args):
|
2020-12-10 19:37:08 +00:00
|
|
|
packages_dir = Path(args.dir[0]).resolve()
|
2018-08-03 16:48:22 +00:00
|
|
|
outputdir = Path(args.output[0]).resolve()
|
2021-09-12 21:13:49 +00:00
|
|
|
if args.cflags is None:
|
|
|
|
args.cflags = common.get_make_flag("SIDE_MODULE_CFLAGS")
|
|
|
|
if args.cxxflags is None:
|
|
|
|
args.cxxflags = common.get_make_flag("SIDE_MODULE_CXXFLAGS")
|
|
|
|
if args.ldflags is None:
|
|
|
|
args.ldflags = common.get_make_flag("SIDE_MODULE_LDFLAGS")
|
2021-12-20 19:26:27 +00:00
|
|
|
if args.target_install_dir is None:
|
|
|
|
args.target_install_dir = common.get_make_flag("TARGETINSTALLDIR")
|
|
|
|
if args.host_install_dir is None:
|
|
|
|
args.host_install_dir = common.get_make_flag("HOSTINSTALLDIR")
|
2020-12-10 19:37:08 +00:00
|
|
|
build_packages(packages_dir, outputdir, args)
|
2018-06-20 18:54:47 +00:00
|
|
|
|
|
|
|
|
2020-06-28 18:24:40 +00:00
|
|
|
if __name__ == "__main__":
|
2018-09-20 14:42:38 +00:00
|
|
|
parser = make_parser(argparse.ArgumentParser())
|
|
|
|
args = parser.parse_args()
|
2018-06-20 18:54:47 +00:00
|
|
|
main(args)
|