mirror of https://github.com/pyodide/pyodide.git
MAINT Split CI build-packages step in two (#2017)
Split build packages into a step for everything up to and including numpy and a second step for numpy and its dependencies. Intended to prevent timeouts.
This commit is contained in:
parent
f22f679328
commit
418813de33
|
@ -105,6 +105,56 @@ jobs:
|
|||
- store_artifacts:
|
||||
path: /root/repo/build/
|
||||
|
||||
build-packages-no-numpy-dependents:
|
||||
<<: *defaults
|
||||
resource_class: large
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- attach_workspace:
|
||||
at: .
|
||||
|
||||
- restore_cache:
|
||||
keys:
|
||||
- -pkg-{{ checksum "Makefile.envs" }}-v20210911-
|
||||
|
||||
- run:
|
||||
name: build packages
|
||||
no_output_timeout: 1800
|
||||
command: |
|
||||
source pyodide_env.sh
|
||||
|
||||
# Set mtime for EM_CONFIG to avoid ccache cache misses
|
||||
touch -m -d '1 Jan 2021 12:00' emsdk/emsdk/.emscripten
|
||||
|
||||
ccache -z
|
||||
PYODIDE_PACKAGES='*, no-numpy-dependents' make -C packages
|
||||
ccache -s
|
||||
environment:
|
||||
PYODIDE_JOBS: 5
|
||||
|
||||
- run:
|
||||
name: check-size
|
||||
command: ls -lh build/
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- /root/.ccache
|
||||
key: -pkg-{{ checksum "Makefile.envs" }}-v20210911-
|
||||
|
||||
- persist_to_workspace:
|
||||
root: .
|
||||
paths:
|
||||
- ./packages
|
||||
- ./build
|
||||
|
||||
- store_artifacts:
|
||||
path: /root/repo/build/
|
||||
|
||||
- store_artifacts:
|
||||
path: /root/repo/packages/build-logs
|
||||
|
||||
build-packages:
|
||||
<<: *defaults
|
||||
resource_class: large
|
||||
|
@ -229,16 +279,15 @@ jobs:
|
|||
npx tsd
|
||||
npm ci
|
||||
npm test
|
||||
- run:
|
||||
- run:
|
||||
name: check if webpack cli works well with load-pyodide.js
|
||||
command: |
|
||||
command: |
|
||||
git clone https://github.com/pyodide/pyodide-webpack-example.git
|
||||
cd pyodide-webpack-example
|
||||
npm ci
|
||||
cp ../src/js/load-pyodide.js node_modules/pyodide/load-pyodide.js
|
||||
npm ci
|
||||
cp ../src/js/load-pyodide.js node_modules/pyodide/load-pyodide.js
|
||||
head -20 node_modules/pyodide/load-pyodide.js
|
||||
npx webpack
|
||||
|
||||
|
||||
benchmark:
|
||||
<<: *defaults
|
||||
|
@ -343,13 +392,20 @@ workflows:
|
|||
tags:
|
||||
only: /.*/
|
||||
|
||||
- build-packages:
|
||||
- build-packages-no-numpy-dependents:
|
||||
requires:
|
||||
- build-core
|
||||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
|
||||
- build-packages:
|
||||
requires:
|
||||
- build-packages-no-numpy-dependents
|
||||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
|
||||
- test-main:
|
||||
name: test-core-chrome
|
||||
test-params: -k "chrome and not webworker" src packages/micropip
|
||||
|
|
|
@ -28,6 +28,7 @@ export PYODIDE_BASE_URL?=./
|
|||
export PYODIDE=1
|
||||
# This is the legacy environment variable used for the aforementioned purpose
|
||||
export PYODIDE_PACKAGE_ABI=1
|
||||
#
|
||||
|
||||
export OPTFLAGS=-O2
|
||||
export CFLAGS_BASE=\
|
||||
|
@ -71,7 +72,7 @@ export MAIN_MODULE_LDFLAGS= $(LDFLAGS_BASE) \
|
|||
-s FORCE_FILESYSTEM=1 \
|
||||
-s TOTAL_MEMORY=20971520 \
|
||||
-s ALLOW_MEMORY_GROWTH=1 \
|
||||
--use-preload-plugins \
|
||||
--use-preload-plugins \
|
||||
--preload-file $(CPYTHONLIB)@/lib/python$(PYMAJOR).$(PYMINOR) \
|
||||
--preload-file src/py/lib@/lib/python$(PYMAJOR).$(PYMINOR)/\
|
||||
--preload-file src/py/@/lib/python$(PYMAJOR).$(PYMINOR)/site-packages/ \
|
||||
|
@ -84,14 +85,14 @@ export SIDE_MODULE_CXXFLAGS = $(CXXFLAGS_BASE)
|
|||
|
||||
export SIDE_MODULE_CFLAGS= $(CFLAGS_BASE)
|
||||
export MAIN_MODULE_CFLAGS= $(CFLAGS_BASE) \
|
||||
-Wall \
|
||||
-Wno-warn-absolute-paths \
|
||||
-Werror=unused-variable \
|
||||
-Werror=sometimes-uninitialized \
|
||||
-Werror=int-conversion \
|
||||
-Werror=incompatible-pointer-types \
|
||||
-Werror=unused-result \
|
||||
-I$(PYTHONINCLUDE)
|
||||
-Wall \
|
||||
-Wno-warn-absolute-paths \
|
||||
-Werror=unused-variable \
|
||||
-Werror=sometimes-uninitialized \
|
||||
-Werror=int-conversion \
|
||||
-Werror=incompatible-pointer-types \
|
||||
-Werror=unused-result \
|
||||
-I$(PYTHONINCLUDE)
|
||||
|
||||
|
||||
.output_vars:
|
||||
|
|
|
@ -158,7 +158,7 @@ class Package(BasePackage):
|
|||
|
||||
|
||||
def generate_dependency_graph(
|
||||
packages_dir: Path, packages: Optional[Set[str]]
|
||||
packages_dir: Path, packages: Set[str]
|
||||
) -> Dict[str, BasePackage]:
|
||||
"""This generates a dependency graph for listed packages.
|
||||
|
||||
|
@ -182,11 +182,16 @@ def generate_dependency_graph(
|
|||
|
||||
pkg_map: Dict[str, BasePackage] = {}
|
||||
|
||||
if packages is None:
|
||||
packages = set(
|
||||
if "*" in packages:
|
||||
packages.discard("*")
|
||||
packages.update(
|
||||
str(x) for x in packages_dir.iterdir() if (x / "meta.yaml").is_file()
|
||||
)
|
||||
|
||||
no_numpy_dependents = "no-numpy-dependents" in packages
|
||||
if no_numpy_dependents:
|
||||
packages.discard("no-numpy-dependents")
|
||||
|
||||
while packages:
|
||||
pkgname = packages.pop()
|
||||
|
||||
|
@ -195,6 +200,8 @@ def generate_dependency_graph(
|
|||
pkg = StdLibPackage(packages_dir / pkgname)
|
||||
else:
|
||||
pkg = Package(packages_dir / pkgname)
|
||||
if no_numpy_dependents and "numpy" in pkg.dependencies:
|
||||
continue
|
||||
pkg_map[pkg.name] = pkg
|
||||
|
||||
for dep in pkg.dependencies:
|
||||
|
@ -209,6 +216,13 @@ def generate_dependency_graph(
|
|||
return pkg_map
|
||||
|
||||
|
||||
def job_priority(pkg: BasePackage):
|
||||
if pkg.name == "numpy":
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
|
||||
def build_from_graph(pkg_map: Dict[str, BasePackage], outputdir: Path, args) -> None:
|
||||
"""
|
||||
This builds packages in pkg_map in parallel, building at most args.n_jobs
|
||||
|
@ -232,10 +246,10 @@ def build_from_graph(pkg_map: Dict[str, BasePackage], outputdir: Path, args) ->
|
|||
build_queue: PriorityQueue = PriorityQueue()
|
||||
|
||||
print("Building the following packages: " + ", ".join(sorted(pkg_map.keys())))
|
||||
|
||||
t0 = perf_counter()
|
||||
for pkg in pkg_map.values():
|
||||
if len(pkg.dependencies) == 0:
|
||||
build_queue.put(pkg)
|
||||
build_queue.put((job_priority(pkg), pkg))
|
||||
|
||||
built_queue: Queue = Queue()
|
||||
thread_lock = Lock()
|
||||
|
@ -244,7 +258,7 @@ def build_from_graph(pkg_map: Dict[str, BasePackage], outputdir: Path, args) ->
|
|||
def builder(n):
|
||||
nonlocal queue_idx
|
||||
while True:
|
||||
pkg = build_queue.get()
|
||||
pkg = build_queue.get()[1]
|
||||
with thread_lock:
|
||||
pkg._queue_idx = queue_idx
|
||||
queue_idx += 1
|
||||
|
@ -259,7 +273,7 @@ def build_from_graph(pkg_map: Dict[str, BasePackage], outputdir: Path, args) ->
|
|||
|
||||
print(
|
||||
f"[{pkg._queue_idx}/{len(pkg_map)}] (thread {n}) "
|
||||
f"built {pkg.name} in {perf_counter() - t0:.1f} s"
|
||||
f"built {pkg.name} in {perf_counter() - t0:.2f} s"
|
||||
)
|
||||
built_queue.put(pkg)
|
||||
# Release the GIL so new packages get queued
|
||||
|
@ -280,12 +294,17 @@ def build_from_graph(pkg_map: Dict[str, BasePackage], outputdir: Path, args) ->
|
|||
dependent = pkg_map[_dependent]
|
||||
dependent.unbuilt_dependencies.remove(pkg.name)
|
||||
if len(dependent.unbuilt_dependencies) == 0:
|
||||
build_queue.put(dependent)
|
||||
build_queue.put((job_priority(dependent), dependent))
|
||||
|
||||
for name in list(pkg_map):
|
||||
if (outputdir / (name + "-tests.js")).exists():
|
||||
pkg_map[name].unvendored_tests = True
|
||||
|
||||
print(
|
||||
"\n===================================================\n"
|
||||
f"built all packages in {perf_counter() - t0:.2f} s"
|
||||
)
|
||||
|
||||
|
||||
def generate_packages_json(pkg_map: Dict[str, BasePackage]) -> Dict:
|
||||
"""Generate the package.json file"""
|
||||
|
|
|
@ -6,7 +6,7 @@ import functools
|
|||
UNVENDORED_STDLIB_MODULES = ["test", "distutils"]
|
||||
|
||||
|
||||
def _parse_package_subset(query: Optional[str]) -> Optional[Set[str]]:
|
||||
def _parse_package_subset(query: Optional[str]) -> Set[str]:
|
||||
"""Parse the list of packages specified with PYODIDE_PACKAGES env var.
|
||||
|
||||
Also add the list of mandatory packages: ["pyparsing", "packaging",
|
||||
|
@ -45,13 +45,10 @@ def _parse_package_subset(query: Optional[str]) -> Optional[Set[str]]:
|
|||
packages = {el.strip() for el in query.split(",")}
|
||||
packages.update(["pyparsing", "packaging", "micropip"])
|
||||
# handle meta-packages
|
||||
if "*" in packages:
|
||||
# build all packages
|
||||
return None
|
||||
elif "core" in packages:
|
||||
if "core" in packages:
|
||||
packages |= core_packages
|
||||
packages.discard("core")
|
||||
elif "min-scipy-stack" in packages:
|
||||
if "min-scipy-stack" in packages:
|
||||
packages |= core_packages | core_scipy_packages
|
||||
packages.discard("min-scipy-stack")
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@ def test_build_all_dependencies(n_jobs, monkeypatch):
|
|||
|
||||
monkeypatch.setattr(buildall, "Package", MockPackage)
|
||||
|
||||
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, packages=None)
|
||||
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, packages={"*"})
|
||||
|
||||
Args = namedtuple("args", ["n_jobs"])
|
||||
buildall.build_from_graph(pkg_map, Path("."), Args(n_jobs=n_jobs))
|
||||
|
|
|
@ -34,8 +34,6 @@ def test_parse_package_subset():
|
|||
"c",
|
||||
"d",
|
||||
}
|
||||
# "*" means select all packages
|
||||
assert _parse_package_subset("*") == None
|
||||
|
||||
assert _parse_package_subset("core") == {
|
||||
"pyparsing",
|
||||
|
|
Loading…
Reference in New Issue