2022-05-26 03:31:04 +00:00
|
|
|
import argparse
|
2022-09-06 06:20:04 +00:00
|
|
|
import hashlib
|
|
|
|
import zipfile
|
2022-02-21 22:27:03 +00:00
|
|
|
from pathlib import Path
|
2020-12-10 19:37:08 +00:00
|
|
|
from time import sleep
|
2022-05-21 20:35:02 +00:00
|
|
|
from typing import Any
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2022-02-21 22:27:03 +00:00
|
|
|
import pytest
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2022-06-08 15:25:12 +00:00
|
|
|
from pyodide_build import buildall, io
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2022-04-09 20:41:10 +00:00
|
|
|
PACKAGES_DIR = Path(__file__).parent / "_test_packages"
|
2020-12-10 19:37:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_generate_dependency_graph():
|
2022-08-22 04:35:27 +00:00
|
|
|
# beautifulsoup4 has a circular dependency on soupsieve
|
2021-09-02 20:19:23 +00:00
|
|
|
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, {"beautifulsoup4"})
|
2022-08-22 04:35:27 +00:00
|
|
|
assert pkg_map["beautifulsoup4"].run_dependencies == ["soupsieve"]
|
|
|
|
assert pkg_map["beautifulsoup4"].host_dependencies == []
|
|
|
|
assert pkg_map["beautifulsoup4"].host_dependents == set()
|
2020-12-10 19:37:08 +00:00
|
|
|
|
|
|
|
|
2022-06-08 15:25:12 +00:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"in_set, out_set",
|
|
|
|
[
|
|
|
|
({"scipy"}, {"scipy", "numpy", "CLAPACK"}),
|
|
|
|
({"scipy", "!numpy"}, set()),
|
|
|
|
({"scipy", "!numpy", "CLAPACK"}, {"CLAPACK"}),
|
|
|
|
({"scikit-learn", "!numpy"}, set()),
|
|
|
|
({"scikit-learn", "scipy", "!joblib"}, {"scipy", "numpy", "CLAPACK"}),
|
|
|
|
({"scikit-learn", "no-numpy-dependents"}, set()),
|
|
|
|
({"scikit-learn", "no-numpy-dependents", "numpy"}, {"numpy"}),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
def test_generate_dependency_graph2(in_set, out_set):
|
|
|
|
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, in_set)
|
|
|
|
assert set(pkg_map.keys()) == out_set
|
|
|
|
|
|
|
|
|
|
|
|
def test_generate_dependency_graph_disabled(monkeypatch):
|
2022-09-11 07:20:56 +00:00
|
|
|
class MockMetaConfig(io.MetaConfig):
|
|
|
|
@classmethod
|
|
|
|
def from_yaml(cls, path):
|
|
|
|
d = io.MetaConfig.from_yaml(path)
|
|
|
|
if "numpy" in str(path):
|
|
|
|
d.package.disabled = True
|
|
|
|
return d
|
|
|
|
|
|
|
|
monkeypatch.setattr(buildall, "MetaConfig", MockMetaConfig)
|
2022-06-08 15:25:12 +00:00
|
|
|
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, {"scipy"})
|
|
|
|
assert set(pkg_map.keys()) == set()
|
|
|
|
|
|
|
|
|
2022-06-20 21:46:11 +00:00
|
|
|
def test_generate_repodata(tmp_path):
|
2022-04-09 20:41:10 +00:00
|
|
|
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, {"pkg_1", "pkg_2"})
|
2022-09-06 06:20:04 +00:00
|
|
|
hashes = {}
|
2022-01-24 01:47:04 +00:00
|
|
|
for pkg in pkg_map.values():
|
2022-09-06 06:20:04 +00:00
|
|
|
pkg.file_name = pkg.file_name or pkg.name + ".whl"
|
2022-05-03 04:09:14 +00:00
|
|
|
# Write dummy package file for SHA-256 hash verification
|
2022-09-06 06:20:04 +00:00
|
|
|
with zipfile.ZipFile(tmp_path / pkg.file_name, "w") as whlzip:
|
|
|
|
whlzip.writestr(pkg.file_name, data=pkg.file_name)
|
|
|
|
|
|
|
|
with open(tmp_path / pkg.file_name, "rb") as f:
|
|
|
|
hashes[pkg.name] = hashlib.sha256(f.read()).hexdigest()
|
2021-07-24 16:22:46 +00:00
|
|
|
|
2022-06-20 21:46:11 +00:00
|
|
|
package_data = buildall.generate_repodata(tmp_path, pkg_map)
|
2021-07-24 16:22:46 +00:00
|
|
|
assert set(package_data.keys()) == {"info", "packages"}
|
2022-06-25 04:21:26 +00:00
|
|
|
assert set(package_data["info"].keys()) == {"arch", "platform", "version", "python"}
|
2022-06-02 23:40:46 +00:00
|
|
|
assert package_data["info"]["arch"] == "wasm32"
|
|
|
|
assert package_data["info"]["platform"].startswith("emscripten")
|
|
|
|
|
2021-07-24 16:22:46 +00:00
|
|
|
assert set(package_data["packages"]) == {
|
2022-04-09 20:41:10 +00:00
|
|
|
"pkg_1",
|
|
|
|
"pkg_1_1",
|
|
|
|
"pkg_2",
|
|
|
|
"pkg_3",
|
|
|
|
"pkg_3_1",
|
2021-07-24 16:22:46 +00:00
|
|
|
}
|
2022-04-09 20:41:10 +00:00
|
|
|
assert package_data["packages"]["pkg_1"] == {
|
|
|
|
"name": "pkg_1",
|
|
|
|
"version": "1.0.0",
|
2022-09-06 06:20:04 +00:00
|
|
|
"file_name": "pkg_1.whl",
|
2022-04-09 20:41:10 +00:00
|
|
|
"depends": ["pkg_1_1", "pkg_3"],
|
|
|
|
"imports": ["pkg_1"],
|
2022-01-24 01:47:04 +00:00
|
|
|
"install_dir": "site",
|
2022-09-06 06:20:04 +00:00
|
|
|
"sha256": hashes["pkg_1"],
|
2021-07-24 16:22:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2020-12-10 19:37:08 +00:00
|
|
|
@pytest.mark.parametrize("n_jobs", [1, 4])
|
|
|
|
def test_build_dependencies(n_jobs, monkeypatch):
|
|
|
|
build_list = []
|
|
|
|
|
|
|
|
class MockPackage(buildall.Package):
|
2022-05-21 20:35:02 +00:00
|
|
|
def build(self, outputdir: Path, args: Any) -> None:
|
2020-12-10 19:37:08 +00:00
|
|
|
build_list.append(self.name)
|
|
|
|
|
|
|
|
monkeypatch.setattr(buildall, "Package", MockPackage)
|
|
|
|
|
2022-04-09 20:41:10 +00:00
|
|
|
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, {"pkg_1", "pkg_2"})
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2021-12-31 17:29:36 +00:00
|
|
|
buildall.build_from_graph(
|
2022-05-26 03:31:04 +00:00
|
|
|
pkg_map, Path("."), argparse.Namespace(n_jobs=n_jobs, force_rebuild=True)
|
2021-12-31 17:29:36 +00:00
|
|
|
)
|
2020-12-10 19:37:08 +00:00
|
|
|
|
|
|
|
assert set(build_list) == {
|
2022-04-09 20:41:10 +00:00
|
|
|
"pkg_1",
|
|
|
|
"pkg_1_1",
|
|
|
|
"pkg_2",
|
|
|
|
"pkg_3",
|
|
|
|
"pkg_3_1",
|
2020-12-10 19:37:08 +00:00
|
|
|
}
|
2022-04-09 20:41:10 +00:00
|
|
|
assert build_list.index("pkg_1_1") < build_list.index("pkg_1")
|
|
|
|
assert build_list.index("pkg_3") < build_list.index("pkg_1")
|
|
|
|
assert build_list.index("pkg_3_1") < build_list.index("pkg_3")
|
2020-12-10 19:37:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("n_jobs", [1, 4])
|
|
|
|
def test_build_all_dependencies(n_jobs, monkeypatch):
|
|
|
|
"""Try building all the dependency graph, without the actual build operations"""
|
|
|
|
|
|
|
|
class MockPackage(buildall.Package):
|
|
|
|
n_builds = 0
|
|
|
|
|
2022-05-21 20:35:02 +00:00
|
|
|
def build(self, outputdir: Path, args: Any) -> None:
|
2020-12-10 19:37:08 +00:00
|
|
|
sleep(0.005)
|
|
|
|
self.n_builds += 1
|
|
|
|
# check that each build is only run once
|
|
|
|
assert self.n_builds == 1
|
|
|
|
|
|
|
|
monkeypatch.setattr(buildall, "Package", MockPackage)
|
|
|
|
|
2021-12-05 20:34:09 +00:00
|
|
|
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, packages={"*"})
|
2020-12-10 19:37:08 +00:00
|
|
|
|
2021-12-31 17:29:36 +00:00
|
|
|
buildall.build_from_graph(
|
2022-05-26 03:31:04 +00:00
|
|
|
pkg_map, Path("."), argparse.Namespace(n_jobs=n_jobs, force_rebuild=False)
|
2021-12-31 17:29:36 +00:00
|
|
|
)
|
2020-12-10 19:37:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("n_jobs", [1, 4])
|
|
|
|
def test_build_error(n_jobs, monkeypatch):
|
|
|
|
"""Try building all the dependency graph, without the actual build operations"""
|
|
|
|
|
|
|
|
class MockPackage(buildall.Package):
|
2022-05-21 20:35:02 +00:00
|
|
|
def build(self, outputdir: Path, args: Any) -> None:
|
2020-12-10 19:37:08 +00:00
|
|
|
raise ValueError("Failed build")
|
|
|
|
|
|
|
|
monkeypatch.setattr(buildall, "Package", MockPackage)
|
|
|
|
|
2022-04-09 20:41:10 +00:00
|
|
|
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, {"pkg_1"})
|
2020-12-10 19:37:08 +00:00
|
|
|
|
|
|
|
with pytest.raises(ValueError, match="Failed build"):
|
2021-12-31 17:29:36 +00:00
|
|
|
buildall.build_from_graph(
|
2022-05-26 03:31:04 +00:00
|
|
|
pkg_map, Path("."), argparse.Namespace(n_jobs=n_jobs, force_rebuild=True)
|
2021-12-31 17:29:36 +00:00
|
|
|
)
|