ENH Remove hard coded paths in `pyodide_build` (#2351)

We are pushing pyodide_build to PyPI as a Python package, but for now, 
installing pyodide_build from PyPI (i.e. pip install pyodide_build`) is almost 
useless because:

    there are bunch of hard-coded paths (e.g. Path(__file__).parents[2]),
    its dependencies are not specified in setup.cfg.

This PR is for mitigating this situation by removing hard-coded paths and
adding tests, and is also a preparation for our new CLI 
(https://github.com/pyodide/pyodide/issues/1977).
This commit is contained in:
Gyeongjae Choi 2022-04-10 05:41:10 +09:00 committed by GitHub
parent 374eef1721
commit 60d530801e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 346 additions and 235 deletions

View File

@ -6,7 +6,7 @@ defaults: &defaults
# Note: when updating the docker image version,
# make sure there are no extra old versions lying around.
# (e.g. `rg -F --hidden <old_tag>`)
- image: pyodide/pyodide-env:20220317-chrome99-firefox98-py310
- image: sihadan/pyodide-env-test:20220401-chrome99-firefox98
environment:
- EMSDK_NUM_CORES: 3
EMCC_CORES: 3
@ -284,7 +284,8 @@ jobs:
name: test
command: |
mkdir test-results
pytest \
python3 -m pip install -e ./pyodide-build
PYODIDE_ROOT=. pytest \
--junitxml=test-results/junit.xml \
--verbose \
-k 'not (chrome or firefox or node)' \

View File

@ -9,6 +9,7 @@ import subprocess
import sys
from pathlib import Path
from typing import Any
from unittest import mock
# -- Project information -----------------------------------------------------
@ -54,6 +55,9 @@ versionwarning_body_selector = "#main-content > div"
autosummary_generate = True
autodoc_default_flags = ["members", "inherited-members"]
# Add modules to be mocked.
mock_modules = ["ruamel.yaml", "tomli"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
@ -190,3 +194,6 @@ if IN_SPHINX:
delete_attrs(pyodide.webloop.WebLoop)
delete_attrs(pyodide.webloop.WebLoopPolicy)
delete_attrs(pyodide.console.PyodideConsole)
for module in mock_modules:
sys.modules[module] = mock.Mock()

View File

@ -45,9 +45,9 @@ any compilation commands.
If your package is on PyPI, the easiest place to start is with the
{ref}`mkpkg tool <pyodide-mkpkg>`. From the Pyodide root directory, install the
tool with `pip install -e pyodide-build`, then run:
tool with `pip install ./pyodide-build`, then run:
`pyodide-build mkpkg <package-name>`
`python -m pyodide_build mkpkg <package-name>`
This will generate a `meta.yaml` file under `packages/<package-name>/` (see
{ref}`meta-yaml-spec`) that should work out of the box for many simple Python
@ -77,9 +77,7 @@ Once the `meta.yaml` file is ready, build the package with the following
commands from inside the package directory `packages/<package-name>`
```sh
export PYTHONPATH="$PYTHONPATH:/path/to/pyodide/pyodide-build/"
python -m pyodide_build buildpkg meta.yaml
cp build/*.data build/*.js ../../build/
```
and see if there are any errors.

View File

@ -18,7 +18,7 @@ and check that they are in your `PATH`.
### Running the Python test suite
To run the pytest suite of tests, type on the command line:
To run the pytest suite of tests, from the root directory of Pyodide, type on the command line:
```bash
pytest

View File

@ -126,6 +126,9 @@ substitutions:
- {{ Enhancement }} Support ANSI escape codes in the Pyodide console.
{pr}`2345`
- {{ Fix }} `pyodide_build` can now be installed in non-editable ways.
{pr}`2351`
## Version 0.19.1
_February 19, 2022_

View File

@ -1,3 +1,5 @@
.PHONY=pyodide-build
export PYODIDE_ROOT=$(abspath ..)
include ../Makefile.envs
@ -6,20 +8,18 @@ else
ONLY_PACKAGES=--only "$(PYODIDE_PACKAGES)"
endif
all: .artifacts/bin/pyodide-build
PYTHONPATH="$(PYODIDE_ROOT)/pyodide-build/"\
python -m pyodide_build buildall . ../build \
$(ONLY_PACKAGES) --n-jobs $${PYODIDE_JOBS:-4} \
--log-dir=./build-logs
.artifacts/bin/pyodide-build: ../pyodide-build/pyodide_build/**
all: pyodide-build
mkdir -p $(HOSTINSTALLDIR)
$(HOSTPYTHON) -m pip install -e ../pyodide-build --no-deps --prefix $(HOSTINSTALLDIR)
PYODIDE_ROOT=$(PYODIDE_ROOT) python -m pyodide_build buildall . $(PYODIDE_ROOT)/build \
$(ONLY_PACKAGES) --n-jobs $${PYODIDE_JOBS:-4} \
--log-dir=./build-logs
pyodide-build: ../pyodide-build/pyodide_build/**
$(HOSTPYTHON) -m pip install --prefix=$(shell $(HOSTPYTHON) -m site --user-base) -e ../pyodide-build
update-all:
for pkg in $$(find . -maxdepth 1 ! -name ".*" -type d -exec basename {} \; | tail -n +2); do \
python -m pyodide_build mkpkg "$${pkg}" --update; \
PYODIDE_ROOT=$(PYODIDE_ROOT) python -m pyodide_build mkpkg "$${pkg}" --update; \
done
clean:

View File

@ -1,13 +1,11 @@
from pyodide_build.testing import PYVERSION
def test_uncaught_cpp_exceptions(selenium):
assert (
selenium.run_js(
f"""
"""
await pyodide.loadPackage("cpp-exceptions-test");
const Tests = pyodide._api.tests;
const idx = pyodide._module.LDSO.loadedLibNames["/lib/{PYVERSION}/site-packages/cpp-exceptions-test-throw.so"]
const sitePackages = pyodide.runPython("import site; site.getsitepackages()[0]")
const idx = pyodide._module.LDSO.loadedLibNames[sitePackages + "/cpp-exceptions-test-throw.so"]
const throwlib = pyodide._module.LDSO.loadedLibs[idx].module;
"""
"""\
@ -39,10 +37,11 @@ def test_uncaught_cpp_exceptions(selenium):
def test_cpp_exception_catching(selenium):
assert (
selenium.run_js(
f"""
"""
await pyodide.loadPackage("cpp-exceptions-test");
const Module = pyodide._module;
const idx = Module.LDSO.loadedLibNames["/lib/{PYVERSION}/site-packages/cpp-exceptions-test-catch.so"]
const sitePackages = pyodide.runPython("import site; site.getsitepackages()[0]")
const idx = Module.LDSO.loadedLibNames[sitePackages + "/cpp-exceptions-test-catch.so"]
const catchlib = Module.LDSO.loadedLibs[idx].module;
"""
"""\

View File

@ -5,7 +5,6 @@ import pytest
from conftest import ROOT_PATH, _package_is_built
from pyodide_build.io import parse_package_config
from pyodide_build.testing import PYVERSION
PKG_DIR = ROOT_PATH / "packages"
@ -60,12 +59,12 @@ def test_import(name, selenium_standalone):
)
)
selenium_standalone.run("import glob, os")
selenium_standalone.run("import glob, os, site")
baseline_pyc = selenium_standalone.run(
f"""
"""
len(list(glob.glob(
'/lib/{PYVERSION}/site-packages/**/*.pyc',
site.getsitepackages()[0] + '/**/*.pyc',
recursive=True)
))
"""
@ -76,9 +75,9 @@ def test_import(name, selenium_standalone):
# files
assert (
selenium_standalone.run(
f"""
"""
len(list(glob.glob(
'/lib/{PYVERSION}/site-packages/**/*.pyc',
site.getsitepackages()[0] + '/**/*.pyc',
recursive=True)
))
"""
@ -88,9 +87,9 @@ def test_import(name, selenium_standalone):
# Make sure no exe files were loaded!
assert (
selenium_standalone.run(
f"""
"""
len(list(glob.glob(
'/lib/{PYVERSION}/site-packages/**/*.exe',
site.getsitepackages()[0] + '/**/*.exe',
recursive=True)
))
"""

View File

@ -1,11 +1,10 @@
#!/usr/bin/env python3
import argparse
import os
import pathlib
import sys
from . import buildall, buildpkg, mkpkg, serve
from .common import get_make_environment_vars
from .common import get_hostsitepackages, get_make_environment_vars, search_pyodide_root
def make_parser() -> argparse.ArgumentParser:
@ -35,15 +34,17 @@ def make_parser() -> argparse.ArgumentParser:
def main():
if not os.environ.get("__LOADED_PYODIDE_ENV"):
from .common import get_hostsitepackages
# If we are building docs, we don't need to know the PYODIDE_ROOT
if "sphinx" in sys.modules:
os.environ["PYODIDE_ROOT"] = ""
if "PYODIDE_ROOT" not in os.environ:
os.environ["PYODIDE_ROOT"] = str(search_pyodide_root(os.getcwd()))
PYODIDE_ROOT = str(pathlib.Path(__file__).parents[2].resolve())
os.environ["PYODIDE_ROOT"] = PYODIDE_ROOT
os.environ.update(get_make_environment_vars())
hostsitepackages = get_hostsitepackages()
pythonpath = [
hostsitepackages,
f"{PYODIDE_ROOT}/pyodide-build/",
]
os.environ["PYTHONPATH"] = ":".join(pythonpath)
os.environ["BASH_ENV"] = ""

View File

@ -1,8 +1,10 @@
import functools
import os
import subprocess
from pathlib import Path
from typing import Iterable, Iterator
import tomli
from packaging.tags import Tag, compatible_tags, cpython_tags
from packaging.utils import parse_wheel_filename
@ -123,49 +125,6 @@ def _parse_package_subset(query: str | None) -> set[str]:
return packages
def file_packager_path() -> Path:
ROOTDIR = Path(__file__).parents[2].resolve()
return ROOTDIR / "emsdk/emsdk/upstream/emscripten/tools/file_packager"
def invoke_file_packager(
*,
name,
root_dir=".",
base_dir,
pyodidedir,
compress=False,
):
subprocess.run(
[
str(file_packager_path()),
f"{name}.data",
f"--js-output={name}.js",
"--preload",
f"{base_dir}@{pyodidedir}",
"--lz4",
"--export-name=globalThis.__pyodide_module",
"--exclude",
"*__pycache__*",
"--use-preload-plugins",
],
cwd=root_dir,
check=True,
)
if compress:
subprocess.run(
[
"npx",
"--no-install",
"terser",
root_dir / f"{name}.js",
"-o",
root_dir / f"{name}.js",
],
check=True,
)
def get_make_flag(name):
"""Get flags from makefile.envs.
@ -193,11 +152,15 @@ def get_make_environment_vars():
"""Load environment variables from Makefile.envs
This allows us to set all build vars in one place"""
# TODO: make this not rely on paths outside of pyodide-build
rootdir = Path(__file__).parents[2].resolve()
if "PYODIDE_ROOT" in os.environ:
PYODIDE_ROOT = Path(os.environ["PYODIDE_ROOT"])
else:
PYODIDE_ROOT = search_pyodide_root(os.getcwd())
environment = {}
result = subprocess.run(
["make", "-f", str(rootdir / "Makefile.envs"), ".output_vars"],
["make", "-f", str(PYODIDE_ROOT / "Makefile.envs"), ".output_vars"],
capture_output=True,
text=True,
)
@ -209,3 +172,33 @@ def get_make_environment_vars():
value = value.strip("'").strip()
environment[varname] = value
return environment
def search_pyodide_root(curdir: str | Path, *, max_depth: int = 5) -> Path:
"""
Recursively search for the root of the Pyodide repository,
by looking for the pyproject.toml file in the parent directories
which contains [tool.pyodide] section.
"""
# We want to include "curdir" in parent_dirs, so add a garbage suffix
parent_dirs = (Path(curdir) / "garbage").parents[:max_depth]
for base in parent_dirs:
pyproject_file = base / "pyproject.toml"
if not pyproject_file.is_file():
continue
try:
with pyproject_file.open("rb") as f:
configs = tomli.load(f)
except tomli.TOMLDecodeError:
raise ValueError(f"Could not parse {pyproject_file}.")
if "tool" in configs and "pyodide" in configs["tool"]:
return base
raise FileNotFoundError(
"Could not find Pyodide root directory. If you are not in the Pyodide directory, set `PYODIDE_ROOT=<pyodide-root-directory>`."
)

View File

@ -12,7 +12,7 @@ import warnings
from pathlib import Path
from typing import Any, Literal
PACKAGES_ROOT = Path(__file__).parents[2] / "packages"
from ruamel.yaml import YAML
class MkpkgFailedException(Exception):
@ -89,23 +89,12 @@ def _get_metadata(package: str, version: str | None = None) -> dict:
return pypi_metadata
def _import_ruamel_yaml():
"""Import ruamel.yaml with a better error message is not installed."""
try:
from ruamel.yaml import YAML
except ImportError as err:
raise ImportError(
"No module named 'ruamel'. "
"It can be installed with pip install ruamel.yaml"
) from err
return YAML
def run_prettier(meta_path):
subprocess.run(["npx", "prettier", "-w", meta_path])
def make_package(
packages_dir: Path,
package: str,
version: str | None = None,
source_fmt: Literal["wheel", "sdist"] | None = None,
@ -115,7 +104,6 @@ def make_package(
but will have to be edited for more complex things.
"""
print(f"Creating meta.yaml package for {package}")
YAML = _import_ruamel_yaml()
yaml = YAML()
@ -149,15 +137,20 @@ def make_package(
},
}
if not (PACKAGES_ROOT / package).is_dir():
os.makedirs(PACKAGES_ROOT / package)
meta_path = PACKAGES_ROOT / package / "meta.yaml"
with open(meta_path, "w") as fd:
yaml.dump(yaml_content, fd)
package_dir = packages_dir / package
package_dir.mkdir(parents=True, exist_ok=True)
meta_path = package_dir / "meta.yaml"
if meta_path.exists():
raise MkpkgFailedException(f"The package {package} already exists")
yaml.dump(yaml_content, meta_path)
run_prettier(meta_path)
success(f"Output written to {meta_path}")
# TODO: use rich for coloring outputs
class bcolors:
HEADER = "\033[95m"
OKBLUE = "\033[94m"
@ -184,30 +177,27 @@ def success(msg):
def update_package(
root: Path,
package: str,
version: str | None = None,
update_patched: bool = True,
source_fmt: Literal["wheel", "sdist"] | None = None,
):
YAML = _import_ruamel_yaml()
yaml = YAML()
meta_path = PACKAGES_ROOT / package / "meta.yaml"
meta_path = root / package / "meta.yaml"
if not meta_path.exists():
print(f"{meta_path} does not exist")
sys.exit(0)
with open(meta_path, "rb") as fd:
yaml_content = yaml.load(fd)
abort(f"{meta_path} does not exist")
yaml_content = yaml.load(meta_path.read_bytes())
if "url" not in yaml_content["source"]:
print(f"Skipping: {package} is a local package!")
sys.exit(0)
raise MkpkgFailedException(f"Skipping: {package} is a local package!")
build_info = yaml_content.get("build", {})
if build_info.get("library", False) or build_info.get("sharedlibrary", False):
print(f"Skipping: {package} is a library!")
sys.exit(0)
raise MkpkgFailedException(f"Skipping: {package} is a library!")
if yaml_content["source"]["url"].endswith("whl"):
old_fmt = "wheel"
@ -222,7 +212,7 @@ def update_package(
)
if already_up_to_date:
print(f"{package} already up to date. Local: {local_ver} PyPI: {pypi_ver}")
sys.exit(0)
return
print(f"{package} is out of date: {local_ver} <= {pypi_ver}.")
@ -233,7 +223,9 @@ def update_package(
"patches (if needed) to avoid build failing."
)
else:
abort(f"Pyodide applies patches to {package}. Skipping update.")
raise MkpkgFailedException(
f"Pyodide applies patches to {package}. Skipping update."
)
if source_fmt:
# require the type requested
@ -251,9 +243,10 @@ def update_package(
yaml_content["source"].pop("md5", None)
yaml_content["source"]["sha256"] = dist_metadata["digests"]["sha256"]
yaml_content["package"]["version"] = pypi_metadata["info"]["version"]
with open(meta_path, "wb") as fd:
yaml.dump(yaml_content, fd)
yaml.dump(yaml_content, meta_path)
run_prettier(meta_path)
success(f"Updated {package} from {local_ver} to {pypi_ver}.")
@ -286,10 +279,20 @@ complex things.""".strip()
def main(args):
PYODIDE_ROOT = os.environ.get("PYODIDE_ROOT")
if PYODIDE_ROOT is None:
raise ValueError("PYODIDE_ROOT is not set")
if shutil.which("npx") is None:
raise ValueError("npx is not installed")
PACKAGES_ROOT = Path(PYODIDE_ROOT) / "packages"
try:
package = args.package[0]
if args.update:
update_package(
PACKAGES_ROOT,
package,
args.version,
update_patched=True,
@ -298,13 +301,16 @@ def main(args):
return
if args.update_if_not_patched:
update_package(
PACKAGES_ROOT,
package,
args.version,
update_patched=False,
source_fmt=args.source_format,
)
return
make_package(package, args.version, source_fmt=args.source_format)
make_package(
PACKAGES_ROOT, package, args.version, source_fmt=args.source_format
)
except MkpkgFailedException as e:
# This produces two types of error messages:
#

View File

@ -27,10 +27,8 @@ from collections import namedtuple
from pathlib import Path, PurePosixPath
from typing import Any, MutableMapping, NoReturn, overload
# absolute import is necessary as this file will be symlinked
# under tools
from . import common
from ._f2c_fixes import fix_f2c_input, fix_f2c_output, scipy_fixes
from pyodide_build import common
from pyodide_build._f2c_fixes import fix_f2c_input, fix_f2c_output, scipy_fixes
symlinks = {"cc", "c++", "ld", "ar", "gcc", "gfortran"}

View File

@ -5,8 +5,6 @@ import pathlib
import socketserver
import sys
BUILD_PATH = pathlib.Path(__file__).resolve().parents[2] / "build"
class Handler(http.server.SimpleHTTPRequestHandler):
def end_headers(self):
@ -23,11 +21,15 @@ def make_parser(parser):
"--build_dir",
action="store",
type=str,
default=BUILD_PATH,
help="set the build directory",
default="build",
help="set the build directory (default: %(default)s)",
)
parser.add_argument(
"--port", action="store", type=int, default=8000, help="set the PORT number"
"--port",
action="store",
type=int,
default=8000,
help="set the PORT number (default: %(default)s)",
)
return parser
@ -38,11 +40,11 @@ def server(port):
def main(args):
build_dir = args.build_dir
build_dir = pathlib.Path(args.build_dir).resolve()
port = args.port
httpd = server(port)
os.chdir(build_dir)
print(f"serving from {build_dir} at localhost:" + str(port))
print(f"serving from {build_dir} at localhost:{port}")
try:
httpd.serve_forever()
except KeyboardInterrupt:

View File

@ -5,10 +5,6 @@ from typing import Callable, Collection
import pytest
from .common import get_pyversion
PYVERSION = get_pyversion()
def _run_in_pyodide_get_source(f):
lines, start_line = inspect.getsourcelines(f)

View File

@ -0,0 +1,12 @@
package:
name: beautifulsoup4
version: 4.10.0
requirements:
run:
- soupsieve
source:
sha256: 9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf
url: https://files.pythonhosted.org/packages/69/bf/f0f194d3379d3f3347478bd267f754fc68c11cbf2fe302a6ab69447b1417/beautifulsoup4-4.10.0-py3-none-any.whl
test:
imports:
- bs4

View File

@ -0,0 +1,13 @@
package:
name: micropip
version: "0.1"
requirements:
run:
- pyparsing
- packaging
- distutils # TODO: remove once there is a release with https://github.com/pypa/packaging/pull/396
source:
path: src
test:
imports:
- micropip

View File

@ -0,0 +1,12 @@
package:
name: packaging
version: "21.3"
source:
sha256: ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522
url: https://files.pythonhosted.org/packages/05/8e/8de486cbd03baba4deef4142bd643a3e7bbe954a784dc1bb17142572d127/packaging-21.3-py3-none-any.whl
requirements:
run:
- pyparsing
test:
imports:
- packaging

View File

@ -0,0 +1,13 @@
package:
name: pkg_1
version: 1.0.0
source:
sha256: deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef
url: https://dummy-url-that-not-exists.com/pkg_1-py3-none-any.whl
requirements:
run:
- pkg_1_1
- pkg_3
test:
imports:
- pkg_1

View File

@ -0,0 +1,9 @@
package:
name: pkg_1_1
version: 1.0.0
source:
sha256: deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef
url: https://dummy-url-that-not-exists.com/pkg_1_1-py3-none-any.whl
test:
imports:
- pkg_1_1

View File

@ -0,0 +1,12 @@
package:
name: pkg_2
version: 1.0.0
source:
sha256: deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef
url: https://dummy-url-that-not-exists.com/pkg_2-py3-none-any.whl
requirements:
run:
- pkg_3
test:
imports:
- pkg_2

View File

@ -0,0 +1,12 @@
package:
name: pkg_3
version: 1.0.0
source:
sha256: deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef
url: https://dummy-url-that-not-exists.com/pkg_3-py3-none-any.whls
requirements:
run:
- pkg_3_1
test:
imports:
- pkg_3

View File

@ -0,0 +1,9 @@
package:
name: pkg_3_1
version: 1.0.0
source:
sha256: deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef
url: https://dummy-url-that-not-exists.com/pkg_3_1-py3-none-any.whl
test:
imports:
- pkg_3_1

View File

@ -0,0 +1,9 @@
package:
name: pyparsing
version: 3.0.7
source:
sha256: a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484
url: https://files.pythonhosted.org/packages/80/c1/23fd82ad3121656b585351aba6c19761926bb0db2ebed9e4ff09a43a3fcc/pyparsing-3.0.7-py3-none-any.whl
test:
imports:
- pyparsing

View File

@ -0,0 +1,9 @@
package:
name: soupsieve
version: 2.3.1
source:
sha256: 1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb
url: https://files.pythonhosted.org/packages/72/a6/fd01694427f1c3fcadfdc5f1de901b813b9ac756f0806ef470cfed1de281/soupsieve-2.3.1-py3-none-any.whl
test:
imports:
- soupsieve

View File

@ -6,7 +6,7 @@ import pytest
from pyodide_build import buildall
PACKAGES_DIR = (Path(__file__).parents[3] / "packages").resolve()
PACKAGES_DIR = Path(__file__).parent / "_test_packages"
def test_generate_dependency_graph():
@ -23,9 +23,7 @@ def test_generate_dependency_graph():
def test_generate_packages_json():
pkg_map = buildall.generate_dependency_graph(
PACKAGES_DIR, {"beautifulsoup4", "micropip"}
)
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, {"pkg_1", "pkg_2"})
for pkg in pkg_map.values():
pkg.file_name = pkg.file_name or pkg.name + ".file"
@ -33,19 +31,18 @@ def test_generate_packages_json():
assert set(package_data.keys()) == {"info", "packages"}
assert package_data["info"] == {"arch": "wasm32", "platform": "Emscripten-1.0"}
assert set(package_data["packages"]) == {
"distutils",
"pyparsing",
"packaging",
"soupsieve",
"beautifulsoup4",
"micropip",
"pkg_1",
"pkg_1_1",
"pkg_2",
"pkg_3",
"pkg_3_1",
}
assert package_data["packages"]["micropip"] == {
"name": "micropip",
"version": "0.1",
"file_name": "micropip.file",
"depends": ["pyparsing", "packaging", "distutils"],
"imports": ["micropip"],
assert package_data["packages"]["pkg_1"] == {
"name": "pkg_1",
"version": "1.0.0",
"file_name": "pkg_1.file",
"depends": ["pkg_1_1", "pkg_3"],
"imports": ["pkg_1"],
"install_dir": "site",
}
@ -60,7 +57,7 @@ def test_build_dependencies(n_jobs, monkeypatch):
monkeypatch.setattr(buildall, "Package", MockPackage)
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, {"lxml", "micropip"})
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, {"pkg_1", "pkg_2"})
Args = namedtuple("Args", ["n_jobs", "force_rebuild"])
buildall.build_from_graph(
@ -68,24 +65,15 @@ def test_build_dependencies(n_jobs, monkeypatch):
)
assert set(build_list) == {
"packaging",
"pyparsing",
"soupsieve",
"beautifulsoup4",
"micropip",
"webencodings",
"html5lib",
"cssselect",
"lxml",
"libxslt",
"libxml",
"zlib",
"libiconv",
"six",
"pkg_1",
"pkg_1_1",
"pkg_2",
"pkg_3",
"pkg_3_1",
}
assert build_list.index("pyparsing") < build_list.index("packaging")
assert build_list.index("packaging") < build_list.index("micropip")
assert build_list.index("soupsieve") < build_list.index("beautifulsoup4")
assert build_list.index("pkg_1_1") < build_list.index("pkg_1")
assert build_list.index("pkg_3") < build_list.index("pkg_1")
assert build_list.index("pkg_3_1") < build_list.index("pkg_3")
@pytest.mark.parametrize("n_jobs", [1, 4])
@ -121,7 +109,7 @@ def test_build_error(n_jobs, monkeypatch):
monkeypatch.setattr(buildall, "Package", MockPackage)
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, {"lxml"})
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, {"pkg_1"})
with pytest.raises(ValueError, match="Failed build"):
Args = namedtuple("Args", ["n_jobs", "force_rebuild"])

View File

@ -8,6 +8,8 @@ import pytest
from pyodide_build import buildpkg
from pyodide_build.io import parse_package_config
PACKAGES_DIR = Path(__file__).parent / "_test_packages"
def test_subprocess_with_shared_env():
with buildpkg.BashRunnerWithSharedEnvironment() as p:
@ -41,19 +43,8 @@ def test_prepare_source(monkeypatch):
test_pkgs = []
# tarballname == version
test_pkgs.append(parse_package_config("./packages/scipy/meta.yaml"))
test_pkgs.append(parse_package_config("./packages/numpy/meta.yaml"))
# tarballname != version
test_pkgs.append(
{
"package": {"name": "pyyaml", "version": "5.3.1"},
"source": {
"url": "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz"
},
}
)
test_pkgs.append(parse_package_config(PACKAGES_DIR / "packaging/meta.yaml"))
test_pkgs.append(parse_package_config(PACKAGES_DIR / "micropip/meta.yaml"))
for pkg in test_pkgs:
pkg["source"]["patches"] = []
@ -66,6 +57,8 @@ def test_prepare_source(monkeypatch):
srcpath = buildpath / source_dir_name
buildpkg.prepare_source(pkg_root, buildpath, srcpath, src_metadata)
assert srcpath.is_dir()
@pytest.mark.parametrize("is_library", [True, False])
def test_run_script(is_library, tmpdir):

View File

@ -1,3 +1,5 @@
import pytest
from pyodide_build.common import (
ALWAYS_PACKAGES,
CORE_PACKAGES,
@ -7,6 +9,7 @@ from pyodide_build.common import (
find_matching_wheels,
get_make_environment_vars,
get_make_flag,
search_pyodide_root,
)
@ -113,3 +116,15 @@ def test_wheel_paths():
"py3-none-any",
"py2.py3-none-any",
]
def test_search_pyodide_root(tmp_path):
pyproject_file = tmp_path / "pyproject.toml"
pyproject_file.write_text("[tool.pyodide]")
assert search_pyodide_root(tmp_path) == tmp_path
assert search_pyodide_root(tmp_path / "subdir") == tmp_path
assert search_pyodide_root(tmp_path / "subdir" / "subdir") == tmp_path
pyproject_file.unlink()
with pytest.raises(FileNotFoundError):
search_pyodide_root(tmp_path)

View File

@ -14,13 +14,10 @@ from pyodide_build.io import parse_package_config
@pytest.mark.parametrize("source_fmt", ["wheel", "sdist"])
def test_mkpkg(tmpdir, monkeypatch, capsys, source_fmt):
pytest.importorskip("ruamel")
assert pyodide_build.mkpkg.PACKAGES_ROOT.exists()
def test_mkpkg(tmpdir, capsys, source_fmt):
base_dir = Path(str(tmpdir))
monkeypatch.setattr(pyodide_build.mkpkg, "PACKAGES_ROOT", base_dir)
pyodide_build.mkpkg.make_package("idna", None, source_fmt)
pyodide_build.mkpkg.make_package(base_dir, "idna", None, source_fmt)
assert os.listdir(base_dir) == ["idna"]
meta_path = base_dir / "idna" / "meta.yaml"
assert meta_path.exists()
@ -38,10 +35,8 @@ def test_mkpkg(tmpdir, monkeypatch, capsys, source_fmt):
@pytest.mark.parametrize("old_dist_type", ["wheel", "sdist"])
@pytest.mark.parametrize("new_dist_type", ["wheel", "sdist", "same"])
def test_mkpkg_update(tmpdir, monkeypatch, old_dist_type, new_dist_type):
pytest.importorskip("ruamel")
def test_mkpkg_update(tmpdir, old_dist_type, new_dist_type):
base_dir = Path(str(tmpdir))
monkeypatch.setattr(pyodide_build.mkpkg, "PACKAGES_ROOT", base_dir)
old_ext = ".tar.gz" if old_dist_type == "sdist" else ".whl"
old_url = "https://<some>/idna-2.0" + old_ext
@ -54,14 +49,15 @@ def test_mkpkg_update(tmpdir, monkeypatch, old_dist_type, new_dist_type):
"test": {"imports": ["idna"]},
}
os.mkdir(base_dir / "idna")
meta_path = base_dir / "idna" / "meta.yaml"
with open(meta_path, "w") as fh:
yaml.dump(db_init, fh)
package_dir = base_dir / "idna"
package_dir.mkdir(parents=True)
meta_path = package_dir / "meta.yaml"
with open(meta_path, "w") as f:
yaml.dump(db_init, f)
source_fmt = new_dist_type
if new_dist_type == "same":
source_fmt = None
pyodide_build.mkpkg.update_package("idna", None, False, source_fmt)
pyodide_build.mkpkg.update_package(base_dir, "idna", None, False, source_fmt)
db = parse_package_config(meta_path)
assert list(db.keys()) == list(db_init.keys())

View File

@ -1,12 +1,13 @@
import os
import subprocess
from pathlib import Path
BASE_DIR = Path(__file__).parents[3]
PYODIDE_ROOT = Path(os.environ.get("PYODIDE_ROOT", os.getcwd()))
def test_run_docker_script():
res = subprocess.run(
["bash", str(BASE_DIR / "run_docker"), "--help"],
["bash", str(PYODIDE_ROOT / "run_docker"), "--help"],
check=False,
capture_output=True,
)
@ -14,7 +15,7 @@ def test_run_docker_script():
assert "Usage: run_docker" in res.stdout.decode("utf-8")
res = subprocess.run(
["bash", str(BASE_DIR / "run_docker"), "--invalid-param"],
["bash", str(PYODIDE_ROOT / "run_docker"), "--invalid-param"],
check=False,
capture_output=True,
)

View File

@ -22,11 +22,19 @@ python_requires = >=3.8
install_requires =
pyyaml
cython<3.0
packaging
ruamel.yaml
packaging
wheel
tomli
build==0.7.0
[options.entry_points]
console_scripts =
pyodide-build = pyodide_build.__main__:main
[options.extras_require]
test =
pytest
[options.packages.find]
where = .

View File

@ -55,3 +55,5 @@ known_first_party = [
known_third_party = [
"build",
]
[tool.pyodide]

View File

@ -1,9 +1,4 @@
# core
cython<3.0
packaging
pyyaml
ruamel.yaml
build==0.7.0
# lint
pre-commit
# testing

View File

@ -7,7 +7,7 @@ import pytest
from conftest import selenium_common
from pyodide import CodeRunner, console # noqa: E402
from pyodide.console import Console, _CommandCompiler, _Compile # noqa: E402
from pyodide_build.testing import PYVERSION, run_in_pyodide
from pyodide_build.testing import run_in_pyodide
def test_command_compiler():
@ -421,9 +421,10 @@ def test_console_html(console_html_fixture):
).strip()
)
result = re.sub(r"line \d+, in repr_shorten", "line xxx, in repr_shorten", result)
result = re.sub(r"/lib/python3.\d+/site-packages", "...", result)
answer = dedent(
f"""
"""
>>> class Test:
... def __repr__(self):
... raise TypeError(\"hi\")
@ -431,7 +432,7 @@ def test_console_html(console_html_fixture):
>>> Test()
[[;;;terminal-error]Traceback (most recent call last):
File \"/lib/{PYVERSION}/site-packages/pyodide/console.py\", line xxx, in repr_shorten
File \".../pyodide/console.py\", line xxx, in repr_shorten
text = repr(value)
File \"<console>\", line 3, in __repr__
TypeError: hi]

View File

@ -4,8 +4,6 @@ for a basic nodejs-based test, see src/js/test/filesystem.test.js
"""
import pytest
from pyodide_build.testing import PYVERSION
@pytest.mark.skip_refcount_check
@pytest.mark.skip_pyproxy_check
@ -16,11 +14,14 @@ def test_idbfs_persist_code(selenium_standalone):
fstype = "NODEFS"
else:
fstype = "IDBFS"
mount_dir = "/mount_test"
# create mount
selenium.run_js(
f"""
pyodide.FS.mkdir('/lib/{PYVERSION}/site-packages/test_idbfs');
pyodide.FS.mount(pyodide.FS.filesystems.{fstype}, {{root : "."}}, "/lib/{PYVERSION}/site-packages/test_idbfs");
let mountDir = '{mount_dir}';
pyodide.FS.mkdir(mountDir);
pyodide.FS.mount(pyodide.FS.filesystems.{fstype}, {{root : "."}}, "{mount_dir}");
"""
)
# create file in mount
@ -28,10 +29,13 @@ def test_idbfs_persist_code(selenium_standalone):
f"""
pyodide.runPython(`
import pathlib
p = pathlib.Path('/lib/{PYVERSION}/site-packages/test_idbfs/__init__.py')
p = pathlib.Path('{mount_dir}/test_idbfs/__init__.py')
p.parent.mkdir(exist_ok=True, parents=True)
p.write_text("def test(): return 7")
from importlib import invalidate_caches
invalidate_caches()
import sys
sys.path.append('{mount_dir}')
from test_idbfs import test
assert test() == 7
`);
@ -55,12 +59,14 @@ def test_idbfs_persist_code(selenium_standalone):
)
# idbfs isn't magically loaded
selenium.run_js(
"""
f"""
pyodide.runPython(`
from importlib import invalidate_caches
import sys
invalidate_caches()
err_type = None
try:
sys.path.append('{mount_dir}')
from test_idbfs import test
except Exception as err:
err_type = type(err)
@ -71,8 +77,8 @@ def test_idbfs_persist_code(selenium_standalone):
# re-mount
selenium.run_js(
f"""
pyodide.FS.mkdir('/lib/{PYVERSION}/site-packages/test_idbfs');
pyodide.FS.mount(pyodide.FS.filesystems.{fstype}, {{root : "."}}, "/lib/{PYVERSION}/site-packages/test_idbfs");
pyodide.FS.mkdir('{mount_dir}');
pyodide.FS.mount(pyodide.FS.filesystems.{fstype}, {{root : "."}}, "{mount_dir}");
"""
)
# sync FROM idbfs
@ -86,16 +92,16 @@ def test_idbfs_persist_code(selenium_standalone):
)
# import file persisted above
selenium.run_js(
"""
f"""
pyodide.runPython(`
from importlib import invalidate_caches
invalidate_caches()
import sys
sys.path.append('{mount_dir}')
from test_idbfs import test
assert test() == 7
`);
"""
)
# remove file
selenium.run_js(
f"""pyodide.FS.unlink("/lib/{PYVERSION}/site-packages/test_idbfs/__init__.py")"""
)
selenium.run_js(f"""pyodide.FS.unlink("{mount_dir}/test_idbfs/__init__.py")""")

View File

@ -5,7 +5,7 @@ from typing import Any
import pytest
from pyodide import CodeRunner, eval_code, find_imports, should_quiet # noqa: E402
from pyodide_build.testing import PYVERSION, run_in_pyodide
from pyodide_build.testing import run_in_pyodide
def test_find_imports():
@ -862,8 +862,10 @@ def test_js_stackframes(selenium):
def normalize_tb(t):
res = []
for [file, name] in t:
if file.endswith(".js") or file.endswith(".html"):
if file.endswith((".js", ".html")):
file = file.rpartition("/")[-1]
if file.endswith(".py"):
file = "/".join(file.split("/")[-2:])
if re.fullmatch(r"\:[0-9]*", file) or file == "evalmachine.<anonymous>":
file = "test.html"
res.append([file, name])
@ -876,14 +878,14 @@ def test_js_stackframes(selenium):
["test.html", "d2"],
["test.html", "d1"],
["pyodide.js", "runPython"],
[f"/lib/{PYVERSION}/site-packages/_pyodide/_base.py", "eval_code"],
[f"/lib/{PYVERSION}/site-packages/_pyodide/_base.py", "run"],
["_pyodide/_base.py", "eval_code"],
["_pyodide/_base.py", "run"],
["<exec>", "<module>"],
["<exec>", "c2"],
["<exec>", "c1"],
["test.html", "b"],
["pyodide.js", "pyimport"],
[f"/lib/{PYVERSION}/importlib/__init__.py", "import_module"],
["importlib/__init__.py", "import_module"],
]
assert normalize_tb(res[: len(frames)]) == frames

View File

@ -32,8 +32,8 @@ check_pkgconfig() {
check_binary_present "pkg-config"
}
check_md5sum() {
check_binary_present "md5sum"
check_shasum() {
check_binary_present "shasum"
}
check_fortran_dependencies() {
@ -41,17 +41,8 @@ check_fortran_dependencies() {
check_binary_present "f2c"
}
check_pyyaml() {
local pyyaml_import_check
pyyaml_import_check="$(python3 -c 'import yaml' 2>&1)"
if [ "${pyyaml_import_check}" ]; then
failure_exit "PyYAML"
fi
}
check_python_version
check_pkgconfig
#check_python_headers
check_fortran_dependencies
check_pyyaml
check_md5sum
check_shasum