mirror of https://github.com/pyodide/pyodide.git
build packages from pypi direct (#3196)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Hood Chatham <roberthoodchatham@gmail.com> Co-authored-by: Gyeongjae Choi <def6488@gmail.com> Co-authored-by: Roman Yurchak <rth.yurchak@gmail.com>
This commit is contained in:
parent
54b469b104
commit
4a1913d3ad
|
@ -612,7 +612,7 @@ workflows:
|
|||
|
||||
- test-main:
|
||||
name: test-core-node
|
||||
test-params: --runtime=node-no-host src packages/micropip packages/fpcast-test packages/sharedlib-test-py/ packages/cpp-exceptions-test/
|
||||
test-params: --runtime=node-no-host src packages/micropip packages/fpcast-test packages/sharedlib-test-py/ packages/cpp-exceptions-test/ pyodide-build/pyodide_build/tests
|
||||
requires:
|
||||
- build-core
|
||||
filters:
|
||||
|
|
|
@ -31,6 +31,9 @@ packages/.artifacts
|
|||
packages/.libs
|
||||
packages/*/build.log*
|
||||
packages/build-logs
|
||||
dist/
|
||||
pyodide-build/**/build.log
|
||||
xbuildenv/
|
||||
pytest-pyodide
|
||||
tools/symlinks
|
||||
xbuildenv/
|
||||
|
|
|
@ -59,10 +59,12 @@ repos:
|
|||
- types-docutils
|
||||
- types-pyyaml
|
||||
- types-setuptools
|
||||
- types-requests
|
||||
- numpy
|
||||
- build
|
||||
- pytest
|
||||
- pydantic
|
||||
- unearth
|
||||
- id: mypy
|
||||
name: mypy-tests
|
||||
args: [--ignore-missing-imports]
|
||||
|
|
|
@ -106,6 +106,18 @@ substitutions:
|
|||
`pyodide-build mkpkg` will be replaced by `pyodide sekeleton pypi`.
|
||||
{pr}`3175`
|
||||
|
||||
- Added a new CLI command `pyodide build-recipes` which build packages from recipe folder.
|
||||
It replaces `pyodide-build buildall`.
|
||||
{pr}`3196`
|
||||
|
||||
- Added subcommands for `pyodide build` which builds packages from various sources.
|
||||
| command | result |
|
||||
|-------------|-------|
|
||||
| `pyodide build pypi` | build or fetch a single package from pypi |
|
||||
| `pyodide build source` | build the current source folder (same as pyodide build) |
|
||||
| `pyodide build url` | build or fetch a package from a url either tgz, tar.gz zip or wheel |
|
||||
{pr}`3196`
|
||||
|
||||
- {{ Fix }} Fixed bug in `split` argument of {any}`repr_shorten`. Added {any}`shorten` function.
|
||||
{pr}`3178`
|
||||
|
||||
|
|
|
@ -3,15 +3,11 @@
|
|||
export PYODIDE_ROOT=$(abspath ..)
|
||||
include ../Makefile.envs
|
||||
|
||||
ifeq ($(strip $(PYODIDE_PACKAGES)),)
|
||||
else
|
||||
ONLY_PACKAGES=--only "$(PYODIDE_PACKAGES)"
|
||||
endif
|
||||
|
||||
all: pyodide-build
|
||||
mkdir -p $(HOSTINSTALLDIR) $(WASM_LIBRARY_DIR)
|
||||
PYODIDE_ROOT=$(PYODIDE_ROOT) python -m pyodide_build buildall . $(PYODIDE_ROOT)/dist \
|
||||
$(ONLY_PACKAGES) --n-jobs $${PYODIDE_JOBS:-4} \
|
||||
PYODIDE_ROOT=$(PYODIDE_ROOT) pyodide build-recipes \
|
||||
"$(PYODIDE_PACKAGES)" \
|
||||
--n-jobs $${PYODIDE_JOBS:-4} \
|
||||
--log-dir=./build-logs
|
||||
|
||||
pyodide-build: ../pyodide-build/pyodide_build/**
|
||||
|
|
|
@ -5,6 +5,7 @@ Build all of the packages in a given directory.
|
|||
"""
|
||||
|
||||
import argparse
|
||||
import copy
|
||||
import dataclasses
|
||||
import hashlib
|
||||
import json
|
||||
|
@ -569,6 +570,7 @@ def build_packages(
|
|||
|
||||
pkg_map = generate_dependency_graph(packages_dir, packages)
|
||||
|
||||
output_dir.mkdir(exist_ok=True, parents=True)
|
||||
build_from_graph(pkg_map, output_dir, args)
|
||||
for pkg in pkg_map.values():
|
||||
assert isinstance(pkg, Package)
|
||||
|
@ -675,10 +677,9 @@ def make_parser(parser: argparse.ArgumentParser) -> argparse.ArgumentParser:
|
|||
return parser
|
||||
|
||||
|
||||
def main(args: argparse.Namespace) -> None:
|
||||
packages_dir = Path(args.dir[0]).resolve()
|
||||
outputdir = Path(args.output[0]).resolve()
|
||||
outputdir.mkdir(exist_ok=True)
|
||||
def set_default_args(args: argparse.Namespace) -> argparse.Namespace:
|
||||
args = copy.deepcopy(args)
|
||||
|
||||
if args.cflags is None:
|
||||
args.cflags = common.get_make_flag("SIDE_MODULE_CFLAGS")
|
||||
if args.cxxflags is None:
|
||||
|
@ -689,6 +690,14 @@ def main(args: argparse.Namespace) -> None:
|
|||
args.target_install_dir = common.get_make_flag("TARGETINSTALLDIR")
|
||||
if args.host_install_dir is None:
|
||||
args.host_install_dir = common.get_make_flag("HOSTINSTALLDIR")
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def main(args: argparse.Namespace) -> None:
|
||||
packages_dir = Path(args.dir[0]).resolve()
|
||||
outputdir = Path(args.output[0]).resolve()
|
||||
args = set_default_args(args)
|
||||
build_packages(packages_dir, outputdir, args)
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,245 @@
|
|||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
import typer # type: ignore[import]
|
||||
from unearth.evaluator import TargetPython
|
||||
from unearth.finder import PackageFinder
|
||||
|
||||
from .. import buildall, common
|
||||
from ..out_of_tree import build
|
||||
from ..out_of_tree.utils import initialize_pyodide_root
|
||||
|
||||
app = typer.Typer()
|
||||
|
||||
|
||||
def _fetch_pypi_package(package_spec, destdir):
|
||||
PYMAJOR = common.get_make_flag("PYMAJOR")
|
||||
PYMINOR = common.get_make_flag("PYMINOR")
|
||||
tp = TargetPython(
|
||||
py_ver=(int(PYMAJOR), int(PYMINOR)),
|
||||
platforms=[common.platform()],
|
||||
abis=[f"cp{PYMAJOR}{PYMINOR}"],
|
||||
)
|
||||
pf = PackageFinder(index_urls=["https://pypi.org/simple/"], target_python=tp)
|
||||
match = pf.find_best_match(package_spec)
|
||||
if match.best is None:
|
||||
if len(match.candidates) != 0:
|
||||
error = f"""Can't find version matching {package_spec}
|
||||
versions found:
|
||||
"""
|
||||
for c in match.candidates:
|
||||
error += " " + str(c.version) + "\t"
|
||||
raise RuntimeError(error)
|
||||
else:
|
||||
raise RuntimeError(f"Can't find package: {package_spec}")
|
||||
with tempfile.TemporaryDirectory() as download_dir:
|
||||
return pf.download_and_unpack(
|
||||
link=match.best.link, location=destdir, download_dir=download_dir
|
||||
)
|
||||
|
||||
|
||||
def pypi(
|
||||
package: str,
|
||||
exports: str = typer.Option(
|
||||
"requested",
|
||||
help="Which symbols should be exported when linking .so files?",
|
||||
),
|
||||
ctx: typer.Context = typer.Context,
|
||||
) -> None:
|
||||
"""Fetch a wheel from pypi, or build from source if none available."""
|
||||
initialize_pyodide_root()
|
||||
common.check_emscripten_version()
|
||||
backend_flags = ctx.args
|
||||
curdir = Path.cwd()
|
||||
(curdir / "dist").mkdir(exist_ok=True)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
temppath = Path(tmpdir)
|
||||
|
||||
# get package from pypi
|
||||
package_path = _fetch_pypi_package(package, temppath)
|
||||
if not package_path.is_dir():
|
||||
# a pure-python wheel has been downloaded - just copy to dist folder
|
||||
shutil.copy(str(package_path), str(curdir / "dist"))
|
||||
print(f"Successfully fetched: {package_path.name}")
|
||||
return
|
||||
|
||||
# sdist - needs building
|
||||
os.chdir(tmpdir)
|
||||
build.run(exports, backend_flags)
|
||||
for src in (temppath / "dist").iterdir():
|
||||
print(f"Built {str(src.name)}")
|
||||
shutil.copy(str(src), str(curdir / "dist"))
|
||||
|
||||
|
||||
def url(
|
||||
package_url: str,
|
||||
exports: str = typer.Option(
|
||||
"requested",
|
||||
help="Which symbols should be exported when linking .so files?",
|
||||
),
|
||||
ctx: typer.Context = typer.Context,
|
||||
) -> None:
|
||||
"""Fetch a wheel or build sdist from url."""
|
||||
initialize_pyodide_root()
|
||||
common.check_emscripten_version()
|
||||
backend_flags = ctx.args
|
||||
curdir = Path.cwd()
|
||||
(curdir / "dist").mkdir(exist_ok=True)
|
||||
|
||||
with requests.get(package_url, stream=True) as response:
|
||||
parsed_url = urlparse(response.url)
|
||||
filename = os.path.basename(parsed_url.path)
|
||||
name_base, ext = os.path.splitext(filename)
|
||||
if ext == ".gz" and name_base.rfind(".") != -1:
|
||||
ext = name_base[name_base.rfind(".") :] + ext
|
||||
if ext.lower() == ".whl":
|
||||
# just copy wheel into dist and return
|
||||
out_path = f"dist/{filename}"
|
||||
with open(out_path, "b") as f:
|
||||
for chunk in response.iter_content(chunk_size=1048576):
|
||||
f.write(chunk)
|
||||
return
|
||||
else:
|
||||
tf = tempfile.NamedTemporaryFile(suffix=ext, delete=False)
|
||||
for chunk in response.iter_content(chunk_size=1048576):
|
||||
tf.write(chunk)
|
||||
tf.close()
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
temppath = Path(tmpdir)
|
||||
shutil.unpack_archive(tf.name, tmpdir)
|
||||
folder_list = list(temppath.iterdir())
|
||||
if len(folder_list) == 1 and folder_list[0].is_dir():
|
||||
# unzipped into subfolder
|
||||
os.chdir(folder_list[0])
|
||||
else:
|
||||
# unzipped here
|
||||
os.chdir(tmpdir)
|
||||
print(os.listdir(tmpdir))
|
||||
build.run(exports, backend_flags)
|
||||
for src in (temppath / "dist").iterdir():
|
||||
print(f"Built {str(src.name)}")
|
||||
shutil.copy(str(src), str(curdir / "dist"))
|
||||
os.unlink(tf.name)
|
||||
|
||||
|
||||
def source(
|
||||
source_location: "Optional[str]" = typer.Argument(None),
|
||||
exports: str = typer.Option(
|
||||
"requested",
|
||||
help="Which symbols should be exported when linking .so files?",
|
||||
),
|
||||
ctx: typer.Context = typer.Context,
|
||||
) -> None:
|
||||
"""Use pypa/build to build a Python package from source"""
|
||||
initialize_pyodide_root()
|
||||
common.check_emscripten_version()
|
||||
backend_flags = [source_location] + ctx.args
|
||||
build.run(exports, backend_flags)
|
||||
|
||||
|
||||
@app.command() # type: ignore[misc]
|
||||
def recipe(
|
||||
packages: list[str] = typer.Argument(
|
||||
..., help="Packages to build, or * for all packages in recipe directory"
|
||||
),
|
||||
output: str = typer.Option(
|
||||
None,
|
||||
help="Path to output built packages and repodata.json. "
|
||||
"If not specified, the default is `PYODIDE_ROOT/dist`.",
|
||||
),
|
||||
cflags: str = typer.Option(
|
||||
None, help="Extra compiling flags. Default: SIDE_MODULE_CFLAGS"
|
||||
),
|
||||
cxxflags: str = typer.Option(
|
||||
None, help="Extra compiling flags. Default: SIDE_MODULE_CXXFLAGS"
|
||||
),
|
||||
ldflags: str = typer.Option(
|
||||
None, help="Extra linking flags. Default: SIDE_MODULE_LDFLAGS"
|
||||
),
|
||||
target_install_dir: str = typer.Option(
|
||||
None,
|
||||
help="The path to the target Python installation. Default: TARGETINSTALLDIR",
|
||||
),
|
||||
host_install_dir: str = typer.Option(
|
||||
None,
|
||||
help="Directory for installing built host packages. Default: HOSTINSTALLDIR",
|
||||
),
|
||||
log_dir: str = typer.Option(None, help="Directory to place log files"),
|
||||
force_rebuild: bool = typer.Option(
|
||||
False,
|
||||
help="Force rebuild of all packages regardless of whether they appear to have been updated",
|
||||
),
|
||||
n_jobs: int = typer.Option(4, help="Number of packages to build in parallel"),
|
||||
root: str = typer.Option(
|
||||
None, help="The root directory of the Pyodide.", envvar="PYODIDE_ROOT"
|
||||
),
|
||||
recipe_dir: str = typer.Option(
|
||||
None,
|
||||
help="The directory containing the recipe of packages. "
|
||||
"If not specified, the default is `packages` in the root directory.",
|
||||
),
|
||||
ctx: typer.Context = typer.Context,
|
||||
) -> None:
|
||||
"""Build packages using yaml recipes and create repodata.json"""
|
||||
pyodide_root = common.search_pyodide_root(Path.cwd()) if not root else Path(root)
|
||||
recipe_dir_ = pyodide_root / "packages" if not recipe_dir else Path(recipe_dir)
|
||||
output_dir = pyodide_root / "dist" if not output else Path(output)
|
||||
|
||||
# Note: to make minimal changes to the existing pyodide-build entrypoint,
|
||||
# keep arguments of buildall unghanged.
|
||||
# TODO: refactor this when we remove pyodide-build entrypoint.
|
||||
args = argparse.Namespace(**ctx.params)
|
||||
args.dir = args.recipe_dir
|
||||
|
||||
if len(args.packages) == 1 and "," in args.packages[0]:
|
||||
# Handle packages passed with old comma separated syntax.
|
||||
# This is to support `PYODIDE_PACKAGES="pkg1,pkg2,..." make` syntax.
|
||||
args.only = args.packages[0].replace(" ", "")
|
||||
else:
|
||||
args.only = ",".join(args.packages)
|
||||
|
||||
args = buildall.set_default_args(args)
|
||||
|
||||
buildall.build_packages(recipe_dir_, output_dir, args)
|
||||
|
||||
|
||||
# simple 'pyodide build' command
|
||||
@app.command() # type: ignore[misc]
|
||||
def main(
|
||||
source_location: "Optional[str]" = typer.Argument(
|
||||
"",
|
||||
help="Build source, can be source folder, pypi version specification, or url to a source dist archive or wheel file. If this is blank, it will build the current directory.",
|
||||
),
|
||||
exports: str = typer.Option(
|
||||
"requested",
|
||||
help="Which symbols should be exported when linking .so files?",
|
||||
),
|
||||
ctx: typer.Context = typer.Context,
|
||||
) -> None:
|
||||
"""Use pypa/build to build a Python package from source, pypi or url."""
|
||||
if not source_location:
|
||||
# build the current folder
|
||||
source(".", exports, ctx)
|
||||
elif source_location.find("://") != -1:
|
||||
url(source_location, exports, ctx)
|
||||
elif Path(source_location).is_dir():
|
||||
# a folder, build it
|
||||
source(source_location, exports, ctx)
|
||||
else:
|
||||
# try fetch from pypi
|
||||
pypi(source_location, exports, ctx)
|
||||
|
||||
|
||||
main.typer_kwargs = {
|
||||
"context_settings": {
|
||||
"ignore_unknown_options": True,
|
||||
"allow_extra_args": True,
|
||||
},
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
import typer # type: ignore[import]
|
||||
|
||||
from .. import common
|
||||
from ..out_of_tree import build
|
||||
from ..out_of_tree.utils import initialize_pyodide_root
|
||||
|
||||
|
||||
def main(
|
||||
exports: str = typer.Option(
|
||||
"requested",
|
||||
help="Which symbols should be exported when linking .so files?",
|
||||
),
|
||||
ctx: typer.Context = typer.Context,
|
||||
) -> None:
|
||||
"""Use pypa/build to build a Python package"""
|
||||
initialize_pyodide_root()
|
||||
common.check_emscripten_version()
|
||||
backend_flags = ctx.args
|
||||
build.run(exports, backend_flags)
|
||||
|
||||
|
||||
main.typer_kwargs = { # type: ignore[attr-defined]
|
||||
"context_settings": {
|
||||
"ignore_unknown_options": True,
|
||||
"allow_extra_args": True,
|
||||
},
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from .. import common, pypabuild, pywasmcross
|
||||
|
||||
|
@ -10,6 +11,9 @@ def run(exports, args):
|
|||
cxxflags += f" {os.environ.get('CXXFLAGS', '')}"
|
||||
ldflags = common.get_make_flag("SIDE_MODULE_LDFLAGS")
|
||||
ldflags += f" {os.environ.get('LDFLAGS', '')}"
|
||||
|
||||
curdir = Path.cwd()
|
||||
(curdir / "dist").mkdir(exist_ok=True)
|
||||
build_env_ctx = pywasmcross.get_build_env(
|
||||
env=os.environ.copy(),
|
||||
pkgname="",
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
package:
|
||||
name: pkg_test_graph1
|
||||
version: "1.0.0"
|
||||
requirements:
|
||||
run:
|
||||
- pkg_test_graph2
|
||||
source:
|
||||
path: src
|
|
@ -0,0 +1,8 @@
|
|||
[build-system]
|
||||
requires = ["setuptools>=42", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "pkg_test_graph1"
|
||||
version = "1.0.0"
|
||||
authors = []
|
|
@ -0,0 +1,5 @@
|
|||
package:
|
||||
name: pkg_test_graph2
|
||||
version: "1.0.0"
|
||||
source:
|
||||
path: src
|
|
@ -0,0 +1,8 @@
|
|||
[build-system]
|
||||
requires = ["setuptools"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "pkg_test_graph2"
|
||||
version = "1.0.0"
|
||||
authors = []
|
|
@ -0,0 +1,5 @@
|
|||
package:
|
||||
name: pkg_test_graph3
|
||||
version: "1.0.0"
|
||||
source:
|
||||
path: src
|
|
@ -0,0 +1,8 @@
|
|||
[build-system]
|
||||
requires = ["setuptools"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "pkg_test_graph3"
|
||||
version = "1.0.0"
|
||||
authors = []
|
|
@ -1,6 +1,18 @@
|
|||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from typer.testing import CliRunner # type: ignore[import]
|
||||
|
||||
from pyodide_build.cli import skeleton
|
||||
from pyodide_build import __version__ as pyodide_build_version
|
||||
from pyodide_build import common
|
||||
from pyodide_build.cli import build, skeleton
|
||||
|
||||
only_node = pytest.mark.xfail_browsers(
|
||||
chrome="node only", firefox="node only", safari="node only"
|
||||
)
|
||||
|
||||
|
||||
runner = CliRunner()
|
||||
|
||||
|
@ -44,3 +56,79 @@ def test_skeleton_pypi(tmp_path):
|
|||
)
|
||||
assert result.exit_code != 0
|
||||
assert "already exists" in str(result.exception)
|
||||
|
||||
|
||||
def test_build_recipe_with_pyodide(tmp_path, monkeypatch, request, runtime):
|
||||
if runtime != "node":
|
||||
pytest.xfail("node only")
|
||||
test_build_recipe(tmp_path, monkeypatch, request)
|
||||
|
||||
|
||||
def test_build_recipe(tmp_path, monkeypatch, request):
|
||||
if "dev" in pyodide_build_version:
|
||||
if "EMSDK" not in os.environ or "PYODIDE_ROOT" not in os.environ:
|
||||
pytest.skip(
|
||||
reason="Can't build recipe in dev mode without building pyodide first"
|
||||
)
|
||||
output_dir = tmp_path / "dist"
|
||||
recipe_dir = Path(__file__).parent / "_test_recipes"
|
||||
|
||||
pkgs = {
|
||||
"pkg_test_graph1": {"pkg_test_graph2"},
|
||||
"pkg_test_graph3": {},
|
||||
}
|
||||
|
||||
pkgs_to_build = pkgs.keys() | {p for v in pkgs.values() for p in v}
|
||||
|
||||
monkeypatch.setattr(common, "ALWAYS_PACKAGES", {})
|
||||
|
||||
for build_dir in recipe_dir.rglob("build"):
|
||||
shutil.rmtree(build_dir)
|
||||
|
||||
result = runner.invoke(
|
||||
build.app,
|
||||
[
|
||||
"recipe",
|
||||
*pkgs.keys(),
|
||||
"--recipe-dir",
|
||||
recipe_dir,
|
||||
"--output",
|
||||
output_dir,
|
||||
],
|
||||
)
|
||||
|
||||
assert result.exit_code == 0, result.stdout
|
||||
|
||||
for pkg in pkgs_to_build:
|
||||
assert f"built {pkg} in" in result.stdout
|
||||
|
||||
built_wheels = set(output_dir.glob("*.whl"))
|
||||
assert len(built_wheels) == len(pkgs_to_build)
|
||||
|
||||
|
||||
def test_fetch_or_build_pypi_with_pyodide(tmp_path, runtime):
|
||||
if runtime != "node":
|
||||
pytest.xfail("node only")
|
||||
test_fetch_or_build_pypi(tmp_path)
|
||||
|
||||
|
||||
def test_fetch_or_build_pypi(tmp_path):
|
||||
if "dev" in pyodide_build_version:
|
||||
if "EMSDK" not in os.environ or "PYODIDE_ROOT" not in os.environ:
|
||||
pytest.skip(
|
||||
reason="Can't build recipe in dev mode without building pyodide first. Skipping test"
|
||||
)
|
||||
output_dir = tmp_path / "dist"
|
||||
# one pure-python package (doesn't need building) and one sdist package (needs building)
|
||||
pkgs = ["pytest-pyodide", "pycryptodome==3.15.0"]
|
||||
|
||||
os.chdir(tmp_path)
|
||||
for p in pkgs:
|
||||
result = runner.invoke(
|
||||
build.app,
|
||||
["main", p],
|
||||
)
|
||||
assert result.exit_code == 0, result.stdout
|
||||
|
||||
built_wheels = set(output_dir.glob("*.whl"))
|
||||
assert len(built_wheels) == len(pkgs)
|
||||
|
|
|
@ -31,6 +31,10 @@ install_requires =
|
|||
pydantic>=1.10.2
|
||||
pyodide-cli>=0.2.0
|
||||
cmake
|
||||
unearth~=0.6
|
||||
requests
|
||||
types-requests
|
||||
typer
|
||||
auditwheel-emscripten==0.0.8
|
||||
|
||||
[options.entry_points]
|
||||
|
@ -38,7 +42,8 @@ console_scripts =
|
|||
pyodide-build = pyodide_build.__main__:main
|
||||
_pywasmcross = pyodide_build.pywasmcross:compiler_main
|
||||
pyodide.cli =
|
||||
build = pyodide_build.cli.build_oot:main
|
||||
build = pyodide_build.cli.build:main
|
||||
build-recipes = pyodide_build.cli.build:recipe
|
||||
venv = pyodide_build.cli.venv:main
|
||||
skeleton = pyodide_build.cli.skeleton:app
|
||||
|
||||
|
|
|
@ -137,6 +137,7 @@ CONTAINER=$(\
|
|||
--groups sudo \
|
||||
$USER_NAME \
|
||||
; \
|
||||
echo 'export PATH=\$PATH:$USER_HOME/.local/bin' >> /etc/profile; \
|
||||
echo '%sudo ALL=(ALL:ALL) NOPASSWD:ALL' >> /etc/sudoers ; \
|
||||
echo '$HEALTHCHECK_MESSAGE'; \
|
||||
tail -f /dev/null \
|
||||
|
@ -154,7 +155,7 @@ docker exec \
|
|||
"$DOCKER_INTERACTIVE" --tty \
|
||||
"${USER_FLAG[@]}" \
|
||||
"$CONTAINER" \
|
||||
/bin/bash -c "${DOCKER_COMMAND}" || EXIT_STATUS=$?
|
||||
/bin/bash -lc "${DOCKER_COMMAND}" || EXIT_STATUS=$?
|
||||
|
||||
docker kill "$CONTAINER" > /dev/null
|
||||
exit $EXIT_STATUS
|
||||
|
|
Loading…
Reference in New Issue