mirror of https://github.com/pyodide/pyodide.git
chore: Update pre-commit (#4440)
I also reordered prettier to come last since it's the slowest. I switched from using black to ruff-format which says its defaults are nearly the same as black.
This commit is contained in:
parent
5da4725830
commit
b8287e42d9
|
@ -13,7 +13,7 @@
|
||||||
// For config options, see https://github.com/devcontainers/features/tree/main/src/conda
|
// For config options, see https://github.com/devcontainers/features/tree/main/src/conda
|
||||||
"ghcr.io/devcontainers/features/conda": {
|
"ghcr.io/devcontainers/features/conda": {
|
||||||
"version": "latest",
|
"version": "latest",
|
||||||
"addCondaForge": "true"
|
"addCondaForge": "true",
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,5 +3,5 @@
|
||||||
// keep in sync with "run_docker"
|
// keep in sync with "run_docker"
|
||||||
"image": "pyodide/pyodide-env:20230506-chrome112-firefox112-py311",
|
"image": "pyodide/pyodide-env:20230506-chrome112-firefox112-py311",
|
||||||
"remoteUser": "root",
|
"remoteUser": "root",
|
||||||
"onCreateCommand": ".devcontainer/onCreate-docker.sh"
|
"onCreateCommand": ".devcontainer/onCreate-docker.sh",
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ default_language_version:
|
||||||
python: "3.11"
|
python: "3.11"
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: "v4.4.0"
|
rev: "v4.5.0"
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-added-large-files
|
- id: check-added-large-files
|
||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
|
@ -17,27 +17,18 @@ repos:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
|
|
||||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||||
rev: "v0.0.254"
|
rev: "v0.1.14"
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: [--fix]
|
args: [--fix]
|
||||||
|
- id: ruff-format
|
||||||
- repo: https://github.com/psf/black
|
|
||||||
rev: "23.1.0"
|
|
||||||
hooks:
|
|
||||||
- id: black
|
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-clang-format
|
- repo: https://github.com/pre-commit/mirrors-clang-format
|
||||||
rev: "v15.0.7"
|
rev: "v17.0.6"
|
||||||
hooks:
|
hooks:
|
||||||
- id: clang-format
|
- id: clang-format
|
||||||
types_or: [c++, c, cuda]
|
types_or: [c++, c, cuda]
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
|
||||||
rev: "v3.0.0-alpha.6"
|
|
||||||
hooks:
|
|
||||||
- id: prettier
|
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/pygrep-hooks
|
- repo: https://github.com/pre-commit/pygrep-hooks
|
||||||
rev: "v1.10.0"
|
rev: "v1.10.0"
|
||||||
hooks:
|
hooks:
|
||||||
|
@ -48,24 +39,24 @@ repos:
|
||||||
- id: rst-inline-touching-normal
|
- id: rst-inline-touching-normal
|
||||||
|
|
||||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||||
rev: "v0.9.0.2"
|
rev: "v0.9.0.6"
|
||||||
hooks:
|
hooks:
|
||||||
- id: shellcheck
|
- id: shellcheck
|
||||||
exclude: ^src/templates/python$
|
exclude: ^src/templates/python$
|
||||||
|
|
||||||
- repo: https://github.com/codespell-project/codespell
|
- repo: https://github.com/codespell-project/codespell
|
||||||
rev: "v2.2.4"
|
rev: "v2.2.6"
|
||||||
hooks:
|
hooks:
|
||||||
- id: codespell
|
- id: codespell
|
||||||
args:
|
args:
|
||||||
[
|
[
|
||||||
"--ignore-words-list",
|
"--ignore-words-list",
|
||||||
"ags,aray,asend,ba,classs,crate,falsy,feld,inflight,lits,nd,slowy,te",
|
"ags,aray,asend,ba,classs,crate,falsy,feld,inflight,lits,nd,slowy,te,oint",
|
||||||
]
|
]
|
||||||
exclude: ^(benchmark/benchmarks/pystone_benchmarks/pystone\.py|src/js/package-lock\.json)$
|
exclude: ^(benchmark/benchmarks/pystone_benchmarks/pystone\.py|src/js/package-lock\.json)$
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
rev: "v1.1.1"
|
rev: "v1.8.0"
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
files: ^(packages/.*/src|src|pyodide-build/pyodide_build)
|
files: ^(packages/.*/src|src|pyodide-build/pyodide_build)
|
||||||
|
@ -94,5 +85,10 @@ repos:
|
||||||
exclude: (^packages/.*/setup.py|/src|^packages/aiohttp/aiohttp_patch.py$)
|
exclude: (^packages/.*/setup.py|/src|^packages/aiohttp/aiohttp_patch.py$)
|
||||||
additional_dependencies: *mypy-deps
|
additional_dependencies: *mypy-deps
|
||||||
|
|
||||||
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
|
rev: "v4.0.0-alpha.8"
|
||||||
|
hooks:
|
||||||
|
- id: prettier
|
||||||
|
|
||||||
ci:
|
ci:
|
||||||
autoupdate_schedule: "quarterly"
|
autoupdate_schedule: "quarterly"
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
"""
|
"""
|
||||||
Various common utilities for testing.
|
Various common utilities for testing.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
|
|
|
@ -53,8 +53,11 @@ code.literal {
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
vertical-align: baseline;
|
vertical-align: baseline;
|
||||||
border-radius: 0.25rem;
|
border-radius: 0.25rem;
|
||||||
transition: color 0.15s ease-in-out, background-color 0.15s ease-in-out,
|
transition:
|
||||||
border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out;
|
color 0.15s ease-in-out,
|
||||||
|
background-color 0.15s ease-in-out,
|
||||||
|
border-color 0.15s ease-in-out,
|
||||||
|
box-shadow 0.15s ease-in-out;
|
||||||
}
|
}
|
||||||
@media (prefers-reduced-motion: reduce) {
|
@media (prefers-reduced-motion: reduce) {
|
||||||
.badge {
|
.badge {
|
||||||
|
|
|
@ -60,9 +60,10 @@ versionwarning_message = (
|
||||||
autosummary_generate = True
|
autosummary_generate = True
|
||||||
autodoc_default_flags = ["members", "inherited-members"]
|
autodoc_default_flags = ["members", "inherited-members"]
|
||||||
|
|
||||||
|
micropip_version = micropip.__version__
|
||||||
intersphinx_mapping = {
|
intersphinx_mapping = {
|
||||||
"python": ("https://docs.python.org/3.11", None),
|
"python": ("https://docs.python.org/3.11", None),
|
||||||
"micropip": (f"https://micropip.pyodide.org/en/v{micropip.__version__}/", None),
|
"micropip": (f"https://micropip.pyodide.org/en/v{micropip_version}/", None),
|
||||||
"numpy": ("https://numpy.org/doc/stable/", None),
|
"numpy": ("https://numpy.org/doc/stable/", None),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -60,7 +60,7 @@ See {issue}`scipy/scipy#15290`.
|
||||||
|
|
||||||
Some of the challenges that Pyodide faces, such as maintaining a collection of
|
Some of the challenges that Pyodide faces, such as maintaining a collection of
|
||||||
build recipes, dependency resolution from PyPI, etc are already solved in either
|
build recipes, dependency resolution from PyPI, etc are already solved in either
|
||||||
Python or JavaScript ecosystems. We should therefore strive to better re-use
|
Python or JavaScript ecosystems. We should therefore strive to better reuse
|
||||||
existing tooling, and seeking synergies with existing initiatives in this space,
|
existing tooling, and seeking synergies with existing initiatives in this space,
|
||||||
such as conda-forge.
|
such as conda-forge.
|
||||||
|
|
||||||
|
|
|
@ -16,5 +16,5 @@ def test_robotraconteur_exceptions(selenium):
|
||||||
RRN = RR.RobotRaconteurNode.s
|
RRN = RR.RobotRaconteurNode.s
|
||||||
RRN.SetNodeName("test_node")
|
RRN.SetNodeName("test_node")
|
||||||
assert RRN.NodeName == "test_node"
|
assert RRN.NodeName == "test_node"
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception): # noqa: B017
|
||||||
RRN.SetNodeName("test_node")
|
RRN.SetNodeName("test_node")
|
||||||
|
|
|
@ -3,9 +3,8 @@ import pytest
|
||||||
|
|
||||||
@pytest.mark.requires_dynamic_linking
|
@pytest.mark.requires_dynamic_linking
|
||||||
def test_uncaught_cpp_exceptions(selenium):
|
def test_uncaught_cpp_exceptions(selenium):
|
||||||
assert (
|
assert selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
await pyodide.loadPackage("cpp-exceptions-test");
|
await pyodide.loadPackage("cpp-exceptions-test");
|
||||||
const Tests = pyodide._api.tests;
|
const Tests = pyodide._api.tests;
|
||||||
const throwlib = pyodide._module.LDSO.loadedLibsByName["/usr/lib/cpp-exceptions-test-throw.so"].exports;
|
const throwlib = pyodide._module.LDSO.loadedLibsByName["/usr/lib/cpp-exceptions-test-throw.so"].exports;
|
||||||
|
@ -20,25 +19,22 @@ def test_uncaught_cpp_exceptions(selenium):
|
||||||
}
|
}
|
||||||
return [t(1), t(2), t(3), t(4), t(5)];
|
return [t(1), t(2), t(3), t(4), t(5)];
|
||||||
"""
|
"""
|
||||||
)
|
) == [
|
||||||
== [
|
"CppException int: The exception is an object of type int at address xxx "
|
||||||
"CppException int: The exception is an object of type int at address xxx "
|
"which does not inherit from std::exception",
|
||||||
"which does not inherit from std::exception",
|
"CppException char: The exception is an object of type char at address xxx "
|
||||||
"CppException char: The exception is an object of type char at address xxx "
|
"which does not inherit from std::exception",
|
||||||
"which does not inherit from std::exception",
|
"CppException std::runtime_error: abc",
|
||||||
"CppException std::runtime_error: abc",
|
"CppException myexception: My exception happened",
|
||||||
"CppException myexception: My exception happened",
|
"CppException char const*: The exception is an object of type char const* at "
|
||||||
"CppException char const*: The exception is an object of type char const* at "
|
"address xxx which does not inherit from std::exception",
|
||||||
"address xxx which does not inherit from std::exception",
|
]
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.requires_dynamic_linking
|
@pytest.mark.requires_dynamic_linking
|
||||||
def test_cpp_exception_catching(selenium):
|
def test_cpp_exception_catching(selenium):
|
||||||
assert (
|
assert selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
await pyodide.loadPackage("cpp-exceptions-test");
|
await pyodide.loadPackage("cpp-exceptions-test");
|
||||||
const Module = pyodide._module;
|
const Module = pyodide._module;
|
||||||
const catchlib = pyodide._module.LDSO.loadedLibsByName["/usr/lib/cpp-exceptions-test-catch.so"].exports;
|
const catchlib = pyodide._module.LDSO.loadedLibsByName["/usr/lib/cpp-exceptions-test-catch.so"].exports;
|
||||||
|
@ -51,28 +47,23 @@ def test_cpp_exception_catching(selenium):
|
||||||
|
|
||||||
return [t(1), t(2), t(3), t(5)];
|
return [t(1), t(2), t(3), t(5)];
|
||||||
"""
|
"""
|
||||||
)
|
) == [
|
||||||
== [
|
"caught int 1000",
|
||||||
"caught int 1000",
|
"caught char 99",
|
||||||
"caught char 99",
|
"caught runtime_error abc",
|
||||||
"caught runtime_error abc",
|
"caught ????",
|
||||||
"caught ????",
|
]
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.requires_dynamic_linking
|
@pytest.mark.requires_dynamic_linking
|
||||||
def test_sjlj(selenium):
|
def test_sjlj(selenium):
|
||||||
assert (
|
assert (
|
||||||
(
|
selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
await pyodide.loadPackage("cpp-exceptions-test");
|
await pyodide.loadPackage("cpp-exceptions-test");
|
||||||
const Module = pyodide._module;
|
const Module = pyodide._module;
|
||||||
const catchlib = pyodide._module.LDSO.loadedLibsByName["/usr/lib/cpp-exceptions-test-catch.so"].exports;
|
const catchlib = pyodide._module.LDSO.loadedLibsByName["/usr/lib/cpp-exceptions-test-catch.so"].exports;
|
||||||
return catchlib.set_jmp_func();
|
return catchlib.set_jmp_func();
|
||||||
"""
|
"""
|
||||||
)
|
|
||||||
)
|
)
|
||||||
== 5
|
) == 5
|
||||||
)
|
|
||||||
|
|
|
@ -27,6 +27,6 @@ setup(
|
||||||
"Operating System :: OS Independent",
|
"Operating System :: OS Independent",
|
||||||
],
|
],
|
||||||
# packages=["fpcast_test"],
|
# packages=["fpcast_test"],
|
||||||
ext_modules=[Extension("fpcast_test", ["fpcast-test.c"])]
|
ext_modules=[Extension("fpcast_test", ["fpcast-test.c"])],
|
||||||
# python_requires='>=3.6',
|
# python_requires='>=3.6',
|
||||||
)
|
)
|
||||||
|
|
|
@ -43,6 +43,6 @@ def test_fpcasts(selenium):
|
||||||
t.kwargs2()
|
t.kwargs2()
|
||||||
t.kwargs3()
|
t.kwargs3()
|
||||||
|
|
||||||
t.getset0
|
t.getset0 # noqa: B018
|
||||||
t.getset1
|
t.getset1 # noqa: B018
|
||||||
t.getset1 = 5
|
t.getset1 = 5
|
||||||
|
|
|
@ -408,7 +408,7 @@ def test_dict_key(selenium):
|
||||||
|
|
||||||
_list = FrozenList([1, 2])
|
_list = FrozenList([1, 2])
|
||||||
with pytest.raises(RuntimeError):
|
with pytest.raises(RuntimeError):
|
||||||
{_list: "hello"}
|
{_list: "hello"} # noqa: B018
|
||||||
_list.freeze()
|
_list.freeze()
|
||||||
{_list: "hello"} # noqa: B018
|
{_list: "hello"} # noqa: B018
|
||||||
|
|
||||||
|
|
|
@ -69,4 +69,4 @@ def test_hashlib_algorithms(selenium):
|
||||||
assert openssl_algorithm in algorithms_available
|
assert openssl_algorithm in algorithms_available
|
||||||
|
|
||||||
for algorithm in algorithms_available:
|
for algorithm in algorithms_available:
|
||||||
hashlib.new(algorithm).digest_size
|
hashlib.new(algorithm).digest_size # noqa: B018
|
||||||
|
|
|
@ -1060,7 +1060,7 @@ done:
|
||||||
/****************************************/
|
/****************************************/
|
||||||
/* ALIGNMENT and signal-handling status */
|
/* ALIGNMENT and signal-handling status */
|
||||||
/****************************************/
|
/****************************************/
|
||||||
/* ALIGNAMENT test is not available */
|
/* ALIGNMENT test is not available */
|
||||||
/* Signal handlers verify test is not available */
|
/* Signal handlers verify test is not available */
|
||||||
/* Signal() support: no */
|
/* Signal() support: no */
|
||||||
/* setjmp() support: yes */
|
/* setjmp() support: yes */
|
||||||
|
|
|
@ -48,9 +48,8 @@ SNOWBALL_WHEEL = "snowballstemmer-2.0.0-py2.py3-none-any.whl"
|
||||||
|
|
||||||
def test_install_simple(selenium_standalone_micropip):
|
def test_install_simple(selenium_standalone_micropip):
|
||||||
selenium = selenium_standalone_micropip
|
selenium = selenium_standalone_micropip
|
||||||
assert (
|
assert selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
return await pyodide.runPythonAsync(`
|
return await pyodide.runPythonAsync(`
|
||||||
import os
|
import os
|
||||||
import micropip
|
import micropip
|
||||||
|
@ -63,9 +62,7 @@ def test_install_simple(selenium_standalone_micropip):
|
||||||
to_js(stemmer.stemWords('go going goes gone'.split()))
|
to_js(stemmer.stemWords('go going goes gone'.split()))
|
||||||
`);
|
`);
|
||||||
"""
|
"""
|
||||||
)
|
) == ["go", "go", "goe", "gone"]
|
||||||
== ["go", "go", "goe", "gone"]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("base_url", ["'{base_url}'", "'.'"])
|
@pytest.mark.parametrize("base_url", ["'{base_url}'", "'.'"])
|
||||||
|
|
|
@ -123,12 +123,8 @@ def test_blosc(selenium_standalone):
|
||||||
np.random.randint(-(2**63), -(2**63) + 20, size=1000, dtype="i8").view(
|
np.random.randint(-(2**63), -(2**63) + 20, size=1000, dtype="i8").view(
|
||||||
"m8[ns]"
|
"m8[ns]"
|
||||||
),
|
),
|
||||||
np.random.randint(-(2**63), -(2**63) + 20, size=1000, dtype="i8").view(
|
np.random.randint(-(2**63), -(2**63) + 20, size=1000, dtype="i8").view("M8[m]"),
|
||||||
"M8[m]"
|
np.random.randint(-(2**63), -(2**63) + 20, size=1000, dtype="i8").view("m8[m]"),
|
||||||
),
|
|
||||||
np.random.randint(-(2**63), -(2**63) + 20, size=1000, dtype="i8").view(
|
|
||||||
"m8[m]"
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
codecs = [
|
codecs = [
|
||||||
|
|
|
@ -10,7 +10,7 @@ from contextlib import nullcontext, redirect_stdout
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
if sys.version_info < (3, 11, 0):
|
if sys.version_info < (3, 11, 0): # noqa: UP036
|
||||||
import tomli as tomllib
|
import tomli as tomllib
|
||||||
else:
|
else:
|
||||||
import tomllib
|
import tomllib
|
||||||
|
|
|
@ -851,6 +851,8 @@ def set_default_build_args(build_args: BuildArgs) -> BuildArgs:
|
||||||
if args.host_install_dir is None:
|
if args.host_install_dir is None:
|
||||||
args.host_install_dir = build_env.get_build_flag("HOSTINSTALLDIR") # type: ignore[unreachable]
|
args.host_install_dir = build_env.get_build_flag("HOSTINSTALLDIR") # type: ignore[unreachable]
|
||||||
if args.compression_level is None:
|
if args.compression_level is None:
|
||||||
args.compression_level = int(build_env.get_build_flag("PYODIDE_ZIP_COMPRESSION_LEVEL")) # type: ignore[unreachable]
|
args.compression_level = int( # type: ignore[unreachable]
|
||||||
|
build_env.get_build_flag("PYODIDE_ZIP_COMPRESSION_LEVEL")
|
||||||
|
)
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
|
@ -53,7 +53,10 @@ def _make_whlfile(
|
||||||
|
|
||||||
shutil.register_archive_format("whl", _make_whlfile, description="Wheel file")
|
shutil.register_archive_format("whl", _make_whlfile, description="Wheel file")
|
||||||
shutil.register_unpack_format(
|
shutil.register_unpack_format(
|
||||||
"whl", [".whl", ".wheel"], shutil._unpack_zipfile, description="Wheel file" # type: ignore[attr-defined]
|
"whl",
|
||||||
|
[".whl", ".wheel"],
|
||||||
|
shutil._unpack_zipfile, # type: ignore[attr-defined]
|
||||||
|
description="Wheel file",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -118,9 +118,7 @@ def source(
|
||||||
|
|
||||||
# simple 'pyodide build' command
|
# simple 'pyodide build' command
|
||||||
def main(
|
def main(
|
||||||
source_location: Optional[ # noqa: typer does not accept list[str] | None yet.
|
source_location: Optional[str] = typer.Argument( # noqa: UP007 typer does not accept list[str] | None yet.
|
||||||
str
|
|
||||||
] = typer.Argument(
|
|
||||||
"",
|
"",
|
||||||
help="Build source, can be source folder, pypi version specification, "
|
help="Build source, can be source folder, pypi version specification, "
|
||||||
"or url to a source dist archive or wheel file. If this is blank, it "
|
"or url to a source dist archive or wheel file. If this is blank, it "
|
||||||
|
@ -162,9 +160,7 @@ def main(
|
||||||
compression_level: int = typer.Option(
|
compression_level: int = typer.Option(
|
||||||
6, help="Compression level to use for the created zip file"
|
6, help="Compression level to use for the created zip file"
|
||||||
),
|
),
|
||||||
config_setting: Optional[ # noqa: typer does not accept list[str] | None yet.
|
config_setting: Optional[list[str]] = typer.Option( # noqa: UP007 typer does not accept list[str] | None yet.
|
||||||
list[str]
|
|
||||||
] = typer.Option(
|
|
||||||
None,
|
None,
|
||||||
"--config-setting",
|
"--config-setting",
|
||||||
"-C",
|
"-C",
|
||||||
|
|
|
@ -320,7 +320,7 @@ def extract_wheel_metadata_file(wheel_path: Path, output_path: Path) -> None:
|
||||||
"""Extracts the METADATA file from the given wheel and writes it to the
|
"""Extracts the METADATA file from the given wheel and writes it to the
|
||||||
output path.
|
output path.
|
||||||
|
|
||||||
Raises an exception if the METADATA file does not exist.
|
Raises a RuntimeError if the METADATA file does not exist.
|
||||||
|
|
||||||
For a wheel called "NAME-VERSION-...", the METADATA file is expected to be
|
For a wheel called "NAME-VERSION-...", the METADATA file is expected to be
|
||||||
found in a directory inside the wheel archive, whose name starts with NAME
|
found in a directory inside the wheel archive, whose name starts with NAME
|
||||||
|
@ -335,13 +335,13 @@ def extract_wheel_metadata_file(wheel_path: Path, output_path: Path) -> None:
|
||||||
wheel.getinfo(metadata_path).filename = output_path.name
|
wheel.getinfo(metadata_path).filename = output_path.name
|
||||||
wheel.extract(metadata_path, output_path.parent)
|
wheel.extract(metadata_path, output_path.parent)
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise Exception(f"METADATA file not found for {pkg_name}") from err
|
raise RuntimeError(f"METADATA file not found for {pkg_name}") from err
|
||||||
|
|
||||||
|
|
||||||
def get_wheel_dist_info_dir(wheel: ZipFile, pkg_name: str) -> str:
|
def get_wheel_dist_info_dir(wheel: ZipFile, pkg_name: str) -> str:
|
||||||
"""Returns the path of the contained .dist-info directory.
|
"""Returns the path of the contained .dist-info directory.
|
||||||
|
|
||||||
Raises an Exception if the directory is not found, more than
|
Raises a RuntimeError if the directory is not found, more than
|
||||||
one is found, or it does not match the provided `pkg_name`.
|
one is found, or it does not match the provided `pkg_name`.
|
||||||
|
|
||||||
Adapted from:
|
Adapted from:
|
||||||
|
@ -353,10 +353,10 @@ def get_wheel_dist_info_dir(wheel: ZipFile, pkg_name: str) -> str:
|
||||||
info_dirs = [subdir for subdir in subdirs if subdir.endswith(".dist-info")]
|
info_dirs = [subdir for subdir in subdirs if subdir.endswith(".dist-info")]
|
||||||
|
|
||||||
if len(info_dirs) == 0:
|
if len(info_dirs) == 0:
|
||||||
raise Exception(f".dist-info directory not found for {pkg_name}")
|
raise RuntimeError(f".dist-info directory not found for {pkg_name}")
|
||||||
|
|
||||||
if len(info_dirs) > 1:
|
if len(info_dirs) > 1:
|
||||||
raise Exception(
|
raise RuntimeError(
|
||||||
f"multiple .dist-info directories found for {pkg_name}: {', '.join(info_dirs)}"
|
f"multiple .dist-info directories found for {pkg_name}: {', '.join(info_dirs)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -366,7 +366,7 @@ def get_wheel_dist_info_dir(wheel: ZipFile, pkg_name: str) -> str:
|
||||||
canonical_name = canonicalize_package_name(pkg_name)
|
canonical_name = canonicalize_package_name(pkg_name)
|
||||||
|
|
||||||
if not info_dir_name.startswith(canonical_name):
|
if not info_dir_name.startswith(canonical_name):
|
||||||
raise Exception(
|
raise RuntimeError(
|
||||||
f".dist-info directory {info_dir!r} does not start with {canonical_name!r}"
|
f".dist-info directory {info_dir!r} does not start with {canonical_name!r}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -207,7 +207,9 @@ def make_package(
|
||||||
try:
|
try:
|
||||||
run_prettier(meta_path)
|
run_prettier(meta_path)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
warnings.warn("'npx' executable missing, output has not been prettified.")
|
warnings.warn(
|
||||||
|
"'npx' executable missing, output has not been prettified.", stacklevel=1
|
||||||
|
)
|
||||||
|
|
||||||
logger.success(f"Output written to {meta_path}")
|
logger.success(f"Output written to {meta_path}")
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,7 @@ def stream_redirected(to=os.devnull, stream=None):
|
||||||
# e.g. in pytest
|
# e.g. in pytest
|
||||||
yield
|
yield
|
||||||
return
|
return
|
||||||
if type(to) == str:
|
if isinstance(to, str):
|
||||||
to = open(to, "w")
|
to = open(to, "w")
|
||||||
with os.fdopen(os.dup(stream_fd), "wb") as copied:
|
with os.fdopen(os.dup(stream_fd), "wb") as copied:
|
||||||
stream.flush()
|
stream.flush()
|
||||||
|
|
|
@ -264,7 +264,7 @@ def replay_genargs_handle_linker_opts(arg: str) -> str | None:
|
||||||
# breaks emscripten see https://github.com/emscripten-core/emscripten/issues/14460
|
# breaks emscripten see https://github.com/emscripten-core/emscripten/issues/14460
|
||||||
"--strip-all",
|
"--strip-all",
|
||||||
"-strip-all",
|
"-strip-all",
|
||||||
# wasm-ld does not regconize some link flags
|
# wasm-ld does not recognize some link flags
|
||||||
"--sort-common",
|
"--sort-common",
|
||||||
"--as-needed",
|
"--as-needed",
|
||||||
]:
|
]:
|
||||||
|
|
|
@ -151,7 +151,7 @@ def test_extract_wheel_metadata_file(tmp_path):
|
||||||
|
|
||||||
output_path_empty = tmp_path / f"{input_path_empty.name}.metadata"
|
output_path_empty = tmp_path / f"{input_path_empty.name}.metadata"
|
||||||
|
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(RuntimeError):
|
||||||
extract_wheel_metadata_file(input_path_empty, output_path_empty)
|
extract_wheel_metadata_file(input_path_empty, output_path_empty)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -70,6 +70,6 @@ def test_is_rust_package_2(reqs):
|
||||||
pkg = MetaConfig(
|
pkg = MetaConfig(
|
||||||
package={"name": "a", "version": "0.2"},
|
package={"name": "a", "version": "0.2"},
|
||||||
source={"url": "test.tar", "sha256": ""},
|
source={"url": "test.tar", "sha256": ""},
|
||||||
**reqs
|
**reqs,
|
||||||
)
|
)
|
||||||
assert not pkg.is_rust_package()
|
assert not pkg.is_rust_package()
|
||||||
|
|
|
@ -28,7 +28,7 @@
|
||||||
|
|
||||||
// No BigUint64Array, have to manually split / join lower and upper byte
|
// No BigUint64Array, have to manually split / join lower and upper byte
|
||||||
//
|
//
|
||||||
#define BIGINT_LOWER(x) (Number((x)&BigInt(0xffffffff)) | 0)
|
#define BIGINT_LOWER(x) (Number((x) & BigInt(0xffffffff)) | 0)
|
||||||
#define BIGINT_UPPER(x) (Number((x) >> BigInt(32)) | 0)
|
#define BIGINT_UPPER(x) (Number((x) >> BigInt(32)) | 0)
|
||||||
#define UBIGINT_FROM_PAIR(lower, upper) \
|
#define UBIGINT_FROM_PAIR(lower, upper) \
|
||||||
(BigInt(lower) | (BigInt(upper) << BigInt(32)))
|
(BigInt(lower) | (BigInt(upper) << BigInt(32)))
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
* JsProxy Class
|
* JsProxy Class
|
||||||
*
|
*
|
||||||
* The root JsProxy class is a simple class that wraps a JsRef. We define
|
* The root JsProxy class is a simple class that wraps a JsRef. We define
|
||||||
* overloads for getattr, setattr, delattr, repr, bool, and comparison opertaors
|
* overloads for getattr, setattr, delattr, repr, bool, and comparison operators
|
||||||
* on the base class.
|
* on the base class.
|
||||||
*
|
*
|
||||||
* We define a wide variety of subclasses on the fly with different operator
|
* We define a wide variety of subclasses on the fly with different operator
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
"module": "ES2022",
|
"module": "ES2022",
|
||||||
"moduleResolution": "node",
|
"moduleResolution": "node",
|
||||||
"paths": {
|
"paths": {
|
||||||
"*": ["*", "js/*"]
|
"*": ["*", "js/*"],
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,9 +18,9 @@
|
||||||
"baseUrl": "..",
|
"baseUrl": "..",
|
||||||
"paths": {
|
"paths": {
|
||||||
"generated/*": ["js/generated/*", "core/*"],
|
"generated/*": ["js/generated/*", "core/*"],
|
||||||
"*": ["*", "js/*"]
|
"*": ["*", "js/*"],
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
"include": ["../**/*.ts", "*.json"],
|
"include": ["../**/*.ts", "*.json"],
|
||||||
"exclude": ["../**/*test*/**/*"]
|
"exclude": ["../**/*test*/**/*"],
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
"""
|
"""
|
||||||
A library of helper utilities for connecting Python to the browser environment.
|
A library of helper utilities for connecting Python to the browser environment.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Added by C:
|
# Added by C:
|
||||||
# JsException (from jsproxy.c)
|
# JsException (from jsproxy.c)
|
||||||
|
|
||||||
|
|
|
@ -37,10 +37,10 @@ T = TypeVar("T")
|
||||||
S = TypeVar("S")
|
S = TypeVar("S")
|
||||||
KT = TypeVar("KT") # Key type.
|
KT = TypeVar("KT") # Key type.
|
||||||
VT = TypeVar("VT") # Value type.
|
VT = TypeVar("VT") # Value type.
|
||||||
Tco = TypeVar("Tco", covariant=True) # Any type covariant containers.
|
T_co = TypeVar("T_co", covariant=True) # Any type covariant containers.
|
||||||
Vco = TypeVar("Vco", covariant=True) # Any type covariant containers.
|
V_co = TypeVar("V_co", covariant=True) # Any type covariant containers.
|
||||||
VTco = TypeVar("VTco", covariant=True) # Value type covariant containers.
|
VT_co = TypeVar("VT_co", covariant=True) # Value type covariant containers.
|
||||||
Tcontra = TypeVar("Tcontra", contravariant=True) # Ditto contravariant.
|
T_contra = TypeVar("T_contra", contravariant=True) # Ditto contravariant.
|
||||||
|
|
||||||
if "IN_PYTEST" in os.environ:
|
if "IN_PYTEST" in os.environ:
|
||||||
__name__ = _save_name
|
__name__ = _save_name
|
||||||
|
@ -131,7 +131,7 @@ class JsProxy(metaclass=_JsProxyMetaClass):
|
||||||
"""Returns the JavaScript type of the ``JsProxy``.
|
"""Returns the JavaScript type of the ``JsProxy``.
|
||||||
|
|
||||||
Corresponds to `typeof obj;` in JavaScript. You may also be interested
|
Corresponds to `typeof obj;` in JavaScript. You may also be interested
|
||||||
in the `constuctor` attribute which returns the type as an object.
|
in the `constructor` attribute which returns the type as an object.
|
||||||
"""
|
"""
|
||||||
return "object"
|
return "object"
|
||||||
|
|
||||||
|
@ -238,11 +238,17 @@ class JsProxy(metaclass=_JsProxyMetaClass):
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
depth: int = -1,
|
depth: int = -1,
|
||||||
default_converter: Callable[
|
default_converter: (
|
||||||
["JsProxy", Callable[["JsProxy"], Any], Callable[["JsProxy", Any], None]],
|
Callable[
|
||||||
Any,
|
[
|
||||||
]
|
"JsProxy",
|
||||||
| None = None,
|
Callable[["JsProxy"], Any],
|
||||||
|
Callable[["JsProxy", Any], None],
|
||||||
|
],
|
||||||
|
Any,
|
||||||
|
]
|
||||||
|
| None
|
||||||
|
) = None,
|
||||||
) -> Any:
|
) -> Any:
|
||||||
"""Convert the :class:`JsProxy` to a native Python object as best as
|
"""Convert the :class:`JsProxy` to a native Python object as best as
|
||||||
possible.
|
possible.
|
||||||
|
@ -572,7 +578,7 @@ class JsBuffer(JsProxy):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class JsIterator(JsProxy, Generic[Tco]):
|
class JsIterator(JsProxy, Generic[T_co]):
|
||||||
"""A JsProxy of a JavaScript iterator.
|
"""A JsProxy of a JavaScript iterator.
|
||||||
|
|
||||||
An object is a :py:class:`JsAsyncIterator` if it has a :js:meth:`~Iterator.next` method and either has a
|
An object is a :py:class:`JsAsyncIterator` if it has a :js:meth:`~Iterator.next` method and either has a
|
||||||
|
@ -581,14 +587,14 @@ class JsIterator(JsProxy, Generic[Tco]):
|
||||||
|
|
||||||
_js_type_flags = ["IS_ITERATOR"]
|
_js_type_flags = ["IS_ITERATOR"]
|
||||||
|
|
||||||
def __next__(self) -> Tco:
|
def __next__(self) -> T_co:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[Tco]:
|
def __iter__(self) -> Iterator[T_co]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class JsAsyncIterator(JsProxy, Generic[Tco]):
|
class JsAsyncIterator(JsProxy, Generic[T_co]):
|
||||||
"""A JsProxy of a JavaScript async iterator.
|
"""A JsProxy of a JavaScript async iterator.
|
||||||
|
|
||||||
An object is a :py:class:`JsAsyncIterator` if it has a
|
An object is a :py:class:`JsAsyncIterator` if it has a
|
||||||
|
@ -598,14 +604,14 @@ class JsAsyncIterator(JsProxy, Generic[Tco]):
|
||||||
|
|
||||||
_js_type_flags = ["IS_ASYNC_ITERATOR"]
|
_js_type_flags = ["IS_ASYNC_ITERATOR"]
|
||||||
|
|
||||||
def __anext__(self) -> Awaitable[Tco]:
|
def __anext__(self) -> Awaitable[T_co]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def __aiter__(self) -> AsyncIterator[Tco]:
|
def __aiter__(self) -> AsyncIterator[T_co]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class JsIterable(JsProxy, Generic[Tco]):
|
class JsIterable(JsProxy, Generic[T_co]):
|
||||||
"""A JavaScript iterable object
|
"""A JavaScript iterable object
|
||||||
|
|
||||||
A JavaScript object is iterable if it has a :js:data:`Symbol.iterator` method.
|
A JavaScript object is iterable if it has a :js:data:`Symbol.iterator` method.
|
||||||
|
@ -613,11 +619,11 @@ class JsIterable(JsProxy, Generic[Tco]):
|
||||||
|
|
||||||
_js_type_flags = ["IS_ITERABLE"]
|
_js_type_flags = ["IS_ITERABLE"]
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[Tco]:
|
def __iter__(self) -> Iterator[T_co]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class JsAsyncIterable(JsProxy, Generic[Tco]):
|
class JsAsyncIterable(JsProxy, Generic[T_co]):
|
||||||
"""A JavaScript async iterable object
|
"""A JavaScript async iterable object
|
||||||
|
|
||||||
A JavaScript object is async iterable if it has a :js:data:`Symbol.asyncIterator`
|
A JavaScript object is async iterable if it has a :js:data:`Symbol.asyncIterator`
|
||||||
|
@ -626,11 +632,11 @@ class JsAsyncIterable(JsProxy, Generic[Tco]):
|
||||||
|
|
||||||
_js_type_flags = ["IS_ASYNC_ITERABLE"]
|
_js_type_flags = ["IS_ASYNC_ITERABLE"]
|
||||||
|
|
||||||
def __aiter__(self) -> AsyncIterator[Tco]:
|
def __aiter__(self) -> AsyncIterator[T_co]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class JsGenerator(JsIterable[Tco], Generic[Tco, Tcontra, Vco]):
|
class JsGenerator(JsIterable[T_co], Generic[T_co, T_contra, V_co]):
|
||||||
"""A JavaScript generator
|
"""A JavaScript generator
|
||||||
|
|
||||||
A JavaScript object is treated as a generator if its
|
A JavaScript object is treated as a generator if its
|
||||||
|
@ -643,7 +649,7 @@ class JsGenerator(JsIterable[Tco], Generic[Tco, Tcontra, Vco]):
|
||||||
|
|
||||||
_js_type_flags = ["IS_GENERATOR"]
|
_js_type_flags = ["IS_GENERATOR"]
|
||||||
|
|
||||||
def send(self, value: Tcontra) -> Tco:
|
def send(self, value: T_contra) -> T_co:
|
||||||
"""
|
"""
|
||||||
Resumes the execution and "sends" a value into the generator function.
|
Resumes the execution and "sends" a value into the generator function.
|
||||||
|
|
||||||
|
@ -664,7 +670,7 @@ class JsGenerator(JsIterable[Tco], Generic[Tco, Tcontra, Vco]):
|
||||||
val: BaseException | object = ...,
|
val: BaseException | object = ...,
|
||||||
tb: TracebackType | None = ...,
|
tb: TracebackType | None = ...,
|
||||||
/,
|
/,
|
||||||
) -> Tco:
|
) -> T_co:
|
||||||
...
|
...
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
|
@ -674,14 +680,14 @@ class JsGenerator(JsIterable[Tco], Generic[Tco, Tcontra, Vco]):
|
||||||
val: None = ...,
|
val: None = ...,
|
||||||
tb: TracebackType | None = ...,
|
tb: TracebackType | None = ...,
|
||||||
/,
|
/,
|
||||||
) -> Tco:
|
) -> T_co:
|
||||||
...
|
...
|
||||||
|
|
||||||
@docs_argspec("(self, error: BaseException, /) -> Tco")
|
@docs_argspec("(self, error: BaseException, /) -> T_co")
|
||||||
def throw(
|
def throw(
|
||||||
self,
|
self,
|
||||||
*args: Any,
|
*args: Any,
|
||||||
) -> Tco:
|
) -> T_co:
|
||||||
"""
|
"""
|
||||||
Raises an exception at the point where the generator was paused, and
|
Raises an exception at the point where the generator was paused, and
|
||||||
returns the next value yielded by the generator function.
|
returns the next value yielded by the generator function.
|
||||||
|
@ -716,10 +722,10 @@ class JsGenerator(JsIterable[Tco], Generic[Tco, Tcontra, Vco]):
|
||||||
the generator has already exited due to an exception or normal exit.
|
the generator has already exited due to an exception or normal exit.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __next__(self) -> Tco:
|
def __next__(self) -> T_co:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def __iter__(self) -> "JsGenerator[Tco, Tcontra, Vco]":
|
def __iter__(self) -> "JsGenerator[T_co, T_contra, V_co]":
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
@ -750,7 +756,7 @@ class JsFetchResponse(JsProxy):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class JsAsyncGenerator(JsAsyncIterable[Tco], Generic[Tco, Tcontra, Vco]):
|
class JsAsyncGenerator(JsAsyncIterable[T_co], Generic[T_co, T_contra, V_co]):
|
||||||
"""A JavaScript :js:class:`AsyncGenerator`
|
"""A JavaScript :js:class:`AsyncGenerator`
|
||||||
|
|
||||||
A JavaScript object is treated as an async generator if it's
|
A JavaScript object is treated as an async generator if it's
|
||||||
|
@ -764,13 +770,13 @@ class JsAsyncGenerator(JsAsyncIterable[Tco], Generic[Tco, Tcontra, Vco]):
|
||||||
|
|
||||||
_js_type_flags = ["IS_ASYNC_GENERATOR"]
|
_js_type_flags = ["IS_ASYNC_GENERATOR"]
|
||||||
|
|
||||||
def __anext__(self) -> Awaitable[Tco]:
|
def __anext__(self) -> Awaitable[T_co]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def __aiter__(self) -> "JsAsyncGenerator[Tco, Tcontra, Vco]":
|
def __aiter__(self) -> "JsAsyncGenerator[T_co, T_contra, V_co]":
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def asend(self, value: Tcontra, /) -> Awaitable[Tco]:
|
def asend(self, value: T_contra, /) -> Awaitable[T_co]:
|
||||||
"""Resumes the execution and "sends" a value into the async generator
|
"""Resumes the execution and "sends" a value into the async generator
|
||||||
function.
|
function.
|
||||||
|
|
||||||
|
@ -795,7 +801,7 @@ class JsAsyncGenerator(JsAsyncIterable[Tco], Generic[Tco, Tcontra, Vco]):
|
||||||
val: BaseException | object = ...,
|
val: BaseException | object = ...,
|
||||||
tb: TracebackType | None = ...,
|
tb: TracebackType | None = ...,
|
||||||
/,
|
/,
|
||||||
) -> Awaitable[Tco]:
|
) -> Awaitable[T_co]:
|
||||||
...
|
...
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
|
@ -805,11 +811,11 @@ class JsAsyncGenerator(JsAsyncIterable[Tco], Generic[Tco, Tcontra, Vco]):
|
||||||
val: None = ...,
|
val: None = ...,
|
||||||
tb: TracebackType | None = ...,
|
tb: TracebackType | None = ...,
|
||||||
/,
|
/,
|
||||||
) -> Awaitable[Tco]:
|
) -> Awaitable[T_co]:
|
||||||
...
|
...
|
||||||
|
|
||||||
@docs_argspec("(self, error: BaseException, /) -> Tco")
|
@docs_argspec("(self, error: BaseException, /) -> T_co")
|
||||||
def athrow(self, value: Any, *args: Any) -> Awaitable[Tco]:
|
def athrow(self, value: Any, *args: Any) -> Awaitable[T_co]:
|
||||||
"""Resumes the execution and raises an exception at the point where the
|
"""Resumes the execution and raises an exception at the point where the
|
||||||
generator was paused.
|
generator was paused.
|
||||||
|
|
||||||
|
@ -931,11 +937,17 @@ class JsArray(JsIterable[T], Generic[T], MutableSequence[T], metaclass=_ABCMeta)
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
depth: int = -1,
|
depth: int = -1,
|
||||||
default_converter: Callable[
|
default_converter: (
|
||||||
["JsProxy", Callable[["JsProxy"], Any], Callable[["JsProxy", Any], None]],
|
Callable[
|
||||||
Any,
|
[
|
||||||
]
|
"JsProxy",
|
||||||
| None = None,
|
Callable[["JsProxy"], Any],
|
||||||
|
Callable[["JsProxy", Any], None],
|
||||||
|
],
|
||||||
|
Any,
|
||||||
|
]
|
||||||
|
| None
|
||||||
|
) = None,
|
||||||
) -> list[Any]:
|
) -> list[Any]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@ -955,7 +967,7 @@ class JsTypedArray(JsBuffer, JsArray[int]):
|
||||||
buffer: JsBuffer
|
buffer: JsBuffer
|
||||||
|
|
||||||
|
|
||||||
class JsMap(JsIterable[KT], Generic[KT, VTco], Mapping[KT, VTco], metaclass=_ABCMeta):
|
class JsMap(JsIterable[KT], Generic[KT, VT_co], Mapping[KT, VT_co], metaclass=_ABCMeta):
|
||||||
"""A JavaScript Map
|
"""A JavaScript Map
|
||||||
|
|
||||||
To be considered a map, a JavaScript object must have a ``get`` method, it
|
To be considered a map, a JavaScript object must have a ``get`` method, it
|
||||||
|
@ -965,7 +977,7 @@ class JsMap(JsIterable[KT], Generic[KT, VTco], Mapping[KT, VTco], metaclass=_ABC
|
||||||
|
|
||||||
_js_type_flags = ["HAS_GET | HAS_LENGTH | IS_ITERABLE", "IS_OBJECT_MAP"]
|
_js_type_flags = ["HAS_GET | HAS_LENGTH | IS_ITERABLE", "IS_OBJECT_MAP"]
|
||||||
|
|
||||||
def __getitem__(self, idx: KT) -> VTco:
|
def __getitem__(self, idx: KT) -> VT_co:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
|
@ -978,33 +990,33 @@ class JsMap(JsIterable[KT], Generic[KT, VTco], Mapping[KT, VTco], metaclass=_ABC
|
||||||
"""Return a :py:class:`~collections.abc.KeysView` for the map."""
|
"""Return a :py:class:`~collections.abc.KeysView` for the map."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def items(self) -> ItemsView[KT, VTco]:
|
def items(self) -> ItemsView[KT, VT_co]:
|
||||||
"""Return a :py:class:`~collections.abc.ItemsView` for the map."""
|
"""Return a :py:class:`~collections.abc.ItemsView` for the map."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def values(self) -> ValuesView[VTco]:
|
def values(self) -> ValuesView[VT_co]:
|
||||||
"""Return a :py:class:`~collections.abc.ValuesView` for the map."""
|
"""Return a :py:class:`~collections.abc.ValuesView` for the map."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get(self, key: KT, /) -> VTco | None:
|
def get(self, key: KT, /) -> VT_co | None:
|
||||||
...
|
...
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get(self, key: KT, default: VTco | T, /) -> VTco | T:
|
def get(self, key: KT, default: VT_co | T, /) -> VT_co | T:
|
||||||
...
|
...
|
||||||
|
|
||||||
@docs_argspec("(self, key: KT, default: VTco | None, /) -> VTco")
|
@docs_argspec("(self, key: KT, default: VT_co | None, /) -> VT_co")
|
||||||
def get(self, key: KT, default: Any = None, /) -> VTco:
|
def get(self, key: KT, default: Any = None, /) -> VT_co:
|
||||||
r"""If ``key in self``, returns ``self[key]``. Otherwise returns ``default``."""
|
r"""If ``key in self``, returns ``self[key]``. Otherwise returns ``default``."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class _SupportsKeysAndGetItem(Protocol[KT, VTco]):
|
class _SupportsKeysAndGetItem(Protocol[KT, VT_co]):
|
||||||
def keys(self) -> Iterable[KT]:
|
def keys(self) -> Iterable[KT]:
|
||||||
...
|
...
|
||||||
|
|
||||||
def __getitem__(self, __key: KT) -> VTco:
|
def __getitem__(self, __key: KT) -> VT_co:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@ -1257,10 +1269,12 @@ def to_js(
|
||||||
pyproxies: JsProxy | None = None,
|
pyproxies: JsProxy | None = None,
|
||||||
create_pyproxies: bool = True,
|
create_pyproxies: bool = True,
|
||||||
dict_converter: Callable[[Iterable[JsArray[Any]]], JsProxy] | None = None,
|
dict_converter: Callable[[Iterable[JsArray[Any]]], JsProxy] | None = None,
|
||||||
default_converter: Callable[
|
default_converter: (
|
||||||
[Any, Callable[[Any], JsProxy], Callable[[Any, JsProxy], None]], JsProxy
|
Callable[
|
||||||
]
|
[Any, Callable[[Any], JsProxy], Callable[[Any, JsProxy], None]], JsProxy
|
||||||
| None = None,
|
]
|
||||||
|
| None
|
||||||
|
) = None,
|
||||||
) -> JsArray[Any]:
|
) -> JsArray[Any]:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
@ -1274,10 +1288,12 @@ def to_js(
|
||||||
pyproxies: JsProxy | None,
|
pyproxies: JsProxy | None,
|
||||||
create_pyproxies: bool,
|
create_pyproxies: bool,
|
||||||
dict_converter: None,
|
dict_converter: None,
|
||||||
default_converter: Callable[
|
default_converter: (
|
||||||
[Any, Callable[[Any], JsProxy], Callable[[Any, JsProxy], None]], JsProxy
|
Callable[
|
||||||
]
|
[Any, Callable[[Any], JsProxy], Callable[[Any, JsProxy], None]], JsProxy
|
||||||
| None = None,
|
]
|
||||||
|
| None
|
||||||
|
) = None,
|
||||||
) -> JsMap[Any, Any]:
|
) -> JsMap[Any, Any]:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
@ -1291,10 +1307,12 @@ def to_js(
|
||||||
pyproxies: JsProxy | None = None,
|
pyproxies: JsProxy | None = None,
|
||||||
create_pyproxies: bool = True,
|
create_pyproxies: bool = True,
|
||||||
dict_converter: Callable[[Iterable[JsArray[Any]]], JsProxy] | None = None,
|
dict_converter: Callable[[Iterable[JsArray[Any]]], JsProxy] | None = None,
|
||||||
default_converter: Callable[
|
default_converter: (
|
||||||
[Any, Callable[[Any], JsProxy], Callable[[Any, JsProxy], None]], JsProxy
|
Callable[
|
||||||
]
|
[Any, Callable[[Any], JsProxy], Callable[[Any, JsProxy], None]], JsProxy
|
||||||
| None = None,
|
]
|
||||||
|
| None
|
||||||
|
) = None,
|
||||||
) -> Any:
|
) -> Any:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
@ -1307,10 +1325,12 @@ def to_js(
|
||||||
pyproxies: JsProxy | None = None,
|
pyproxies: JsProxy | None = None,
|
||||||
create_pyproxies: bool = True,
|
create_pyproxies: bool = True,
|
||||||
dict_converter: Callable[[Iterable[JsArray[Any]]], JsProxy] | None = None,
|
dict_converter: Callable[[Iterable[JsArray[Any]]], JsProxy] | None = None,
|
||||||
default_converter: Callable[
|
default_converter: (
|
||||||
[Any, Callable[[Any], JsProxy], Callable[[Any, JsProxy], None]], JsProxy
|
Callable[
|
||||||
]
|
[Any, Callable[[Any], JsProxy], Callable[[Any, JsProxy], None]], JsProxy
|
||||||
| None = None,
|
]
|
||||||
|
| None
|
||||||
|
) = None,
|
||||||
) -> Any:
|
) -> Any:
|
||||||
"""Convert the object to JavaScript.
|
"""Convert the object to JavaScript.
|
||||||
|
|
||||||
|
|
|
@ -108,7 +108,7 @@ class JsCanvasElement(JsDomElement):
|
||||||
antialias: bool = False,
|
antialias: bool = False,
|
||||||
alpha: bool = False,
|
alpha: bool = False,
|
||||||
depth: bool = False,
|
depth: bool = False,
|
||||||
stencil: bool = False
|
stencil: bool = False,
|
||||||
) -> Any: ...
|
) -> Any: ...
|
||||||
|
|
||||||
class ArrayBuffer(_JsObject):
|
class ArrayBuffer(_JsObject):
|
||||||
|
|
|
@ -35,7 +35,14 @@ TARGETS = {"site": SITE_PACKAGES, "stdlib": STD_LIB, "dynlib": DSO_DIR}
|
||||||
|
|
||||||
|
|
||||||
ZIP_TYPES = {".whl", ".zip"}
|
ZIP_TYPES = {".whl", ".zip"}
|
||||||
TAR_TYPES = {".tar", ".gz", ".bz", ".gz", ".tgz", ".bz2", ".tbz2"}
|
TAR_TYPES = {
|
||||||
|
".bz",
|
||||||
|
".bz2",
|
||||||
|
".tbz2",
|
||||||
|
".gz",
|
||||||
|
".tgz",
|
||||||
|
".tar",
|
||||||
|
}
|
||||||
EXTENSION_TAGS = [suffix.removesuffix(".so") for suffix in EXTENSION_SUFFIXES]
|
EXTENSION_TAGS = [suffix.removesuffix(".so") for suffix in EXTENSION_SUFFIXES]
|
||||||
# See PEP 3149. I think the situation has since been updated since PEP 3149 does
|
# See PEP 3149. I think the situation has since been updated since PEP 3149 does
|
||||||
# not talk about platform triples. But I could not find any newer pep discussing
|
# not talk about platform triples. But I could not find any newer pep discussing
|
||||||
|
@ -100,9 +107,7 @@ def wheel_dist_info_dir(source: ZipFile, name: str) -> str:
|
||||||
canonical_name = canonicalize_name(name)
|
canonical_name = canonicalize_name(name)
|
||||||
if not info_dir_name.startswith(canonical_name):
|
if not info_dir_name.startswith(canonical_name):
|
||||||
raise UnsupportedWheel(
|
raise UnsupportedWheel(
|
||||||
".dist-info directory {!r} does not start with {!r}".format(
|
f".dist-info directory {info_dir!r} does not start with {canonical_name!r}"
|
||||||
info_dir, canonical_name
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return info_dir
|
return info_dir
|
||||||
|
@ -117,7 +122,10 @@ def make_whlfile(
|
||||||
if IN_BROWSER:
|
if IN_BROWSER:
|
||||||
shutil.register_archive_format("whl", make_whlfile, description="Wheel file")
|
shutil.register_archive_format("whl", make_whlfile, description="Wheel file")
|
||||||
shutil.register_unpack_format(
|
shutil.register_unpack_format(
|
||||||
"whl", [".whl", ".wheel"], shutil._unpack_zipfile, description="Wheel file" # type: ignore[attr-defined]
|
"whl",
|
||||||
|
[".whl", ".wheel"],
|
||||||
|
shutil._unpack_zipfile, # type: ignore[attr-defined]
|
||||||
|
description="Wheel file",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,6 @@ import sys
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import __main__
|
import __main__
|
||||||
|
|
||||||
from _pyodide._importhook import jsfinder
|
from _pyodide._importhook import jsfinder
|
||||||
|
|
||||||
from .ffi import JsProxy
|
from .ffi import JsProxy
|
||||||
|
|
|
@ -117,7 +117,7 @@ class PyodideFuture(Future[T]):
|
||||||
except Exception as result_exception:
|
except Exception as result_exception:
|
||||||
result.set_exception(result_exception)
|
result.set_exception(result_exception)
|
||||||
return
|
return
|
||||||
result.set_result(r) # type:ignore[arg-type]
|
result.set_result(r)
|
||||||
|
|
||||||
def wrapper(fut: Future[T]) -> None:
|
def wrapper(fut: Future[T]) -> None:
|
||||||
asyncio.ensure_future(callback(fut))
|
asyncio.ensure_future(callback(fut))
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"module": "NodeNext"
|
"module": "NodeNext",
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -360,22 +360,18 @@ def test_await_pyproxy_eval_async(selenium):
|
||||||
== 2
|
== 2
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
let finally_occurred = false;
|
let finally_occurred = false;
|
||||||
let c = pyodide._api.pyodide_code.eval_code_async("1+1");
|
let c = pyodide._api.pyodide_code.eval_code_async("1+1");
|
||||||
let result = await c.finally(() => { finally_occurred = true; });
|
let result = await c.finally(() => { finally_occurred = true; });
|
||||||
c.destroy();
|
c.destroy();
|
||||||
return [result, finally_occurred];
|
return [result, finally_occurred];
|
||||||
"""
|
"""
|
||||||
)
|
) == [2, True]
|
||||||
== [2, True]
|
|
||||||
)
|
|
||||||
|
|
||||||
assert (
|
assert selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
let finally_occurred = false;
|
let finally_occurred = false;
|
||||||
let err_occurred = false;
|
let err_occurred = false;
|
||||||
let c = pyodide._api.pyodide_code.eval_code_async("raise ValueError('hi')");
|
let c = pyodide._api.pyodide_code.eval_code_async("raise ValueError('hi')");
|
||||||
|
@ -387,9 +383,7 @@ def test_await_pyproxy_eval_async(selenium):
|
||||||
c.destroy();
|
c.destroy();
|
||||||
return [finally_occurred, err_occurred];
|
return [finally_occurred, err_occurred];
|
||||||
"""
|
"""
|
||||||
)
|
) == [True, True]
|
||||||
== [True, True]
|
|
||||||
)
|
|
||||||
|
|
||||||
assert selenium.run_js(
|
assert selenium.run_js(
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
for a basic nodejs-based test, see src/js/test/filesystem.test.js
|
for a basic nodejs-based test, see src/js/test/filesystem.test.js
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -67,9 +67,8 @@ def test_jsproxy_dir(selenium):
|
||||||
|
|
||||||
|
|
||||||
def test_jsproxy_getattr(selenium):
|
def test_jsproxy_getattr(selenium):
|
||||||
assert (
|
assert selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
self.a = { x : 2, y : "9", typeof : 7 };
|
self.a = { x : 2, y : "9", typeof : 7 };
|
||||||
let pyresult = pyodide.runPython(`
|
let pyresult = pyodide.runPython(`
|
||||||
from js import a
|
from js import a
|
||||||
|
@ -79,9 +78,7 @@ def test_jsproxy_getattr(selenium):
|
||||||
pyresult.destroy();
|
pyresult.destroy();
|
||||||
return result;
|
return result;
|
||||||
"""
|
"""
|
||||||
)
|
) == [2, "9", "object"]
|
||||||
== [2, "9", "object"]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@run_in_pyodide
|
@run_in_pyodide
|
||||||
|
@ -93,9 +90,9 @@ def test_jsproxy_getattr_errors(selenium):
|
||||||
|
|
||||||
o = run_js("({get a() { throw new Error('oops'); } })")
|
o = run_js("({get a() { throw new Error('oops'); } })")
|
||||||
with pytest.raises(AttributeError):
|
with pytest.raises(AttributeError):
|
||||||
o.x
|
o.x # noqa: B018
|
||||||
with pytest.raises(JsException):
|
with pytest.raises(JsException):
|
||||||
o.a
|
o.a # noqa: B018
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.xfail_browsers(node="No document in node")
|
@pytest.mark.xfail_browsers(node="No document in node")
|
||||||
|
@ -267,9 +264,8 @@ def test_jsproxy_implicit_iter(selenium):
|
||||||
|
|
||||||
|
|
||||||
def test_jsproxy_call1(selenium):
|
def test_jsproxy_call1(selenium):
|
||||||
assert (
|
assert selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
self.f = function(){ return arguments.length; };
|
self.f = function(){ return arguments.length; };
|
||||||
let pyresult = pyodide.runPython(
|
let pyresult = pyodide.runPython(
|
||||||
`
|
`
|
||||||
|
@ -281,9 +277,7 @@ def test_jsproxy_call1(selenium):
|
||||||
pyresult.destroy();
|
pyresult.destroy();
|
||||||
return result;
|
return result;
|
||||||
"""
|
"""
|
||||||
)
|
) == list(range(10))
|
||||||
== list(range(10))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@run_in_pyodide
|
@run_in_pyodide
|
||||||
|
@ -295,9 +289,8 @@ def test_jsproxy_call2(selenium):
|
||||||
|
|
||||||
|
|
||||||
def test_jsproxy_call_kwargs(selenium):
|
def test_jsproxy_call_kwargs(selenium):
|
||||||
assert (
|
assert selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
self.kwarg_function = ({ a = 1, b = 1 }) => {
|
self.kwarg_function = ({ a = 1, b = 1 }) => {
|
||||||
return [a, b];
|
return [a, b];
|
||||||
};
|
};
|
||||||
|
@ -308,9 +301,7 @@ def test_jsproxy_call_kwargs(selenium):
|
||||||
`
|
`
|
||||||
);
|
);
|
||||||
"""
|
"""
|
||||||
)
|
) == [10, 2]
|
||||||
== [10, 2]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.xfail
|
@pytest.mark.xfail
|
||||||
|
@ -2483,7 +2474,7 @@ def test_python_reserved_keywords(selenium):
|
||||||
)
|
)
|
||||||
assert o.match == 222
|
assert o.match == 222
|
||||||
with pytest.raises(AttributeError):
|
with pytest.raises(AttributeError):
|
||||||
o.match_
|
o.match_ # noqa: B018
|
||||||
assert eval("o.match") == 222
|
assert eval("o.match") == 222
|
||||||
keys = ["async", "await", "False", "nonlocal", "yield", "try", "assert"]
|
keys = ["async", "await", "False", "nonlocal", "yield", "try", "assert"]
|
||||||
for k in keys:
|
for k in keys:
|
||||||
|
@ -2565,4 +2556,4 @@ def test_js_proxy_attribute(selenium):
|
||||||
assert x.b == 7 # Previously this raised AttributeError
|
assert x.b == 7 # Previously this raised AttributeError
|
||||||
assert x.c is None
|
assert x.c is None
|
||||||
with pytest.raises(AttributeError):
|
with pytest.raises(AttributeError):
|
||||||
x.d
|
x.d # noqa: B018
|
||||||
|
|
|
@ -1468,9 +1468,8 @@ def test_module_not_found_hook(selenium_standalone):
|
||||||
|
|
||||||
def test_args(selenium_standalone_noload):
|
def test_args(selenium_standalone_noload):
|
||||||
selenium = selenium_standalone_noload
|
selenium = selenium_standalone_noload
|
||||||
assert (
|
assert selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
self.stdoutStrings = [];
|
self.stdoutStrings = [];
|
||||||
self.stderrStrings = [];
|
self.stderrStrings = [];
|
||||||
function stdout(s){
|
function stdout(s){
|
||||||
|
@ -1491,9 +1490,7 @@ def test_args(selenium_standalone_noload):
|
||||||
pyodide._module._run_main();
|
pyodide._module._run_main();
|
||||||
return stdoutStrings.pop()
|
return stdoutStrings.pop()
|
||||||
"""
|
"""
|
||||||
)
|
) == repr([x * x + 1 for x in range(10)])
|
||||||
== repr([x * x + 1 for x in range(10)])
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_args_OO(selenium_standalone_noload):
|
def test_args_OO(selenium_standalone_noload):
|
||||||
|
|
|
@ -101,9 +101,8 @@ def test_del_builtin(selenium):
|
||||||
|
|
||||||
def test_in_globals(selenium):
|
def test_in_globals(selenium):
|
||||||
selenium.run("yyyyy = 7")
|
selenium.run("yyyyy = 7")
|
||||||
assert (
|
assert selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
let result = [];
|
let result = [];
|
||||||
result.push(pyodide.globals.has("xxxxx"));
|
result.push(pyodide.globals.has("xxxxx"));
|
||||||
result.push(pyodide.globals.has("yyyyy"));
|
result.push(pyodide.globals.has("yyyyy"));
|
||||||
|
@ -111,9 +110,7 @@ def test_in_globals(selenium):
|
||||||
result.push(pyodide.globals.has("open"));
|
result.push(pyodide.globals.has("open"));
|
||||||
return result;
|
return result;
|
||||||
"""
|
"""
|
||||||
)
|
) == [False, True, True, True]
|
||||||
== [False, True, True, True]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_pyproxy_copy(selenium):
|
def test_pyproxy_copy(selenium):
|
||||||
|
@ -604,8 +601,7 @@ def test_pyproxy_mixins32(selenium, configurable, writable):
|
||||||
assertThrows(() => delete d.x, "TypeError", "%s");
|
assertThrows(() => delete d.x, "TypeError", "%s");
|
||||||
}
|
}
|
||||||
d.destroy();
|
d.destroy();
|
||||||
"""
|
""" % (setText, deleteText)
|
||||||
% (setText, deleteText)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -164,7 +164,7 @@ def test_number_conversions(selenium_module_scope, n):
|
||||||
x_js = run_js("(s) => self.x_js = eval(s)")(json.dumps(n))
|
x_js = run_js("(s) => self.x_js = eval(s)")(json.dumps(n))
|
||||||
run_js("(x_py) => Number(x_py) === x_js")(n)
|
run_js("(x_py) => Number(x_py) === x_js")(n)
|
||||||
|
|
||||||
if type(x_js) is float:
|
if isinstance(x_js, float):
|
||||||
assert x_js == float(n)
|
assert x_js == float(n)
|
||||||
else:
|
else:
|
||||||
assert x_js == n
|
assert x_js == n
|
||||||
|
@ -352,7 +352,6 @@ def test_big_int_conversions3(selenium_module_scope, n, exp):
|
||||||
@run_in_pyodide
|
@run_in_pyodide
|
||||||
def test_hyp_py2js2py(selenium, obj):
|
def test_hyp_py2js2py(selenium, obj):
|
||||||
import __main__
|
import __main__
|
||||||
|
|
||||||
from pyodide.code import run_js
|
from pyodide.code import run_js
|
||||||
|
|
||||||
__main__.obj = obj
|
__main__.obj = obj
|
||||||
|
@ -379,7 +378,6 @@ def test_hyp_py2js2py(selenium, obj):
|
||||||
@run_in_pyodide
|
@run_in_pyodide
|
||||||
def test_hyp_py2js2py_2(selenium, obj):
|
def test_hyp_py2js2py_2(selenium, obj):
|
||||||
import __main__
|
import __main__
|
||||||
|
|
||||||
from pyodide.code import run_js
|
from pyodide.code import run_js
|
||||||
|
|
||||||
__main__.o = obj
|
__main__.o = obj
|
||||||
|
@ -393,7 +391,6 @@ def test_hyp_py2js2py_2(selenium, obj):
|
||||||
@run_in_pyodide
|
@run_in_pyodide
|
||||||
def test_big_integer_py2js2py(selenium, a):
|
def test_big_integer_py2js2py(selenium, a):
|
||||||
import __main__
|
import __main__
|
||||||
|
|
||||||
from pyodide.code import run_js
|
from pyodide.code import run_js
|
||||||
|
|
||||||
__main__.a = a
|
__main__.a = a
|
||||||
|
@ -412,7 +409,6 @@ def test_big_integer_py2js2py(selenium, a):
|
||||||
@run_in_pyodide
|
@run_in_pyodide
|
||||||
def test_hyp_tojs_no_crash(selenium, obj):
|
def test_hyp_tojs_no_crash(selenium, obj):
|
||||||
import __main__
|
import __main__
|
||||||
|
|
||||||
from pyodide.code import run_js
|
from pyodide.code import run_js
|
||||||
|
|
||||||
__main__.x = obj
|
__main__.x = obj
|
||||||
|
@ -475,19 +471,16 @@ def test_python2js1(selenium, py, js):
|
||||||
def test_python2js2(selenium):
|
def test_python2js2(selenium):
|
||||||
from pyodide.code import run_js
|
from pyodide.code import run_js
|
||||||
|
|
||||||
assert (
|
assert list(
|
||||||
list(
|
run_js(
|
||||||
run_js(
|
"""
|
||||||
"""
|
|
||||||
(x) => {
|
(x) => {
|
||||||
x = x.toJs();
|
x = x.toJs();
|
||||||
return [x.constructor.name, x.length, x[0]];
|
return [x.constructor.name, x.length, x[0]];
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
)(b"bytes")
|
)(b"bytes")
|
||||||
)
|
) == ["Uint8Array", 5, 98]
|
||||||
== ["Uint8Array", 5, 98]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@run_in_pyodide
|
@run_in_pyodide
|
||||||
|
@ -510,20 +503,17 @@ def test_python2js3(selenium):
|
||||||
def test_python2js4(selenium):
|
def test_python2js4(selenium):
|
||||||
from pyodide.code import run_js
|
from pyodide.code import run_js
|
||||||
|
|
||||||
assert (
|
assert list(
|
||||||
list(
|
run_js(
|
||||||
run_js(
|
"""
|
||||||
"""
|
|
||||||
(proxy) => {
|
(proxy) => {
|
||||||
let typename = proxy.type;
|
let typename = proxy.type;
|
||||||
let x = proxy.toJs();
|
let x = proxy.toJs();
|
||||||
return [proxy.type, x.constructor.name, x.get(42)];
|
return [proxy.type, x.constructor.name, x.get(42)];
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
)({42: 64})
|
)({42: 64})
|
||||||
)
|
) == ["dict", "Map", 64]
|
||||||
== ["dict", "Map", 64]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@run_in_pyodide
|
@run_in_pyodide
|
||||||
|
@ -1018,11 +1008,11 @@ def test_jsproxy_attribute_error(selenium):
|
||||||
assert point.y == 43
|
assert point.y == 43
|
||||||
|
|
||||||
with pytest.raises(AttributeError, match="z"):
|
with pytest.raises(AttributeError, match="z"):
|
||||||
point.z
|
point.z # noqa: B018
|
||||||
|
|
||||||
del point.y
|
del point.y
|
||||||
with pytest.raises(AttributeError, match="y"):
|
with pytest.raises(AttributeError, match="y"):
|
||||||
point.y
|
point.y # noqa: B018
|
||||||
|
|
||||||
assert run_js("(point) => point.y;")(point) is None
|
assert run_js("(point) => point.y;")(point) is None
|
||||||
|
|
||||||
|
@ -1228,33 +1218,27 @@ def test_tojs8(selenium):
|
||||||
|
|
||||||
|
|
||||||
def test_tojs9(selenium):
|
def test_tojs9(selenium):
|
||||||
assert (
|
assert set(
|
||||||
set(
|
selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
return Array.from(pyodide.runPython(`
|
return Array.from(pyodide.runPython(`
|
||||||
from pyodide.ffi import to_js
|
from pyodide.ffi import to_js
|
||||||
to_js({ 1, "1" })
|
to_js({ 1, "1" })
|
||||||
`).values())
|
`).values())
|
||||||
"""
|
"""
|
||||||
)
|
|
||||||
)
|
)
|
||||||
== {1, "1"}
|
) == {1, "1"}
|
||||||
)
|
|
||||||
|
|
||||||
assert (
|
assert dict(
|
||||||
dict(
|
selenium.run_js(
|
||||||
selenium.run_js(
|
"""
|
||||||
"""
|
|
||||||
return Array.from(pyodide.runPython(`
|
return Array.from(pyodide.runPython(`
|
||||||
from pyodide.ffi import to_js
|
from pyodide.ffi import to_js
|
||||||
to_js({ 1 : 7, "1" : 9 })
|
to_js({ 1 : 7, "1" : 9 })
|
||||||
`).entries())
|
`).entries())
|
||||||
"""
|
"""
|
||||||
)
|
|
||||||
)
|
)
|
||||||
== {1: 7, "1": 9}
|
) == {1: 7, "1": 9}
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@run_in_pyodide
|
@run_in_pyodide
|
||||||
|
|
|
@ -54,7 +54,7 @@ def test_deploy_to_s3_overwrite(tmp_path, capsys):
|
||||||
assert get_object_list() == {"dev/full/a.whl", "dev/full/b.tar", "dev/full/c.zip"}
|
assert get_object_list() == {"dev/full/a.whl", "dev/full/b.tar", "dev/full/c.zip"}
|
||||||
|
|
||||||
# Writing a second time to the same prefix with overwrite=False should fail
|
# Writing a second time to the same prefix with overwrite=False should fail
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception): # noqa: B017
|
||||||
deploy_to_s3_main(
|
deploy_to_s3_main(
|
||||||
tmp_path,
|
tmp_path,
|
||||||
remote_prefix=PurePosixPath("dev/full/"),
|
remote_prefix=PurePosixPath("dev/full/"),
|
||||||
|
|
Loading…
Reference in New Issue