chore: pyupgrade 3.9+ (#2195)

This commit is contained in:
Henry Schreiner 2022-02-20 17:13:37 -05:00 committed by GitHub
parent 1a8514d4a8
commit ed22c06e07
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 208 additions and 217 deletions

View File

@ -10,6 +10,20 @@ repos:
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/asottile/pyupgrade
rev: "v2.31.0"
hooks:
- id: pyupgrade
args: ["--py39-plus"]
exclude: ^packages/micropip/src/micropip/externals
- repo: https://github.com/hadialqattan/pycln
rev: "v1.1.0"
hooks:
- id: pycln
args: [--config=pyproject.toml]
stages: [manual]
- repo: https://github.com/psf/black
rev: "22.1.0"
hooks:

View File

@ -5,20 +5,20 @@ import subprocess
import sys
from time import time
sys.path.insert(0, str((Path(__file__).resolve().parents[1] / "test")))
sys.path.insert(0, str((Path(__file__).resolve().parents[1])))
sys.path.insert(0, str(Path(__file__).resolve().parents[1] / "test"))
sys.path.insert(0, str(Path(__file__).resolve().parents[1]))
import conftest # noqa: E402
SKIP = set(["fft", "hyantes", "README"])
SKIP = {"fft", "hyantes", "README"}
def print_entry(name, res):
print(" - ", name)
print(" " * 4, end="")
for name, dt in res.items():
print("{}: {:.6f} ".format(name, dt), end="")
print(f"{name}: {dt:.6f} ", end="")
print("")
@ -75,7 +75,7 @@ def parse_numpy_benchmark(filename):
for line in fp:
m = re.match(r"^#\s*(setup|run): (.*)$", line)
if m:
line = "{} = {!r}\n".format(m.group(1), m.group(2))
line = f"{m.group(1)} = {m.group(2)!r}\n"
lines.append(line)
return "".join(lines)

View File

@ -17,7 +17,6 @@ import shutil
import functools
import pytest
from typing import List
ROOT_PATH = pathlib.Path(__file__).parents[0].resolve()
TEST_PATH = ROOT_PATH / "src" / "tests"
@ -75,7 +74,7 @@ def pytest_collection_modifyitems(config, items):
@functools.cache
def built_packages() -> List[str]:
def built_packages() -> list[str]:
"""Returns the list of built package names from packages.json"""
packages_json_path = BUILD_PATH / "packages.json"
if not packages_json_path.exists():
@ -328,7 +327,7 @@ class SeleniumWrapper:
)
def load_package(self, packages):
self.run_js("await pyodide.loadPackage({!r})".format(packages))
self.run_js(f"await pyodide.loadPackage({packages!r})")
@property
def urls(self):
@ -418,10 +417,10 @@ class NodeWrapper(SeleniumWrapper):
def run_js_inner(self, code, check_code):
check_code = ""
wrapped = """
let result = await (async () => { %s })();
%s
let result = await (async () => {{ {} }})();
{}
return result;
""" % (
""".format(
code,
check_code,
)

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Configuration file for the Sphinx documentation builder.
# -- Path setup --------------------------------------------------------------
@ -10,7 +9,7 @@ import shutil
import subprocess
from pathlib import Path
from typing import Dict, Any
from typing import Any
# -- Project information -----------------------------------------------------
@ -84,7 +83,7 @@ html_theme = "sphinx_book_theme"
html_logo = "_static/img/pyodide-logo.png"
# theme-specific options
html_theme_options: Dict[str, Any] = {}
html_theme_options: dict[str, Any] = {}
# paths that contain custom static files (such as style sheets)
html_static_path = ["_static"]

View File

@ -3,14 +3,14 @@ Monkey patch autodoc to recursively include submodules as well. We have to
import the submodules for it to find them.
"""
from typing import Any, Dict, List, Tuple
from typing import Any, Dict, Tuple
from sphinx.util.inspect import safe_getattr
from sphinx.ext.autodoc import ModuleDocumenter, ObjectMember # type: ignore
__all__ = ["monkeypatch_module_documenter"]
def get_module_members(module: Any) -> List[Tuple[str, Any]]:
def get_module_members(module: Any) -> list[tuple[str, Any]]:
members = {} # type: Dict[str, Tuple[str, Any]]
for name in dir(module):
try:

View File

@ -21,13 +21,13 @@ from sphinx_js.renderers import (
AutoClassRenderer,
)
from typing import Any, Dict, List
from typing import Any
_orig_convert_node = TsAnalyzer._convert_node
_orig_type_name = TsAnalyzer._type_name
def destructure_param(param: Dict[str, Any]) -> List[Dict[str, Any]]:
def destructure_param(param: dict[str, Any]) -> list[dict[str, Any]]:
"""We want to document a destructured argument as if it were several
separate arguments. This finds complex inline object types in the arguments
list of a function and "destructures" them into separately documented arguments.
@ -67,7 +67,7 @@ def destructure_param(param: Dict[str, Any]) -> List[Dict[str, Any]]:
return result
def fix_up_inline_object_signature(self: TsAnalyzer, node: Dict[str, Any]):
def fix_up_inline_object_signature(self: TsAnalyzer, node: dict[str, Any]):
"""Calls get_destructured_children on inline object types"""
kind = node.get("kindString")
if kind not in ["Call signature", "Constructor signature"]:
@ -86,7 +86,7 @@ def fix_up_inline_object_signature(self: TsAnalyzer, node: Dict[str, Any]):
node["parameters"] = new_params
def _convert_node(self: TsAnalyzer, node: Dict[str, Any]):
def _convert_node(self: TsAnalyzer, node: dict[str, Any]):
"""Monkey patch for TsAnalyzer._convert_node.
Fixes two crashes and separates documentation for destructured object
@ -508,7 +508,7 @@ def get_jsdoc_summary_directive(app):
for prefix, name, sig, summary, real_name in items:
qualifier = "any" # <== Only thing changed from autosummary version
if "nosignatures" not in self.options:
col1 = "%s:%s:`%s <%s>`\\ %s" % (
col1 = "{}:{}:`{} <{}>`\\ {}".format(
prefix,
qualifier,
name,
@ -516,7 +516,7 @@ def get_jsdoc_summary_directive(app):
rst.escape(sig),
)
else:
col1 = "%s:%s:`%s <%s>`" % (prefix, qualifier, name, real_name)
col1 = f"{prefix}:{qualifier}:`{name} <{real_name}>`"
col2 = summary
append_row(col1, col2)

View File

@ -2,7 +2,7 @@ from docutils import nodes
from docutils.parsers.rst import Directive
import sys
import pathlib
from typing import Dict, Any, Tuple, List
from typing import Any
from sphinx import addnodes
@ -43,7 +43,7 @@ def get_packages_summary_directive(app):
return result
def parse_package_info(self, config: pathlib.Path) -> Tuple[str, str, bool]:
def parse_package_info(self, config: pathlib.Path) -> tuple[str, str, bool]:
yaml_data = parse_package_config(config)
name = yaml_data["package"]["name"]
@ -54,7 +54,7 @@ def get_packages_summary_directive(app):
def get_package_metadata_list(
self, directory: pathlib.Path
) -> List[pathlib.Path]:
) -> list[pathlib.Path]:
"""Return metadata files of packages in alphabetical order (case insensitive)"""
return sorted(
directory.glob("**/meta.yaml"),
@ -62,8 +62,8 @@ def get_packages_summary_directive(app):
)
def format_packages_table(
self, packages: Dict[str, Any], columns: Tuple[str]
) -> List[Any]:
self, packages: dict[str, Any], columns: tuple[str]
) -> list[Any]:
table_spec = addnodes.tabular_col_spec()
table_spec["spec"] = r"\X{1}{2}\X{1}{2}"

View File

@ -111,8 +111,7 @@ class FigureCanvasWasm(FigureCanvasBase):
div.addEventListener("contextmenu", ignore)
div.setAttribute(
"style",
"margin: 0 auto; text-align: center;"
+ "width: {}px".format(width / self._ratio),
"margin: 0 auto; text-align: center;" + f"width: {width / self._ratio}px",
)
div.id = self._id

View File

@ -163,7 +163,7 @@ class NavigationToolbar2HTMLCanvas(NavigationToolbar2Wasm):
mimetype, base64.b64encode(data.getvalue()).decode("ascii")
),
)
element.setAttribute("download", "plot.{}".format(format))
element.setAttribute("download", f"plot.{format}")
element.style.display = "none"
document.body.appendChild(element)
@ -185,7 +185,7 @@ class GraphicsContextHTMLCanvas(GraphicsContextBase):
self._capstyle = cs
self.renderer.ctx.lineCap = _capstyle_d[cs]
else:
raise ValueError("Unrecognized cap style. Found {0}".format(cs))
raise ValueError(f"Unrecognized cap style. Found {cs}")
def set_clip_rectangle(self, rectangle):
self.renderer.ctx.save()
@ -223,7 +223,7 @@ class GraphicsContextHTMLCanvas(GraphicsContextBase):
self._joinstyle = js
self.renderer.ctx.lineJoin = js
else:
raise ValueError("Unrecognized join style. Found {0}".format(js))
raise ValueError(f"Unrecognized join style. Found {js}")
def set_linewidth(self, w):
self.stroke = w != 0
@ -266,11 +266,9 @@ class RendererHTMLCanvas(RendererBase):
G = int(color[1] * 255)
B = int(color[2] * 255)
if len(color) == 3 or alpha_overrides:
CSS_color = """rgba({0:d}, {1:d}, {2:d}, {3:.3g})""".format(
R, G, B, alpha
)
CSS_color = f"""rgba({R:d}, {G:d}, {B:d}, {alpha:.3g})"""
else:
CSS_color = """rgba({0:d}, {1:d}, {2:d}, {3:.3g})""".format(
CSS_color = """rgba({:d}, {:d}, {:d}, {:.3g})""".format(
R, G, B, color[3]
)
@ -409,7 +407,7 @@ class RendererHTMLCanvas(RendererBase):
font_face_arguments = (
prop.get_name(),
"url({0})".format(_base_fonts_url + font_file_name),
f"url({_base_fonts_url + font_file_name})",
)
# The following snippet loads a font into the browser's
@ -423,7 +421,7 @@ class RendererHTMLCanvas(RendererBase):
f = FontFace.new(*font_face_arguments)
f.load().then(_load_font_into_web)
font_property_string = "{0} {1} {2:.3g}px {3}, {4}".format(
font_property_string = "{} {} {:.3g}px {}, {}".format(
prop.get_style(),
prop.get_weight(),
font_size,

View File

@ -82,7 +82,7 @@ class NavigationToolbar2AggWasm(NavigationToolbar2Wasm):
mimetype, base64.b64encode(data.getvalue()).decode("ascii")
),
)
element.setAttribute("download", "plot.{}".format(format))
element.setAttribute("download", f"plot.{format}")
element.style.display = "none"
document.body.appendChild(element)
element.click()

View File

@ -27,9 +27,7 @@ def get_canvas_data(selenium, prefix):
img_script = "return arguments[0].toDataURL('image/png').substring(21)"
canvas_base64 = selenium.driver.execute_script(img_script, canvas_element)
canvas_png = base64.b64decode(canvas_base64)
with open(
r"{0}/{1}-{2}.png".format(TEST_PATH, prefix, selenium.browser), "wb"
) as f:
with open(rf"{TEST_PATH}/{prefix}-{selenium.browser}.png", "wb") as f:
f.write(canvas_png)
@ -38,9 +36,7 @@ def check_comparison(selenium, prefix, num_fonts):
font_wait.until(FontsLoaded(num_fonts))
# If we don't have a reference image, write one to disk
if not os.path.isfile(
"{0}/{1}-{2}.png".format(TEST_PATH, prefix, selenium.browser)
):
if not os.path.isfile(f"{TEST_PATH}/{prefix}-{selenium.browser}.png"):
get_canvas_data(selenium, prefix)
selenium.run(

View File

@ -12,7 +12,7 @@ from packaging.version import Version
from packaging.markers import default_environment
from pathlib import Path
from typing import Dict, Any, Union, List, Tuple, Optional
from typing import Any, Union, Optional
from zipfile import ZipFile
from .externals.pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
@ -91,7 +91,7 @@ def _is_pure_python_wheel(filename: str):
return filename.endswith("py3-none-any.whl")
def _parse_wheel_url(url: str) -> Tuple[str, Dict[str, Any], str]:
def _parse_wheel_url(url: str) -> tuple[str, dict[str, Any], str]:
"""Parse wheels URL and extract available metadata
See https://www.python.org/dev/peps/pep-0427/#file-name-convention
@ -149,7 +149,7 @@ class _PackageManager:
async def gather_requirements(
self,
requirements: Union[str, List[str]],
requirements: Union[str, list[str]],
ctx=None,
keep_going: bool = False,
):
@ -158,7 +158,7 @@ class _PackageManager:
if isinstance(requirements, str):
requirements = [requirements]
transaction: Dict[str, Any] = {
transaction: dict[str, Any] = {
"wheels": [],
"pyodide_packages": [],
"locked": copy.deepcopy(self.installed_packages),
@ -175,7 +175,7 @@ class _PackageManager:
return transaction
async def install(
self, requirements: Union[str, List[str]], ctx=None, keep_going: bool = False
self, requirements: Union[str, list[str]], ctx=None, keep_going: bool = False
):
async def _install(install_func, done_callback):
await install_func
@ -319,8 +319,8 @@ class _PackageManager:
transaction["wheels"].append((name, wheel, version))
def find_wheel(
self, metadata: Dict[str, Any], req: Requirement
) -> Tuple[Any, Optional[Version]]:
self, metadata: dict[str, Any], req: Requirement
) -> tuple[Any, Optional[Version]]:
"""Parse metadata to find the latest version of pure python wheel.
Parameters
@ -356,7 +356,7 @@ PACKAGE_MANAGER = _PackageManager()
del _PackageManager
def install(requirements: Union[str, List[str]], keep_going: bool = False):
def install(requirements: Union[str, list[str]], keep_going: bool = False):
"""Install the given package and all of its dependencies.
See :ref:`loading packages <loading_packages>` for more information.

View File

@ -1,11 +1,11 @@
from collections import UserDict
from dataclasses import dataclass, astuple
from typing import List, Iterable
from typing import Iterable
__all__ = ["PackageDict"]
def _format_table(headers: List[str], table: List[Iterable]) -> str:
def _format_table(headers: list[str], table: list[Iterable]) -> str:
"""
Returns a minimal formatted table

View File

@ -26,7 +26,7 @@ def mock_get_pypi_json(pkg_map):
A mock function of ``_get_pypi_json`` which returns dummy JSON data of PyPI API.
"""
class Wildcard(object):
class Wildcard:
def __eq__(self, other):
return True

View File

@ -1,10 +1,9 @@
from typing import Dict
import random
import pytest
def generate_largish_json(n_rows: int = 91746) -> Dict:
def generate_largish_json(n_rows: int = 91746) -> dict:
# with n_rows = 91746, the output JSON size will be ~15 MB/10k rows
# Note: we don't fix the random seed here, but the actual values

View File

@ -1,6 +1,5 @@
import pytest
import os
from typing import List, Dict
import functools
from pyodide_build.io import parse_package_config
@ -10,7 +9,7 @@ PKG_DIR = ROOT_PATH / "packages"
@functools.cache
def registered_packages() -> List[str]:
def registered_packages() -> list[str]:
"""Returns a list of registered package names"""
packages = []
for name in os.listdir(PKG_DIR):
@ -29,7 +28,7 @@ def registered_packages_meta():
}
UNSUPPORTED_PACKAGES: Dict[str, List[str]] = {
UNSUPPORTED_PACKAGES: dict[str, list[str]] = {
"chrome": [],
"firefox": [],
"node": [],

View File

@ -1,7 +1,7 @@
import re
import subprocess
from textwrap import dedent # for doctests
from typing import List, Iterable, Iterator, Tuple
from typing import Iterable, Iterator
from pathlib import Path
@ -24,7 +24,7 @@ def fix_f2c_output(f2c_output_path: str):
]
)
with open(f2c_output, "r") as f:
with open(f2c_output) as f:
lines = f.readlines()
if "id_dist" in f2c_output_path:
# Fix implicit casts in id_dist.
@ -61,7 +61,7 @@ def prepare_doctest(x):
return dedent(x).strip().split("\n")
def remove_ftnlen_args(lines: List[str]) -> List[str]:
def remove_ftnlen_args(lines: list[str]) -> list[str]:
"""
Functions with "character" arguments have these extra ftnlen arguments at
the end (which are never used). Other places declare these arguments as
@ -89,7 +89,7 @@ def remove_ftnlen_args(lines: List[str]) -> List[str]:
return new_lines
def add_externs_to_structs(lines: List[str]):
def add_externs_to_structs(lines: list[str]):
"""
The fortran "common" keyword is supposed to share variables between a bunch
of files. f2c doesn't handle this correctly (it isn't possible for it to
@ -167,7 +167,7 @@ def regroup_lines(lines: Iterable[str]) -> Iterator[str]:
yield from (x + ";" for x in joined_line.split(";")[:-1])
def fix_inconsistent_decls(lines: List[str]) -> List[str]:
def fix_inconsistent_decls(lines: list[str]) -> list[str]:
"""
Fortran functions in id_dist use implicit casting of function args which f2c
doesn't support.
@ -238,7 +238,7 @@ def fix_inconsistent_decls(lines: List[str]) -> List[str]:
return lines
def get_subroutine_decl(sub: str) -> Tuple[str, List[str]]:
def get_subroutine_decl(sub: str) -> tuple[str, list[str]]:
"""
>>> get_subroutine_decl(
... "extern /* Subroutine */ int dqelg_(integer *, doublereal *, doublereal *, doublereal *, doublereal *, integer *);"

View File

@ -14,7 +14,7 @@ import subprocess
import sys
from threading import Thread, Lock
from time import sleep, perf_counter
from typing import Dict, Set, Optional, List, Any
from typing import Optional, Any
import os
from . import common
@ -30,9 +30,9 @@ class BasePackage:
meta: dict
library: bool
shared_library: bool
dependencies: List[str]
unbuilt_dependencies: Set[str]
dependents: Set[str]
dependencies: list[str]
unbuilt_dependencies: set[str]
dependents: set[str]
unvendored_tests: Optional[Path] = None
file_name: Optional[str] = None
install_dir: str = "site"
@ -143,7 +143,7 @@ class Package(BasePackage):
# Don't overwrite build log if we didn't build the file.
# If the file didn't need to be rebuilt, the log will have exactly two lines.
rebuilt = True
with open(self.pkgdir / "build.log.tmp", "r") as f:
with open(self.pkgdir / "build.log.tmp") as f:
try:
next(f)
next(f)
@ -165,7 +165,7 @@ class Package(BasePackage):
except subprocess.CalledProcessError:
print(f"Error building {self.name}. Printing build logs.")
with open(self.pkgdir / "build.log", "r") as f:
with open(self.pkgdir / "build.log") as f:
shutil.copyfileobj(f, sys.stdout)
raise
@ -185,8 +185,8 @@ class Package(BasePackage):
def generate_dependency_graph(
packages_dir: Path, packages: Set[str]
) -> Dict[str, BasePackage]:
packages_dir: Path, packages: set[str]
) -> dict[str, BasePackage]:
"""This generates a dependency graph for listed packages.
A node in the graph is a BasePackage object defined above, which maintains
@ -207,7 +207,7 @@ def generate_dependency_graph(
- pkg_map: dictionary mapping package names to BasePackage objects
"""
pkg_map: Dict[str, BasePackage] = {}
pkg_map: dict[str, BasePackage] = {}
if "*" in packages:
packages.discard("*")
@ -267,7 +267,7 @@ def get_progress_line(package_set):
return f"In progress: " + ", ".join(package_set.keys())
def format_name_list(l: List[str]) -> str:
def format_name_list(l: list[str]) -> str:
"""
>>> format_name_list(["regex"])
'regex'
@ -285,7 +285,7 @@ def format_name_list(l: List[str]) -> str:
def mark_package_needs_build(
pkg_map: Dict[str, BasePackage], pkg: BasePackage, needs_build: Set[str]
pkg_map: dict[str, BasePackage], pkg: BasePackage, needs_build: set[str]
):
"""
Helper for generate_needs_build_set. Modifies needs_build in place.
@ -300,7 +300,7 @@ def mark_package_needs_build(
mark_package_needs_build(pkg_map, pkg_map[dep], needs_build)
def generate_needs_build_set(pkg_map: Dict[str, BasePackage]) -> Set[str]:
def generate_needs_build_set(pkg_map: dict[str, BasePackage]) -> set[str]:
"""
Generate the set of packages that need to be rebuilt.
@ -309,7 +309,7 @@ def generate_needs_build_set(pkg_map: Dict[str, BasePackage]) -> Set[str]:
according to needs_rebuild, and
2. packages which depend on case 1 packages.
"""
needs_build: Set[str] = set()
needs_build: set[str] = set()
for pkg in pkg_map.values():
# Otherwise, rebuild packages that have been updated and their dependents.
if pkg.needs_rebuild():
@ -317,7 +317,7 @@ def generate_needs_build_set(pkg_map: Dict[str, BasePackage]) -> Set[str]:
return needs_build
def build_from_graph(pkg_map: Dict[str, BasePackage], outputdir: Path, args) -> None:
def build_from_graph(pkg_map: dict[str, BasePackage], outputdir: Path, args) -> None:
"""
This builds packages in pkg_map in parallel, building at most args.n_jobs
packages at once.
@ -426,10 +426,10 @@ def build_from_graph(pkg_map: Dict[str, BasePackage], outputdir: Path, args) ->
)
def generate_packages_json(pkg_map: Dict[str, BasePackage]) -> Dict:
def generate_packages_json(pkg_map: dict[str, BasePackage]) -> dict:
"""Generate the package.json file"""
# Build package.json data.
package_data: Dict[str, Dict[str, Any]] = {
package_data: dict[str, dict[str, Any]] = {
"info": {"arch": "wasm32", "platform": "Emscripten-1.0"},
"packages": {},
}

View File

@ -19,7 +19,7 @@ from contextlib import contextmanager
from datetime import datetime
from pathlib import Path
from textwrap import dedent
from typing import Any, Dict
from typing import Any
from urllib import request
from . import pywasmcross
@ -59,7 +59,7 @@ class BashRunnerWithSharedEnvironment:
def __init__(self, env=None):
if env is None:
env = dict(os.environ)
self.env: Dict[str, str] = env
self.env: dict[str, str] = env
self._fd_read, self._fd_write = os.pipe()
self._reader = os.fdopen(self._fd_read, "r")
@ -113,7 +113,7 @@ def get_bash_runner():
b.close()
def check_checksum(archive: Path, source_metadata: Dict[str, Any]):
def check_checksum(archive: Path, source_metadata: dict[str, Any]):
"""
Checks that an archive matches the checksum in the package metadata.
@ -144,7 +144,7 @@ def check_checksum(archive: Path, source_metadata: Dict[str, Any]):
if len(chunk) < CHUNK_SIZE:
break
if h.hexdigest() != checksum:
raise ValueError("Invalid {} checksum".format(checksum_algorithm))
raise ValueError(f"Invalid {checksum_algorithm} checksum")
def trim_archive_extension(tarballname):
@ -164,7 +164,7 @@ def trim_archive_extension(tarballname):
return tarballname
def download_and_extract(buildpath: Path, srcpath: Path, src_metadata: Dict[str, Any]):
def download_and_extract(buildpath: Path, srcpath: Path, src_metadata: dict[str, Any]):
"""
Download the source from specified in the meta data, then checksum it, then
extract the archive into srcpath.
@ -217,7 +217,7 @@ def download_and_extract(buildpath: Path, srcpath: Path, src_metadata: Dict[str,
def prepare_source(
pkg_root: Path, buildpath: Path, srcpath: Path, src_metadata: Dict[str, Any]
pkg_root: Path, buildpath: Path, srcpath: Path, src_metadata: dict[str, Any]
):
"""
Figure out from the "source" key in the package metadata where to get the source
@ -266,7 +266,7 @@ def prepare_source(
shutil.copytree(srcdir, srcpath)
def patch(pkg_root: Path, srcpath: Path, src_metadata: Dict[str, Any]):
def patch(pkg_root: Path, srcpath: Path, src_metadata: dict[str, Any]):
"""
Apply patches to the source.
@ -331,7 +331,7 @@ def install_for_distribution():
def compile(
srcpath: Path,
build_metadata: Dict[str, Any],
build_metadata: dict[str, Any],
bash_runner: BashRunnerWithSharedEnvironment,
*,
target_install_dir: str,
@ -408,7 +408,7 @@ def package_wheel(
pkg_name: str,
pkg_root: Path,
srcpath: Path,
build_metadata: Dict[str, Any],
build_metadata: dict[str, Any],
bash_runner: BashRunnerWithSharedEnvironment,
):
"""Package a wheel
@ -522,7 +522,7 @@ def create_packaged_token(buildpath: Path):
def run_script(
buildpath: Path,
srcpath: Path,
build_metadata: Dict[str, Any],
build_metadata: dict[str, Any],
bash_runner: BashRunnerWithSharedEnvironment,
):
"""
@ -553,7 +553,7 @@ def run_script(
def needs_rebuild(
pkg_root: Path, buildpath: Path, source_metadata: Dict[str, Any]
pkg_root: Path, buildpath: Path, source_metadata: dict[str, Any]
) -> bool:
"""
Determines if a package needs a rebuild because its meta.yaml, patches, or
@ -593,7 +593,7 @@ def needs_rebuild(
def build_package(
pkg_root: Path,
pkg: Dict[str, Any],
pkg: dict[str, Any],
*,
target_install_dir: str,
host_install_dir: str,
@ -633,7 +633,7 @@ def build_package(
return
if not should_prepare_source and not srcpath.exists():
raise IOError(
raise OSError(
"Cannot find source for rebuild. Expected to find the source "
f"directory at the path {srcpath}, but that path does not exist."
)
@ -759,7 +759,7 @@ def make_parser(parser: argparse.ArgumentParser):
return parser
def parse_continue_arg(continue_from: str) -> Dict[str, Any]:
def parse_continue_arg(continue_from: str) -> dict[str, Any]:
from itertools import accumulate
is_none = continue_from == "None"
@ -777,11 +777,11 @@ def parse_continue_arg(continue_from: str) -> Dict[str, Any]:
] = accumulate([is_none, is_script, is_capture, is_replay], lambda a, b: a or b)
if not should_replay_compile:
raise IOError(
raise OSError(
f"Unexpected --continue argument '{continue_from}', should have been 'script', 'capture', 'replay', or 'replay:##'"
)
result: Dict[str, Any] = {}
result: dict[str, Any] = {}
result["should_prepare_source"] = should_prepare_source
result["should_run_script"] = should_run_script
result["should_capture_compile"] = should_capture_compile
@ -833,7 +833,7 @@ def main(args):
finally:
t1 = datetime.now()
datestamp = "[{}]".format(t1.strftime("%Y-%m-%d %H:%M:%S"))
total_seconds = "{:.1f}".format((t1 - t0).total_seconds())
total_seconds = f"{(t1 - t0).total_seconds():.1f}"
status = "Succeeded" if success else "Failed"
print(
f"{datestamp} {status} building package {name} in {total_seconds} seconds."

View File

@ -1,5 +1,5 @@
from pathlib import Path
from typing import Optional, Set
from typing import Optional
import functools
import subprocess
@ -7,7 +7,7 @@ import subprocess
UNVENDORED_STDLIB_MODULES = ["test", "distutils"]
def _parse_package_subset(query: Optional[str]) -> Set[str]:
def _parse_package_subset(query: Optional[str]) -> set[str]:
"""Parse the list of packages specified with PYODIDE_PACKAGES env var.
Also add the list of mandatory packages: ["pyparsing", "packaging",
@ -127,7 +127,7 @@ def get_make_flag(name):
return get_make_environment_vars()[name]
@functools.lru_cache(maxsize=None)
@functools.cache
def get_make_environment_vars():
"""Load environment variables from Makefile.envs

View File

@ -1,10 +1,10 @@
from pathlib import Path
from typing import Dict, Any, List, Optional
from typing import Any, Optional
# TODO: support more complex types for validation
PACKAGE_CONFIG_SPEC: Dict[str, Dict[str, Any]] = {
PACKAGE_CONFIG_SPEC: dict[str, dict[str, Any]] = {
"package": {
"name": str,
"version": str,
@ -47,8 +47,8 @@ PACKAGE_CONFIG_SPEC: Dict[str, Dict[str, Any]] = {
def check_package_config(
config: Dict[str, Any], raise_errors: bool = True, file_path: Optional[Path] = None
) -> List[str]:
config: dict[str, Any], raise_errors: bool = True, file_path: Optional[Path] = None
) -> list[str]:
"""Check the validity of a loaded meta.yaml file
Currently the following checks are applied:
@ -119,7 +119,7 @@ def check_package_config(
return errors_msg
def parse_package_config(path: Path, check: bool = True) -> Dict[str, Any]:
def parse_package_config(path: Path, check: bool = True) -> dict[str, Any]:
"""Load a meta.yaml file
Parameters

View File

@ -9,7 +9,7 @@ import urllib.error
import subprocess
import sys
from pathlib import Path
from typing import Dict, Any, Optional, List, Literal
from typing import Any, Optional, Literal
import warnings
PACKAGES_ROOT = Path(__file__).parents[2] / "packages"
@ -26,7 +26,7 @@ SDIST_EXTENSIONS = tuple(
)
def _find_sdist(pypi_metadata: Dict[str, Any]) -> Optional[Dict[str, Any]]:
def _find_sdist(pypi_metadata: dict[str, Any]) -> Optional[dict[str, Any]]:
"""Get sdist file path from the metadata"""
# The first one we can use. Usually a .tar.gz
for entry in pypi_metadata["urls"]:
@ -37,7 +37,7 @@ def _find_sdist(pypi_metadata: Dict[str, Any]) -> Optional[Dict[str, Any]]:
return None
def _find_wheel(pypi_metadata: Dict[str, Any]) -> Optional[Dict[str, Any]]:
def _find_wheel(pypi_metadata: dict[str, Any]) -> Optional[dict[str, Any]]:
"""Get wheel file path from the metadata"""
for entry in pypi_metadata["urls"]:
if entry["packagetype"] == "bdist_wheel" and entry["filename"].endswith(
@ -48,8 +48,8 @@ def _find_wheel(pypi_metadata: Dict[str, Any]) -> Optional[Dict[str, Any]]:
def _find_dist(
pypi_metadata: Dict[str, Any], source_types=List[Literal["wheel", "sdist"]]
) -> Dict[str, Any]:
pypi_metadata: dict[str, Any], source_types=list[Literal["wheel", "sdist"]]
) -> dict[str, Any]:
"""Find a wheel or sdist, as appropriate.
source_types controls which types (wheel and/or sdist) are accepted and also
@ -72,7 +72,7 @@ def _find_dist(
raise MkpkgFailedException(f"No {types_str} found for package {name} ({url})")
def _get_metadata(package: str, version: Optional[str] = None) -> Dict:
def _get_metadata(package: str, version: Optional[str] = None) -> dict:
"""Download metadata for a package from PyPI"""
version = ("/" + version) if version is not None else ""
url = f"https://pypi.org/pypi/{package}{version}/json"

View File

@ -34,7 +34,7 @@ import shutil
import sys
from typing import List, Dict, Set, Optional, overload
from typing import Optional, overload
# absolute import is necessary as this file will be symlinked
# under tools
@ -42,7 +42,7 @@ from pyodide_build import common
from pyodide_build._f2c_fixes import fix_f2c_output
symlinks = set(["cc", "c++", "ld", "ar", "gcc", "gfortran"])
symlinks = {"cc", "c++", "ld", "ar", "gcc", "gfortran"}
ReplayArgs = namedtuple(
"ReplayArgs",
@ -57,7 +57,7 @@ ReplayArgs = namedtuple(
)
def capture_command(command: str, args: List[str]) -> int:
def capture_command(command: str, args: list[str]) -> int:
"""
This is called when this script is called through a symlink that looks like
a compiler or linker.
@ -118,7 +118,7 @@ def capture_command(command: str, args: List[str]) -> int:
return subprocess.run(compiler_command + args, env=env).returncode
def capture_make_command_wrapper_symlinks(env: Dict[str, str]):
def capture_make_command_wrapper_symlinks(env: dict[str, str]):
"""
Makes sure all the symlinks that make this script look like a compiler
exist.
@ -141,7 +141,7 @@ def capture_make_command_wrapper_symlinks(env: Dict[str, str]):
env[var] = symlink
def capture_compile(*, host_install_dir: str, skip_host: bool, env: Dict[str, str]):
def capture_compile(*, host_install_dir: str, skip_host: bool, env: dict[str, str]):
TOOLSDIR = Path(common.get_make_flag("TOOLSDIR"))
env = dict(env)
env["PATH"] = str(TOOLSDIR) + ":" + env["PATH"]
@ -164,7 +164,7 @@ def capture_compile(*, host_install_dir: str, skip_host: bool, env: Dict[str, st
clean_out_native_artifacts()
def replay_f2c(args: List[str], dryrun: bool = False) -> Optional[List[str]]:
def replay_f2c(args: list[str], dryrun: bool = False) -> Optional[list[str]]:
"""Apply f2c to compilation arguments
Parameters
@ -211,7 +211,7 @@ def replay_f2c(args: List[str], dryrun: bool = False) -> Optional[List[str]]:
return new_args
def get_library_output(line: List[str]) -> Optional[str]:
def get_library_output(line: list[str]) -> Optional[str]:
"""
Check if the command is a linker invocation. If so, return the name of the
output file.
@ -222,7 +222,7 @@ def get_library_output(line: List[str]) -> Optional[str]:
return None
def parse_replace_libs(replace_libs: str) -> Dict[str, str]:
def parse_replace_libs(replace_libs: str) -> dict[str, str]:
"""
Parameters
----------
@ -249,7 +249,7 @@ def parse_replace_libs(replace_libs: str) -> Dict[str, str]:
def replay_genargs_handle_dashl(
arg: str, replace_libs: Dict[str, str], used_libs: Set[str]
arg: str, replace_libs: dict[str, str], used_libs: set[str]
) -> Optional[str]:
"""
Figure out how to replace a `-lsomelib` argument.
@ -397,8 +397,8 @@ def replay_genargs_handle_argument(arg: str) -> Optional[str]:
def replay_command_generate_args(
line: List[str], args: ReplayArgs, is_link_command: bool
) -> List[str]:
line: list[str], args: ReplayArgs, is_link_command: bool
) -> list[str]:
"""
A helper command for `replay_command` that generates the new arguments for
the compilation.
@ -456,7 +456,7 @@ def replay_command_generate_args(
debugflag = arg
break
used_libs: Set[str] = set()
used_libs: set[str] = set()
# Go through and adjust arguments
for arg in line[1:]:
# The native build is possibly multithreaded, but the emscripten one
@ -496,8 +496,8 @@ def replay_command_generate_args(
def replay_command(
line: List[str], args: ReplayArgs, dryrun: bool = False
) -> Optional[List[str]]:
line: list[str], args: ReplayArgs, dryrun: bool = False
) -> Optional[list[str]]:
"""Handle a compilation command
Parameters
@ -577,8 +577,8 @@ def replay_command(
def environment_substitute_args(
args: Dict[str, str], env: Dict[str, str] = None
) -> Dict[str, str]:
args: dict[str, str], env: dict[str, str] = None
) -> dict[str, str]:
if env is None:
env = dict(os.environ)
subbed_args = {}
@ -622,7 +622,7 @@ def replay_compile(replay_from: int = 1, **kwargs):
.split(" ")[0]
)
num_lines = str(lines_str)
with open(build_log_path, "r") as fd:
with open(build_log_path) as fd:
num_lines = sum(1 for _1 in fd) # type: ignore
fd.seek(0)
for idx, line_str in enumerate(fd):

View File

@ -42,7 +42,7 @@ def main(args):
port = args.port
httpd = server(port)
os.chdir(build_dir)
print("serving from {0} at localhost:".format(build_dir) + str(port))
print(f"serving from {build_dir} at localhost:" + str(port))
try:
httpd.serve_forever()
except KeyboardInterrupt:

View File

@ -1,6 +1,6 @@
import pytest
import inspect
from typing import Callable, Dict, List, Optional, Union
from typing import Callable, Optional, Union
import contextlib
from base64 import b64encode
@ -30,8 +30,8 @@ def run_in_pyodide(
*,
standalone: bool = False,
module_scope: bool = False,
packages: List[str] = [],
xfail_browsers: Dict[str, str] = {},
packages: list[str] = [],
xfail_browsers: dict[str, str] = {},
driver_timeout: Optional[Union[str, int]] = None,
) -> Callable:
"""

View File

@ -8,8 +8,7 @@ def test_run_docker_script():
res = subprocess.run(
["bash", str(BASE_DIR / "run_docker"), "--help"],
check=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
capture_output=True,
)
assert "Usage: run_docker" in res.stdout.decode("utf-8")
@ -17,8 +16,7 @@ def test_run_docker_script():
res = subprocess.run(
["bash", str(BASE_DIR / "run_docker"), "--invalid-param"],
check=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
capture_output=True,
)
assert res.returncode > 0
assert "Unknown option --invalid-param" in res.stderr.decode("utf-8")

View File

@ -13,3 +13,6 @@ module = [
"ruamel.yaml",
]
ignore_missing_imports = true
[tool.pycln]
all = true

View File

@ -11,7 +11,7 @@ from io import StringIO
from textwrap import dedent
import tokenize
from types import CodeType
from typing import Any, Dict, Generator, List, Optional
from typing import Any, Generator, Optional
def should_quiet(source: str) -> bool:
@ -254,7 +254,7 @@ class CodeRunner:
assert False
return self
def run(self, globals: Dict[str, Any] = None, locals: Dict[str, Any] = None):
def run(self, globals: dict[str, Any] = None, locals: dict[str, Any] = None):
"""Executes ``self.code``.
Can only be used after calling compile. The code may not use top level
@ -303,7 +303,7 @@ class CodeRunner:
return e.value
async def run_async(
self, globals: Dict[str, Any] = None, locals: Dict[str, Any] = None
self, globals: dict[str, Any] = None, locals: dict[str, Any] = None
):
"""Runs ``self.code`` which may use top level await.
@ -351,8 +351,8 @@ class CodeRunner:
def eval_code(
source: str,
globals: Optional[Dict[str, Any]] = None,
locals: Optional[Dict[str, Any]] = None,
globals: Optional[dict[str, Any]] = None,
locals: Optional[dict[str, Any]] = None,
*,
return_mode: str = "last_expr",
quiet_trailing_semicolon: bool = True,
@ -426,8 +426,8 @@ def eval_code(
async def eval_code_async(
source: str,
globals: Optional[Dict[str, Any]] = None,
locals: Optional[Dict[str, Any]] = None,
globals: Optional[dict[str, Any]] = None,
locals: Optional[dict[str, Any]] = None,
*,
return_mode: str = "last_expr",
quiet_trailing_semicolon: bool = True,
@ -503,7 +503,7 @@ async def eval_code_async(
)
def find_imports(source: str) -> List[str]:
def find_imports(source: str) -> list[str]:
"""
Finds the imports in a Python source code string

View File

@ -19,10 +19,7 @@ from typing import (
Optional,
Callable,
Any,
List,
Tuple,
Union,
Tuple,
)
from _pyodide._base import should_quiet, CodeRunner
@ -254,7 +251,7 @@ class Console:
self.stdout_callback = stdout_callback
self.stderr_callback = stderr_callback
self.filename = filename
self.buffer: List[str] = []
self.buffer: list[str] = []
self._lock = asyncio.Lock()
self._streams_redirected = False
self._stream_generator = None # track persistent stream redirection
@ -427,7 +424,7 @@ class Console:
self.buffer = []
return result
def complete(self, source: str) -> Tuple[List[str], int]:
def complete(self, source: str) -> tuple[list[str], int]:
"""Use Python's rlcompleter to complete the source string using the :any:`globals <Console.globals>` namespace.
Finds last "word" in the source string and completes it with rlcompleter. Word

View File

@ -1,5 +1,4 @@
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# flake8: noqa
"""
@ -44,7 +43,6 @@ Version History:
"""
from __future__ import print_function
LOOPS = 50000

View File

@ -7,7 +7,7 @@ import tarfile
from pathlib import Path
from site import getsitepackages
from tempfile import NamedTemporaryFile
from typing import List, IO, Iterable
from typing import IO, Iterable
from zipfile import ZipFile
SITE_PACKAGES = Path(getsitepackages()[0])
@ -51,7 +51,7 @@ def unpack_buffer(filename: str, buffer: JsProxy, target: str = "site") -> JsPro
return to_js(get_dynlibs(f, target_dir))
def get_dynlibs(archive: IO[bytes], target_dir: Path) -> List[str]:
def get_dynlibs(archive: IO[bytes], target_dir: Path) -> list[str]:
"""List out the paths to .so files in a zip or tar archive.
Parameters

View File

@ -208,7 +208,7 @@ class FetchResponse:
async def pyfetch(url: str, **kwargs) -> FetchResponse:
"""Fetch the url and return the response.
r"""Fetch the url and return the response.
This functions provides a similar API to the JavaScript `fetch function
<https://developer.mozilla.org/en-US/docs/Web/API/fetch>`_ however it is

View File

@ -429,7 +429,7 @@ def test_console_html(console_html_fixture):
>>> Test()
[[;;;terminal-error]Traceback (most recent call last):
File \"/lib/python3.9/site-packages/_pyodide/console.py\", line 478, in repr_shorten
File \"/lib/python3.9/site-packages/_pyodide/console.py\", line 475, in repr_shorten
text = repr(value)
File \"<console>\", line 3, in __repr__
TypeError: hi]

View File

@ -18,21 +18,19 @@ def test_jsproxy_dir(selenium):
return result;
"""
)
jsproxy_items = set(
[
"__bool__",
"__class__",
"__defineGetter__",
"__defineSetter__",
"__delattr__",
"constructor",
"toString",
"typeof",
"valueOf",
]
)
a_items = set(["x", "y"])
callable_items = set(["__call__", "new"])
jsproxy_items = {
"__bool__",
"__class__",
"__defineGetter__",
"__defineSetter__",
"__delattr__",
"constructor",
"toString",
"typeof",
"valueOf",
}
a_items = {"x", "y"}
callable_items = {"__call__", "new"}
set0 = set(result[0])
set1 = set(result[1])
assert set0.issuperset(jsproxy_items)
@ -606,8 +604,8 @@ def test_mount_object(selenium_standalone):
"""
)
assert result[:3] == ["x1", "x2", 3]
assert set([x for x in result[3] if len(x) == 1]) == set(["x", "y", "s", "t"])
assert set([x for x in result[4] if len(x) == 1]) == set(["x", "y", "u", "t"])
assert {x for x in result[3] if len(x) == 1} == {"x", "y", "s", "t"}
assert {x for x in result[4] if len(x) == 1} == {"x", "y", "u", "t"}
selenium.run_js(
"""
pyodide.unregisterJsModule("a");

View File

@ -120,8 +120,8 @@ def test_load_packages_multiple(selenium_standalone, packages):
)
def test_load_packages_sequential(selenium_standalone, packages):
selenium = selenium_standalone
promises = ",".join('pyodide.loadPackage("{}")'.format(x) for x in packages)
selenium.run_js("return Promise.all([{}])".format(promises))
promises = ",".join(f'pyodide.loadPackage("{x}")' for x in packages)
selenium.run_js(f"return Promise.all([{promises}])")
selenium.run(f"import {packages[0]}")
selenium.run(f"import {packages[1]}")
# The log must show that each package is loaded exactly once,

View File

@ -957,7 +957,7 @@ def test_custom_stdin_stdout(selenium_standalone_noload):
globalThis.pyodide = pyodide;
"""
)
outstrings = sum([s.removesuffix("\n").split("\n") for s in strings], [])
outstrings = sum((s.removesuffix("\n").split("\n") for s in strings), [])
print(outstrings)
assert (
selenium.run_js(

View File

@ -30,42 +30,37 @@ def test_pyproxy_class(selenium):
f.destroy();
"""
)
assert (
set(
[
"__class__",
"__delattr__",
"__dict__",
"__dir__",
"__doc__",
"__eq__",
"__format__",
"__ge__",
"__getattribute__",
"__gt__",
"__hash__",
"__init__",
"__init_subclass__",
"__le__",
"__lt__",
"__module__",
"__ne__",
"__new__",
"__reduce__",
"__reduce_ex__",
"__repr__",
"__setattr__",
"__sizeof__",
"__str__",
"__subclasshook__",
"__weakref__",
"bar",
"baz",
"get_value",
]
).difference(selenium.run_js("return f_props"))
== set()
)
assert {
"__class__",
"__delattr__",
"__dict__",
"__dir__",
"__doc__",
"__eq__",
"__format__",
"__ge__",
"__getattribute__",
"__gt__",
"__hash__",
"__init__",
"__init_subclass__",
"__le__",
"__lt__",
"__module__",
"__ne__",
"__new__",
"__reduce__",
"__reduce_ex__",
"__repr__",
"__setattr__",
"__sizeof__",
"__str__",
"__subclasshook__",
"__weakref__",
"bar",
"baz",
"get_value",
}.difference(selenium.run_js("return f_props")) == set()
def test_del_builtin(selenium):
@ -205,7 +200,7 @@ def test_pyproxy_iter(selenium):
"""
)
assert ty == "ChainMap"
assert set(l) == set(["a", "b"])
assert set(l) == {"a", "b"}
[result, result2] = selenium.run_js(
"""

View File

@ -1,13 +1,12 @@
#!/usr/bin/env python3
import subprocess
from typing import List
import sys
args = sys.argv[1:]
def remove_num_threads_option(args: List[str]) -> None:
def remove_num_threads_option(args: list[str]) -> None:
"""Remove -n <n> from argument list"""
for i in range(0, len(args)):
if args[i] == "-n":