mirror of https://github.com/pyodide/pyodide.git
Toward a conda compatible packages.json (#1700)
This commit is contained in:
parent
7c6b88bc36
commit
b455d6a6fb
|
@ -62,6 +62,9 @@ substitutions:
|
|||
allowing for custom persistence strategies depending on execution environment
|
||||
{pr}`1596`
|
||||
|
||||
- {{ API }} The `packages.json` schema for Pyodide was re-designed for better compatibility
|
||||
with conda. {pr}`1700`
|
||||
|
||||
## Standard library
|
||||
|
||||
- The following standard library modules are now available as standalone packages
|
||||
|
|
|
@ -129,7 +129,7 @@ async def _install_wheel(name, fileinfo):
|
|||
class _PackageManager:
|
||||
def __init__(self):
|
||||
if IN_BROWSER:
|
||||
self.builtin_packages = pyodide_js._module.packages.versions.to_py()
|
||||
self.builtin_packages = pyodide_js._module.packages.to_py()
|
||||
else:
|
||||
self.builtin_packages = {}
|
||||
self.installed_packages = {}
|
||||
|
@ -197,9 +197,9 @@ class _PackageManager:
|
|||
# the Pyodide package instead of the one on PyPI
|
||||
if (
|
||||
req.name in self.builtin_packages
|
||||
and self.builtin_packages[req.name] in req.specifier
|
||||
and self.builtin_packages[req.name]["version"] in req.specifier
|
||||
):
|
||||
version = self.builtin_packages[req.name]
|
||||
version = self.builtin_packages[req.name]["version"]
|
||||
transaction["pyodide_packages"].append((req.name, version))
|
||||
return
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ import subprocess
|
|||
import sys
|
||||
from threading import Thread
|
||||
from time import sleep, perf_counter
|
||||
from typing import Dict, Set, Optional, List
|
||||
from typing import Dict, Set, Optional, List, Any
|
||||
|
||||
from . import common
|
||||
from .io import parse_package_config
|
||||
|
@ -263,39 +263,54 @@ def build_from_graph(pkg_map: Dict[str, BasePackage], outputdir: Path, args) ->
|
|||
build_queue.put(dependent)
|
||||
|
||||
|
||||
def generate_packages_json(pkg_map: Dict[str, BasePackage]) -> Dict:
|
||||
"""Generate the package.json file"""
|
||||
# Build package.json data.
|
||||
package_data: Dict[str, Dict[str, Any]] = {
|
||||
"info": {"arch": "wasm32", "platform": "Emscripten-1.0"},
|
||||
"packages": {},
|
||||
}
|
||||
|
||||
libraries = [pkg.name for pkg in pkg_map.values() if pkg.library]
|
||||
|
||||
# unvendored stdlib modules
|
||||
for name in UNVENDORED_STDLIB_MODULES:
|
||||
pkg_entry: Dict[str, Any] = {
|
||||
"name": name,
|
||||
"version": "1.0",
|
||||
"depends": [],
|
||||
"imports": [name],
|
||||
}
|
||||
package_data["packages"][name.lower()] = pkg_entry
|
||||
|
||||
for name, pkg in pkg_map.items():
|
||||
if pkg.library:
|
||||
continue
|
||||
pkg_entry = {"name": name, "version": pkg.version}
|
||||
if pkg.shared_library:
|
||||
pkg_entry["shared_library"] = True
|
||||
pkg_entry["depends"] = [
|
||||
x.lower() for x in pkg.dependencies if x not in libraries
|
||||
]
|
||||
pkg_entry["imports"] = pkg.meta.get("test", {}).get("imports", [name])
|
||||
|
||||
package_data["packages"][name.lower()] = pkg_entry
|
||||
|
||||
# Workaround for circular dependency between soupsieve and beautifulsoup4
|
||||
# TODO: FIXME!!
|
||||
if "soupsieve" in package_data["packages"]:
|
||||
package_data["packages"]["soupsieve"]["depends"].append("beautifulsoup4")
|
||||
|
||||
return package_data
|
||||
|
||||
|
||||
def build_packages(packages_dir: Path, outputdir: Path, args) -> None:
|
||||
pkg_map = generate_dependency_graph(packages_dir, args.only)
|
||||
|
||||
build_from_graph(pkg_map, outputdir, args)
|
||||
|
||||
# Build package.json data.
|
||||
package_data: dict = {
|
||||
"dependencies": {key: [] for key in UNVENDORED_STDLIB_MODULES},
|
||||
"import_name_to_package_name": {},
|
||||
"shared_library": {},
|
||||
"versions": {},
|
||||
"orig_case": {},
|
||||
}
|
||||
package_data = generate_packages_json(pkg_map)
|
||||
|
||||
libraries = [pkg.name for pkg in pkg_map.values() if pkg.library]
|
||||
|
||||
for name, pkg in pkg_map.items():
|
||||
if pkg.library:
|
||||
continue
|
||||
if pkg.shared_library:
|
||||
package_data["shared_library"][name.lower()] = True
|
||||
package_data["dependencies"][name.lower()] = [
|
||||
x.lower() for x in pkg.dependencies if x not in libraries
|
||||
]
|
||||
package_data["versions"][name.lower()] = pkg.version
|
||||
for imp in pkg.meta.get("test", {}).get("imports", [name]):
|
||||
package_data["import_name_to_package_name"][imp] = name.lower()
|
||||
package_data["orig_case"][name.lower()] = name
|
||||
|
||||
# Hack for 0.17.0 release
|
||||
# TODO: FIXME!!
|
||||
if "soupsieve" in pkg_map:
|
||||
package_data["dependencies"]["soupsieve"].append("beautifulsoup4")
|
||||
with open(outputdir / "packages.json", "w") as fd:
|
||||
json.dump(package_data, fd)
|
||||
|
||||
|
|
|
@ -26,6 +26,29 @@ def test_generate_dependency_graph():
|
|||
assert pkg_map["beautifulsoup4"].dependents == set()
|
||||
|
||||
|
||||
def test_generate_packages_json():
|
||||
pkg_map = buildall.generate_dependency_graph(PACKAGES_DIR, "beautifulsoup4")
|
||||
|
||||
package_data = buildall.generate_packages_json(pkg_map)
|
||||
assert set(package_data.keys()) == {"info", "packages"}
|
||||
assert package_data["info"] == {"arch": "wasm32", "platform": "Emscripten-1.0"}
|
||||
assert set(package_data["packages"]) == {
|
||||
"test",
|
||||
"distutils",
|
||||
"pyparsing",
|
||||
"packaging",
|
||||
"soupsieve",
|
||||
"beautifulsoup4",
|
||||
"micropip",
|
||||
}
|
||||
assert package_data["packages"]["micropip"] == {
|
||||
"name": "micropip",
|
||||
"version": "0.1",
|
||||
"depends": ["pyparsing", "packaging", "distutils"],
|
||||
"imports": ["micropip"],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("n_jobs", [1, 4])
|
||||
def test_build_dependencies(n_jobs, monkeypatch):
|
||||
build_list = []
|
||||
|
|
|
@ -137,19 +137,16 @@ export async function loadPackagesFromImports(
|
|||
if (imports.length === 0) {
|
||||
return;
|
||||
}
|
||||
let packageNames = Module.packages.import_name_to_package_name;
|
||||
|
||||
let packageNames = Module._import_name_to_package_name;
|
||||
let packages = new Set();
|
||||
for (let name of imports) {
|
||||
if (name in packageNames) {
|
||||
packages.add(packageNames[name]);
|
||||
if (packageNames.has(name)) {
|
||||
packages.add(packageNames.get(name));
|
||||
}
|
||||
}
|
||||
if (packages.size) {
|
||||
await loadPackage(
|
||||
Array.from(packages.keys()),
|
||||
messageCallback,
|
||||
errorCallback
|
||||
);
|
||||
await loadPackage(Array.from(packages), messageCallback, errorCallback);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -182,7 +179,7 @@ export function pyimport(name) {
|
|||
* response = await fetch("./packages.json")
|
||||
* packages = await response.json()
|
||||
* # If final statement is an expression, its value is returned to
|
||||
* Javascript len(packages.dependencies.object_keys())
|
||||
* Javascript len(packages['packages'].object_keys())
|
||||
* `);
|
||||
* console.log(result); // 72
|
||||
*
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
import { Module } from "./module.js";
|
||||
|
||||
const IN_NODE =
|
||||
typeof process !== "undefined" && process.release.name !== "undefined";
|
||||
|
||||
/** @typedef {import('./pyproxy.js').PyProxy} PyProxy */
|
||||
/** @private */
|
||||
let baseURL;
|
||||
|
@ -9,16 +12,30 @@ let baseURL;
|
|||
*/
|
||||
export async function initializePackageIndex(indexURL) {
|
||||
baseURL = indexURL;
|
||||
if (typeof process !== "undefined" && process.release.name !== "undefined") {
|
||||
const fs = await import("fs");
|
||||
fs.readFile(`${indexURL}packages.json`, (err, data) => {
|
||||
if (err) throw err;
|
||||
let response = JSON.parse(data);
|
||||
Module.packages = response;
|
||||
});
|
||||
let package_json;
|
||||
if (IN_NODE) {
|
||||
const fsPromises = await import("fs/promises");
|
||||
const package_string = await fsPromises.readFile(
|
||||
`${indexURL}packages.json`
|
||||
);
|
||||
package_json = JSON.parse(package_string);
|
||||
} else {
|
||||
let response = await fetch(`${indexURL}packages.json`);
|
||||
Module.packages = await response.json();
|
||||
package_json = await response.json();
|
||||
}
|
||||
if (!package_json.packages) {
|
||||
throw new Error(
|
||||
"Loaded packages.json does not contain the expected key 'packages'."
|
||||
);
|
||||
}
|
||||
Module.packages = package_json.packages;
|
||||
|
||||
// compute the inverted index for imports to package names
|
||||
Module._import_name_to_package_name = new Map();
|
||||
for (let name of Object.keys(Module.packages)) {
|
||||
for (let import_name of Module.packages[name].imports) {
|
||||
Module._import_name_to_package_name.set(import_name, name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,24 +95,22 @@ function recursiveDependencies(
|
|||
errorCallback,
|
||||
sharedLibsOnly
|
||||
) {
|
||||
const packages = Module.packages.dependencies;
|
||||
const sharedLibraries = Module.packages.shared_library;
|
||||
const toLoad = new Map();
|
||||
|
||||
const addPackage = (pkg) => {
|
||||
pkg = pkg.toLowerCase();
|
||||
if (toLoad.has(pkg)) {
|
||||
const addPackage = (name) => {
|
||||
name = name.toLowerCase();
|
||||
if (toLoad.has(name)) {
|
||||
return;
|
||||
}
|
||||
toLoad.set(pkg, DEFAULT_CHANNEL);
|
||||
toLoad.set(name, DEFAULT_CHANNEL);
|
||||
// If the package is already loaded, we don't add dependencies, but warn
|
||||
// the user later. This is especially important if the loaded package is
|
||||
// from a custom url, in which case adding dependencies is wrong.
|
||||
if (loadedPackages[pkg] !== undefined) {
|
||||
if (loadedPackages[name] !== undefined) {
|
||||
return;
|
||||
}
|
||||
for (let dep of packages[pkg]) {
|
||||
addPackage(dep);
|
||||
for (let dep_name of Module.packages[name].depends) {
|
||||
addPackage(dep_name);
|
||||
}
|
||||
};
|
||||
for (let name of names) {
|
||||
|
@ -113,7 +128,7 @@ function recursiveDependencies(
|
|||
continue;
|
||||
}
|
||||
name = name.toLowerCase();
|
||||
if (name in packages) {
|
||||
if (name in Module.packages) {
|
||||
addPackage(name);
|
||||
continue;
|
||||
}
|
||||
|
@ -122,8 +137,9 @@ function recursiveDependencies(
|
|||
if (sharedLibsOnly) {
|
||||
let onlySharedLibs = new Map();
|
||||
for (let c of toLoad) {
|
||||
if (c[0] in sharedLibraries) {
|
||||
onlySharedLibs.set(c[0], toLoad.get(c[0]));
|
||||
name = c[0];
|
||||
if (Module.packages[name].shared_library) {
|
||||
onlySharedLibs.set(name, toLoad.get(name));
|
||||
}
|
||||
}
|
||||
return onlySharedLibs;
|
||||
|
@ -177,7 +193,7 @@ async function _loadPackage(names, messageCallback, errorCallback) {
|
|||
continue;
|
||||
}
|
||||
}
|
||||
let pkgname = Module.packages.orig_case[pkg] || pkg;
|
||||
let pkgname = (Module.packages[pkg] && Module.packages[pkg].name) || pkg;
|
||||
let scriptSrc = uri === DEFAULT_CHANNEL ? `${baseURL}${pkgname}.js` : uri;
|
||||
messageCallback(`Loading ${pkg} from ${scriptSrc}`);
|
||||
scriptPromises.push(
|
||||
|
|
|
@ -412,9 +412,9 @@ def test_await_pyproxy_eval_async(selenium):
|
|||
from js import fetch
|
||||
await (await fetch('packages.json')).json()
|
||||
`);
|
||||
let packages = await c;
|
||||
let result = await c;
|
||||
c.destroy();
|
||||
return (!!packages.dependencies) && (!!packages.import_name_to_package_name);
|
||||
return (!!result) && ("packages" in result);
|
||||
"""
|
||||
)
|
||||
|
||||
|
@ -444,6 +444,6 @@ def test_await_pyproxy_async_def(selenium):
|
|||
return await (await fetch('packages.json')).json()
|
||||
await temp()
|
||||
`);
|
||||
return (!!packages.dependencies) && (!!packages.import_name_to_package_name);
|
||||
return (!!packages.packages) && (!!packages.info);
|
||||
"""
|
||||
)
|
||||
|
|
|
@ -258,8 +258,8 @@ def test_run_python_async_toplevel_await(selenium):
|
|||
await pyodide.runPythonAsync(`
|
||||
from js import fetch
|
||||
resp = await fetch("packages.json")
|
||||
json = await resp.json()
|
||||
assert hasattr(json, "dependencies")
|
||||
json = (await resp.json()).to_py()["packages"]
|
||||
assert "micropip" in json
|
||||
`);
|
||||
"""
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue