2018-06-20 18:54:47 +00:00
|
|
|
#!/usr/bin/env python3
|
2018-06-22 14:22:00 +00:00
|
|
|
"""Helper for cross-compiling distutils-based Python extensions.
|
|
|
|
|
|
|
|
distutils has never had a proper cross-compilation story. This is a hack, which
|
|
|
|
miraculously works, to get around that.
|
|
|
|
|
2022-03-22 05:05:30 +00:00
|
|
|
The gist is we compile the package replacing calls to the compiler and linker
|
|
|
|
with wrappers that adjusting include paths and flags as necessary for
|
|
|
|
cross-compiling and then pass the command long to emscripten.
|
2018-06-22 14:22:00 +00:00
|
|
|
"""
|
2022-03-22 05:05:30 +00:00
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import sys
|
2022-07-02 13:53:16 +00:00
|
|
|
from pathlib import Path
|
2022-03-22 05:05:30 +00:00
|
|
|
|
2022-07-02 05:00:27 +00:00
|
|
|
from __main__ import __file__ as INVOKED_PATH_STR
|
|
|
|
|
|
|
|
INVOKED_PATH = Path(INVOKED_PATH_STR)
|
|
|
|
|
2022-09-22 01:42:26 +00:00
|
|
|
SYMLINKS = {
|
|
|
|
"cc",
|
|
|
|
"c++",
|
|
|
|
"ld",
|
|
|
|
"ar",
|
|
|
|
"gcc",
|
|
|
|
"ranlib",
|
|
|
|
"strip",
|
|
|
|
"gfortran",
|
|
|
|
"cargo",
|
|
|
|
"cmake",
|
|
|
|
}
|
2022-07-02 05:00:27 +00:00
|
|
|
IS_COMPILER_INVOCATION = INVOKED_PATH.name in SYMLINKS
|
|
|
|
|
|
|
|
if IS_COMPILER_INVOCATION:
|
2022-06-30 00:52:09 +00:00
|
|
|
# If possible load from environment variable, if necessary load from disk.
|
|
|
|
if "PYWASMCROSS_ARGS" in os.environ:
|
|
|
|
PYWASMCROSS_ARGS = json.loads(os.environ["PYWASMCROSS_ARGS"])
|
2023-05-06 22:26:47 +00:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
with open(INVOKED_PATH.parent / "pywasmcross_env.json") as f:
|
|
|
|
PYWASMCROSS_ARGS = json.load(f)
|
|
|
|
except FileNotFoundError:
|
|
|
|
raise RuntimeError(
|
|
|
|
"Invalid invocation: can't find PYWASMCROSS_ARGS."
|
|
|
|
f" Invoked from {INVOKED_PATH}."
|
|
|
|
) from None
|
2022-06-30 00:52:09 +00:00
|
|
|
|
2022-03-22 05:05:30 +00:00
|
|
|
sys.path = PYWASMCROSS_ARGS.pop("PYTHONPATH")
|
2022-06-30 00:52:09 +00:00
|
|
|
os.environ["PATH"] = PYWASMCROSS_ARGS.pop("PATH")
|
2022-07-02 05:00:27 +00:00
|
|
|
# restore __name__ so that relative imports work as we expect
|
|
|
|
__name__ = PYWASMCROSS_ARGS.pop("orig__name__")
|
|
|
|
|
2018-06-22 14:22:00 +00:00
|
|
|
|
2023-01-30 04:35:18 +00:00
|
|
|
import dataclasses
|
2023-05-16 16:21:50 +00:00
|
|
|
import re
|
2023-05-09 00:34:00 +00:00
|
|
|
import shutil
|
2022-02-21 22:27:03 +00:00
|
|
|
import subprocess
|
2023-01-17 04:31:45 +00:00
|
|
|
from collections.abc import Iterable, Iterator
|
|
|
|
from typing import Literal, NoReturn
|
2018-06-22 14:22:00 +00:00
|
|
|
|
2023-01-30 04:35:18 +00:00
|
|
|
|
|
|
|
@dataclasses.dataclass(eq=False, order=False, kw_only=True)
|
|
|
|
class BuildArgs:
|
|
|
|
"""
|
|
|
|
Common arguments for building a package.
|
|
|
|
"""
|
|
|
|
|
|
|
|
pkgname: str = ""
|
|
|
|
cflags: str = ""
|
|
|
|
cxxflags: str = ""
|
|
|
|
ldflags: str = ""
|
|
|
|
target_install_dir: str = "" # The path to the target Python installation
|
|
|
|
host_install_dir: str = "" # Directory for installing built host packages.
|
|
|
|
builddir: str = "" # The path to run pypa/build
|
|
|
|
pythoninclude: str = ""
|
|
|
|
exports: Literal["whole_archive", "requested", "pyinit"] | list[str] = "pyinit"
|
2023-03-21 08:11:47 +00:00
|
|
|
compression_level: int = 6
|
2021-02-04 11:34:40 +00:00
|
|
|
|
|
|
|
|
2022-04-01 22:36:55 +00:00
|
|
|
def replay_f2c(args: list[str], dryrun: bool = False) -> list[str] | None:
|
2018-10-26 15:22:10 +00:00
|
|
|
"""Apply f2c to compilation arguments
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
2021-12-22 01:40:13 +00:00
|
|
|
args
|
2018-10-26 15:22:10 +00:00
|
|
|
input compiler arguments
|
2021-12-22 01:40:13 +00:00
|
|
|
dryrun
|
2018-10-26 15:22:10 +00:00
|
|
|
if False run f2c on detected fortran files
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2021-12-22 01:40:13 +00:00
|
|
|
new_args
|
2018-10-26 15:22:10 +00:00
|
|
|
output compiler arguments
|
|
|
|
|
|
|
|
|
|
|
|
Examples
|
|
|
|
--------
|
|
|
|
|
2021-12-20 19:26:27 +00:00
|
|
|
>>> replay_f2c(['gfortran', 'test.f'], dryrun=True)
|
2021-12-25 20:08:43 +00:00
|
|
|
['gcc', 'test.c']
|
2018-10-26 15:22:10 +00:00
|
|
|
"""
|
2023-01-17 04:31:45 +00:00
|
|
|
|
|
|
|
from pyodide_build._f2c_fixes import fix_f2c_input, fix_f2c_output
|
|
|
|
|
2021-12-25 20:08:43 +00:00
|
|
|
new_args = ["gcc"]
|
2018-07-18 13:32:16 +00:00
|
|
|
found_source = False
|
2021-12-25 20:08:43 +00:00
|
|
|
for arg in args[1:]:
|
2022-04-09 01:43:30 +00:00
|
|
|
if arg.endswith(".f") or arg.endswith(".F"):
|
|
|
|
filepath = Path(arg).resolve()
|
2018-10-26 15:22:10 +00:00
|
|
|
if not dryrun:
|
Fix scipy linking errors (#2289)
With newer versions of emscripten, linker errors surface eariler.
This makes it easier to find function pointer cast errors without
having to execute the bad code path -- the errors happen when the
wasm modules are linked (at load time in the browser)
Anyways, this fixes more linker errors. Mostly the problems have
to do with LAPACK functions that take string arguments. Most
LAPACK functions that take string arguments use them as enums and
only care about the first character of the string. Because of the
way that f2c works, we need to replace these strings with the ascii
code of the first character so we should replace:
sTRSV( 'UPPER', 'NOTRANS', 'NONUNIT', J, H, LDH, Y, 1 )
==>
CALL sTRSV( 85, 78, 78, J, H, LDH, Y, 1 )
where 85 and 78 are the ascii codes of U and N. Various character
variables are subbed into being integer variables. The two
functions `ilaenv` and `xerbla` expect actual C strings as an
argument, but it is very annoying to produce C strings so instead
I added wrapper functions ilaenvf2c and xerblaf2c to clapack and
instead of calling ilaenv and xerbla we call the f2c versions.
2022-03-24 06:17:29 +00:00
|
|
|
fix_f2c_input(arg)
|
2022-04-09 01:43:30 +00:00
|
|
|
if arg.endswith(".F"):
|
|
|
|
# .F files apparently expect to be run through the C
|
|
|
|
# preprocessor (they have #ifdef's in them)
|
2023-05-09 10:50:38 +00:00
|
|
|
# Use gfortran frontend, as gcc frontend might not be
|
2023-05-16 16:21:50 +00:00
|
|
|
# present on osx
|
|
|
|
# The file-system might be not case-sensitive,
|
|
|
|
# so take care to handle this by renaming.
|
|
|
|
# For preprocessing and further operation the
|
|
|
|
# expected file-name and extension needs to be preserved.
|
2022-04-09 01:43:30 +00:00
|
|
|
subprocess.check_call(
|
|
|
|
[
|
2023-05-09 10:50:38 +00:00
|
|
|
"gfortran",
|
2022-04-09 01:43:30 +00:00
|
|
|
"-E",
|
|
|
|
"-C",
|
|
|
|
"-P",
|
|
|
|
filepath,
|
|
|
|
"-o",
|
2023-05-16 16:21:50 +00:00
|
|
|
filepath.with_suffix(".f77"),
|
2022-04-09 01:43:30 +00:00
|
|
|
]
|
|
|
|
)
|
2023-05-16 16:21:50 +00:00
|
|
|
filepath = filepath.with_suffix(".f77")
|
2023-04-12 04:30:07 +00:00
|
|
|
# -R flag is important, it means that Fortran functions that
|
|
|
|
# return real e.g. sdot will be transformed into C functions
|
|
|
|
# that return float. For historic reasons, by default f2c
|
|
|
|
# transform them into functions that return a double. Using -R
|
|
|
|
# allows to match what OpenBLAS has done when they f2ced their
|
|
|
|
# Fortran files, see
|
|
|
|
# https://github.com/xianyi/OpenBLAS/pull/3539#issuecomment-1493897254
|
|
|
|
# for more details
|
2023-05-16 16:21:50 +00:00
|
|
|
with (
|
|
|
|
open(filepath) as input_pipe,
|
|
|
|
open(filepath.with_suffix(".c"), "w") as output_pipe,
|
|
|
|
):
|
|
|
|
subprocess.check_call(
|
|
|
|
["f2c", "-R"],
|
|
|
|
stdin=input_pipe,
|
|
|
|
stdout=output_pipe,
|
|
|
|
cwd=filepath.parent,
|
|
|
|
)
|
2022-01-03 22:07:13 +00:00
|
|
|
fix_f2c_output(arg[:-2] + ".c")
|
2020-06-28 18:24:40 +00:00
|
|
|
new_args.append(arg[:-2] + ".c")
|
2018-07-18 13:32:16 +00:00
|
|
|
found_source = True
|
|
|
|
else:
|
|
|
|
new_args.append(arg)
|
2018-10-26 15:22:10 +00:00
|
|
|
|
2020-06-28 18:24:40 +00:00
|
|
|
new_args_str = " ".join(args)
|
2018-10-26 15:22:10 +00:00
|
|
|
if ".so" in new_args_str and "libgfortran.so" not in new_args_str:
|
|
|
|
found_source = True
|
|
|
|
|
2018-07-18 13:32:16 +00:00
|
|
|
if not found_source:
|
2020-06-28 18:24:40 +00:00
|
|
|
print(f"f2c: source not found, skipping: {new_args_str}")
|
2018-07-18 13:32:16 +00:00
|
|
|
return None
|
|
|
|
return new_args
|
|
|
|
|
|
|
|
|
2022-04-01 22:36:55 +00:00
|
|
|
def get_library_output(line: list[str]) -> str | None:
|
2021-12-22 01:40:13 +00:00
|
|
|
"""
|
|
|
|
Check if the command is a linker invocation. If so, return the name of the
|
|
|
|
output file.
|
|
|
|
"""
|
2022-09-30 02:06:41 +00:00
|
|
|
SHAREDLIB_REGEX = re.compile(r"\.so(.\d+)*$")
|
2021-12-22 01:40:13 +00:00
|
|
|
for arg in line:
|
2022-09-30 02:06:41 +00:00
|
|
|
if not arg.startswith("-") and SHAREDLIB_REGEX.search(arg):
|
2021-12-22 01:40:13 +00:00
|
|
|
return arg
|
|
|
|
return None
|
2018-10-23 10:11:25 +00:00
|
|
|
|
2021-12-22 01:40:13 +00:00
|
|
|
|
2022-07-02 13:53:16 +00:00
|
|
|
def replay_genargs_handle_dashl(arg: str, used_libs: set[str]) -> str | None:
|
2018-10-23 10:11:25 +00:00
|
|
|
"""
|
2021-12-22 01:40:13 +00:00
|
|
|
Figure out how to replace a `-lsomelib` argument.
|
2021-01-03 15:37:44 +00:00
|
|
|
|
2021-12-22 01:40:13 +00:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
arg
|
|
|
|
The argument we are replacing. Must start with `-l`.
|
2018-06-20 18:54:47 +00:00
|
|
|
|
2021-12-22 01:40:13 +00:00
|
|
|
used_libs
|
|
|
|
The libraries we've used so far in this command. emcc fails out if `-lsomelib`
|
|
|
|
occurs twice, so we have to track this.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
The new argument, or None to delete the argument.
|
|
|
|
"""
|
|
|
|
assert arg.startswith("-l")
|
|
|
|
|
|
|
|
if arg == "-lffi":
|
|
|
|
return None
|
|
|
|
|
2022-07-04 07:11:55 +00:00
|
|
|
if arg == "-lgfortran":
|
2021-12-22 01:40:13 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
# WASM link doesn't like libraries being included twice
|
|
|
|
# skip second one
|
|
|
|
if arg in used_libs:
|
|
|
|
return None
|
|
|
|
used_libs.add(arg)
|
|
|
|
return arg
|
|
|
|
|
|
|
|
|
2022-04-01 22:36:55 +00:00
|
|
|
def replay_genargs_handle_dashI(arg: str, target_install_dir: str) -> str | None:
|
2021-12-22 01:40:13 +00:00
|
|
|
"""
|
|
|
|
Figure out how to replace a `-Iincludepath` argument.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
arg
|
|
|
|
The argument we are replacing. Must start with `-I`.
|
|
|
|
|
|
|
|
target_install_dir
|
|
|
|
The target_install_dir argument.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
The new argument, or None to delete the argument.
|
|
|
|
"""
|
|
|
|
assert arg.startswith("-I")
|
|
|
|
if (
|
|
|
|
str(Path(arg[2:]).resolve()).startswith(sys.prefix + "/include/python")
|
|
|
|
and "site-packages" not in arg
|
|
|
|
):
|
|
|
|
return arg.replace("-I" + sys.prefix, "-I" + target_install_dir)
|
|
|
|
# Don't include any system directories
|
|
|
|
if arg[2:].startswith("/usr"):
|
|
|
|
return None
|
|
|
|
return arg
|
|
|
|
|
|
|
|
|
2022-06-30 06:08:46 +00:00
|
|
|
def replay_genargs_handle_linker_opts(arg: str) -> str | None:
|
2021-12-25 20:08:43 +00:00
|
|
|
"""
|
|
|
|
ignore some link flags
|
|
|
|
it should not check if `arg == "-Wl,-xxx"` and ignore directly here,
|
|
|
|
because arg may be something like "-Wl,-xxx,-yyy" where we only want
|
|
|
|
to ignore "-xxx" but not "-yyy".
|
|
|
|
"""
|
|
|
|
|
|
|
|
assert arg.startswith("-Wl")
|
|
|
|
link_opts = arg.split(",")[1:]
|
|
|
|
new_link_opts = ["-Wl"]
|
|
|
|
for opt in link_opts:
|
|
|
|
if opt in [
|
|
|
|
"-Bsymbolic-functions",
|
|
|
|
# breaks emscripten see https://github.com/emscripten-core/emscripten/issues/14460
|
|
|
|
"--strip-all",
|
2021-12-31 17:29:36 +00:00
|
|
|
"-strip-all",
|
2021-12-25 20:08:43 +00:00
|
|
|
# wasm-ld does not regconize some link flags
|
|
|
|
"--sort-common",
|
|
|
|
"--as-needed",
|
|
|
|
]:
|
|
|
|
continue
|
2022-07-10 10:11:03 +00:00
|
|
|
|
|
|
|
if opt.startswith(
|
|
|
|
(
|
|
|
|
"--sysroot=", # ignore unsupported --sysroot compile argument used in conda
|
|
|
|
"--version-script=",
|
|
|
|
"-R/", # wasm-ld does not accept -R (runtime libraries)
|
2022-08-08 12:02:43 +00:00
|
|
|
"-R.", # wasm-ld does not accept -R (runtime libraries)
|
2023-01-08 23:21:37 +00:00
|
|
|
"--exclude-libs=",
|
2022-07-10 10:11:03 +00:00
|
|
|
)
|
|
|
|
):
|
2021-12-31 17:29:36 +00:00
|
|
|
continue
|
2022-07-10 10:11:03 +00:00
|
|
|
|
2021-12-25 20:08:43 +00:00
|
|
|
new_link_opts.append(opt)
|
|
|
|
if len(new_link_opts) > 1:
|
|
|
|
return ",".join(new_link_opts)
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2022-04-01 22:36:55 +00:00
|
|
|
def replay_genargs_handle_argument(arg: str) -> str | None:
|
2021-12-22 01:40:13 +00:00
|
|
|
"""
|
|
|
|
Figure out how to replace a general argument.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
arg
|
|
|
|
The argument we are replacing. Must not start with `-I` or `-l`.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
The new argument, or None to delete the argument.
|
|
|
|
"""
|
|
|
|
assert not arg.startswith("-I") # should be handled by other functions
|
|
|
|
assert not arg.startswith("-l")
|
2021-12-25 20:08:43 +00:00
|
|
|
assert not arg.startswith("-Wl,")
|
2021-12-22 01:40:13 +00:00
|
|
|
|
|
|
|
# Don't include any system directories
|
|
|
|
if arg.startswith("-L/usr"):
|
|
|
|
return None
|
|
|
|
|
|
|
|
# fmt: off
|
|
|
|
if arg in [
|
|
|
|
# threading is disabled for now
|
|
|
|
"-pthread",
|
|
|
|
# this only applies to compiling fortran code, but we already f2c'd
|
|
|
|
"-ffixed-form",
|
2023-01-03 02:20:26 +00:00
|
|
|
"-fallow-argument-mismatch",
|
2021-12-22 01:40:13 +00:00
|
|
|
# On Mac, we need to omit some darwin-specific arguments
|
|
|
|
"-bundle", "-undefined", "dynamic_lookup",
|
|
|
|
# This flag is needed to build numpy with SIMD optimization which we currently disable
|
|
|
|
"-mpopcnt",
|
|
|
|
# gcc flag that clang does not support
|
|
|
|
"-Bsymbolic-functions",
|
2021-12-31 17:29:36 +00:00
|
|
|
'-fno-second-underscore',
|
2023-01-28 00:36:46 +00:00
|
|
|
'-fstack-protector', # doesn't work?
|
|
|
|
'-fno-strict-overflow', # warning: argument unused during compilation
|
2021-12-22 01:40:13 +00:00
|
|
|
]:
|
|
|
|
return None
|
|
|
|
# fmt: on
|
|
|
|
return arg
|
|
|
|
|
|
|
|
|
2022-09-22 01:42:26 +00:00
|
|
|
def get_cmake_compiler_flags() -> list[str]:
|
|
|
|
"""
|
|
|
|
GeneraTe cmake compiler flags.
|
|
|
|
emcmake will set these values to emcc, em++, ...
|
|
|
|
but we need to set them to cc, c++, in order to make them pass to pywasmcross.
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
The commandline flags to pass to cmake.
|
|
|
|
"""
|
|
|
|
compiler_flags = {
|
|
|
|
"CMAKE_C_COMPILER": "cc",
|
|
|
|
"CMAKE_CXX_COMPILER": "c++",
|
|
|
|
"CMAKE_AR": "ar",
|
|
|
|
"CMAKE_C_COMPILER_AR": "ar",
|
|
|
|
"CMAKE_CXX_COMPILER_AR": "ar",
|
|
|
|
}
|
|
|
|
|
|
|
|
flags = []
|
|
|
|
symlinks_dir = Path(sys.argv[0]).parent
|
|
|
|
for key, value in compiler_flags.items():
|
|
|
|
assert value in SYMLINKS
|
|
|
|
|
|
|
|
flags.append(f"-D{key}={symlinks_dir / value}")
|
|
|
|
|
|
|
|
return flags
|
|
|
|
|
|
|
|
|
2022-07-04 18:32:36 +00:00
|
|
|
def _calculate_object_exports_readobj_parse(output: str) -> list[str]:
|
2022-06-23 14:15:21 +00:00
|
|
|
"""
|
2022-07-04 18:32:36 +00:00
|
|
|
>>> _calculate_object_exports_readobj_parse(
|
|
|
|
... '''
|
|
|
|
... Format: WASM \\n Arch: wasm32 \\n AddressSize: 32bit
|
|
|
|
... Sections [
|
|
|
|
... Section { \\n Type: TYPE (0x1) \\n Size: 5 \\n Offset: 8 \\n }
|
|
|
|
... Section { \\n Type: IMPORT (0x2) \\n Size: 32 \\n Offset: 19 \\n }
|
|
|
|
... ]
|
|
|
|
... Symbol {
|
|
|
|
... Name: g2 \\n Type: FUNCTION (0x0) \\n
|
|
|
|
... Flags [ (0x0) \\n ]
|
|
|
|
... ElementIndex: 0x2
|
|
|
|
... }
|
|
|
|
... Symbol {
|
|
|
|
... Name: f2 \\n Type: FUNCTION (0x0) \\n
|
|
|
|
... Flags [ (0x4) \\n VISIBILITY_HIDDEN (0x4) \\n ]
|
|
|
|
... ElementIndex: 0x1
|
|
|
|
... }
|
|
|
|
... Symbol {
|
|
|
|
... Name: l \\n Type: FUNCTION (0x0)
|
|
|
|
... Flags [ (0x10)\\n UNDEFINED (0x10) \\n ]
|
|
|
|
... ImportModule: env
|
|
|
|
... ElementIndex: 0x0
|
|
|
|
... }
|
|
|
|
... '''
|
|
|
|
... )
|
|
|
|
['g2']
|
2022-06-23 14:15:21 +00:00
|
|
|
"""
|
2022-07-04 18:32:36 +00:00
|
|
|
result = []
|
|
|
|
insymbol = False
|
|
|
|
for line in output.split("\n"):
|
|
|
|
line = line.strip()
|
|
|
|
if line == "Symbol {":
|
|
|
|
insymbol = True
|
|
|
|
export = True
|
|
|
|
name = None
|
|
|
|
symbol_lines = [line]
|
|
|
|
continue
|
|
|
|
if not insymbol:
|
|
|
|
continue
|
|
|
|
symbol_lines.append(line)
|
|
|
|
if line.startswith("Name:"):
|
|
|
|
name = line.removeprefix("Name:").strip()
|
|
|
|
if line.startswith(("BINDING_LOCAL", "UNDEFINED", "VISIBILITY_HIDDEN")):
|
|
|
|
export = False
|
|
|
|
if line == "}":
|
|
|
|
insymbol = False
|
|
|
|
if export:
|
|
|
|
if not name:
|
|
|
|
raise RuntimeError(
|
|
|
|
"Didn't find symbol's name:\n" + "\n".join(symbol_lines)
|
|
|
|
)
|
|
|
|
result.append(name)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def calculate_object_exports_readobj(objects: list[str]) -> list[str] | None:
|
2023-05-09 00:34:00 +00:00
|
|
|
readobj_path = shutil.which("llvm-readobj")
|
|
|
|
if not readobj_path:
|
|
|
|
which_emcc = shutil.which("emcc")
|
|
|
|
assert which_emcc
|
|
|
|
emcc = Path(which_emcc)
|
|
|
|
readobj_path = str((emcc / "../../bin/llvm-readobj").resolve())
|
2022-07-04 18:32:36 +00:00
|
|
|
args = [
|
2023-05-09 00:34:00 +00:00
|
|
|
readobj_path,
|
2022-07-04 18:32:36 +00:00
|
|
|
"--section-details",
|
|
|
|
"-st",
|
|
|
|
] + objects
|
|
|
|
completedprocess = subprocess.run(
|
|
|
|
args, encoding="utf8", capture_output=True, env={"PATH": os.environ["PATH"]}
|
|
|
|
)
|
|
|
|
if completedprocess.returncode:
|
|
|
|
print(f"Command '{' '.join(args)}' failed. Output to stderr was:")
|
|
|
|
print(completedprocess.stderr)
|
|
|
|
sys.exit(completedprocess.returncode)
|
|
|
|
|
|
|
|
if "bitcode files are not supported" in completedprocess.stderr:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return _calculate_object_exports_readobj_parse(completedprocess.stdout)
|
|
|
|
|
|
|
|
|
|
|
|
def calculate_object_exports_nm(objects: list[str]) -> list[str]:
|
2022-06-23 14:15:21 +00:00
|
|
|
args = ["emnm", "-j", "--export-symbols"] + objects
|
|
|
|
result = subprocess.run(
|
|
|
|
args, encoding="utf8", capture_output=True, env={"PATH": os.environ["PATH"]}
|
|
|
|
)
|
|
|
|
if result.returncode:
|
|
|
|
print(f"Command '{' '.join(args)}' failed. Output to stderr was:")
|
|
|
|
print(result.stderr)
|
|
|
|
sys.exit(result.returncode)
|
2022-07-04 18:32:36 +00:00
|
|
|
return result.stdout.splitlines()
|
|
|
|
|
2022-06-23 14:15:21 +00:00
|
|
|
|
2023-03-17 00:00:51 +00:00
|
|
|
def filter_objects(line: list[str]) -> list[str]:
|
|
|
|
"""
|
|
|
|
Collect up all the object files and archive files being linked.
|
|
|
|
"""
|
|
|
|
return [
|
|
|
|
arg
|
|
|
|
for arg in line
|
|
|
|
if arg.endswith((".a", ".o"))
|
|
|
|
or arg.startswith(
|
|
|
|
"@"
|
|
|
|
) # response file (https://gcc.gnu.org/wiki/Response_Files)
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2022-07-04 18:32:36 +00:00
|
|
|
def calculate_exports(line: list[str], export_all: bool) -> Iterable[str]:
|
|
|
|
"""
|
2023-03-17 00:00:51 +00:00
|
|
|
List out symbols from object files and archive files that are marked as public.
|
|
|
|
If ``export_all`` is ``True``, then return all public symbols.
|
|
|
|
If not, return only the public symbols that begin with `PyInit`.
|
2022-07-04 18:32:36 +00:00
|
|
|
"""
|
2023-03-17 00:00:51 +00:00
|
|
|
objects = filter_objects(line)
|
2022-07-04 18:32:36 +00:00
|
|
|
exports = None
|
|
|
|
# Using emnm is simpler but it cannot handle bitcode. If we're only
|
|
|
|
# exporting the PyInit symbols, save effort by using nm.
|
|
|
|
if export_all:
|
|
|
|
exports = calculate_object_exports_readobj(objects)
|
|
|
|
if exports is None:
|
|
|
|
# Either export_all is false or we are linking at least one bitcode
|
|
|
|
# object. Fall back to a more conservative estimate of the symbols
|
|
|
|
# exported. This can export things with `__visibility__("hidden")`
|
|
|
|
exports = calculate_object_exports_nm(objects)
|
|
|
|
if export_all:
|
|
|
|
return exports
|
|
|
|
return (x for x in exports if x.startswith("PyInit"))
|
2022-06-23 14:15:21 +00:00
|
|
|
|
|
|
|
|
2022-06-30 06:08:46 +00:00
|
|
|
def get_export_flags(
|
|
|
|
line: list[str],
|
|
|
|
exports: Literal["whole_archive", "requested", "pyinit"] | list[str],
|
|
|
|
) -> Iterator[str]:
|
2022-06-23 14:15:21 +00:00
|
|
|
"""
|
|
|
|
If "whole_archive" was requested, no action is needed. Otherwise, add
|
|
|
|
`-sSIDE_MODULE=2` and the appropriate export list.
|
|
|
|
"""
|
|
|
|
if exports == "whole_archive":
|
|
|
|
return
|
|
|
|
yield "-sSIDE_MODULE=2"
|
|
|
|
if isinstance(exports, str):
|
|
|
|
export_list = calculate_exports(line, exports == "requested")
|
|
|
|
else:
|
2022-07-04 18:32:36 +00:00
|
|
|
export_list = exports
|
2022-06-23 14:15:21 +00:00
|
|
|
prefixed_exports = ["_" + x for x in export_list]
|
|
|
|
yield f"-sEXPORTED_FUNCTIONS={prefixed_exports!r}"
|
|
|
|
|
|
|
|
|
2023-05-09 10:50:38 +00:00
|
|
|
def handle_command_generate_args( # noqa: C901
|
2023-01-30 04:35:18 +00:00
|
|
|
line: list[str], build_args: BuildArgs, is_link_command: bool
|
2022-02-20 22:13:37 +00:00
|
|
|
) -> list[str]:
|
2021-12-22 01:40:13 +00:00
|
|
|
"""
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
A helper command for `handle_command` that generates the new arguments for
|
2021-12-22 01:40:13 +00:00
|
|
|
the compilation.
|
|
|
|
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
Unlike `handle_command` this avoids I/O: it doesn't sys.exit, it doesn't run
|
2021-12-22 01:40:13 +00:00
|
|
|
subprocesses, it doesn't create any files, and it doesn't write to stdout.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
line The original compilation command as a list e.g., ["gcc", "-c",
|
|
|
|
"input.c", "-o", "output.c"]
|
|
|
|
|
2023-01-30 04:35:18 +00:00
|
|
|
build_args The arguments that pywasmcross was invoked with
|
2021-12-22 01:40:13 +00:00
|
|
|
|
|
|
|
is_link_command Is this a linker invocation?
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
An updated argument list suitable for use with emscripten.
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
|
|
|
|
|
|
|
|
Examples
|
|
|
|
--------
|
|
|
|
|
|
|
|
>>> from collections import namedtuple
|
2022-07-02 13:53:16 +00:00
|
|
|
>>> Args = namedtuple('args', ['cflags', 'cxxflags', 'ldflags', 'target_install_dir'])
|
|
|
|
>>> args = Args(cflags='', cxxflags='', ldflags='', target_install_dir='')
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
>>> handle_command_generate_args(['gcc', 'test.c'], args, False)
|
2023-05-18 03:57:40 +00:00
|
|
|
['emcc', 'test.c', '-Werror=implicit-function-declaration', '-Werror=mismatched-parameter-types', '-Werror=return-type']
|
2021-12-22 01:40:13 +00:00
|
|
|
"""
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
if "-print-multiarch" in line:
|
|
|
|
return ["echo", "wasm32-emscripten"]
|
|
|
|
for arg in line:
|
|
|
|
if arg.startswith("-print-file-name"):
|
|
|
|
return line
|
2022-09-22 01:42:26 +00:00
|
|
|
if len(line) == 2 and line[1] == "-v":
|
|
|
|
return ["emcc", "-v"]
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
|
2021-12-31 17:29:36 +00:00
|
|
|
cmd = line[0]
|
|
|
|
if cmd == "ar":
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
line[0] = "emar"
|
|
|
|
return line
|
2021-12-31 17:29:36 +00:00
|
|
|
elif cmd == "c++" or cmd == "g++":
|
2020-06-28 18:24:40 +00:00
|
|
|
new_args = ["em++"]
|
2021-12-31 17:29:36 +00:00
|
|
|
elif cmd == "cc" or cmd == "gcc" or cmd == "ld":
|
2020-06-28 18:24:40 +00:00
|
|
|
new_args = ["emcc"]
|
2018-06-20 18:54:47 +00:00
|
|
|
# distutils doesn't use the c++ compiler when compiling c++ <sigh>
|
2021-01-03 00:17:08 +00:00
|
|
|
if any(arg.endswith((".cpp", ".cc")) for arg in line):
|
2020-06-28 18:24:40 +00:00
|
|
|
new_args = ["em++"]
|
2022-09-22 01:42:26 +00:00
|
|
|
elif cmd == "cmake":
|
2023-01-13 17:41:29 +00:00
|
|
|
# If it is a build/install command, or running a script, we don't do anything.
|
|
|
|
if "--build" in line or "--install" in line or "-P" in line:
|
2022-09-22 01:42:26 +00:00
|
|
|
return line
|
|
|
|
|
|
|
|
flags = get_cmake_compiler_flags()
|
2023-01-13 17:41:29 +00:00
|
|
|
line[:1] = [
|
|
|
|
"emcmake",
|
|
|
|
"cmake",
|
|
|
|
*flags,
|
|
|
|
# Since we create a temporary directory and install compiler symlinks every time,
|
|
|
|
# CMakeCache.txt will contain invalid paths to the compiler when re-running,
|
|
|
|
# so we need to tell CMake to ignore the existing cache and build from scratch.
|
|
|
|
"--fresh",
|
|
|
|
]
|
2022-09-22 01:42:26 +00:00
|
|
|
return line
|
2022-09-13 04:19:44 +00:00
|
|
|
elif cmd == "ranlib":
|
|
|
|
line[0] = "emranlib"
|
|
|
|
return line
|
|
|
|
elif cmd == "strip":
|
|
|
|
line[0] = "emstrip"
|
|
|
|
return line
|
2021-12-25 20:08:43 +00:00
|
|
|
else:
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
return line
|
|
|
|
|
2022-02-20 22:13:37 +00:00
|
|
|
used_libs: set[str] = set()
|
2018-06-22 14:22:00 +00:00
|
|
|
# Go through and adjust arguments
|
2018-06-20 18:54:47 +00:00
|
|
|
for arg in line[1:]:
|
2020-12-08 08:00:53 +00:00
|
|
|
if new_args[-1].startswith("-B") and "compiler_compat" in arg:
|
|
|
|
# conda uses custom compiler search paths with the compiler_compat folder.
|
|
|
|
# Ignore it.
|
|
|
|
del new_args[-1]
|
|
|
|
continue
|
|
|
|
|
2021-12-22 01:40:13 +00:00
|
|
|
if arg.startswith("-l"):
|
2022-07-02 13:53:16 +00:00
|
|
|
result = replay_genargs_handle_dashl(arg, used_libs)
|
2021-12-22 01:40:13 +00:00
|
|
|
elif arg.startswith("-I"):
|
2023-01-30 04:35:18 +00:00
|
|
|
result = replay_genargs_handle_dashI(arg, build_args.target_install_dir)
|
2021-12-25 20:08:43 +00:00
|
|
|
elif arg.startswith("-Wl"):
|
|
|
|
result = replay_genargs_handle_linker_opts(arg)
|
2021-12-22 01:40:13 +00:00
|
|
|
else:
|
|
|
|
result = replay_genargs_handle_argument(arg)
|
|
|
|
|
|
|
|
if result:
|
|
|
|
new_args.append(result)
|
2022-04-19 00:24:47 +00:00
|
|
|
|
2023-05-18 03:57:40 +00:00
|
|
|
new_args.extend(
|
|
|
|
[
|
|
|
|
"-Werror=implicit-function-declaration",
|
|
|
|
"-Werror=mismatched-parameter-types",
|
|
|
|
"-Werror=return-type",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
# set linker and C flags to error on anything to do with function declarations being wrong.
|
|
|
|
# Better to fail at compile or link time.
|
|
|
|
if is_link_command:
|
|
|
|
new_args.append("-Wl,--fatal-warnings")
|
|
|
|
new_args.extend(build_args.ldflags.split())
|
|
|
|
new_args.extend(get_export_flags(line, build_args.exports))
|
|
|
|
|
|
|
|
if "-c" in line:
|
|
|
|
if new_args[0] == "emcc":
|
|
|
|
new_args.extend(build_args.cflags.split())
|
|
|
|
elif new_args[0] == "em++":
|
|
|
|
new_args.extend(build_args.cflags.split() + build_args.cxxflags.split())
|
|
|
|
|
|
|
|
if build_args.pythoninclude:
|
|
|
|
new_args.extend(["-I", build_args.pythoninclude])
|
|
|
|
|
2021-12-22 01:40:13 +00:00
|
|
|
return new_args
|
|
|
|
|
|
|
|
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
def handle_command(
|
|
|
|
line: list[str],
|
2023-01-30 04:35:18 +00:00
|
|
|
build_args: BuildArgs,
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
) -> NoReturn:
|
|
|
|
"""Handle a compilation command. Exit with an appropriate exit code when done.
|
2021-12-22 01:40:13 +00:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
line : iterable
|
|
|
|
an iterable with the compilation arguments
|
2023-01-30 04:35:18 +00:00
|
|
|
build_args : BuildArgs
|
|
|
|
a container with additional compilation options
|
2021-12-22 01:40:13 +00:00
|
|
|
"""
|
|
|
|
# some libraries have different names on wasm e.g. png16 = png
|
2022-03-29 03:07:36 +00:00
|
|
|
is_link_cmd = get_library_output(line) is not None
|
2021-12-22 01:40:13 +00:00
|
|
|
|
|
|
|
if line[0] == "gfortran":
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
if "-dumpversion" in line:
|
|
|
|
sys.exit(subprocess.run(line).returncode)
|
2021-12-22 01:40:13 +00:00
|
|
|
tmp = replay_f2c(line)
|
|
|
|
if tmp is None:
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
sys.exit(0)
|
2021-12-22 01:40:13 +00:00
|
|
|
line = tmp
|
|
|
|
|
2023-01-30 04:35:18 +00:00
|
|
|
new_args = handle_command_generate_args(line, build_args, is_link_cmd)
|
2018-06-20 18:54:47 +00:00
|
|
|
|
2023-01-30 04:35:18 +00:00
|
|
|
if build_args.pkgname == "scipy":
|
2023-01-17 04:31:45 +00:00
|
|
|
from pyodide_build._f2c_fixes import scipy_fixes
|
|
|
|
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
scipy_fixes(new_args)
|
2018-07-18 13:32:16 +00:00
|
|
|
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
returncode = subprocess.run(new_args).returncode
|
2018-06-20 18:54:47 +00:00
|
|
|
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
sys.exit(returncode)
|
2018-06-20 18:54:47 +00:00
|
|
|
|
|
|
|
|
2022-07-02 05:00:27 +00:00
|
|
|
def compiler_main():
|
2023-01-30 04:35:18 +00:00
|
|
|
build_args = BuildArgs(**PYWASMCROSS_ARGS)
|
2018-09-21 14:58:09 +00:00
|
|
|
basename = Path(sys.argv[0]).name
|
No replay (#2256)
Our package build process currently has a significant flaw: we first run setup.py, recording all compilation commands, then we rewrite these compilation commands to invoke emcc and replay them, and then we pray that the cross compiled executables ended up in the right place to go into the wheel. This is not a good strategy because the build script is allowed to implement arbitrary logic, and if it moves, renames, etc any of the output files then we lose track of them. This has repeatedly caused difficulty for us.
However, we also make no particularly significant use of the two pass approach. We can just do the simpler thing: capture the compiler commands as they occur, modify them as needed, and then run the fixed command.
I also added a patch to fix the numpy feature detection for wasm so that we don't have to include _npyconfig.h and config.h, numpy can generate them in the way it would for a native build. I opened a numpy PR that would fix the detection for us upstream:
numpy/numpy#21154
This clears the way for us to switch to using pypa/build (as @henryiii has suggested) by removing our dependence on specific setuptools behavior.
This is on top of #2238.
2022-03-13 18:39:06 +00:00
|
|
|
args = list(sys.argv)
|
|
|
|
args[0] = basename
|
2023-01-30 04:35:18 +00:00
|
|
|
sys.exit(handle_command(args, build_args))
|
2022-07-02 05:00:27 +00:00
|
|
|
|
|
|
|
|
|
|
|
if IS_COMPILER_INVOCATION:
|
|
|
|
compiler_main()
|