* Refactor setup

This commit is contained in:
Matthew Honnibal 2015-01-05 05:30:56 +11:00
parent e0711d7024
commit 83fa1850e2
1 changed files with 63 additions and 75 deletions

138
setup.py
View File

@ -1,14 +1,15 @@
#!/usr/bin/env python #!/usr/bin/env python
import subprocess
from setuptools import setup
from glob import glob
import sys import sys
import os import os
from os import path from os import path
from glob import glob from os.path import splitext
import sys
from setuptools import setup from setuptools import Extension
from distutils.core import Extension
import shutil
def clean(ext): def clean(ext):
@ -22,85 +23,72 @@ def clean(ext):
os.unlink(html) os.unlink(html)
compile_args = [] def name_to_path(mod_name, ext):
link_args = [] return '%s.%s' % (mod_name.replace('.', '/'), ext)
libs = []
includes = ['.']
virtual_env = os.environ.get('VIRTUAL_ENV', '')
includes = ['.']
if virtual_env:
includes += glob(os.path.join(virtual_env, 'include', 'site'))
includes += glob(os.path.join(virtual_env, 'include', 'site', '*'))
else:
pass
ext_args = {'language': "c++", "include_dirs": includes} def c_ext(mod_name, language, includes, compile_args):
mod_path = name_to_path(mod_name, language)
return Extension(mod_name, [mod_path], include_dirs=includes,
extra_compile_args=compile_args, extra_link_args=compile_args)
exts = [
Extension("spacy.typedefs", ["spacy/typedefs.cpp"], **ext_args),
Extension("spacy.strings", ["spacy/strings.cpp"], **ext_args),
Extension("spacy.lexeme", ["spacy/lexeme.cpp"], **ext_args),
Extension("spacy.vocab", ["spacy/vocab.cpp"], **ext_args),
Extension("spacy.tokens", ["spacy/tokens.cpp"], **ext_args),
Extension("spacy.morphology", ["spacy/morphology.cpp"], **ext_args),
Extension("spacy._ml", ["spacy/_ml.cpp"], **ext_args), def cython_ext(mod_name, language, includes, compile_args):
import Cython.Distutils
import Cython.Build
mod_path = mod_name.replace('.', '/') + '.pyx'
if language == 'cpp':
language = 'c++'
ext = Extension(mod_name, [mod_path], language=language, include_dirs=includes,
extra_compile_args=compile_args)
return Cython.Build.cythonize([ext])[0]
Extension("spacy.tokenizer", ["spacy/tokenizer.cpp"], **ext_args),
Extension("spacy.en.attrs", ["spacy/en/attrs.cpp"], **ext_args), def run_setup(exts):
Extension("spacy.en.pos", ["spacy/en/pos.cpp"], **ext_args),
Extension("spacy.syntax.parser", ["spacy/syntax/parser.cpp"], **ext_args),
Extension("spacy.syntax._state", ["spacy/syntax/_state.cpp"], **ext_args),
Extension("spacy.syntax.arc_eager", ["spacy/syntax/arc_eager.cpp"], **ext_args),
Extension("spacy.syntax._parse_features", ["spacy/syntax/_parse_features.cpp"],
**ext_args)
#Extension("spacy.pos_feats", ["spacy/pos_feats.pyx"], language="c++", include_dirs=includes), setup(
#Extension("spacy.ner._state", ["spacy/ner/_state.pyx"], language="c++", include_dirs=includes), name='spacy',
#Extension("spacy.ner.bilou_moves", ["spacy/ner/bilou_moves.pyx"], language="c++", include_dirs=includes), packages=['spacy', 'spacy.en', 'spacy.syntax'],
#Extension("spacy.ner.io_moves", ["spacy/ner/io_moves.pyx"], language="c++", include_dirs=includes), description="Industrial-strength NLP",
#Extension("spacy.ner.greedy_parser", ["spacy/ner/greedy_parser.pyx"], language="c++", include_dirs=includes), author='Matthew Honnibal',
#Extension("spacy.ner.pystate", ["spacy/ner/pystate.pyx"], language="c++", include_dirs=includes), author_email='honnibal@gmail.com',
#Extension("spacy.ner.context", ["spacy/ner/context.pyx"], language="c++", include_dirs=includes), version='0.15',
#Extension("spacy.ner.feats", ["spacy/ner/feats.pyx"], language="c++", include_dirs=includes), url="http://honnibal.github.io/spaCy/",
#Extension("spacy.ner.annot", ["spacy/ner/annot.pyx"], language="c++", include_dirs=includes), package_data={"spacy": ["*.pxd"],
] "spacy.en": ["*.pxd", "data/pos/*",
"data/wordnet/*", "data/tokenizer/*",
"data/vocab/*"],
"spacy.syntax": ["*.pxd"]},
ext_modules=exts,
license="Dual: Commercial or AGPL",
install_requires=['murmurhash', 'numpy', 'cymem', 'preshed', 'thinc',
"unidecode", "ujson"],
setup_requires=["headers_workaround"],
)
import headers_workaround
headers_workaround.fix_venv_pypy_include()
headers_workaround.install_headers('murmurhash')
headers_workaround.install_headers('numpy')
if sys.argv[1] == 'clean': def main(modules, is_pypy):
print >> sys.stderr, "cleaning .c, .c++ and .so files matching sources" language = "cpp"
map(clean, exts) ext_func = cython_ext if use_cython else c_ext
includes = ['.', path.join(sys.prefix, 'include')]
compile_args = ['-O3']
exts = [ext_func(mn, language, includes, compile_args) for mn in modules]
run_setup(exts)
setup( MOD_NAMES = ['spacy.typedefs', 'spacy.strings', 'spacy.lexeme',
name='spacy', 'spacy.vocab', 'spacy.tokens', 'spacy.morphology',
packages=['spacy', 'spacy.en', 'spacy.syntax'], 'spacy._ml', 'spacy.tokenizer', 'spacy.en.attrs',
description="Industrial-strength NLP", 'spacy.en.pos', 'spacy.syntax.parser', 'spacy.syntax._state',
author='Matthew Honnibal', 'spacy.syntax.arc_eager', 'spacy.syntax._parse_features']
author_email='honnibal@gmail.com',
version='0.15',
url="http://honnibal.github.io/spaCy/",
package_data={"spacy": ["*.pxd"],
"spacy.en": ["*.pxd", "data/pos/*",
"data/wordnet/*", "data/tokenizer/*",
"data/vocab/*"],
"spacy.syntax": ["*.pxd"]},
ext_modules=exts,
license="Dual: Commercial or AGPL",
install_requires=['murmurhash', 'numpy', 'cymem', 'preshed', 'thinc', "unidecode",
"ujson"],
setup_requires=["headers_workaround"],
)
import headers_workaround if __name__ == '__main__':
use_cython = sys.argv[1] == 'build_ext'
main(MOD_NAMES, use_cython)
include_dir = path.join(sys.prefix, 'include', 'site')
if not path.exists(include_dir):
os.mkdir(include_dir)
headers_workaround.install_headers(include_dir, 'murmurhash')
headers_workaround.install_headers(include_dir, 'numpy')