mirror of https://github.com/explosion/spaCy.git
Merge branch 'master' of https://github.com/explosion/spaCy
This commit is contained in:
commit
58a19518cf
|
@ -3,7 +3,7 @@ pathlib
|
|||
numpy>=1.7
|
||||
cymem>=1.30,<1.32
|
||||
preshed>=1.0.0,<2.0.0
|
||||
thinc>=6.10.1,<6.11.0
|
||||
thinc==6.10.2.dev1
|
||||
murmurhash>=0.28,<0.29
|
||||
plac<1.0.0,>=0.9.6
|
||||
six
|
||||
|
|
|
@ -3,13 +3,13 @@
|
|||
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
|
||||
|
||||
__title__ = 'spacy'
|
||||
__version__ = '2.0.3'
|
||||
__version__ = '2.0.4.dev0'
|
||||
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
|
||||
__uri__ = 'https://spacy.io'
|
||||
__author__ = 'Explosion AI'
|
||||
__email__ = 'contact@explosion.ai'
|
||||
__license__ = 'MIT'
|
||||
__release__ = True
|
||||
__release__ = False
|
||||
|
||||
__docs_models__ = 'https://spacy.io/usage/models'
|
||||
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
|
||||
|
|
|
@ -146,7 +146,7 @@ def list_files(data_dir):
|
|||
|
||||
def list_requirements(meta):
|
||||
parent_package = meta.get('parent_package', 'spacy')
|
||||
requirements = [parent_package + meta['spacy_version']]
|
||||
requirements = [parent_package + ">=" + meta['spacy_version']]
|
||||
if 'setup_requires' in meta:
|
||||
requirements += meta['setup_requires']
|
||||
return requirements
|
||||
|
|
|
@ -85,7 +85,7 @@ cdef class Vectors:
|
|||
return len(self.key2row)
|
||||
|
||||
def __reduce__(self):
|
||||
keys_and_rows = self.key2row.items()
|
||||
keys_and_rows = tuple(self.key2row.items())
|
||||
return (unpickle_vectors, (keys_and_rows, self.data))
|
||||
|
||||
def __getitem__(self, key):
|
||||
|
|
|
@ -477,26 +477,27 @@ cdef class Vocab:
|
|||
|
||||
def pickle_vocab(vocab):
|
||||
sstore = vocab.strings
|
||||
vectors = vocab.vectors
|
||||
morph = vocab.morphology
|
||||
length = vocab.length
|
||||
data_dir = vocab.data_dir
|
||||
lex_attr_getters = dill.dumps(vocab.lex_attr_getters)
|
||||
lexemes_data = vocab.lexemes_to_bytes()
|
||||
return (unpickle_vocab,
|
||||
(sstore, morph, data_dir, lex_attr_getters, lexemes_data, length))
|
||||
(sstore, vectors, morph, data_dir, lex_attr_getters, lexemes_data, length))
|
||||
|
||||
|
||||
def unpickle_vocab(sstore, morphology, data_dir,
|
||||
def unpickle_vocab(sstore, vectors, morphology, data_dir,
|
||||
lex_attr_getters, bytes lexemes_data, int length):
|
||||
cdef Vocab vocab = Vocab()
|
||||
vocab.length = length
|
||||
vocab.vectors = vectors
|
||||
vocab.strings = sstore
|
||||
vocab.morphology = morphology
|
||||
vocab.data_dir = data_dir
|
||||
vocab.lex_attr_getters = dill.loads(lex_attr_getters)
|
||||
vocab.lexemes_from_bytes(lexemes_data)
|
||||
vocab.length = length
|
||||
link_vectors_to_models(vocab)
|
||||
return vocab
|
||||
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ p
|
|||
+accordion("English", "dependency-parsing-english")
|
||||
p
|
||||
| The English dependency labels use the
|
||||
| #[+a("http://www.mathcs.emory.edu/~choi/doc/clear-dependency-2012.pdf") CLEAR Style]
|
||||
| #[+a("http://www.mathcs.emory.edu/~choi/doc/cu-2012-choi.pdf") CLEAR Style]
|
||||
| by #[+a("http://www.clearnlp.com") ClearNLP].
|
||||
|
||||
+table(["Label", "Description"])
|
||||
|
|
Loading…
Reference in New Issue