mirror of https://github.com/explosion/spaCy.git
* Replace from_package with load() classmethod in Vocab
This commit is contained in:
parent
c5902f2b4b
commit
0e2498da00
|
@ -19,6 +19,7 @@ from .orth cimport word_shape
|
||||||
from .typedefs cimport attr_t
|
from .typedefs cimport attr_t
|
||||||
from .cfile cimport CFile
|
from .cfile cimport CFile
|
||||||
from .lemmatizer import Lemmatizer
|
from .lemmatizer import Lemmatizer
|
||||||
|
from .util import MockPackage
|
||||||
|
|
||||||
from . import attrs
|
from . import attrs
|
||||||
from . import symbols
|
from . import symbols
|
||||||
|
@ -47,11 +48,12 @@ cdef class Vocab:
|
||||||
'''A map container for a language's LexemeC structs.
|
'''A map container for a language's LexemeC structs.
|
||||||
'''
|
'''
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_package(cls, package, get_lex_attr=None):
|
def load(cls, pkg_or_str_or_file, get_lex_attr=None):
|
||||||
|
package = MockPackage.create_or_return(pkg_or_str_or_file)
|
||||||
tag_map = package.load_utf8(json.load,
|
tag_map = package.load_utf8(json.load,
|
||||||
'vocab', 'tag_map.json')
|
'vocab', 'tag_map.json')
|
||||||
|
|
||||||
lemmatizer = Lemmatizer.from_package(package)
|
lemmatizer = Lemmatizer.load(package)
|
||||||
|
|
||||||
serializer_freqs = package.load_utf8(json.load,
|
serializer_freqs = package.load_utf8(json.load,
|
||||||
'vocab', 'serializer.json',
|
'vocab', 'serializer.json',
|
||||||
|
@ -67,7 +69,6 @@ cdef class Vocab:
|
||||||
if package.has_file('vocab', 'vec.bin'): # TODO: really optional?
|
if package.has_file('vocab', 'vec.bin'): # TODO: really optional?
|
||||||
self.vectors_length = self.load_vectors_from_bin_loc(
|
self.vectors_length = self.load_vectors_from_bin_loc(
|
||||||
package.file_path('vocab', 'vec.bin'))
|
package.file_path('vocab', 'vec.bin'))
|
||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __init__(self, get_lex_attr=None, tag_map=None, lemmatizer=None, serializer_freqs=None):
|
def __init__(self, get_lex_attr=None, tag_map=None, lemmatizer=None, serializer_freqs=None):
|
||||||
|
|
Loading…
Reference in New Issue