mirror of https://github.com/explosion/spaCy.git
Add dummy serialization methods for Japanese and missing lang getter (resolves #1557)
This commit is contained in:
parent
40c4e8fc09
commit
c9d72de0fb
|
@ -21,8 +21,25 @@ class JapaneseTokenizer(object):
|
|||
words = [x.surface for x in self.tokenizer.tokenize(text)]
|
||||
return Doc(self.vocab, words=words, spaces=[False]*len(words))
|
||||
|
||||
# add dummy methods for to_bytes, from_bytes, to_disk and from_disk to
|
||||
# allow serialization (see #1557)
|
||||
def to_bytes(self, **exclude):
|
||||
return b''
|
||||
|
||||
def from_bytes(self, bytes_data, **exclude):
|
||||
return self
|
||||
|
||||
def to_disk(self, path, **exclude):
|
||||
return None
|
||||
|
||||
def from_disk(self, path, **exclude):
|
||||
return self
|
||||
|
||||
|
||||
class JapaneseDefaults(Language.Defaults):
|
||||
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
|
||||
lex_attr_getters[LANG] = lambda text: 'ja'
|
||||
|
||||
@classmethod
|
||||
def create_tokenizer(cls, nlp=None):
|
||||
return JapaneseTokenizer(cls, nlp)
|
||||
|
|
Loading…
Reference in New Issue