diff --git a/spacy/lang/ja/__init__.py b/spacy/lang/ja/__init__.py index 04cc013a4..3b67c5489 100644 --- a/spacy/lang/ja/__init__.py +++ b/spacy/lang/ja/__init__.py @@ -21,8 +21,25 @@ class JapaneseTokenizer(object): words = [x.surface for x in self.tokenizer.tokenize(text)] return Doc(self.vocab, words=words, spaces=[False]*len(words)) + # add dummy methods for to_bytes, from_bytes, to_disk and from_disk to + # allow serialization (see #1557) + def to_bytes(self, **exclude): + return b'' + + def from_bytes(self, bytes_data, **exclude): + return self + + def to_disk(self, path, **exclude): + return None + + def from_disk(self, path, **exclude): + return self + class JapaneseDefaults(Language.Defaults): + lex_attr_getters = dict(Language.Defaults.lex_attr_getters) + lex_attr_getters[LANG] = lambda text: 'ja' + @classmethod def create_tokenizer(cls, nlp=None): return JapaneseTokenizer(cls, nlp)