2017-05-08 13:54:36 +00:00
|
|
|
# coding: utf8
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2017-12-28 09:13:58 +00:00
|
|
|
from ...attrs import LANG
|
2017-05-08 20:29:04 +00:00
|
|
|
from ...language import Language
|
|
|
|
from ...tokens import Doc
|
2018-07-25 00:47:23 +00:00
|
|
|
from .tag_map import TAG_MAP
|
|
|
|
from .stop_words import STOP_WORDS
|
2018-08-07 09:26:31 +00:00
|
|
|
from ...util import update_exc
|
|
|
|
from ..tokenizer_exceptions import BASE_EXCEPTIONS
|
|
|
|
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
|
2016-04-24 16:44:24 +00:00
|
|
|
|
|
|
|
|
2017-12-28 09:13:58 +00:00
|
|
|
class ChineseDefaults(Language.Defaults):
|
|
|
|
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
|
|
|
|
lex_attr_getters[LANG] = lambda text: 'zh' # for pickling
|
2018-03-27 17:23:02 +00:00
|
|
|
use_jieba = True
|
2018-07-25 00:47:23 +00:00
|
|
|
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
|
|
|
|
tag_map = TAG_MAP
|
|
|
|
stop_words = STOP_WORDS
|
2017-12-28 09:13:58 +00:00
|
|
|
|
|
|
|
|
2016-04-24 16:44:24 +00:00
|
|
|
class Chinese(Language):
|
2017-05-08 13:54:36 +00:00
|
|
|
lang = 'zh'
|
2017-12-28 09:13:58 +00:00
|
|
|
Defaults = ChineseDefaults # override defaults
|
2016-05-05 09:39:12 +00:00
|
|
|
|
2016-11-02 18:57:38 +00:00
|
|
|
def make_doc(self, text):
|
2018-03-27 17:23:02 +00:00
|
|
|
if self.Defaults.use_jieba:
|
|
|
|
try:
|
|
|
|
import jieba
|
|
|
|
except ImportError:
|
|
|
|
msg = ("Jieba not installed. Either set Chinese.use_jieba = False, "
|
|
|
|
"or install it https://github.com/fxsjy/jieba")
|
|
|
|
raise ImportError(msg)
|
|
|
|
words = list(jieba.cut(text, cut_all=False))
|
|
|
|
words = [x for x in words if x]
|
|
|
|
return Doc(self.vocab, words=words, spaces=[False]*len(words))
|
|
|
|
else:
|
|
|
|
words = []
|
|
|
|
spaces = []
|
|
|
|
doc = self.tokenizer(text)
|
|
|
|
for token in self.tokenizer(text):
|
|
|
|
words.extend(list(token.text))
|
|
|
|
spaces.extend([False]*len(token.text))
|
|
|
|
spaces[-1] = bool(token.whitespace_)
|
|
|
|
return Doc(self.vocab, words=words, spaces=spaces)
|
2017-05-03 09:01:42 +00:00
|
|
|
|
|
|
|
|
2017-05-08 13:54:36 +00:00
|
|
|
__all__ = ['Chinese']
|