2014-12-21 20:25:43 +00:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
from os import path
|
|
|
|
|
|
|
|
from .. import orth
|
|
|
|
from ..vocab import Vocab
|
|
|
|
from ..tokenizer import Tokenizer
|
|
|
|
from ..syntax.parser import GreedyParser
|
|
|
|
from ..tokens import Tokens
|
|
|
|
from .pos import EnPosTagger
|
2014-12-21 21:54:47 +00:00
|
|
|
from .pos import POS_TAGS
|
2014-12-21 20:25:43 +00:00
|
|
|
from .attrs import get_flags
|
|
|
|
|
|
|
|
|
|
|
|
def get_lex_props(string):
|
|
|
|
return {'flags': get_flags(string), 'dense': 1}
|
|
|
|
|
|
|
|
|
|
|
|
class English(object):
|
2014-12-23 00:40:32 +00:00
|
|
|
def __init__(self, data_dir=None, tag=True, parse=False):
|
2014-12-21 20:25:43 +00:00
|
|
|
if data_dir is None:
|
|
|
|
data_dir = path.join(path.dirname(__file__), 'data')
|
2014-12-23 00:40:32 +00:00
|
|
|
self.vocab = Vocab(data_dir=data_dir, get_lex_props=get_lex_props)
|
2014-12-21 20:25:43 +00:00
|
|
|
self.tokenizer = Tokenizer.from_dir(self.vocab, data_dir)
|
2014-12-23 00:40:32 +00:00
|
|
|
self.tagger = EnPosTagger(self.vocab.strings, data_dir) if tag else None
|
|
|
|
self.parser = GreedyParser(data_dir) if parse else None
|
2014-12-21 20:25:43 +00:00
|
|
|
|
2014-12-23 00:40:32 +00:00
|
|
|
def __call__(self, text, tag=True, parse=True):
|
2014-12-21 20:25:43 +00:00
|
|
|
tokens = self.tokenizer.tokenize(text)
|
2014-12-23 00:40:32 +00:00
|
|
|
if self.tagger and tag:
|
|
|
|
self.tagger(tokens)
|
2014-12-21 20:25:43 +00:00
|
|
|
if self.parser and parse:
|
|
|
|
self.parser.parse(tokens)
|
|
|
|
return tokens
|