2016-11-02 19:02:41 +00:00
|
|
|
# encoding: utf8
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2016-12-18 15:54:19 +00:00
|
|
|
from .stop_words import STOP_WORDS
|
2017-02-25 10:55:00 +00:00
|
|
|
from .tokenizer_exceptions import get_tokenizer_exceptions, TOKEN_MATCH
|
2016-11-02 19:02:41 +00:00
|
|
|
|
|
|
|
|
2016-12-18 15:54:19 +00:00
|
|
|
STOP_WORDS = set(STOP_WORDS)
|
2016-12-17 11:33:09 +00:00
|
|
|
|
2016-11-02 19:02:41 +00:00
|
|
|
|
2017-02-25 10:55:00 +00:00
|
|
|
__all__ = ["STOP_WORDS", "get_tokenizer_exceptions", "TOKEN_MATCH"]
|