mirror of https://github.com/explosion/spaCy.git
Add LANG attribute to English and German
This commit is contained in:
parent
05e2a589a4
commit
8c8f5c62c6
|
@ -3,6 +3,7 @@ from __future__ import unicode_literals, print_function
|
||||||
from os import path
|
from os import path
|
||||||
|
|
||||||
from ..language import Language
|
from ..language import Language
|
||||||
|
from ..attrs import LANG
|
||||||
from . import language_data
|
from . import language_data
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,6 +12,8 @@ class German(Language):
|
||||||
|
|
||||||
class Defaults(Language.Defaults):
|
class Defaults(Language.Defaults):
|
||||||
tokenizer_exceptions = dict(language_data.TOKENIZER_EXCEPTIONS)
|
tokenizer_exceptions = dict(language_data.TOKENIZER_EXCEPTIONS)
|
||||||
|
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
|
||||||
|
lex_attr_getters[LANG] = lambda text: 'de'
|
||||||
|
|
||||||
prefixes = tuple(language_data.TOKENIZER_PREFIXES)
|
prefixes = tuple(language_data.TOKENIZER_PREFIXES)
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,7 @@ from .. import util
|
||||||
from ..lemmatizer import Lemmatizer
|
from ..lemmatizer import Lemmatizer
|
||||||
from ..vocab import Vocab
|
from ..vocab import Vocab
|
||||||
from ..tokenizer import Tokenizer
|
from ..tokenizer import Tokenizer
|
||||||
|
from ..attrs import LANG
|
||||||
|
|
||||||
|
|
||||||
class English(Language):
|
class English(Language):
|
||||||
|
@ -15,6 +16,7 @@ class English(Language):
|
||||||
|
|
||||||
class Defaults(Language.Defaults):
|
class Defaults(Language.Defaults):
|
||||||
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
|
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
|
||||||
|
lex_attr_getters[LANG] = lambda text: 'en'
|
||||||
|
|
||||||
tokenizer_exceptions = dict(language_data.TOKENIZER_EXCEPTIONS)
|
tokenizer_exceptions = dict(language_data.TOKENIZER_EXCEPTIONS)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue