mirror of https://github.com/explosion/spaCy.git
Adding unitest for tokenization in french (with title)
This commit is contained in:
parent
8ff4682255
commit
13b6957c8e
|
@ -10,12 +10,10 @@ class Lemmatizer(Lemmatizer):
|
||||||
return cls(lookup or {})
|
return cls(lookup or {})
|
||||||
|
|
||||||
def __init__(self, lookup):
|
def __init__(self, lookup):
|
||||||
print("Mon lemmatizer")
|
|
||||||
self.lookup = lookup
|
self.lookup = lookup
|
||||||
|
|
||||||
def __call__(self, string, univ_pos, morphology=None):
|
def __call__(self, string, univ_pos, morphology=None):
|
||||||
print("call")
|
|
||||||
try:
|
try:
|
||||||
return self.lookup[string]
|
return set([self.lookup[string]])
|
||||||
except:
|
except:
|
||||||
return string
|
return set([string])
|
Loading…
Reference in New Issue