mirror of https://github.com/explosion/spaCy.git
Adding unitest for tokenization in french (with title)
This commit is contained in:
parent
8ff4682255
commit
13b6957c8e
|
@ -10,12 +10,10 @@ class Lemmatizer(Lemmatizer):
|
|||
return cls(lookup or {})
|
||||
|
||||
def __init__(self, lookup):
|
||||
print("Mon lemmatizer")
|
||||
self.lookup = lookup
|
||||
|
||||
def __call__(self, string, univ_pos, morphology=None):
|
||||
print("call")
|
||||
try:
|
||||
return self.lookup[string]
|
||||
return set([self.lookup[string]])
|
||||
except:
|
||||
return string
|
||||
return set([string])
|
Loading…
Reference in New Issue