From a36353df470a3ada700e2758c28cd38f5fbb83e0 Mon Sep 17 00:00:00 2001 From: Matthew Honnibal Date: Fri, 4 Nov 2016 19:18:07 +0100 Subject: [PATCH] Temporarily put back the tokenize_from_strings method, while tests aren't updated yet. --- spacy/tokenizer.pyx | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/spacy/tokenizer.pyx b/spacy/tokenizer.pyx index e6975abc5..66c93528b 100644 --- a/spacy/tokenizer.pyx +++ b/spacy/tokenizer.pyx @@ -107,10 +107,11 @@ cdef class Tokenizer: return (self.__class__, args, None, None) cpdef Doc tokens_from_list(self, list strings): - raise NotImplementedError( - "Method deprecated in 1.0.\n" - "Old: tokenizer.tokens_from_list(strings)\n" - "New: Doc(tokenizer.vocab, words=strings)") + return Doc(self.vocab, words=strings) + #raise NotImplementedError( + # "Method deprecated in 1.0.\n" + # "Old: tokenizer.tokens_from_list(strings)\n" + # "New: Doc(tokenizer.vocab, words=strings)") @cython.boundscheck(False) def __call__(self, unicode string):