diff --git a/spacy/tests/tokenizer/test_tokens_from_list.py b/spacy/tests/tokenizer/test_tokens_from_list.py deleted file mode 100644 index c30326c54..000000000 --- a/spacy/tests/tokenizer/test_tokens_from_list.py +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import unicode_literals -import pytest - - -def test1(en_tokenizer): - words = ['JAPAN', 'GET', 'LUCKY'] - tokens = en_tokenizer.tokens_from_list(words) - assert len(tokens) == 3 - assert tokens[0].orth_ == 'JAPAN'