From b6b01d46801372ad0d81e3e5ca1d1ba8c4fa8b28 Mon Sep 17 00:00:00 2001 From: Matthew Honnibal Date: Wed, 2 Nov 2016 23:47:21 +0100 Subject: [PATCH] Remove deprecated tokens_from_list test. --- spacy/tests/tokenizer/test_tokens_from_list.py | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 spacy/tests/tokenizer/test_tokens_from_list.py diff --git a/spacy/tests/tokenizer/test_tokens_from_list.py b/spacy/tests/tokenizer/test_tokens_from_list.py deleted file mode 100644 index c30326c54..000000000 --- a/spacy/tests/tokenizer/test_tokens_from_list.py +++ /dev/null @@ -1,9 +0,0 @@ -from __future__ import unicode_literals -import pytest - - -def test1(en_tokenizer): - words = ['JAPAN', 'GET', 'LUCKY'] - tokens = en_tokenizer.tokens_from_list(words) - assert len(tokens) == 3 - assert tokens[0].orth_ == 'JAPAN'