From d1c1d3f9cdabc33df93e534da7e55d2e9aaf159e Mon Sep 17 00:00:00 2001 From: Ines Montani Date: Sun, 18 Dec 2016 16:55:32 +0100 Subject: [PATCH] Fix tokenizer test --- spacy/tests/tokenizer/test_tokenizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spacy/tests/tokenizer/test_tokenizer.py b/spacy/tests/tokenizer/test_tokenizer.py index d4330e3ce..091561ae3 100644 --- a/spacy/tests/tokenizer/test_tokenizer.py +++ b/spacy/tests/tokenizer/test_tokenizer.py @@ -8,9 +8,9 @@ import cloudpickle import tempfile from ... import util -from ...en.language_data import TOKENIZER_PREFIXES as EN_TOKENIZER_PREFIXES +from ...language_data import TOKENIZER_PREFIXES -en_search_prefixes = util.compile_prefix_regex(EN_TOKENIZER_PREFIXES).search +en_search_prefixes = util.compile_prefix_regex(TOKENIZER_PREFIXES).search # @pytest.mark.xfail # def test_pickle(en_tokenizer):