From e1c702e498e1a10b92bdc6f7947934bc8fe312bc Mon Sep 17 00:00:00 2001 From: Matthew Honnibal Date: Tue, 14 Jul 2015 00:08:50 +0200 Subject: [PATCH] * Upd tests after refactor --- tests/spans/conftest.py | 6 ++++-- tests/spans/test_span.py | 2 +- tests/tokenizer/test_contractions.py | 1 - tests/tokens/test_token.py | 10 ++-------- 4 files changed, 7 insertions(+), 12 deletions(-) diff --git a/tests/spans/conftest.py b/tests/spans/conftest.py index 710c47cf6..4e82af8b2 100644 --- a/tests/spans/conftest.py +++ b/tests/spans/conftest.py @@ -1,7 +1,9 @@ import pytest -from spacy.en import English +from spacy.en import English, LOCAL_DATA_DIR +import os @pytest.fixture(scope="session") def en_nlp(): - return English(load_vectors=False) + data_dir = os.environ.get('SPACY_DATA', LOCAL_DATA_DIR) + return English(load_vectors=False, data_dir=data_dir) diff --git a/tests/spans/test_span.py b/tests/spans/test_span.py index 9ef5fb147..d7f48e080 100644 --- a/tests/spans/test_span.py +++ b/tests/spans/test_span.py @@ -22,4 +22,4 @@ def test_root(doc): assert len(np) == 2 assert np.orth_ == 'a sentence' assert np.root.orth_ == 'sentence' - assert nlp.root.head.orth_ == 'is' + assert np.root.head.orth_ == 'is' diff --git a/tests/tokenizer/test_contractions.py b/tests/tokenizer/test_contractions.py index 1d12e3d22..ea93ff8b4 100644 --- a/tests/tokenizer/test_contractions.py +++ b/tests/tokenizer/test_contractions.py @@ -32,7 +32,6 @@ def test_aint(en_tokenizer): assert tokens[1].orth_ == "n't" assert tokens[1].lemma_ == "not" - def test_capitalized(en_tokenizer): tokens = en_tokenizer("can't") assert len(tokens) == 2 diff --git a/tests/tokens/test_token.py b/tests/tokens/test_token.py index 88ea6a5a5..14dfb31b6 100644 --- a/tests/tokens/test_token.py +++ b/tests/tokens/test_token.py @@ -1,16 +1,10 @@ from __future__ import unicode_literals import pytest -from spacy.en import English from spacy.parts_of_speech import ADV -@pytest.fixture -def nlp(): - return English() - - -def test_prob(nlp): - tokens = nlp(u'Give it back') +def test_prob(EN): + tokens = EN(u'Give it back', parse=False) give = tokens[0] assert give.prob != 0