2022-11-21 07:12:03 +00:00
|
|
|
import os
|
|
|
|
|
2017-01-12 14:27:46 +00:00
|
|
|
import pytest
|
2023-06-14 15:48:41 +00:00
|
|
|
|
2021-12-04 19:34:48 +00:00
|
|
|
from spacy.attrs import IS_ALPHA, LEMMA, ORTH
|
2022-11-21 07:12:03 +00:00
|
|
|
from spacy.lang.en import English
|
2018-07-24 21:38:44 +00:00
|
|
|
from spacy.parts_of_speech import NOUN, VERB
|
2021-12-04 19:34:48 +00:00
|
|
|
from spacy.vocab import Vocab
|
|
|
|
|
2022-11-21 07:12:03 +00:00
|
|
|
from ..util import make_tempdir
|
|
|
|
|
2021-12-04 19:34:48 +00:00
|
|
|
|
|
|
|
@pytest.mark.issue(1868)
|
|
|
|
def test_issue1868():
|
|
|
|
"""Test Vocab.__contains__ works with int keys."""
|
|
|
|
vocab = Vocab()
|
|
|
|
lex = vocab["hello"]
|
|
|
|
assert lex.orth in vocab
|
|
|
|
assert lex.orth_ in vocab
|
|
|
|
assert "some string" not in vocab
|
|
|
|
int_id = vocab.strings.add("some string")
|
|
|
|
assert int_id not in vocab
|
2015-10-26 01:31:05 +00:00
|
|
|
|
|
|
|
|
2018-11-27 00:09:36 +00:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"text1,text2", [("Hello", "bye"), ("Hello", "hello"), ("Hello", "Hello,")]
|
|
|
|
)
|
2017-01-12 14:27:46 +00:00
|
|
|
def test_vocab_api_neq(en_vocab, text1, text2):
|
|
|
|
assert en_vocab[text1].orth != en_vocab[text2].orth
|
2015-10-26 01:31:05 +00:00
|
|
|
|
|
|
|
|
2018-11-27 00:09:36 +00:00
|
|
|
@pytest.mark.parametrize("text", "Hello")
|
2017-01-12 14:27:46 +00:00
|
|
|
def test_vocab_api_eq(en_vocab, text):
|
|
|
|
lex = en_vocab[text]
|
|
|
|
assert en_vocab[text].orth == lex.orth
|
2015-10-26 01:31:05 +00:00
|
|
|
|
|
|
|
|
2018-11-27 00:09:36 +00:00
|
|
|
@pytest.mark.parametrize("text", ["example"])
|
2017-01-12 14:27:46 +00:00
|
|
|
def test_vocab_api_shape_attr(en_vocab, text):
|
|
|
|
lex = en_vocab[text]
|
|
|
|
assert lex.orth != lex.shape
|
2015-10-26 01:31:05 +00:00
|
|
|
|
|
|
|
|
2018-11-27 00:09:36 +00:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"string,symbol",
|
|
|
|
[
|
|
|
|
("IS_ALPHA", IS_ALPHA),
|
|
|
|
("NOUN", NOUN),
|
|
|
|
("VERB", VERB),
|
|
|
|
("LEMMA", LEMMA),
|
|
|
|
("ORTH", ORTH),
|
|
|
|
],
|
|
|
|
)
|
2017-01-12 14:27:46 +00:00
|
|
|
def test_vocab_api_symbols(en_vocab, string, symbol):
|
|
|
|
assert en_vocab.strings[string] == symbol
|
2015-10-26 01:31:05 +00:00
|
|
|
|
|
|
|
|
2018-11-27 00:09:36 +00:00
|
|
|
@pytest.mark.parametrize("text", "Hello")
|
2017-01-13 13:26:53 +00:00
|
|
|
def test_vocab_api_contains(en_vocab, text):
|
2018-11-30 16:43:08 +00:00
|
|
|
_ = en_vocab[text] # noqa: F841
|
2017-01-12 14:27:46 +00:00
|
|
|
assert text in en_vocab
|
|
|
|
assert "LKsdjvlsakdvlaksdvlkasjdvljasdlkfvm" not in en_vocab
|
2019-03-11 14:23:20 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_vocab_writing_system(en_vocab):
|
|
|
|
assert en_vocab.writing_system["direction"] == "ltr"
|
2019-03-11 14:28:22 +00:00
|
|
|
assert en_vocab.writing_system["has_case"] is True
|
2022-11-21 07:12:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_to_disk():
|
|
|
|
nlp = English()
|
|
|
|
with make_tempdir() as d:
|
|
|
|
nlp.vocab.to_disk(d)
|
|
|
|
assert "vectors" in os.listdir(d)
|
|
|
|
assert "lookups.bin" in os.listdir(d)
|
|
|
|
|
|
|
|
|
|
|
|
def test_to_disk_exclude():
|
|
|
|
nlp = English()
|
|
|
|
with make_tempdir() as d:
|
|
|
|
nlp.vocab.to_disk(d, exclude=("vectors", "lookups"))
|
|
|
|
assert "vectors" not in os.listdir(d)
|
|
|
|
assert "lookups.bin" not in os.listdir(d)
|