spaCy/spacy/tests/lang/eu/test_text.py

20 lines
488 B
Python
Raw Normal View History

2020-03-04 06:58:56 +00:00
import pytest
def test_eu_tokenizer_handles_long_text(eu_tokenizer):
text = """ta nere guitarra estrenatu ondoren"""
tokens = eu_tokenizer(text)
assert len(tokens) == 5
2020-03-25 11:28:12 +00:00
@pytest.mark.parametrize(
"text,length",
[
("milesker ederra joan zen hitzaldia plazer hutsa", 7),
("astelehen guztia sofan pasau biot", 5),
],
)
2020-03-04 06:58:56 +00:00
def test_eu_tokenizer_handles_cnts(eu_tokenizer, text, length):
tokens = eu_tokenizer(text)
assert len(tokens) == length