spaCy/tests/tokens/test_tokens_api.py

35 lines
1.0 KiB
Python
Raw Normal View History

2015-02-07 18:14:07 +00:00
from __future__ import unicode_literals
from spacy.tokens import Doc
2015-02-07 18:14:07 +00:00
import pytest
2015-04-19 19:39:18 +00:00
2015-07-26 14:23:41 +00:00
@pytest.mark.models
def test_getitem(EN):
tokens = EN(u'Give it back! He pleaded.')
2015-02-07 18:14:07 +00:00
assert tokens[0].orth_ == 'Give'
assert tokens[-1].orth_ == '.'
with pytest.raises(IndexError):
tokens[len(tokens)]
2015-07-26 14:23:41 +00:00
@pytest.mark.models
def test_serialize(EN):
2015-07-22 23:19:11 +00:00
tokens = EN(u'Give it back! He pleaded.')
packed = tokens.to_bytes()
new_tokens = Doc(EN.vocab).from_bytes(packed)
assert tokens.string == new_tokens.string
assert [t.orth_ for t in tokens] == [t.orth_ for t in new_tokens]
assert [t.orth for t in tokens] == [t.orth for t in new_tokens]
2015-07-26 14:23:41 +00:00
@pytest.mark.models
2015-07-22 23:19:11 +00:00
def test_serialize_whitespace(EN):
tokens = EN(u' Give it back! He pleaded. ')
2015-07-22 23:19:11 +00:00
packed = tokens.to_bytes()
new_tokens = Doc(EN.vocab).from_bytes(packed)
assert tokens.string == new_tokens.string
assert [t.orth_ for t in tokens] == [t.orth_ for t in new_tokens]
assert [t.orth for t in tokens] == [t.orth for t in new_tokens]