2019-08-20 14:33:09 +00:00
|
|
|
# coding: utf8
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
|
|
from spacy.matcher import Matcher
|
|
|
|
from spacy.tokens import Doc
|
|
|
|
|
|
|
|
|
|
|
|
def test_issue4120(en_vocab):
|
|
|
|
"""Test that matches without a final {OP: ?} token are returned."""
|
|
|
|
matcher = Matcher(en_vocab)
|
2019-10-25 20:21:08 +00:00
|
|
|
matcher.add("TEST", [[{"ORTH": "a"}, {"OP": "?"}]])
|
2019-08-20 14:33:09 +00:00
|
|
|
doc1 = Doc(en_vocab, words=["a"])
|
|
|
|
assert len(matcher(doc1)) == 1 # works
|
2019-08-21 20:46:56 +00:00
|
|
|
|
2019-08-20 14:33:09 +00:00
|
|
|
doc2 = Doc(en_vocab, words=["a", "b", "c"])
|
2019-08-21 20:46:56 +00:00
|
|
|
assert len(matcher(doc2)) == 2 # fixed
|
2019-08-20 14:33:09 +00:00
|
|
|
|
|
|
|
matcher = Matcher(en_vocab)
|
2019-10-25 20:21:08 +00:00
|
|
|
matcher.add("TEST", [[{"ORTH": "a"}, {"OP": "?"}, {"ORTH": "b"}]])
|
2019-08-20 14:33:09 +00:00
|
|
|
doc3 = Doc(en_vocab, words=["a", "b", "b", "c"])
|
|
|
|
assert len(matcher(doc3)) == 2 # works
|
|
|
|
|
|
|
|
matcher = Matcher(en_vocab)
|
2019-10-25 20:21:08 +00:00
|
|
|
matcher.add("TEST", [[{"ORTH": "a"}, {"OP": "?"}, {"ORTH": "b", "OP": "?"}]])
|
2019-08-20 14:33:09 +00:00
|
|
|
doc4 = Doc(en_vocab, words=["a", "b", "b", "c"])
|
2019-08-21 20:46:56 +00:00
|
|
|
assert len(matcher(doc4)) == 3 # fixed
|