mirror of https://github.com/explosion/spaCy.git
Update docstrings and API docs for Matcher
This commit is contained in:
parent
c00ff257be
commit
39f36539f6
|
@ -199,7 +199,9 @@ cdef class Matcher:
|
||||||
return (self.__class__, (self.vocab, self._patterns), None, None)
|
return (self.__class__, (self.vocab, self._patterns), None, None)
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
"""Get the number of rules added to the matcher.
|
"""Get the number of rules added to the matcher. Note that this only
|
||||||
|
returns the number of rules (identical with the number of IDs), not the
|
||||||
|
number of individual patterns.
|
||||||
|
|
||||||
RETURNS (int): The number of rules.
|
RETURNS (int): The number of rules.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -121,15 +121,15 @@ p Match a stream of documents, yielding them in turn.
|
||||||
+h(2, "len") Matcher.__len__
|
+h(2, "len") Matcher.__len__
|
||||||
+tag method
|
+tag method
|
||||||
|
|
||||||
p Get the number of rules added to the matcher.
|
p
|
||||||
|
| Get the number of rules added to the matcher. Note that this only returns
|
||||||
|
| the number of rules (identical with the number of IDs), not the number
|
||||||
|
| of individual patterns.
|
||||||
|
|
||||||
+aside-code("Example").
|
+aside-code("Example").
|
||||||
from spacy.matcher import Matcher
|
|
||||||
from spacy.attrs import ORTH
|
|
||||||
|
|
||||||
matcher = Matcher(nlp.vocab)
|
matcher = Matcher(nlp.vocab)
|
||||||
assert len(matcher) == 0
|
assert len(matcher) == 0
|
||||||
matcher.add('rule', None, [{ORTH: 'rule'}])
|
matcher.add('Rule', None, [{ORTH: 'test'}])
|
||||||
assert len(matcher) == 1
|
assert len(matcher) == 1
|
||||||
|
|
||||||
+table(["Name", "Type", "Description"])
|
+table(["Name", "Type", "Description"])
|
||||||
|
@ -144,13 +144,10 @@ p Get the number of rules added to the matcher.
|
||||||
p Check whether the matcher contains rules for a match ID.
|
p Check whether the matcher contains rules for a match ID.
|
||||||
|
|
||||||
+aside-code("Example").
|
+aside-code("Example").
|
||||||
from spacy.matcher import Matcher
|
|
||||||
from spacy.attrs import ORTH
|
|
||||||
|
|
||||||
matcher = Matcher(nlp.vocab)
|
matcher = Matcher(nlp.vocab)
|
||||||
assert 'rule' in matcher == False
|
assert 'Rule' in matcher == False
|
||||||
matcher.add('rule', None, [{ORTH: 'rule'}])
|
matcher.add('Rule', None, [{ORTH: 'test'}])
|
||||||
assert 'rule' in matcher == True
|
assert 'Rule' in matcher == True
|
||||||
|
|
||||||
+table(["Name", "Type", "Description"])
|
+table(["Name", "Type", "Description"])
|
||||||
+row
|
+row
|
||||||
|
@ -174,9 +171,6 @@ p
|
||||||
| overwritten.
|
| overwritten.
|
||||||
|
|
||||||
+aside-code("Example").
|
+aside-code("Example").
|
||||||
from spacy.matcher import Matcher
|
|
||||||
from spacy.attrs import LOWER, ORTH
|
|
||||||
|
|
||||||
def on_match(matcher, doc, id, matches):
|
def on_match(matcher, doc, id, matches):
|
||||||
print('Matched!', matches)
|
print('Matched!', matches)
|
||||||
|
|
||||||
|
@ -214,14 +208,10 @@ p
|
||||||
| ID does not exist.
|
| ID does not exist.
|
||||||
|
|
||||||
+aside-code("Example").
|
+aside-code("Example").
|
||||||
from spacy.matcher import Matcher
|
matcher.add('Rule', None, [{ORTH: 'test'}])
|
||||||
from spacy.attrs import ORTH
|
assert 'Rule' in matcher == True
|
||||||
|
matcher.remove('Rule')
|
||||||
matcher = Matcher(nlp.vocab)
|
assert 'Rule' in matcher == False
|
||||||
matcher.add('rule', None, [{ORTH: 'rule'}])
|
|
||||||
assert 'rule' in matcher == True
|
|
||||||
matcher.remove('rule')
|
|
||||||
assert 'rule' in matcher == False
|
|
||||||
|
|
||||||
+table(["Name", "Type", "Description"])
|
+table(["Name", "Type", "Description"])
|
||||||
+row
|
+row
|
||||||
|
|
|
@ -198,7 +198,7 @@ p
|
||||||
|
|
||||||
+row
|
+row
|
||||||
+cell #[code Matcher.has_entity]
|
+cell #[code Matcher.has_entity]
|
||||||
+cell -
|
+cell #[+api("matcher#contains") #[code Matcher.__contains__]]
|
||||||
|
|
||||||
+row
|
+row
|
||||||
+cell #[code Matcher.get_entity]
|
+cell #[code Matcher.get_entity]
|
||||||
|
|
Loading…
Reference in New Issue