Bug fix to tagger: wasnt backproping to token vectors

This commit is contained in:
Matthew Honnibal 2017-05-17 13:13:14 +02:00
parent 877f83807f
commit 692bd2a186
1 changed files with 4 additions and 1 deletions

View File

@ -48,7 +48,7 @@ class TokenVectorEncoder(object):
self.vocab = vocab
self.doc2feats = doc2feats()
self.model = self.Model() if model is True else model
def __call__(self, docs, state=None):
if isinstance(docs, Doc):
docs = [docs]
@ -137,9 +137,12 @@ class NeuralTagger(object):
self.model.nI = tokvecs.shape[1]
tag_scores, bp_tag_scores = self.model.begin_update(tokvecs, drop=drop)
loss, d_tag_scores = self.get_loss(docs, golds, tag_scores)
d_tokvecs = bp_tag_scores(d_tag_scores, sgd)
bp_tokvecs(d_tokvecs, sgd=sgd)
state['tag_scores'] = tag_scores
state['bp_tag_scores'] = bp_tag_scores
state['d_tag_scores'] = d_tag_scores