Dont assume pretrained_vectors cfg set in build_tagger

This commit is contained in:
Matthew Honnibal 2018-03-28 20:12:45 +02:00
parent f8dd905a24
commit 4555e3e251
1 changed files with 1 additions and 1 deletions

View File

@ -438,7 +438,7 @@ def build_tagger_model(nr_class, **cfg):
token_vector_width = cfg['token_vector_width']
else:
token_vector_width = util.env_opt('token_vector_width', 128)
pretrained_vectors = cfg['pretrained_vectors']
pretrained_vectors = cfg.get('pretrained_vectors')
with Model.define_operators({'>>': chain, '+': add}):
if 'tok2vec' in cfg:
tok2vec = cfg['tok2vec']