From 4bd6a12b1f6c70b4ebdcc06f65e5846ba942b5c4 Mon Sep 17 00:00:00 2001 From: Matthew Honnibal Date: Sat, 23 Sep 2017 02:58:54 +0200 Subject: [PATCH] Fix Tok2Vec --- spacy/_ml.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/spacy/_ml.py b/spacy/_ml.py index 37bf6335b..74757f502 100644 --- a/spacy/_ml.py +++ b/spacy/_ml.py @@ -475,14 +475,16 @@ def getitem(i): return layerize(getitem_fwd) -def build_tagger_model(nr_class, token_vector_width, pretrained_dims=0, **cfg): +def build_tagger_model(nr_class, pretrained_dims=0, **cfg): embed_size = util.env_opt('embed_size', 4000) + if 'token_vector_width' not in cfg: + token_vector_width = util.env_opt('token_vector_width', 128) with Model.define_operators({'>>': chain, '+': add}): tok2vec = Tok2Vec(token_vector_width, embed_size, pretrained_dims=pretrained_dims) - model = with_flatten( + model = ( tok2vec - >> Softmax(nr_class, token_vector_width) + >> with_flatten(Softmax(nr_class, token_vector_width)) ) model.nI = None model.tok2vec = tok2vec