Disable batch size compounding in ud-train

This commit is contained in:
Matthew Honnibal 2018-04-01 08:45:00 +00:00
parent 98165e43a7
commit 8a120fb455
1 changed files with 1 additions and 1 deletions

View File

@ -338,7 +338,7 @@ def main(ud_dir, parses_dir, config, corpus, limit=0, use_gpu=-1):
optimizer = initialize_pipeline(nlp, docs, golds, config, use_gpu)
batch_sizes = compounding(config.batch_size//10, config.batch_size, 1.001)
batch_sizes = compounding(config.batch_size, config.batch_size, 1.001)
for i in range(config.nr_epoch):
docs = [nlp.make_doc(doc.text) for doc in docs]
Xs = list(zip(docs, golds))