Move dropout and batch sizes out of global scope in train cmd

This commit is contained in:
Matthew Honnibal 2018-12-07 20:54:35 +01:00
parent f8c4ee34fe
commit b2bfd1e1c8
1 changed files with 15 additions and 16 deletions

View File

@ -18,22 +18,6 @@ from .. import util
from .. import about from .. import about
# Take dropout and batch size as generators of values -- dropout
# starts high and decays sharply, to force the optimizer to explore.
# Batch size starts at 1 and grows, so that we make updates quickly
# at the beginning of training.
dropout_rates = util.decaying(
util.env_opt("dropout_from", 0.1),
util.env_opt("dropout_to", 0.1),
util.env_opt("dropout_decay", 0.0),
)
batch_sizes = util.compounding(
util.env_opt("batch_from", 750),
util.env_opt("batch_to", 750),
util.env_opt("batch_compound", 1.001),
)
@plac.annotations( @plac.annotations(
lang=("Model language", "positional", None, str), lang=("Model language", "positional", None, str),
output_path=("Output directory to store model in", "positional", None, Path), output_path=("Output directory to store model in", "positional", None, Path),
@ -120,6 +104,21 @@ def train(
if not output_path.exists(): if not output_path.exists():
output_path.mkdir() output_path.mkdir()
# Take dropout and batch size as generators of values -- dropout
# starts high and decays sharply, to force the optimizer to explore.
# Batch size starts at 1 and grows, so that we make updates quickly
# at the beginning of training.
dropout_rates = util.decaying(
util.env_opt("dropout_from", 0.1),
util.env_opt("dropout_to", 0.1),
util.env_opt("dropout_decay", 0.0),
)
batch_sizes = util.compounding(
util.env_opt("batch_from", 100.0),
util.env_opt("batch_to", 1000.0),
util.env_opt("batch_compound", 1.001),
)
# Set up the base model and pipeline. If a base model is specified, load # Set up the base model and pipeline. If a base model is specified, load
# the model and make sure the pipeline matches the pipeline setting. If # the model and make sure the pipeline matches the pipeline setting. If
# training starts from a blank model, intitalize the language class. # training starts from a blank model, intitalize the language class.