diff --git a/spacy/cli/train.py b/spacy/cli/train.py index b25cdcbd5..ed146cb24 100644 --- a/spacy/cli/train.py +++ b/spacy/cli/train.py @@ -70,12 +70,12 @@ def train(_, lang, output_dir, train_data, dev_data, n_iter=20, n_sents=0, optimizer = nlp.begin_training(lambda: corpus.train_tuples, use_gpu=use_gpu) - print("Itn.\tDep. Loss\tUAS\tNER P.\tNER R.\tNER F.\tTag %\tToken %") + print("Itn.\tLoss\tUAS\tNER P.\tNER R.\tNER F.\tTag %\tToken %") try: for i in range(n_iter): with tqdm.tqdm(total=corpus.count_train(), leave=False) as pbar: train_docs = corpus.train_docs(nlp, projectivize=True, - gold_preproc=False, max_length=1000) + gold_preproc=False, max_length=0) losses = {} for batch in minibatch(train_docs, size=batch_sizes): docs, golds = zip(*batch)