Prepend `almond_` to seq2seq and paraphrase task names

This commit is contained in:
Sina 2021-04-06 21:21:39 -07:00
parent fe3f8b7824
commit 29473aca3a
2 changed files with 8 additions and 8 deletions

View File

@ -469,12 +469,12 @@ class Almond(BaseAlmondTask):
preprocess=self.preprocess_field, lower=False) preprocess=self.preprocess_field, lower=False)
@register_task('natural_seq2seq') @register_task('almond_natural_seq2seq')
class NaturalSeq2Seq(BaseAlmondTask): class NaturalSeq2Seq(BaseAlmondTask):
""" """
The Almond sequence to sequence task where both sequences are natural language. The Almond sequence to sequence task where both sequences are natural language.
Paraphrasing and translation are examples of this task. In this task entities (see ENTITY_REGEX) are not preprocessed in contrast to paraphrasing and translation tasks.
In this task entities (see ENTITY_REGEX) are not preprocessed in contrast to paraphrasing and translation tasks Paraphrasing and translation inherit from this class.
""" """
@property @property
@ -509,7 +509,7 @@ class NaturalSeq2Seq(BaseAlmondTask):
return AlmondDataset.return_splits(path=os.path.join(root, 'almond'), make_example=self._make_example, **kwargs) return AlmondDataset.return_splits(path=os.path.join(root, 'almond'), make_example=self._make_example, **kwargs)
@register_task('paraphrase') @register_task('almond_paraphrase')
class Paraphrase(NaturalSeq2Seq): class Paraphrase(NaturalSeq2Seq):
"""The Almond paraphrasing task. Applies the necessary preprocessing for special tokens and case changes. """The Almond paraphrasing task. Applies the necessary preprocessing for special tokens and case changes.
Can be used at prediction and training time. Training is still experimental. Can be used at prediction and training time. Training is still experimental.

View File

@ -4,18 +4,18 @@
i=0 i=0
# test natural_seq2seq and paraphrase tasks # test almond_natural_seq2seq and almond_paraphrase tasks
for hparams in \ for hparams in \
"--model TransformerSeq2Seq --pretrained_model sshleifer/bart-tiny-random"; do "--model TransformerSeq2Seq --pretrained_model sshleifer/bart-tiny-random"; do
# train # train
genienlp train --train_tasks natural_seq2seq --train_batch_tokens 50 --val_batch_size 50 --train_iterations 6 --preserve_case --save_every 2 --log_every 2 --val_every 2 --save $workdir/model_$i --data $SRCDIR/dataset/ $hparams --exist_ok --skip_cache --embeddings $EMBEDDING_DIR --no_commit genienlp train --train_tasks almond_natural_seq2seq --train_batch_tokens 50 --val_batch_size 50 --train_iterations 6 --preserve_case --save_every 2 --log_every 2 --val_every 2 --save $workdir/model_$i --data $SRCDIR/dataset/ $hparams --exist_ok --skip_cache --embeddings $EMBEDDING_DIR --no_commit
# greedy prediction # greedy prediction
genienlp predict --tasks paraphrase --evaluate test --path $workdir/model_$i --overwrite --eval_dir $workdir/model_$i/eval_results/ --data $SRCDIR/dataset/ --embeddings $EMBEDDING_DIR --skip_cache genienlp predict --tasks almond_paraphrase --evaluate test --path $workdir/model_$i --overwrite --eval_dir $workdir/model_$i/eval_results/ --data $SRCDIR/dataset/ --embeddings $EMBEDDING_DIR --skip_cache
# check if result file exists # check if result file exists
if test ! -f $workdir/model_$i/eval_results/test/paraphrase.tsv || test ! -f $workdir/model_$i/eval_results/test/paraphrase.results.json; then if test ! -f $workdir/model_$i/eval_results/test/almond_paraphrase.tsv || test ! -f $workdir/model_$i/eval_results/test/almond_paraphrase.results.json; then
echo "File not found!" echo "File not found!"
exit 1 exit 1
fi fi