2019-10-05 18:13:32 +00:00
|
|
|
"""
|
|
|
|
Runs a model on a single node across N-gpus.
|
|
|
|
"""
|
|
|
|
import os
|
2019-10-22 08:32:40 +00:00
|
|
|
from argparse import ArgumentParser
|
|
|
|
|
2019-10-05 18:13:32 +00:00
|
|
|
import numpy as np
|
|
|
|
import torch
|
|
|
|
|
2020-02-09 22:39:10 +00:00
|
|
|
import pytorch_lightning as pl
|
2019-10-18 13:44:58 +00:00
|
|
|
from pl_examples.basic_examples.lightning_module_template import LightningTemplateModel
|
2019-10-05 18:13:32 +00:00
|
|
|
|
|
|
|
SEED = 2334
|
|
|
|
torch.manual_seed(SEED)
|
|
|
|
np.random.seed(SEED)
|
|
|
|
|
|
|
|
|
|
|
|
def main(hparams):
|
|
|
|
"""
|
|
|
|
Main training routine specific for this project
|
|
|
|
:param hparams:
|
|
|
|
"""
|
|
|
|
# ------------------------
|
|
|
|
# 1 INIT LIGHTNING MODEL
|
|
|
|
# ------------------------
|
|
|
|
model = LightningTemplateModel(hparams)
|
|
|
|
|
|
|
|
# ------------------------
|
|
|
|
# 2 INIT TRAINER
|
|
|
|
# ------------------------
|
2020-02-09 22:39:10 +00:00
|
|
|
trainer = pl.Trainer(
|
2019-10-05 18:13:32 +00:00
|
|
|
gpus=hparams.gpus,
|
|
|
|
distributed_backend=hparams.distributed_backend,
|
|
|
|
use_amp=hparams.use_16bit
|
|
|
|
)
|
|
|
|
|
|
|
|
# ------------------------
|
|
|
|
# 3 START TRAINING
|
|
|
|
# ------------------------
|
|
|
|
trainer.fit(model)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
# ------------------------
|
|
|
|
# TRAINING ARGUMENTS
|
|
|
|
# ------------------------
|
|
|
|
# these are project-wide arguments
|
|
|
|
|
|
|
|
root_dir = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
parent_parser = ArgumentParser(add_help=False)
|
|
|
|
|
|
|
|
# gpu args
|
|
|
|
parent_parser.add_argument(
|
|
|
|
'--gpus',
|
2019-10-05 20:39:05 +00:00
|
|
|
type=int,
|
|
|
|
default=2,
|
|
|
|
help='how many gpus'
|
2019-10-05 18:13:32 +00:00
|
|
|
)
|
|
|
|
parent_parser.add_argument(
|
|
|
|
'--distributed_backend',
|
|
|
|
type=str,
|
2019-10-05 20:39:05 +00:00
|
|
|
default='dp',
|
2019-10-05 18:13:32 +00:00
|
|
|
help='supports three options dp, ddp, ddp2'
|
|
|
|
)
|
|
|
|
parent_parser.add_argument(
|
|
|
|
'--use_16bit',
|
|
|
|
dest='use_16bit',
|
|
|
|
action='store_true',
|
|
|
|
help='if true uses 16 bit precision'
|
|
|
|
)
|
|
|
|
|
|
|
|
# each LightningModule defines arguments relevant to it
|
|
|
|
parser = LightningTemplateModel.add_model_specific_args(parent_parser, root_dir)
|
|
|
|
hyperparams = parser.parse_args()
|
|
|
|
|
|
|
|
# ---------------------
|
|
|
|
# RUN TRAINING
|
|
|
|
# ---------------------
|
|
|
|
main(hyperparams)
|