2019-07-24 14:28:44 +00:00
|
|
|
import pytest
|
|
|
|
from pytorch_lightning import Trainer
|
|
|
|
from pytorch_lightning.examples.new_project_templates.lightning_module_template import LightningTemplateModel
|
|
|
|
from argparse import Namespace
|
|
|
|
from test_tube import Experiment
|
2019-07-24 15:09:50 +00:00
|
|
|
from pytorch_lightning.callbacks import ModelCheckpoint
|
2019-07-24 14:28:44 +00:00
|
|
|
import numpy as np
|
|
|
|
import warnings
|
|
|
|
import torch
|
|
|
|
import os
|
|
|
|
import shutil
|
2019-07-24 14:32:21 +00:00
|
|
|
import pdb
|
|
|
|
|
2019-07-24 14:28:44 +00:00
|
|
|
|
|
|
|
def get_model():
|
|
|
|
# set up model with these hyperparams
|
|
|
|
root_dir = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
hparams = Namespace(**{'drop_prob': 0.2,
|
|
|
|
'batch_size': 32,
|
|
|
|
'in_features': 28*28,
|
|
|
|
'learning_rate': 0.001*8,
|
|
|
|
'optimizer_name': 'adam',
|
|
|
|
'data_root': os.path.join(root_dir, 'mnist'),
|
|
|
|
'out_features': 10,
|
|
|
|
'hidden_dim': 1000})
|
|
|
|
model = LightningTemplateModel(hparams)
|
|
|
|
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
2019-07-24 15:09:50 +00:00
|
|
|
def get_exp(debug=True):
|
2019-07-24 14:28:44 +00:00
|
|
|
# set up exp object without actually saving logs
|
|
|
|
root_dir = os.path.dirname(os.path.realpath(__file__))
|
2019-07-24 15:09:50 +00:00
|
|
|
exp = Experiment(debug=debug, save_dir=root_dir, name='tests_tt_dir')
|
2019-07-24 14:28:44 +00:00
|
|
|
return exp
|
|
|
|
|
|
|
|
|
2019-07-24 15:09:50 +00:00
|
|
|
def init_save_dir():
|
2019-07-24 14:28:44 +00:00
|
|
|
root_dir = os.path.dirname(os.path.realpath(__file__))
|
2019-07-24 15:09:50 +00:00
|
|
|
save_dir = os.path.join(root_dir, 'save_dir')
|
|
|
|
|
|
|
|
if os.path.exists(save_dir):
|
|
|
|
shutil.rmtree(save_dir)
|
|
|
|
|
|
|
|
os.makedirs(save_dir, exist_ok=True)
|
|
|
|
|
|
|
|
return save_dir
|
|
|
|
|
|
|
|
|
|
|
|
def clear_save_dir():
|
|
|
|
root_dir = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
save_dir = os.path.join(root_dir, 'save_dir')
|
|
|
|
if os.path.exists(save_dir):
|
|
|
|
shutil.rmtree(save_dir)
|
2019-07-24 14:28:44 +00:00
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
2019-07-24 15:09:50 +00:00
|
|
|
save_dir = init_save_dir()
|
2019-07-24 14:28:44 +00:00
|
|
|
model = get_model()
|
|
|
|
|
2019-07-24 15:09:50 +00:00
|
|
|
# exp file to get meta
|
|
|
|
exp = get_exp(False)
|
|
|
|
exp.save()
|
|
|
|
|
|
|
|
# exp file to get weights
|
|
|
|
checkpoint = ModelCheckpoint(save_dir)
|
|
|
|
|
2019-07-24 14:28:44 +00:00
|
|
|
trainer = Trainer(
|
2019-07-24 15:09:50 +00:00
|
|
|
checkpoint_callback=checkpoint,
|
2019-07-24 14:44:35 +00:00
|
|
|
progress_bar=True,
|
2019-07-24 15:09:50 +00:00
|
|
|
experiment=exp,
|
2019-07-24 14:28:44 +00:00
|
|
|
max_nb_epochs=1,
|
2019-07-24 14:51:07 +00:00
|
|
|
train_percent_check=0.1,
|
|
|
|
val_percent_check=0.1,
|
2019-07-24 14:28:44 +00:00
|
|
|
gpus=[0, 1],
|
|
|
|
distributed_backend='ddp',
|
|
|
|
use_amp=True
|
|
|
|
)
|
|
|
|
|
|
|
|
result = trainer.fit(model)
|
|
|
|
|
|
|
|
# correct result and ok accuracy
|
|
|
|
assert result == 1, 'amp + ddp model failed to complete'
|
|
|
|
|
2019-07-24 15:09:50 +00:00
|
|
|
# load trained model
|
|
|
|
pdb.set_trace()
|
|
|
|
tags_path = exp.get_data_path(exp.name, exp.version)
|
2019-07-24 15:10:22 +00:00
|
|
|
tags_path = os.path.join(tags_path, 'meta_tags.csv')
|
|
|
|
LightningTemplateModel.load_from_metrics(weights_path=save_dir, tags_csv=tags_path)
|
2019-07-24 14:55:17 +00:00
|
|
|
|
2019-07-24 15:09:50 +00:00
|
|
|
clear_save_dir()
|
2019-07-24 14:28:44 +00:00
|
|
|
|
2019-07-24 14:44:35 +00:00
|
|
|
|
2019-07-24 14:28:44 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|