2020-10-13 11:18:07 +00:00
|
|
|
# Copyright The PyTorch Lightning team.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2021-01-15 08:01:57 +00:00
|
|
|
from typing import Generic, TypeVar
|
|
|
|
|
2020-04-16 02:16:40 +00:00
|
|
|
import torch
|
|
|
|
import torch.nn as nn
|
|
|
|
import torch.nn.functional as F
|
|
|
|
|
|
|
|
from pytorch_lightning.core.lightning import LightningModule
|
2020-05-10 17:15:28 +00:00
|
|
|
from tests.base.model_optimizers import ConfigureOptimizersPool
|
|
|
|
from tests.base.model_test_dataloaders import TestDataloaderVariations
|
|
|
|
from tests.base.model_test_epoch_ends import TestEpochEndVariations
|
|
|
|
from tests.base.model_test_steps import TestStepVariations
|
|
|
|
from tests.base.model_train_dataloaders import TrainDataloaderVariations
|
|
|
|
from tests.base.model_train_steps import TrainingStepVariations
|
2021-01-15 08:01:57 +00:00
|
|
|
from tests.base.model_utilities import ModelTemplateData, ModelTemplateUtils
|
2020-05-10 17:15:28 +00:00
|
|
|
from tests.base.model_valid_dataloaders import ValDataloaderVariations
|
|
|
|
from tests.base.model_valid_epoch_ends import ValidationEpochEndVariations
|
|
|
|
from tests.base.model_valid_steps import ValidationStepVariations
|
2021-02-08 10:52:02 +00:00
|
|
|
from tests.helpers.datasets import PATH_DATASETS, TrialMNIST
|
2020-04-16 02:16:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
class EvalModelTemplate(
|
2020-04-25 20:45:20 +00:00
|
|
|
ModelTemplateData,
|
2020-04-16 02:16:40 +00:00
|
|
|
ModelTemplateUtils,
|
|
|
|
TrainingStepVariations,
|
|
|
|
ValidationStepVariations,
|
|
|
|
ValidationEpochEndVariations,
|
|
|
|
TestStepVariations,
|
|
|
|
TestEpochEndVariations,
|
|
|
|
TrainDataloaderVariations,
|
|
|
|
ValDataloaderVariations,
|
|
|
|
TestDataloaderVariations,
|
|
|
|
ConfigureOptimizersPool,
|
2020-08-20 11:19:11 +00:00
|
|
|
LightningModule,
|
2020-04-16 02:16:40 +00:00
|
|
|
):
|
|
|
|
"""
|
|
|
|
This template houses all combinations of model configurations we want to test
|
2020-05-13 03:18:39 +00:00
|
|
|
|
|
|
|
>>> model = EvalModelTemplate()
|
2020-04-16 02:16:40 +00:00
|
|
|
"""
|
2020-05-24 22:59:08 +00:00
|
|
|
|
2020-06-27 20:38:03 +00:00
|
|
|
def __init__(
|
2021-02-06 13:22:10 +00:00
|
|
|
self,
|
|
|
|
drop_prob: float = 0.2,
|
|
|
|
batch_size: int = 32,
|
|
|
|
in_features: int = 28 * 28,
|
|
|
|
learning_rate: float = 0.001 * 8,
|
|
|
|
optimizer_name: str = 'adam',
|
|
|
|
data_root: str = PATH_DATASETS,
|
|
|
|
out_features: int = 10,
|
|
|
|
hidden_dim: int = 1000,
|
|
|
|
b1: float = 0.5,
|
|
|
|
b2: float = 0.999,
|
2020-06-27 20:38:03 +00:00
|
|
|
):
|
2020-04-16 02:16:40 +00:00
|
|
|
# init superclass
|
|
|
|
super().__init__()
|
2020-06-08 11:19:34 +00:00
|
|
|
self.save_hyperparameters()
|
2020-05-31 12:29:51 +00:00
|
|
|
|
2020-05-24 22:59:08 +00:00
|
|
|
self.drop_prob = drop_prob
|
|
|
|
self.batch_size = batch_size
|
|
|
|
self.in_features = in_features
|
|
|
|
self.learning_rate = learning_rate
|
|
|
|
self.optimizer_name = optimizer_name
|
|
|
|
self.data_root = data_root
|
|
|
|
self.out_features = out_features
|
|
|
|
self.hidden_dim = hidden_dim
|
|
|
|
self.b1 = b1
|
|
|
|
self.b2 = b2
|
2020-07-20 23:00:20 +00:00
|
|
|
self.training_step_called = False
|
|
|
|
self.training_step_end_called = False
|
|
|
|
self.training_epoch_end_called = False
|
2020-07-22 17:53:10 +00:00
|
|
|
self.validation_step_called = False
|
|
|
|
self.validation_step_end_called = False
|
|
|
|
self.validation_epoch_end_called = False
|
|
|
|
self.test_step_called = False
|
|
|
|
self.test_step_end_called = False
|
|
|
|
self.test_epoch_end_called = False
|
2020-04-16 02:16:40 +00:00
|
|
|
|
2020-07-31 10:27:57 +00:00
|
|
|
self.example_input_array = torch.rand(5, 28 * 28)
|
2020-04-16 02:16:40 +00:00
|
|
|
|
|
|
|
# build model
|
|
|
|
self.__build_model()
|
|
|
|
|
|
|
|
def __build_model(self):
|
|
|
|
"""
|
|
|
|
Simple model for testing
|
|
|
|
:return:
|
|
|
|
"""
|
2020-08-20 11:19:11 +00:00
|
|
|
self.c_d1 = nn.Linear(in_features=self.in_features, out_features=self.hidden_dim)
|
2020-05-24 22:59:08 +00:00
|
|
|
self.c_d1_bn = nn.BatchNorm1d(self.hidden_dim)
|
|
|
|
self.c_d1_drop = nn.Dropout(self.drop_prob)
|
2020-04-16 02:16:40 +00:00
|
|
|
|
2020-08-20 11:19:11 +00:00
|
|
|
self.c_d2 = nn.Linear(in_features=self.hidden_dim, out_features=self.out_features)
|
2020-04-16 02:16:40 +00:00
|
|
|
|
|
|
|
def forward(self, x):
|
|
|
|
x = self.c_d1(x)
|
|
|
|
x = torch.tanh(x)
|
|
|
|
x = self.c_d1_bn(x)
|
|
|
|
x = self.c_d1_drop(x)
|
|
|
|
|
|
|
|
x = self.c_d2(x)
|
|
|
|
logits = F.log_softmax(x, dim=1)
|
|
|
|
|
|
|
|
return logits
|
|
|
|
|
|
|
|
def loss(self, labels, logits):
|
|
|
|
nll = F.nll_loss(logits, labels)
|
|
|
|
return nll
|
|
|
|
|
|
|
|
def prepare_data(self):
|
2020-06-18 12:29:18 +00:00
|
|
|
TrialMNIST(root=self.data_root, train=True, download=True)
|
2020-05-10 17:15:28 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2020-05-24 22:59:08 +00:00
|
|
|
def get_default_hparams(continue_training: bool = False, hpc_exp_number: int = 0) -> dict:
|
2020-05-10 17:15:28 +00:00
|
|
|
args = dict(
|
|
|
|
drop_prob=0.2,
|
|
|
|
batch_size=32,
|
|
|
|
in_features=28 * 28,
|
|
|
|
learning_rate=0.001 * 8,
|
|
|
|
optimizer_name='adam',
|
|
|
|
data_root=PATH_DATASETS,
|
|
|
|
out_features=10,
|
|
|
|
hidden_dim=1000,
|
|
|
|
b1=0.5,
|
|
|
|
b2=0.999,
|
|
|
|
)
|
|
|
|
|
|
|
|
if continue_training:
|
|
|
|
args.update(
|
2021-02-06 13:22:10 +00:00
|
|
|
test_tube_do_checkpoint_load=True,
|
|
|
|
hpc_exp_number=hpc_exp_number,
|
2020-05-10 17:15:28 +00:00
|
|
|
)
|
|
|
|
|
2020-05-24 22:59:08 +00:00
|
|
|
return args
|
2020-08-20 11:19:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
T = TypeVar('T')
|
|
|
|
|
|
|
|
|
|
|
|
class GenericParentEvalModelTemplate(Generic[T], EvalModelTemplate):
|
2021-02-06 13:22:10 +00:00
|
|
|
|
2020-08-20 11:19:11 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
drop_prob: float,
|
|
|
|
batch_size: int,
|
|
|
|
in_features: int,
|
|
|
|
learning_rate: float,
|
|
|
|
optimizer_name: str,
|
|
|
|
data_root: str,
|
|
|
|
out_features: int,
|
|
|
|
hidden_dim: int,
|
|
|
|
b1: float,
|
|
|
|
b2: float,
|
|
|
|
):
|
|
|
|
super().__init__(
|
|
|
|
drop_prob=drop_prob,
|
|
|
|
batch_size=batch_size,
|
|
|
|
in_features=in_features,
|
|
|
|
learning_rate=learning_rate,
|
|
|
|
optimizer_name=optimizer_name,
|
|
|
|
data_root=data_root,
|
|
|
|
out_features=out_features,
|
|
|
|
hidden_dim=hidden_dim,
|
|
|
|
b1=b1,
|
|
|
|
b2=b2,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class GenericEvalModelTemplate(GenericParentEvalModelTemplate[int]):
|
|
|
|
pass
|