2020-04-16 02:16:40 +00:00
|
|
|
from abc import ABC
|
|
|
|
|
|
|
|
from torch import optim
|
|
|
|
|
|
|
|
|
|
|
|
class ConfigureOptimizersPool(ABC):
|
|
|
|
def configure_optimizers(self):
|
|
|
|
"""
|
|
|
|
return whatever optimizers we want here.
|
|
|
|
:return: list of optimizers
|
|
|
|
"""
|
|
|
|
optimizer = optim.Adam(self.parameters(), lr=self.hparams.learning_rate)
|
|
|
|
return optimizer
|
|
|
|
|
2020-05-04 20:51:39 +00:00
|
|
|
def configure_optimizers__empty(self):
|
2020-04-16 02:16:40 +00:00
|
|
|
return None
|
|
|
|
|
2020-05-02 12:38:22 +00:00
|
|
|
def configure_optimizers__lbfgs(self):
|
2020-04-16 02:16:40 +00:00
|
|
|
"""
|
|
|
|
return whatever optimizers we want here.
|
|
|
|
:return: list of optimizers
|
|
|
|
"""
|
|
|
|
optimizer = optim.LBFGS(self.parameters(), lr=self.hparams.learning_rate)
|
|
|
|
return optimizer
|
|
|
|
|
2020-05-02 12:38:22 +00:00
|
|
|
def configure_optimizers__multiple_optimizers(self):
|
2020-04-16 02:16:40 +00:00
|
|
|
"""
|
|
|
|
return whatever optimizers we want here.
|
|
|
|
:return: list of optimizers
|
|
|
|
"""
|
|
|
|
# try no scheduler for this model (testing purposes)
|
|
|
|
optimizer1 = optim.Adam(self.parameters(), lr=self.hparams.learning_rate)
|
|
|
|
optimizer2 = optim.Adam(self.parameters(), lr=self.hparams.learning_rate)
|
|
|
|
return optimizer1, optimizer2
|
|
|
|
|
2020-05-02 12:38:22 +00:00
|
|
|
def configure_optimizers__single_scheduler(self):
|
2020-04-16 02:16:40 +00:00
|
|
|
optimizer = optim.Adam(self.parameters(), lr=self.hparams.learning_rate)
|
|
|
|
lr_scheduler = optim.lr_scheduler.StepLR(optimizer, 1, gamma=0.1)
|
|
|
|
return [optimizer], [lr_scheduler]
|
|
|
|
|
2020-05-02 12:38:22 +00:00
|
|
|
def configure_optimizers__multiple_schedulers(self):
|
2020-04-16 02:16:40 +00:00
|
|
|
optimizer1 = optim.Adam(self.parameters(), lr=self.hparams.learning_rate)
|
|
|
|
optimizer2 = optim.Adam(self.parameters(), lr=self.hparams.learning_rate)
|
|
|
|
lr_scheduler1 = optim.lr_scheduler.StepLR(optimizer1, 1, gamma=0.1)
|
|
|
|
lr_scheduler2 = optim.lr_scheduler.StepLR(optimizer2, 1, gamma=0.1)
|
|
|
|
|
|
|
|
return [optimizer1, optimizer2], [lr_scheduler1, lr_scheduler2]
|
|
|
|
|
2020-05-02 12:38:22 +00:00
|
|
|
def configure_optimizers__mixed_scheduling(self):
|
2020-04-16 02:16:40 +00:00
|
|
|
optimizer1 = optim.Adam(self.parameters(), lr=self.hparams.learning_rate)
|
|
|
|
optimizer2 = optim.Adam(self.parameters(), lr=self.hparams.learning_rate)
|
|
|
|
lr_scheduler1 = optim.lr_scheduler.StepLR(optimizer1, 4, gamma=0.1)
|
|
|
|
lr_scheduler2 = optim.lr_scheduler.StepLR(optimizer2, 1, gamma=0.1)
|
|
|
|
|
|
|
|
return [optimizer1, optimizer2], \
|
|
|
|
[{'scheduler': lr_scheduler1, 'interval': 'step'}, lr_scheduler2]
|
|
|
|
|
2020-05-02 12:38:22 +00:00
|
|
|
def configure_optimizers__reduce_lr_on_plateau(self):
|
2020-04-16 02:16:40 +00:00
|
|
|
optimizer = optim.Adam(self.parameters(), lr=self.hparams.learning_rate)
|
|
|
|
lr_scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer)
|
|
|
|
return [optimizer], [lr_scheduler]
|
2020-05-10 21:05:34 +00:00
|
|
|
|
|
|
|
def configure_optimizers__param_groups(self):
|
|
|
|
param_groups = [
|
|
|
|
{'params': list(self.parameters())[:2], 'lr': self.hparams.learning_rate * 0.1},
|
|
|
|
{'params': list(self.parameters())[2:], 'lr': self.hparams.learning_rate}
|
|
|
|
]
|
|
|
|
|
|
|
|
optimizer = optim.Adam(param_groups)
|
|
|
|
lr_scheduler = optim.lr_scheduler.StepLR(optimizer, 1, gamma=0.1)
|
|
|
|
return [optimizer], [lr_scheduler]
|