fix lr scheduler docs (#1446)
Co-authored-by: Nicki Skafte <nugginea@gmail.com>
This commit is contained in:
parent
3f09b32df3
commit
3f1e4b953f
|
@ -13,23 +13,29 @@ Every optimizer you use can be paired with any `LearningRateScheduler <https://p
|
|||
|
||||
# Adam + LR scheduler
|
||||
def configure_optimizers(self):
|
||||
return [Adam(...)], [ReduceLROnPlateau()]
|
||||
optimizer = Adam(...)
|
||||
scheduler = ReduceLROnPlateau(optimizer, ...)
|
||||
return [optimizer], [scheduler]
|
||||
|
||||
# Two optimziers each with a scheduler
|
||||
def configure_optimizers(self):
|
||||
return [Adam(...), SGD(...)], [ReduceLROnPlateau(), LambdaLR()]
|
||||
optimizer1 = Adam(...)
|
||||
optimizer2 = SGD(...)
|
||||
scheduler1 = ReduceLROnPlateau(optimizer1, ...)
|
||||
scheduler2 = LambdaLR(optimizer2, ...)
|
||||
return [optimizer1, optimizer2], [scheduler1, scheduler2]
|
||||
|
||||
# Same as above with additional params passed to the first scheduler
|
||||
def configure_optimizers(self):
|
||||
optimizers = [Adam(...), SGD(...)]
|
||||
schedulers = [
|
||||
{
|
||||
'scheduler': ReduceLROnPlateau(mode='max', patience=7),
|
||||
'scheduler': ReduceLROnPlateau(optimizers[0], ...),
|
||||
'monitor': 'val_recall', # Default: val_loss
|
||||
'interval': 'epoch',
|
||||
'frequency': 1
|
||||
},
|
||||
LambdaLR()
|
||||
LambdaLR(optimizers[1], ...)
|
||||
]
|
||||
return optimizers, schedulers
|
||||
|
||||
|
|
Loading…
Reference in New Issue