fix lr scheduler docs (#1446)
Co-authored-by: Nicki Skafte <nugginea@gmail.com>
This commit is contained in:
parent
3f09b32df3
commit
3f1e4b953f
|
@ -13,23 +13,29 @@ Every optimizer you use can be paired with any `LearningRateScheduler <https://p
|
||||||
|
|
||||||
# Adam + LR scheduler
|
# Adam + LR scheduler
|
||||||
def configure_optimizers(self):
|
def configure_optimizers(self):
|
||||||
return [Adam(...)], [ReduceLROnPlateau()]
|
optimizer = Adam(...)
|
||||||
|
scheduler = ReduceLROnPlateau(optimizer, ...)
|
||||||
|
return [optimizer], [scheduler]
|
||||||
|
|
||||||
# Two optimziers each with a scheduler
|
# Two optimziers each with a scheduler
|
||||||
def configure_optimizers(self):
|
def configure_optimizers(self):
|
||||||
return [Adam(...), SGD(...)], [ReduceLROnPlateau(), LambdaLR()]
|
optimizer1 = Adam(...)
|
||||||
|
optimizer2 = SGD(...)
|
||||||
|
scheduler1 = ReduceLROnPlateau(optimizer1, ...)
|
||||||
|
scheduler2 = LambdaLR(optimizer2, ...)
|
||||||
|
return [optimizer1, optimizer2], [scheduler1, scheduler2]
|
||||||
|
|
||||||
# Same as above with additional params passed to the first scheduler
|
# Same as above with additional params passed to the first scheduler
|
||||||
def configure_optimizers(self):
|
def configure_optimizers(self):
|
||||||
optimizers = [Adam(...), SGD(...)]
|
optimizers = [Adam(...), SGD(...)]
|
||||||
schedulers = [
|
schedulers = [
|
||||||
{
|
{
|
||||||
'scheduler': ReduceLROnPlateau(mode='max', patience=7),
|
'scheduler': ReduceLROnPlateau(optimizers[0], ...),
|
||||||
'monitor': 'val_recall', # Default: val_loss
|
'monitor': 'val_recall', # Default: val_loss
|
||||||
'interval': 'epoch',
|
'interval': 'epoch',
|
||||||
'frequency': 1
|
'frequency': 1
|
||||||
},
|
},
|
||||||
LambdaLR()
|
LambdaLR(optimizers[1], ...)
|
||||||
]
|
]
|
||||||
return optimizers, schedulers
|
return optimizers, schedulers
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue