fix lr scheduler docs (#1446)

Co-authored-by: Nicki Skafte <nugginea@gmail.com>
This commit is contained in:
Nicki Skafte 2020-04-10 22:14:51 +02:00 committed by GitHub
parent 3f09b32df3
commit 3f1e4b953f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 10 additions and 4 deletions

View File

@ -13,23 +13,29 @@ Every optimizer you use can be paired with any `LearningRateScheduler <https://p
# Adam + LR scheduler
def configure_optimizers(self):
return [Adam(...)], [ReduceLROnPlateau()]
optimizer = Adam(...)
scheduler = ReduceLROnPlateau(optimizer, ...)
return [optimizer], [scheduler]
# Two optimziers each with a scheduler
def configure_optimizers(self):
return [Adam(...), SGD(...)], [ReduceLROnPlateau(), LambdaLR()]
optimizer1 = Adam(...)
optimizer2 = SGD(...)
scheduler1 = ReduceLROnPlateau(optimizer1, ...)
scheduler2 = LambdaLR(optimizer2, ...)
return [optimizer1, optimizer2], [scheduler1, scheduler2]
# Same as above with additional params passed to the first scheduler
def configure_optimizers(self):
optimizers = [Adam(...), SGD(...)]
schedulers = [
{
'scheduler': ReduceLROnPlateau(mode='max', patience=7),
'scheduler': ReduceLROnPlateau(optimizers[0], ...),
'monitor': 'val_recall', # Default: val_loss
'interval': 'epoch',
'frequency': 1
},
LambdaLR()
LambdaLR(optimizers[1], ...)
]
return optimizers, schedulers