Fix configure_optimizers example (#20420)

Co-authored-by: Alan Chu <alanchu@Alans-Air.lan>
This commit is contained in:
Alan Chu 2024-11-14 14:41:44 -08:00 committed by GitHub
parent 20d19d2f57
commit cd2bd3ce53
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 2 additions and 1 deletions

View File

@ -41,7 +41,8 @@ class MNISTModule(L.LightningModule):
def configure_optimizers(self): def configure_optimizers(self):
optim = torch.optim.Adam(self.parameters(), lr=1e-4) optim = torch.optim.Adam(self.parameters(), lr=1e-4)
return optim, { return {
"optimizer": optim,
"scheduler": torch.optim.lr_scheduler.ReduceLROnPlateau(optim, mode="max", verbose=True), "scheduler": torch.optim.lr_scheduler.ReduceLROnPlateau(optim, mode="max", verbose=True),
"monitor": "val_accuracy", "monitor": "val_accuracy",
"interval": "epoch", "interval": "epoch",