Fix configure_optimizers example (#20420)
Co-authored-by: Alan Chu <alanchu@Alans-Air.lan>
This commit is contained in:
parent
20d19d2f57
commit
cd2bd3ce53
|
@ -41,7 +41,8 @@ class MNISTModule(L.LightningModule):
|
||||||
|
|
||||||
def configure_optimizers(self):
|
def configure_optimizers(self):
|
||||||
optim = torch.optim.Adam(self.parameters(), lr=1e-4)
|
optim = torch.optim.Adam(self.parameters(), lr=1e-4)
|
||||||
return optim, {
|
return {
|
||||||
|
"optimizer": optim,
|
||||||
"scheduler": torch.optim.lr_scheduler.ReduceLROnPlateau(optim, mode="max", verbose=True),
|
"scheduler": torch.optim.lr_scheduler.ReduceLROnPlateau(optim, mode="max", verbose=True),
|
||||||
"monitor": "val_accuracy",
|
"monitor": "val_accuracy",
|
||||||
"interval": "epoch",
|
"interval": "epoch",
|
||||||
|
|
Loading…
Reference in New Issue