diff --git a/README.md b/README.md index 6a616b7c38..87c39d0b03 100644 --- a/README.md +++ b/README.md @@ -150,6 +150,10 @@ class LitAutoEncoder(pl.LightningModule): def configure_optimizers(self): optimizer = torch.optim.Adam(self.parameters(), lr=1e-3) return optimizer + + # def forward(self, x): + # in lightning this is optional and mostly used to say + # how your LightningModule should work for inference/predictions ``` #### Step 2: Train! diff --git a/docs/source/new-project.rst b/docs/source/new-project.rst index ef86da30d1..6769152824 100644 --- a/docs/source/new-project.rst +++ b/docs/source/new-project.rst @@ -111,6 +111,10 @@ Step 1: Define LightningModule optimizer = torch.optim.Adam(self.parameters(), lr=1e-3) return optimizer + # def forward(self, x): + # in lightning this is optional and mostly used to say + # how your LightningModule should work for inference/predictions + A :class:`~pytorch_lightning.core.LightningModule` defines a *system* such as: - `Autoencoder `_