diff --git a/README.md b/README.md index e2054ed623..e9d5666c17 100644 --- a/README.md +++ b/README.md @@ -146,7 +146,7 @@ class LitAutoEncoder(L.LightningModule): def training_step(self, batch, batch_idx): # training_step defines the train loop. It is independent of forward - x, y = batch + x, _ = batch x = x.view(x.size(0), -1) z = self.encoder(x) x_hat = self.decoder(z) diff --git a/docs/source-app/code_samples/convert_pl_to_app/train.py b/docs/source-app/code_samples/convert_pl_to_app/train.py index e9f9de3a6a..d0b7919b75 100644 --- a/docs/source-app/code_samples/convert_pl_to_app/train.py +++ b/docs/source-app/code_samples/convert_pl_to_app/train.py @@ -25,7 +25,7 @@ class LitAutoEncoder(pl.LightningModule): def training_step(self, batch, batch_idx): # training_step defines the train loop. # It is independent of forward - x, y = batch + x, _ = batch x = x.view(x.size(0), -1) z = self.encoder(x) x_hat = self.decoder(z) diff --git a/docs/source-pytorch/common/evaluation_basic.rst b/docs/source-pytorch/common/evaluation_basic.rst index 3dc7867de1..9eb1b9400c 100644 --- a/docs/source-pytorch/common/evaluation_basic.rst +++ b/docs/source-pytorch/common/evaluation_basic.rst @@ -45,7 +45,7 @@ To add a test loop, implement the **test_step** method of the LightningModule def test_step(self, batch, batch_idx): # this is the test loop - x, y = batch + x, _ = batch x = x.view(x.size(0), -1) z = self.encoder(x) x_hat = self.decoder(z) @@ -105,7 +105,7 @@ To add a validation loop, implement the **validation_step** method of the Lightn def validation_step(self, batch, batch_idx): # this is the validation loop - x, y = batch + x, _ = batch x = x.view(x.size(0), -1) z = self.encoder(x) x_hat = self.decoder(z) diff --git a/docs/source-pytorch/common/notebooks.rst b/docs/source-pytorch/common/notebooks.rst index 1652d9dcb4..d4777f675b 100644 --- a/docs/source-pytorch/common/notebooks.rst +++ b/docs/source-pytorch/common/notebooks.rst @@ -57,7 +57,7 @@ Paste the following code block into a notebook cell: self.decoder = decoder def training_step(self, batch, batch_idx): - x, y = batch + x, _ = batch x = x.view(x.size(0), -1) z = self.encoder(x) x_hat = self.decoder(z) diff --git a/docs/source-pytorch/model/train_model_basic.rst b/docs/source-pytorch/model/train_model_basic.rst index 68111fea5d..f5aeb8b689 100644 --- a/docs/source-pytorch/model/train_model_basic.rst +++ b/docs/source-pytorch/model/train_model_basic.rst @@ -68,7 +68,7 @@ The LightningModule is the full **recipe** that defines how your nn.Modules inte def training_step(self, batch, batch_idx): # training_step defines the train loop. - x, y = batch + x, _ = batch x = x.view(x.size(0), -1) z = self.encoder(x) x_hat = self.decoder(z) diff --git a/docs/source-pytorch/starter/introduction.rst b/docs/source-pytorch/starter/introduction.rst index 3cfe872613..8e55afb907 100644 --- a/docs/source-pytorch/starter/introduction.rst +++ b/docs/source-pytorch/starter/introduction.rst @@ -129,7 +129,7 @@ A LightningModule enables your PyTorch nn.Module to play together in complex way def training_step(self, batch, batch_idx): # training_step defines the train loop. # it is independent of forward - x, y = batch + x, _ = batch x = x.view(x.size(0), -1) z = self.encoder(x) x_hat = self.decoder(z) diff --git a/src/pytorch_lightning/README.md b/src/pytorch_lightning/README.md index f1e1d5dfc7..a9200baa27 100644 --- a/src/pytorch_lightning/README.md +++ b/src/pytorch_lightning/README.md @@ -174,7 +174,7 @@ class LitAutoEncoder(pl.LightningModule): def training_step(self, batch, batch_idx): # training_step defines the train loop. It is independent of forward - x, y = batch + x, _ = batch x = x.view(x.size(0), -1) z = self.encoder(x) x_hat = self.decoder(z)