Remove opt from manual_backward in docs (#6267)

This commit is contained in:
Akihiro Nitta 2021-03-02 03:15:43 +09:00 committed by GitHub
parent ed67490d93
commit 412a7d812e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 6 additions and 6 deletions

View File

@ -946,7 +946,7 @@ When set to ``False``, Lightning does not automate the optimization process. Thi
opt = self.optimizers(use_pl_optimizer=True)
loss = ...
self.manual_backward(loss, opt)
self.manual_backward(loss)
opt.step()
opt.zero_grad()
@ -961,16 +961,16 @@ In the multi-optimizer case, ignore the ``optimizer_idx`` argument and use the o
def training_step(self, batch, batch_idx, optimizer_idx):
# access your optimizers with use_pl_optimizer=False. Default is True
(opt_a, opt_b) = self.optimizers(use_pl_optimizer=True)
opt_a, opt_b = self.optimizers(use_pl_optimizer=True)
gen_loss = ...
opt_a.zero_grad()
self.manual_backward(gen_loss, opt_a)
self.manual_backward(gen_loss)
opt_a.step()
disc_loss = ...
opt_b.zero_grad()
self.manual_backward(disc_loss, opt_b)
self.manual_backward(disc_loss)
opt_b.step()
--------------

View File

@ -1211,10 +1211,10 @@ class LightningModule(
Example::
def training_step(...):
(opt_a, opt_b) = self.optimizers()
opt_a, opt_b = self.optimizers()
loss = ...
# automatically applies scaling, etc...
self.manual_backward(loss, opt_a)
self.manual_backward(loss)
opt_a.step()
"""
if optimizer is not None: