Remove opt from manual_backward in docs (#6267)
This commit is contained in:
parent
ed67490d93
commit
412a7d812e
|
@ -946,7 +946,7 @@ When set to ``False``, Lightning does not automate the optimization process. Thi
|
|||
opt = self.optimizers(use_pl_optimizer=True)
|
||||
|
||||
loss = ...
|
||||
self.manual_backward(loss, opt)
|
||||
self.manual_backward(loss)
|
||||
opt.step()
|
||||
opt.zero_grad()
|
||||
|
||||
|
@ -961,16 +961,16 @@ In the multi-optimizer case, ignore the ``optimizer_idx`` argument and use the o
|
|||
|
||||
def training_step(self, batch, batch_idx, optimizer_idx):
|
||||
# access your optimizers with use_pl_optimizer=False. Default is True
|
||||
(opt_a, opt_b) = self.optimizers(use_pl_optimizer=True)
|
||||
opt_a, opt_b = self.optimizers(use_pl_optimizer=True)
|
||||
|
||||
gen_loss = ...
|
||||
opt_a.zero_grad()
|
||||
self.manual_backward(gen_loss, opt_a)
|
||||
self.manual_backward(gen_loss)
|
||||
opt_a.step()
|
||||
|
||||
disc_loss = ...
|
||||
opt_b.zero_grad()
|
||||
self.manual_backward(disc_loss, opt_b)
|
||||
self.manual_backward(disc_loss)
|
||||
opt_b.step()
|
||||
|
||||
--------------
|
||||
|
|
|
@ -1211,10 +1211,10 @@ class LightningModule(
|
|||
Example::
|
||||
|
||||
def training_step(...):
|
||||
(opt_a, opt_b) = self.optimizers()
|
||||
opt_a, opt_b = self.optimizers()
|
||||
loss = ...
|
||||
# automatically applies scaling, etc...
|
||||
self.manual_backward(loss, opt_a)
|
||||
self.manual_backward(loss)
|
||||
opt_a.step()
|
||||
"""
|
||||
if optimizer is not None:
|
||||
|
|
Loading…
Reference in New Issue