diff --git a/docs/source-pytorch/model/manual_optimization.rst b/docs/source-pytorch/model/manual_optimization.rst index ee7c30aa99..96d24bbe04 100644 --- a/docs/source-pytorch/model/manual_optimization.rst +++ b/docs/source-pytorch/model/manual_optimization.rst @@ -98,6 +98,39 @@ after every ``N`` steps, you can do as such. opt.step() opt.zero_grad() +Gradient Clipping +================= + +You can clip optimizer gradients during manual optimization similar to passing the ``gradient_clip_val`` and +``gradient_clip_algorithm`` argument in :ref:`Trainer ` during automatic optimization. +To perform gradient clipping with one optimizer with manual optimization, you can do as such. + +.. testcode:: python + + from pytorch_lightning import LightningModule + + + class SimpleModel(LightningModule): + def __init__(self): + super().__init__() + self.automatic_optimization = False + + def training_step(self, batch, batch_idx): + opt = self.optimizers() + + # compute loss + loss = self.compute_loss(batch) + + opt.zero_grad() + self.manual_backward(loss) + + # clip gradients + self.clip_gradients(opt, gradient_clip_val=0.5, gradient_clip_algorithm="norm") + + opt.step() + +.. warning:: + * Note that ``configure_gradient_clipping()`` won't be called in Manual Optimization. Instead consider using ``self. clip_gradients()`` manually like in the example above. Use Multiple Optimizers (like GANs) =================================== diff --git a/src/pytorch_lightning/core/module.py b/src/pytorch_lightning/core/module.py index 3d488dc2ad..f1f1c2a03e 100644 --- a/src/pytorch_lightning/core/module.py +++ b/src/pytorch_lightning/core/module.py @@ -1471,8 +1471,12 @@ class LightningModule( """Handles gradient clipping internally. Note: - Do not override this method. If you want to customize gradient clipping, consider - using :meth:`configure_gradient_clipping` method. + - Do not override this method. If you want to customize gradient clipping, consider using + :meth:`configure_gradient_clipping` method. + - For manual optimization (``self.automatic_optimization = False``), if you want to use + gradient clipping, consider calling + ``self.clip_gradients(opt, gradient_clip_val=0.5, gradient_clip_algorithm="norm")`` + manually in the training step. Args: optimizer: Current optimizer being used.