From 4bcc4f1cf7c68a94d0bc600b0ec4f0882fec2762 Mon Sep 17 00:00:00 2001 From: awaelchli Date: Tue, 13 Feb 2024 11:32:47 +0100 Subject: [PATCH] Document the return value of `Fabric.clip_gradients()` (#19457) --- docs/source-fabric/api/fabric_methods.rst | 1 + src/lightning/fabric/fabric.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/docs/source-fabric/api/fabric_methods.rst b/docs/source-fabric/api/fabric_methods.rst index 315ba19576..568c92808a 100644 --- a/docs/source-fabric/api/fabric_methods.rst +++ b/docs/source-fabric/api/fabric_methods.rst @@ -108,6 +108,7 @@ This is useful if your model experiences *exploding gradients* during training. fabric.clip_gradients(model, optimizer, max_norm=2.0, norm_type="inf") The :meth:`~lightning.fabric.fabric.Fabric.clip_gradients` method is agnostic to the precision and strategy being used. +If you pass `max_norm` as the argument, ``clip_gradients`` will return the total norm of the gradients (before clipping was applied) as a scalar tensor. to_device diff --git a/src/lightning/fabric/fabric.py b/src/lightning/fabric/fabric.py index bc07e633a9..4b270c8dc1 100644 --- a/src/lightning/fabric/fabric.py +++ b/src/lightning/fabric/fabric.py @@ -468,6 +468,10 @@ class Fabric: Default is the 2-norm. error_if_nonfinite: An error is raised if the total norm of the gradients is NaN or infinite. + Return: + The total norm of the gradients (before clipping was applied) as a scalar tensor if ``max_norm`` was + passed, otherwise ``None``. + """ if clip_val is not None and max_norm is not None: raise ValueError(