diff --git a/CHANGELOG.md b/CHANGELOG.md index e82adcbf35..bb9a9f1db8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -323,6 +323,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed incorrect main progress bar indicator when resuming training mid-epoch ([#9310](https://github.com/PyTorchLightning/pytorch-lightning/pull/9310)) +- Fixed logging of nan parameters ([#9364](https://github.com/PyTorchLightning/pytorch-lightning/pull/9364)) + + - Fixed `replace_sampler` missing the batch size under specific conditions ([#9367](https://github.com/PyTorchLightning/pytorch-lightning/pull/9367)) diff --git a/pytorch_lightning/utilities/finite_checks.py b/pytorch_lightning/utilities/finite_checks.py index 4dfc5843de..27ba78373f 100644 --- a/pytorch_lightning/utilities/finite_checks.py +++ b/pytorch_lightning/utilities/finite_checks.py @@ -25,7 +25,7 @@ def print_nan_gradients(model: nn.Module) -> None: """Iterates over model parameters and prints out parameter + gradient information if NaN.""" for param in model.parameters(): if (param.grad is not None) and torch.isnan(param.grad.float()).any(): - log.info(param, param.grad) + log.info(f"{param}, {param.grad}") def detect_nan_parameters(model: nn.Module) -> None: