Fix logging of nan parameters (#9364)

* Fix logging of nan parameters
This commit is contained in:
Artsiom 2021-09-09 02:39:23 +02:00 committed by GitHub
parent a079d7fccc
commit 41ba639859
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 4 additions and 1 deletions

View File

@ -323,6 +323,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed incorrect main progress bar indicator when resuming training mid-epoch ([#9310](https://github.com/PyTorchLightning/pytorch-lightning/pull/9310))
- Fixed logging of nan parameters ([#9364](https://github.com/PyTorchLightning/pytorch-lightning/pull/9364))
- Fixed `replace_sampler` missing the batch size under specific conditions ([#9367](https://github.com/PyTorchLightning/pytorch-lightning/pull/9367))

View File

@ -25,7 +25,7 @@ def print_nan_gradients(model: nn.Module) -> None:
"""Iterates over model parameters and prints out parameter + gradient information if NaN."""
for param in model.parameters():
if (param.grad is not None) and torch.isnan(param.grad.float()).any():
log.info(param, param.grad)
log.info(f"{param}, {param.grad}")
def detect_nan_parameters(model: nn.Module) -> None: