Remove the unused `utilities.finite_checks` (#16682)

This commit is contained in:
Carlos Mocholí 2023-02-09 21:11:05 +01:00 committed by GitHub
parent 8386c704b4
commit 457cd76d1a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 3 additions and 65 deletions

View File

@ -254,7 +254,6 @@ utilities
data
deepspeed
distributed
finite_checks
memory
model_summary
parsing

View File

@ -184,6 +184,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed `Trainer.model` setter ([#16462](https://github.com/Lightning-AI/lightning/pull/16462))
- Removed the unused `lightning.pytorch.utilities.finite_checks.print_nan_gradients` function ([#16682](https://github.com/Lightning-AI/lightning/pull/16682))
- Removed the unused `lightning.pytorch.utilities.finite_checks.detect_nan_parameters` function ([#16682](https://github.com/Lightning-AI/lightning/pull/16682))
- Tuner removal
* Removed the deprecated `trainer.tuning` property ([#16379](https://github.com/Lightning-AI/lightning/pull/16379))
* Removed the deprecated `TrainerFn.TUNING` and `RunningStage.TUNING` enums ([#16379](https://github.com/Lightning-AI/lightning/pull/16379))

View File

@ -1,44 +0,0 @@
# Copyright The Lightning AI team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions to detect NaN/Inf values."""
import logging
import torch
import torch.nn as nn
log = logging.getLogger(__name__)
def print_nan_gradients(model: nn.Module) -> None:
"""Iterates over model parameters and prints out parameter + gradient information if NaN."""
for param in model.parameters():
if (param.grad is not None) and torch.isnan(param.grad.float()).any():
log.info(f"{param}, {param.grad}")
def detect_nan_parameters(model: nn.Module) -> None:
"""Iterates over model parameters and prints gradients if any parameter is not finite.
Raises:
ValueError:
If ``NaN`` or ``inf`` values are found
"""
for name, param in model.named_parameters():
if not torch.isfinite(param).all():
print_nan_gradients(model)
raise ValueError(
f"Detected nan and/or inf values in `{name}`."
" Check your forward pass for numerically unstable operations."
)

View File

@ -1,20 +0,0 @@
import math
import pytest
import torch
import torch.nn as nn
from lightning.pytorch.utilities.finite_checks import detect_nan_parameters
@pytest.mark.parametrize("value", (math.nan, math.inf, -math.inf))
def test_detect_nan_parameters(value):
model = nn.Linear(2, 3)
detect_nan_parameters(model)
nn.init.constant_(model.bias, value)
assert not torch.isfinite(model.bias).all()
with pytest.raises(ValueError, match=r".*Detected nan and/or inf values in `bias`.*"):
detect_nan_parameters(model)