Move metrics_to_scalars to a dedicated utilities file (#7180)
* rm-trainer-logging * Update CHANGELOG.md * Update metrics.py * Update logging.py * Update metrics.py
This commit is contained in:
parent
f58865aada
commit
b3fe836656
|
@ -12,6 +12,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
|
|||
- Added a `teardown` hook to `ClusterEnvironment` ([#6942](https://github.com/PyTorchLightning/pytorch-lightning/pull/6942))
|
||||
|
||||
|
||||
- Added utils for metrics to scalar conversions ([#7180](https://github.com/PyTorchLightning/pytorch-lightning/pull/7180))
|
||||
|
||||
|
||||
- Added utils for NaN/Inf detection for gradients and parameters ([#6834](https://github.com/PyTorchLightning/pytorch-lightning/pull/6834/))
|
||||
|
||||
|
||||
|
@ -146,6 +149,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
|
|||
|
||||
### Deprecated
|
||||
|
||||
- Deprecated `TrainerLoggingMixin` in favor of a separate utilities module for metric handling ([#7180](https://github.com/PyTorchLightning/pytorch-lightning/pull/7180))
|
||||
|
||||
|
||||
- Deprecated `TrainerTrainingTricksMixin` in favor of a separate utilities module for NaN/Inf detection for gradients and parameters ([#6834](https://github.com/PyTorchLightning/pytorch-lightning/pull/6834/))
|
||||
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ from pytorch_lightning.trainer.connectors.logger_connector.epoch_result_store im
|
|||
from pytorch_lightning.trainer.connectors.logger_connector.metrics_holder import MetricsHolder
|
||||
from pytorch_lightning.trainer.states import RunningStage, TrainerState
|
||||
from pytorch_lightning.utilities import DeviceType
|
||||
from pytorch_lightning.utilities.metrics import metrics_to_scalars
|
||||
|
||||
|
||||
class LoggerConnector:
|
||||
|
@ -210,7 +211,7 @@ class LoggerConnector:
|
|||
metrics.update(grad_norm_dic)
|
||||
|
||||
# turn all tensors to scalars
|
||||
scalar_metrics = self.trainer.metrics_to_scalars(metrics)
|
||||
scalar_metrics = metrics_to_scalars(metrics)
|
||||
|
||||
if "step" in scalar_metrics and step is None:
|
||||
step = scalar_metrics.pop("step")
|
||||
|
|
|
@ -14,28 +14,21 @@
|
|||
|
||||
from abc import ABC
|
||||
|
||||
import torch
|
||||
|
||||
from pytorch_lightning.utilities.exceptions import MisconfigurationException
|
||||
from pytorch_lightning.utilities.distributed import rank_zero_deprecation
|
||||
from pytorch_lightning.utilities.metrics import metrics_to_scalars as new_metrics_to_scalars
|
||||
|
||||
|
||||
class TrainerLoggingMixin(ABC):
|
||||
"""
|
||||
TODO: Remove this class in v1.5.
|
||||
|
||||
def metrics_to_scalars(self, metrics):
|
||||
new_metrics = {}
|
||||
# TODO: this is duplicated in MetricsHolder. should be unified
|
||||
for k, v in metrics.items():
|
||||
if isinstance(v, torch.Tensor):
|
||||
if v.numel() != 1:
|
||||
raise MisconfigurationException(
|
||||
f"The metric `{k}` does not contain a single element"
|
||||
f" thus it cannot be converted to float. Found `{v}`"
|
||||
)
|
||||
v = v.item()
|
||||
Use the utilities from ``pytorch_lightning.utilities.metrics`` instead.
|
||||
"""
|
||||
|
||||
if isinstance(v, dict):
|
||||
v = self.metrics_to_scalars(v)
|
||||
|
||||
new_metrics[k] = v
|
||||
|
||||
return new_metrics
|
||||
def metrics_to_scalars(self, metrics: dict) -> dict:
|
||||
rank_zero_deprecation(
|
||||
"Internal: TrainerLoggingMixin.metrics_to_scalars is deprecated in v1.3"
|
||||
" and will be removed in v1.5."
|
||||
" Use `pytorch_lightning.utilities.metrics.metrics_to_scalars` instead."
|
||||
)
|
||||
return new_metrics_to_scalars(metrics)
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
# Copyright The PyTorch Lightning team.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Helper functions to operate on metric values. """
|
||||
|
||||
import torch
|
||||
|
||||
from pytorch_lightning.utilities.exceptions import MisconfigurationException
|
||||
|
||||
|
||||
def metrics_to_scalars(metrics: dict) -> dict:
|
||||
""" Recursively walk through a dictionary of metrics and convert single-item tensors to scalar values. """
|
||||
|
||||
# TODO: this is duplicated in MetricsHolder. should be unified
|
||||
new_metrics = {}
|
||||
for k, v in metrics.items():
|
||||
if isinstance(v, torch.Tensor):
|
||||
if v.numel() != 1:
|
||||
raise MisconfigurationException(
|
||||
f"The metric `{k}` does not contain a single element"
|
||||
f" thus it cannot be converted to float. Found `{v}`"
|
||||
)
|
||||
v = v.item()
|
||||
|
||||
if isinstance(v, dict):
|
||||
v = metrics_to_scalars(v)
|
||||
|
||||
new_metrics[k] = v
|
||||
|
||||
return new_metrics
|
|
@ -242,3 +242,9 @@ def test_v1_5_0_auto_move_data():
|
|||
@auto_move_data
|
||||
def bar(self):
|
||||
pass
|
||||
|
||||
|
||||
def test_v1_5_0_trainer_logging_mixin(tmpdir):
|
||||
trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, checkpoint_callback=False, logger=False)
|
||||
with pytest.deprecated_call(match="is deprecated in v1.3 and will be removed in v1.5"):
|
||||
trainer.metrics_to_scalars({})
|
||||
|
|
Loading…
Reference in New Issue