From 7a617ec90e1566c763be8ac7a200af1e4025412c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 22 Aug 2022 15:19:53 +0200 Subject: [PATCH] Add back support for logging in the gradient clipping hooks (#14298) * Add back support for logging in the gradient clipping hooks * Docs and CHANGELOG * Fix tests --- docs/source-pytorch/visualize/logging_advanced.rst | 2 +- src/pytorch_lightning/CHANGELOG.md | 3 +++ .../trainer/connectors/logger_connector/fx_validator.py | 9 +++++++-- .../trainer/logging_/test_logger_connector.py | 9 +-------- .../tests_pytorch/trainer/logging_/test_loop_logging.py | 2 ++ 5 files changed, 14 insertions(+), 11 deletions(-) diff --git a/docs/source-pytorch/visualize/logging_advanced.rst b/docs/source-pytorch/visualize/logging_advanced.rst index d820fb6c1f..788deffdb3 100644 --- a/docs/source-pytorch/visualize/logging_advanced.rst +++ b/docs/source-pytorch/visualize/logging_advanced.rst @@ -355,7 +355,7 @@ In LightningModule * - Method - on_step - on_epoch - * - on_after_backward, on_before_backward, on_before_optimizer_step, on_before_zero_grad, training_step, training_step_end + * - on_after_backward, on_before_backward, on_before_optimizer_step, optimizer_step, configure_gradient_clipping, on_before_zero_grad, training_step, training_step_end - True - False * - training_epoch_end, test_epoch_end, test_step, test_step_end, validation_epoch_end, validation_step, validation_step_end diff --git a/src/pytorch_lightning/CHANGELOG.md b/src/pytorch_lightning/CHANGELOG.md index 3214aa5976..07c34bbc0e 100644 --- a/src/pytorch_lightning/CHANGELOG.md +++ b/src/pytorch_lightning/CHANGELOG.md @@ -76,6 +76,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed an `AttributeError` when accessing `LightningModule.logger` and the Trainer has multiple loggers ([#14234](https://github.com/Lightning-AI/lightning/pull/14234)) +- Added back support for `log`ging in the `configure_gradient_clipping` hook after unintended removal in v1.7.2 ([#14298](https://github.com/Lightning-AI/lightning/issues/14298)) + + - Fixed wrong num padding for `RichProgressBar` ([#14296](https://github.com/Lightning-AI/lightning/pull/14296)) diff --git a/src/pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py b/src/pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py index 56ad53ef4b..87ff742810 100644 --- a/src/pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py +++ b/src/pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py @@ -44,8 +44,13 @@ class _FxValidator: allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False ), "lr_scheduler_step": None, - "configure_gradient_clipping": None, - "clip_gradients": None, + # should match `optimizer_step` + "configure_gradient_clipping": _LogOptions( + allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False + ), + "clip_gradients": _LogOptions( + allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False + ), "on_before_zero_grad": _LogOptions( allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False ), diff --git a/tests/tests_pytorch/trainer/logging_/test_logger_connector.py b/tests/tests_pytorch/trainer/logging_/test_logger_connector.py index c2be22c612..1c346ac1e8 100644 --- a/tests/tests_pytorch/trainer/logging_/test_logger_connector.py +++ b/tests/tests_pytorch/trainer/logging_/test_logger_connector.py @@ -183,12 +183,7 @@ class HookedModel(BoringModel): def __init__(self, not_supported): super().__init__() pl_module_hooks = get_members(LightningModule) - pl_module_hooks.difference_update( - { - "log", - "log_dict", - } - ) + pl_module_hooks.difference_update({"log", "log_dict"}) # remove `nn.Module` hooks module_hooks = get_members(torch.nn.Module) pl_module_hooks.difference_update(module_hooks) @@ -236,8 +231,6 @@ def test_fx_validator_integration(tmpdir): "on_validation_model_eval": "You can't", "on_validation_model_train": "You can't", "lr_scheduler_step": "You can't", - "configure_gradient_clipping": "You can't", - "clip_gradients": "You can't", "on_save_checkpoint": "You can't", "on_load_checkpoint": "You can't", "on_exception": "You can't", diff --git a/tests/tests_pytorch/trainer/logging_/test_loop_logging.py b/tests/tests_pytorch/trainer/logging_/test_loop_logging.py index c37087253e..66c7bdcd25 100644 --- a/tests/tests_pytorch/trainer/logging_/test_loop_logging.py +++ b/tests/tests_pytorch/trainer/logging_/test_loop_logging.py @@ -54,6 +54,8 @@ def test_default_level_for_hooks_that_support_logging(): "on_after_backward", "on_before_optimizer_step", "optimizer_step", + "configure_gradient_clipping", + "clip_gradients", "on_before_zero_grad", "optimizer_zero_grad", "training_step",