2020-10-13 11:18:07 +00:00
|
|
|
# Copyright The PyTorch Lightning team.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2020-08-06 14:58:51 +00:00
|
|
|
import os
|
2020-11-14 11:22:56 +00:00
|
|
|
from unittest import mock
|
2020-09-15 16:41:27 +00:00
|
|
|
from unittest.mock import patch
|
2020-08-06 14:58:51 +00:00
|
|
|
|
2020-06-02 22:51:09 +00:00
|
|
|
import numpy as np
|
2020-06-16 02:03:40 +00:00
|
|
|
import pytest
|
2020-06-02 22:51:09 +00:00
|
|
|
|
2020-06-16 02:03:40 +00:00
|
|
|
from pytorch_lightning import Trainer
|
2021-02-11 14:32:07 +00:00
|
|
|
from tests.helpers import BoringModel
|
2021-02-08 10:52:02 +00:00
|
|
|
from tests.helpers.utils import reset_seed
|
2020-06-02 22:51:09 +00:00
|
|
|
|
|
|
|
|
2021-02-11 14:32:07 +00:00
|
|
|
class ModelWithManualGradTracker(BoringModel):
|
2021-02-06 11:07:26 +00:00
|
|
|
|
2020-06-02 22:51:09 +00:00
|
|
|
def __init__(self, norm_type, *args, **kwargs):
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
self.stored_grad_norms, self.norm_type = [], float(norm_type)
|
|
|
|
|
|
|
|
# validation spoils logger's metrics with `val_loss` records
|
|
|
|
validation_step = None
|
|
|
|
val_dataloader = None
|
|
|
|
|
|
|
|
def training_step(self, batch, batch_idx, optimizer_idx=None):
|
|
|
|
# just return a loss, no log or progress bar meta
|
2021-02-11 14:32:07 +00:00
|
|
|
output = self(batch)
|
|
|
|
loss = self.loss(batch, output)
|
|
|
|
return {'loss': loss}
|
2020-06-02 22:51:09 +00:00
|
|
|
|
|
|
|
def on_after_backward(self):
|
|
|
|
out, norms = {}, []
|
|
|
|
prefix = f'grad_{self.norm_type}_norm_'
|
|
|
|
for name, p in self.named_parameters():
|
|
|
|
if p.grad is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# `np.linalg.norm` implementation likely uses fp64 intermediates
|
|
|
|
flat = p.grad.data.cpu().numpy().ravel()
|
|
|
|
norm = np.linalg.norm(flat, self.norm_type)
|
|
|
|
norms.append(norm)
|
|
|
|
|
2020-10-30 15:42:14 +00:00
|
|
|
out[prefix + name] = round(norm, 4)
|
2020-06-02 22:51:09 +00:00
|
|
|
|
|
|
|
# handle total norm
|
|
|
|
norm = np.linalg.norm(norms, self.norm_type)
|
2020-10-30 15:42:14 +00:00
|
|
|
out[prefix + 'total'] = round(norm, 4)
|
2020-06-02 22:51:09 +00:00
|
|
|
self.stored_grad_norms.append(out)
|
|
|
|
|
|
|
|
|
2020-11-14 11:22:56 +00:00
|
|
|
@mock.patch.dict(os.environ, {"PL_DEV_DEBUG": "1"})
|
2020-07-27 23:07:09 +00:00
|
|
|
@pytest.mark.parametrize("norm_type", [1., 1.25, 2, 3, 5, 10, 'inf'])
|
|
|
|
def test_grad_tracking(tmpdir, norm_type, rtol=5e-3):
|
2020-07-20 23:00:20 +00:00
|
|
|
# rtol=5e-3 respects the 3 decimals rounding in `.grad_norms` and above
|
2020-06-02 22:51:09 +00:00
|
|
|
|
|
|
|
reset_seed()
|
|
|
|
|
|
|
|
# use a custom grad tracking module and a list logger
|
|
|
|
model = ModelWithManualGradTracker(norm_type)
|
|
|
|
|
|
|
|
trainer = Trainer(
|
2020-06-29 01:36:46 +00:00
|
|
|
default_root_dir=tmpdir,
|
2020-06-02 22:51:09 +00:00
|
|
|
max_epochs=3,
|
|
|
|
track_grad_norm=norm_type,
|
2020-10-06 14:27:06 +00:00
|
|
|
log_every_n_steps=1, # request grad_norms every batch
|
2020-06-02 22:51:09 +00:00
|
|
|
)
|
2021-01-12 00:36:48 +00:00
|
|
|
trainer.fit(model)
|
2020-06-02 22:51:09 +00:00
|
|
|
|
2021-05-04 10:50:56 +00:00
|
|
|
assert trainer.state.finished, f"Training failed with {trainer.state}"
|
2020-07-20 23:00:20 +00:00
|
|
|
logged_metrics = trainer.dev_debugger.logged_metrics
|
|
|
|
assert len(logged_metrics) == len(model.stored_grad_norms)
|
2020-06-02 22:51:09 +00:00
|
|
|
|
|
|
|
# compare the logged metrics against tracked norms on `.backward`
|
2020-07-20 23:00:20 +00:00
|
|
|
for mod, log in zip(model.stored_grad_norms, logged_metrics):
|
2020-06-02 22:51:09 +00:00
|
|
|
common = mod.keys() & log.keys()
|
|
|
|
|
|
|
|
log, mod = [log[k] for k in common], [mod[k] for k in common]
|
|
|
|
|
|
|
|
assert np.allclose(log, mod, rtol=rtol)
|
2020-09-15 16:41:27 +00:00
|
|
|
|
|
|
|
|
2020-10-06 14:27:06 +00:00
|
|
|
@pytest.mark.parametrize("log_every_n_steps", [1, 2, 3])
|
|
|
|
def test_grad_tracking_interval(tmpdir, log_every_n_steps):
|
2020-09-15 16:41:27 +00:00
|
|
|
""" Test that gradient norms get tracked in the right interval and that everytime the same keys get logged. """
|
|
|
|
trainer = Trainer(
|
|
|
|
default_root_dir=tmpdir,
|
|
|
|
track_grad_norm=2,
|
2020-10-06 14:27:06 +00:00
|
|
|
log_every_n_steps=log_every_n_steps,
|
2020-09-15 16:41:27 +00:00
|
|
|
max_steps=10,
|
|
|
|
)
|
|
|
|
|
|
|
|
with patch.object(trainer.logger, "log_metrics") as mocked:
|
2021-02-11 14:32:07 +00:00
|
|
|
model = BoringModel()
|
2020-09-15 16:41:27 +00:00
|
|
|
trainer.fit(model)
|
2020-10-06 14:27:06 +00:00
|
|
|
expected = trainer.global_step // log_every_n_steps
|
2020-09-15 16:41:27 +00:00
|
|
|
grad_norm_dicts = []
|
|
|
|
for _, kwargs in mocked.call_args_list:
|
|
|
|
metrics = kwargs.get("metrics", {})
|
|
|
|
grad_norm_dict = {k: v for k, v in metrics.items() if k.startswith("grad_")}
|
|
|
|
if grad_norm_dict:
|
|
|
|
grad_norm_dicts.append(grad_norm_dict)
|
|
|
|
|
|
|
|
assert len(grad_norm_dicts) == expected
|
|
|
|
assert all(grad_norm_dicts[0].keys() == g.keys() for g in grad_norm_dicts)
|