2019-03-31 01:45:16 +00:00
|
|
|
"""
|
|
|
|
Module to describe gradients
|
|
|
|
"""
|
|
|
|
|
2019-08-05 08:52:09 +00:00
|
|
|
from torch import nn
|
2019-03-31 01:45:16 +00:00
|
|
|
|
|
|
|
class GradInformation(nn.Module):
|
|
|
|
|
|
|
|
def grad_norm(self, norm_type):
|
|
|
|
results = {}
|
|
|
|
total_norm = 0
|
|
|
|
for i, p in enumerate(self.parameters()):
|
|
|
|
if p.requires_grad:
|
|
|
|
try:
|
|
|
|
param_norm = p.grad.data.norm(norm_type)
|
|
|
|
total_norm += param_norm ** norm_type
|
|
|
|
norm = param_norm ** (1 / norm_type)
|
|
|
|
|
|
|
|
results['grad_{}_norm_{}'.format(norm_type, i)] = round(norm.data.cpu().numpy().flatten()[0], 3)
|
|
|
|
except Exception as e:
|
|
|
|
# this param had no grad
|
|
|
|
pass
|
|
|
|
|
|
|
|
total_norm = total_norm ** (1. / norm_type)
|
|
|
|
results['grad_{}_norm_total'.format(norm_type)] = round(total_norm.data.cpu().numpy().flatten()[0], 3)
|
|
|
|
return results
|
|
|
|
|