From 0e71705a0a02c14cbca0d2b7d1e68c220df085c4 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Wed, 13 May 2020 14:14:11 -0700 Subject: [PATCH] [checkpoint logic] Fix bug which doesn't account for NoneType for `model.hparams` (#1817) The intention of the code is to output a warning message when `hparams` is null or not set. Instead the code now fatals when `model.hparams = None`. Prevent that. --- pytorch_lightning/trainer/training_io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/training_io.py b/pytorch_lightning/trainer/training_io.py index 1aedbee1a2..947942ff6d 100644 --- a/pytorch_lightning/trainer/training_io.py +++ b/pytorch_lightning/trainer/training_io.py @@ -342,7 +342,7 @@ class TrainerIOMixin(ABC): if self.use_amp and self.use_native_amp: checkpoint['native_amp_scaling_state'] = self.scaler.state_dict() - if hasattr(model, "hparams"): + if hasattr(model, "hparams") and model.hparams is not None: parsing.clean_namespace(model.hparams) checkpoint['hparams_type'] = model.hparams.__class__.__name__ if checkpoint['hparams_type'] == 'dict':