From 94afe8236ccee4bce5d7bd429f943149949ee762 Mon Sep 17 00:00:00 2001 From: Nic Eggert Date: Sat, 29 Feb 2020 10:39:24 -0600 Subject: [PATCH] Fix load_from_checkpoint docs (#978) We don't (yet) support storing hparams a a dict. It *must* be an `argparse.Namespace` for checkpoint saving and loading to work. --- pytorch_lightning/core/lightning.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 00f48d4967..5358728930 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -1168,15 +1168,6 @@ class LightningModule(ABC, GradInformation, ModelIO, ModelHooks): def __init__(self, hparams): self.learning_rate = hparams.learning_rate - # -------------- - # Case 2 - # when using a dict - model = MyModel({'learning_rate': 0.1}) - - class MyModel(LightningModule): - def __init__(self, hparams): - self.learning_rate = hparams['learning_rate'] - Args: checkpoint_path (str): Path to checkpoint. map_location (dict | str | torch.device | function):