Fix load_from_checkpoint docs (#978)

We don't (yet) support storing hparams a a dict. It *must*
be an `argparse.Namespace` for checkpoint saving and
loading to work.
This commit is contained in:
Nic Eggert 2020-02-29 10:39:24 -06:00 committed by GitHub
parent 479a35d94e
commit 94afe8236c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 0 additions and 9 deletions

View File

@ -1168,15 +1168,6 @@ class LightningModule(ABC, GradInformation, ModelIO, ModelHooks):
def __init__(self, hparams):
self.learning_rate = hparams.learning_rate
# --------------
# Case 2
# when using a dict
model = MyModel({'learning_rate': 0.1})
class MyModel(LightningModule):
def __init__(self, hparams):
self.learning_rate = hparams['learning_rate']
Args:
checkpoint_path (str): Path to checkpoint.
map_location (dict | str | torch.device | function):