Fix load_from_checkpoint docs (#978)
We don't (yet) support storing hparams a a dict. It *must* be an `argparse.Namespace` for checkpoint saving and loading to work.
This commit is contained in:
parent
479a35d94e
commit
94afe8236c
|
@ -1168,15 +1168,6 @@ class LightningModule(ABC, GradInformation, ModelIO, ModelHooks):
|
||||||
def __init__(self, hparams):
|
def __init__(self, hparams):
|
||||||
self.learning_rate = hparams.learning_rate
|
self.learning_rate = hparams.learning_rate
|
||||||
|
|
||||||
# --------------
|
|
||||||
# Case 2
|
|
||||||
# when using a dict
|
|
||||||
model = MyModel({'learning_rate': 0.1})
|
|
||||||
|
|
||||||
class MyModel(LightningModule):
|
|
||||||
def __init__(self, hparams):
|
|
||||||
self.learning_rate = hparams['learning_rate']
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
checkpoint_path (str): Path to checkpoint.
|
checkpoint_path (str): Path to checkpoint.
|
||||||
map_location (dict | str | torch.device | function):
|
map_location (dict | str | torch.device | function):
|
||||||
|
|
Loading…
Reference in New Issue