From 5c6cdc0f27c703328c07546a144945a18c238b5d Mon Sep 17 00:00:00 2001 From: William Falcon Date: Wed, 7 Aug 2019 16:01:51 -0400 Subject: [PATCH] updated docs --- docs/Trainer/Checkpointing.md | 35 ++++++++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/docs/Trainer/Checkpointing.md b/docs/Trainer/Checkpointing.md index b0a5281d57..8c37764cdf 100644 --- a/docs/Trainer/Checkpointing.md +++ b/docs/Trainer/Checkpointing.md @@ -29,11 +29,40 @@ Lightning will restore the session if you pass an experiment with the same versi from test_tube import Experiment exp = Experiment(version=a_previous_version_with_a_saved_checkpoint) -Trainer(experiment=exp) +trainer = Trainer(experiment=exp) -trainer = Trainer(checkpoint_callback=checkpoint_callback) -# the trainer is now restored +# this fit call loads model weights and trainer state +# the trainer continues seamlessly from where you left off +# without having to do anything else. +trainer.fit(model) ``` +The trainer restores: +- global_step +- current_epoch +- All optimizers +- All lr_schedulers +- Model weights +You can even change the logic of your model as long as the weights and "architecture" of +the system isn't different. If you add a layer, for instance, it might not work. +At a rough level, here's [what happens inside Trainer](https://github.com/williamFalcon/pytorch-lightning/blob/master/pytorch_lightning/root_module/model_saving.py#L63): +```python + +self.global_step = checkpoint['global_step'] +self.current_epoch = checkpoint['epoch'] + +# restore the optimizers +optimizer_states = checkpoint['optimizer_states'] +for optimizer, opt_state in zip(self.optimizers, optimizer_states): + optimizer.load_state_dict(opt_state) + +# restore the lr schedulers +lr_schedulers = checkpoint['lr_schedulers'] +for scheduler, lrs_state in zip(self.lr_schedulers, lr_schedulers): + scheduler.load_state_dict(lrs_state) + +# uses the model you passed into trainer +model.load_state_dict(checkpoint['state_dict']) +``` \ No newline at end of file