diff --git a/docs/LightningModule/properties.md b/docs/LightningModule/properties.md new file mode 100644 index 0000000000..7502046b90 --- /dev/null +++ b/docs/LightningModule/properties.md @@ -0,0 +1,25 @@ +A LightningModule has the following properties which you can access at any time + +--- +#### current_epoch +The current epoch + +--- +#### dtype +Current dtype + +--- +#### global_step +Total training batches seen across all epochs + +--- +#### gradient_clip +The current gradient clip value + +--- +#### on_gpu +True if your model is currently running on GPUs. Useful to set flags around the LightningModule for different CPU vs GPU behavior. + +--- +#### Trainer +Last resort access to any state the trainer has. Changing certain properties here could affect your training run. diff --git a/docs/index.md b/docs/index.md index 27fbec083c..dea92070bf 100644 --- a/docs/index.md +++ b/docs/index.md @@ -11,9 +11,9 @@ - [GPU cluster Trainer](https://github.com/williamFalcon/pytorch-lightning/blob/master/examples/new_project_templates/trainer_gpu_cluster_template.py) ###### Quick start examples -- [CPU example](https://williamfalcon.github.io/pytorch-lightning/Examples/#CPU-hyperparameter-search) -- [Hyperparameter search on single GPU](https://williamfalcon.github.io/pytorch-lightning/Examples/#Hyperparameter-search-on-a-single-or-multiple-GPUs) -- [Hyperparameter search on multiple GPUs on same node](https://williamfalcon.github.io/pytorch-lightning/Examples/#Hyperparameter-search-on-a-single-or-multiple-GPUs) +- [CPU example](https://williamfalcon.github.io/pytorch-lightning/Examples/#cpu-hyperparameter-search) +- [Hyperparameter search on single GPU](https://williamfalcon.github.io/pytorch-lightning/Examples/#hyperparameter-search-on-a-single-or-multiple-gpus) +- [Hyperparameter search on multiple GPUs on same node](https://williamfalcon.github.io/pytorch-lightning/Examples/#hyperparameter-search-on-a-single-or-multiple-gpus) - [Hyperparameter search on a SLURM HPC cluster](https://williamfalcon.github.io/pytorch-lightning/Examples/#Hyperparameter search on a SLURM HPC cluster) diff --git a/pytorch_lightning/root_module/root_module.py b/pytorch_lightning/root_module/root_module.py index 1d1f8039aa..537ec27787 100644 --- a/pytorch_lightning/root_module/root_module.py +++ b/pytorch_lightning/root_module/root_module.py @@ -24,7 +24,6 @@ class LightningModule(GradInformation, ModelIO, OptimizerConfig, ModelHooks): self.fast_dev_run = hparams.fast_dev_run self.overfit = hparams.overfit self.gradient_clip = hparams.gradient_clip - self.num = 2 self.trainer = None self.from_lightning = True