From 7d0c2c7db8c7cd38312dd1dd6973b44d3eab6a7a Mon Sep 17 00:00:00 2001 From: William Falcon Date: Wed, 8 Apr 2020 11:38:12 -0400 Subject: [PATCH] Loader docs (#1416) * added multiple loader docs * added multiple loader docs * added multiple loader docs * added multiple loader docs * added multiple loader docs * Apply suggestions from code review * added multiple loader docs * added build docs script * typo * added build docs script * added build docs script * added build docs script Co-authored-by: Jirka Borovec Co-authored-by: J. Borovec --- docs/.build_docs.sh | 1 + docs/source/index.rst | 1 + docs/source/introduction_guide.rst | 17 +++++++ docs/source/multiple_loaders.rst | 66 +++++++++++++++++++++++++++ pytorch_lightning/trainer/__init__.py | 4 +- 5 files changed, 87 insertions(+), 2 deletions(-) create mode 100644 docs/.build_docs.sh create mode 100644 docs/source/multiple_loaders.rst diff --git a/docs/.build_docs.sh b/docs/.build_docs.sh new file mode 100644 index 0000000000..691f7fc229 --- /dev/null +++ b/docs/.build_docs.sh @@ -0,0 +1 @@ +make clean ; make html --debug --jobs 2 SPHINXOPTS="-W" \ No newline at end of file diff --git a/docs/source/index.rst b/docs/source/index.rst index 1e11f7a0e9..0424bcfeb5 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -67,6 +67,7 @@ PyTorch Lightning Documentation hooks hyperparameters multi_gpu + multiple_loaders weights_loading optimizers profiler diff --git a/docs/source/introduction_guide.rst b/docs/source/introduction_guide.rst index 99f3c279af..a98631ecd6 100644 --- a/docs/source/introduction_guide.rst +++ b/docs/source/introduction_guide.rst @@ -28,6 +28,23 @@ to use inheritance to very quickly create an AutoEncoder. --------- +Installing Lightning +-------------------- +Lightning is trivial to install. + +.. code-block:: bash + + conda activate my_env + pip install pytorch-lightning + +Or without conda environments, anywhere you can use pip. + +.. code-block:: bash + + pip install pytorch-lightning + +--------- + Lightning Philosophy -------------------- Lightning factors DL/ML code into three types: diff --git a/docs/source/multiple_loaders.rst b/docs/source/multiple_loaders.rst new file mode 100644 index 0000000000..e88b7b1cbe --- /dev/null +++ b/docs/source/multiple_loaders.rst @@ -0,0 +1,66 @@ +Multiple Datasets +================= +Lightning supports multiple dataloaders in a few ways. + +1. Create a dataloader that iterates both datasets under the hood. +2. In the validation and test loop you also have the option to return multiple dataloaders + which lightning will call sequentially. + +Multiple training dataloaders +----------------------------- +For training, the best way to use multiple-dataloaders is to create a Dataloader class +which wraps both your dataloaders. (This of course also works for testing and validation +dataloaders). + +(`reference `_) + +.. code-block:: python + + class ConcatDataset(torch.utils.data.Dataset): + def __init__(self, *datasets): + self.datasets = datasets + + def __getitem__(self, i): + return tuple(d[i] for d in self.datasets) + + def __len__(self): + return min(len(d) for d in self.datasets) + + class LitModel(LightningModule): + def train_dataloader(self): + concat_dataset = ConcatDataset( + datasets.ImageFolder(traindir_A), + datasets.ImageFolder(traindir_B) + ) + + loader = torch.utils.data.DataLoader( + concat_dataset, + batch_size=args.batch_size, + shuffle=True, + num_workers=args.workers, + pin_memory=True + ) + return loader + + def val_dataloader(self): + # SAME + + def test_dataloader(self): + # SAME + +Test/Val dataloaders +-------------------- +For validation, test dataloaders lightning also gives you the additional +option of passing in multiple dataloaders back from each call. + +See the following for more details: + +- :meth:`~pytorch_lightning.core.LightningModule.val_dataloader` +- :meth:`~pytorch_lightning.core.LightningModule.test_dataloader` + +.. code-block:: python + + def val_dataloader(self): + loader_1 = Dataloader() + loader_2 = Dataloader() + return [loader_1, loader_2] diff --git a/pytorch_lightning/trainer/__init__.py b/pytorch_lightning/trainer/__init__.py index 554b34f838..b29ce40f34 100644 --- a/pytorch_lightning/trainer/__init__.py +++ b/pytorch_lightning/trainer/__init__.py @@ -58,7 +58,7 @@ So you can run it like so:distributed_backend .. code-block:: bash - $ python main.py --gpus 2 + python main.py --gpus 2 .. note:: @@ -550,7 +550,7 @@ submit this script using the xla_dist script. Example:: - $ python -m torch_xla.distributed.xla_dist + python -m torch_xla.distributed.xla_dist --tpu=$TPU_POD_NAME --conda-env=torch-xla-nightly --env=XLA_USE_BF16=1