From 4605e8a4a563fc921b6aca82d6395399a8fe7b43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Asl=C4=B1=20Sabanc=C4=B1?= Date: Tue, 27 Jul 2021 04:22:05 -0700 Subject: [PATCH] Add missing highlighting for Python snippets (#8411) Co-authored-by: Jirka Borovec --- docs/source/common/trainer.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/source/common/trainer.rst b/docs/source/common/trainer.rst index 572ea5b4b4..0cc6e0822e 100644 --- a/docs/source/common/trainer.rst +++ b/docs/source/common/trainer.rst @@ -335,7 +335,7 @@ auto_scale_batch_size Automatically tries to find the largest batch size that fits into memory, before any training. -.. code-block:: +.. code-block:: python # default used by the Trainer (no scaling of batch size) trainer = Trainer(auto_scale_batch_size=None) @@ -1353,7 +1353,6 @@ By setting to False, you have to add your own distributed sampler: .. code-block:: python - # in your LightningModule or LightningDataModule def train_dataloader(self): # default used by the Trainer @@ -1575,7 +1574,7 @@ Can specify as float or int. trainer = Trainer(val_check_interval=1000) -.. code-block:: +.. code-block:: python # Here is the computation to estimate the total number of batches seen within an epoch.