From 5a89be68736ad2ba1031fdd43148708a76d1c90a Mon Sep 17 00:00:00 2001 From: Kaushik B <45285388+kaushikb11@users.noreply.github.com> Date: Wed, 5 Jan 2022 16:07:11 +0530 Subject: [PATCH] Update strategy registry docs (#11311) --- ...gins_registry.rst => strategy_registry.rst} | 18 +++++++++--------- docs/source/index.rst | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) rename docs/source/advanced/{plugins_registry.rst => strategy_registry.rst} (67%) diff --git a/docs/source/advanced/plugins_registry.rst b/docs/source/advanced/strategy_registry.rst similarity index 67% rename from docs/source/advanced/plugins_registry.rst rename to docs/source/advanced/strategy_registry.rst index 0c8545a125..d92069a9fe 100644 --- a/docs/source/advanced/plugins_registry.rst +++ b/docs/source/advanced/strategy_registry.rst @@ -1,12 +1,12 @@ -Training Type Plugins Registry -============================== +Strategy Registry +================= -.. warning:: The Plugins Registry is experimental and subject to change. +.. warning:: The Strategy Registry is experimental and subject to change. -Lightning includes a registry that holds information about Training Type plugins and allows for the registration of new custom plugins. +Lightning includes a registry that holds information about Training strategies and allows for the registration of new custom strategies. -The Plugins are assigned strings that identify them, such as "ddp", "deepspeed_stage_2_offload", and so on. -It also returns the optional description and parameters for initialising the Plugin that were defined during registration. +The Strategies are assigned strings that identify them, such as "ddp", "deepspeed_stage_2_offload", and so on. +It also returns the optional description and parameters for initialising the Strategy that were defined during registration. .. code-block:: python @@ -21,11 +21,11 @@ It also returns the optional description and parameters for initialising the Plu trainer = Trainer(strategy="tpu_spawn_debug", accelerator="tpu", devices=8) -Additionally, you can pass your custom registered training type plugins to the ``strategy`` argument. +Additionally, you can pass your custom registered training strategies to the ``strategy`` argument. .. code-block:: python - from pytorch_lightning.strategies import DDPStrategy, TrainingTypePluginsRegistry, CheckpointIO + from pytorch_lightning.strategies import DDPStrategy, StrategyRegistry, CheckpointIO class CustomCheckpointIO(CheckpointIO): @@ -39,7 +39,7 @@ Additionally, you can pass your custom registered training type plugins to the ` custom_checkpoint_io = CustomCheckpointIO() # Register the DDP Strategy with your custom CheckpointIO plugin - TrainingTypePluginsRegistry.register( + StrategyRegistry.register( "ddp_custom_checkpoint_io", DDPStrategy, description="DDP Strategy with custom checkpoint io plugin", diff --git a/docs/source/index.rst b/docs/source/index.rst index bb442aa325..ac98456f7d 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -77,7 +77,7 @@ PyTorch Lightning advanced/fault_tolerant_training common/optimizers advanced/profiler - advanced/plugins_registry + advanced/strategy_registry common/remote_fs common/single_gpu advanced/training_tricks