From 5d6a09153167c778ff41def1cb7be1c42aea8499 Mon Sep 17 00:00:00 2001 From: Rohit Gupta Date: Fri, 19 Feb 2021 01:32:50 +0530 Subject: [PATCH] default sched (#6062) --- .../plugins/training_type/deepspeed.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/pytorch_lightning/plugins/training_type/deepspeed.py b/pytorch_lightning/plugins/training_type/deepspeed.py index b6545c9b40..0f9a837805 100644 --- a/pytorch_lightning/plugins/training_type/deepspeed.py +++ b/pytorch_lightning/plugins/training_type/deepspeed.py @@ -26,6 +26,7 @@ from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.overrides.base import _LightningModuleWrapperBase from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment from pytorch_lightning.plugins.training_type.ddp import DDPPlugin +from pytorch_lightning.trainer.optimizers import _get_default_scheduler_config from pytorch_lightning.utilities import AMPType from pytorch_lightning.utilities.apply_func import apply_to_collection from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_only @@ -240,16 +241,8 @@ class DeepSpeedPlugin(DDPPlugin): ) def configure_scheduler(self, lr_scheduler): - # this duplicates the defaults from init_optimizers - scheduler = { - 'scheduler': lr_scheduler, - 'name': None, # no custom name - 'interval': 'epoch', # after epoch is over - 'frequency': 1, # every epoch/batch - 'reduce_on_plateau': False, # most often not ReduceLROnPlateau scheduler - 'monitor': None, # value to monitor for ReduceLROnPlateau - 'strict': True, # enforce that the monitor exists for ReduceLROnPlateau - } + scheduler = _get_default_scheduler_config() + scheduler["scheduler"] = lr_scheduler return [scheduler] @property