diff --git a/pytorch_lightning/trainer/optimizers.py b/pytorch_lightning/trainer/optimizers.py index c5a1528566..9de9e83614 100644 --- a/pytorch_lightning/trainer/optimizers.py +++ b/pytorch_lightning/trainer/optimizers.py @@ -230,7 +230,7 @@ def _validate_optim_conf(optim_conf: Dict[str, Any]) -> None: def _validate_scheduler_optimizer(optimizers, lr_schedulers): if any(sch["scheduler"].optimizer not in optimizers for sch in lr_schedulers): raise MisconfigurationException( - "Some schedulers are attatched with an optimizer that wasn't returned from `configure_optimizers`." + "Some schedulers are attached with an optimizer that wasn't returned from `configure_optimizers`." ) diff --git a/tests/trainer/optimization/test_optimizers.py b/tests/trainer/optimization/test_optimizers.py index 86499e2d8c..9872d90880 100644 --- a/tests/trainer/optimization/test_optimizers.py +++ b/tests/trainer/optimization/test_optimizers.py @@ -490,7 +490,7 @@ def test_lr_scheduler_with_no_actual_scheduler_raises(tmpdir): def test_invalid_optimizer_in_scheduler(tmpdir): - """Test exception when optimizer attatched to lr_schedulers wasn't returned.""" + """Test exception when optimizer attached to lr_schedulers wasn't returned.""" class InvalidOptimizerModel(BoringModel): def configure_optimizers(self): @@ -501,7 +501,7 @@ def test_invalid_optimizer_in_scheduler(tmpdir): model = InvalidOptimizerModel() trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True) - with pytest.raises(MisconfigurationException, match="attatched with an optimizer that wasn't returned"): + with pytest.raises(MisconfigurationException, match="attached with an optimizer that wasn't returned"): trainer.fit(model)