diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 4e19f16b29..ac01227fd0 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -459,7 +459,7 @@ class Trainer( self.logger_connector = LoggerConnector(self, log_gpu_memory) self._callback_connector = CallbackConnector(self) self.checkpoint_connector = CheckpointConnector(self, resume_from_checkpoint) - self.signal_connector = SignalConnector(self) + self._signal_connector = SignalConnector(self) self.tuner = Tuner(self) min_steps, max_steps, min_epochs, max_epochs, max_time = _parse_loop_limits( @@ -1242,7 +1242,7 @@ class Trainer( self._data_connector.teardown() self._active_loop.teardown() self.logger_connector.teardown() - self.signal_connector.teardown() + self._signal_connector.teardown() def run_stage(self) -> None: rank_zero_deprecation( @@ -1267,7 +1267,7 @@ class Trainer( self.strategy.barrier("setup_training") # register signals - self.signal_connector.register_signal_handlers() + self._signal_connector.register_signal_handlers() # -------------------------- # Pre-train diff --git a/tests/trainer/connectors/test_signal_connector.py b/tests/trainer/connectors/test_signal_connector.py index 3b4ccf5589..5988809af4 100644 --- a/tests/trainer/connectors/test_signal_connector.py +++ b/tests/trainer/connectors/test_signal_connector.py @@ -100,7 +100,7 @@ def test_auto_requeue_flag(auto_requeue): def _registering_signals(): trainer = Trainer() - trainer.signal_connector.register_signal_handlers() + trainer._signal_connector.register_signal_handlers() @RunIf(skip_windows=True)