Remove deprecated functions from accelerator.py (#9019)
This commit is contained in:
parent
92e49795e1
commit
13e64e6a80
|
@ -157,6 +157,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
|
|||
- Removed deprecated `GradInformation` module in favor of `pytorch_lightning.utilities.grads` ([#8831](https://github.com/PyTorchLightning/pytorch-lightning/pull/8831/))
|
||||
|
||||
|
||||
- Removed deprecated `connect_precision_plugin` and `connect_training_type_plugin` from `Accelerator` ([#9019](https://github.com/PyTorchLightning/pytorch-lightning/pull/9019))
|
||||
|
||||
|
||||
### Fixed
|
||||
|
||||
- Ensure the existence of `DDPPlugin._sync_dir` in `reconciliate_processes` ([#8939](https://github.com/PyTorchLightning/pytorch-lightning/pull/8939))
|
||||
|
|
|
@ -25,7 +25,7 @@ from pytorch_lightning.plugins import DataParallelPlugin
|
|||
from pytorch_lightning.plugins.precision import ApexMixedPrecisionPlugin, NativeMixedPrecisionPlugin, PrecisionPlugin
|
||||
from pytorch_lightning.plugins.training_type import TrainingTypePlugin
|
||||
from pytorch_lightning.trainer.states import TrainerFn
|
||||
from pytorch_lightning.utilities import _NATIVE_AMP_AVAILABLE, rank_zero_warn
|
||||
from pytorch_lightning.utilities import _NATIVE_AMP_AVAILABLE
|
||||
from pytorch_lightning.utilities.apply_func import apply_to_collection, move_data_to_device
|
||||
from pytorch_lightning.utilities.enums import AMPType, GradClipAlgorithmType, LightningEnum
|
||||
from pytorch_lightning.utilities.types import STEP_OUTPUT
|
||||
|
@ -429,32 +429,6 @@ class Accelerator:
|
|||
with self.training_type_plugin.model_sharded_context():
|
||||
yield
|
||||
|
||||
# todo: remove in v1.5
|
||||
def connect_training_type_plugin(self, plugin: TrainingTypePlugin, model: "pl.LightningModule") -> None:
|
||||
"""
|
||||
Attaches the training type plugin to the accelerator.
|
||||
Also transfers ownership of the model to this plugin
|
||||
|
||||
.. deprecated::v1.3
|
||||
Will be removed in v1.5.0.
|
||||
"""
|
||||
rank_zero_warn(
|
||||
"Accelerator method `connect_training_type_plugin` was deprecated in v1.3. It will be removed in v1.5."
|
||||
)
|
||||
self.setup_training_type_plugin()
|
||||
|
||||
# todo: remove in v1.5
|
||||
def connect_precision_plugin(self, plugin: PrecisionPlugin) -> None:
|
||||
"""Attaches the precision plugin to the accelerator
|
||||
|
||||
.. deprecated::v1.3
|
||||
Will be removed in v1.5.0.
|
||||
"""
|
||||
rank_zero_warn(
|
||||
"Accelerator method `connect_precision_plugin` was deprecated in v1.3. It will be removed in v1.5."
|
||||
)
|
||||
self.setup_precision_plugin()
|
||||
|
||||
def save_checkpoint(self, checkpoint: Dict[str, Any], filepath: str) -> None:
|
||||
"""Save model/training states as a checkpoint file through state-dump and file-write.
|
||||
|
||||
|
|
Loading…
Reference in New Issue