Remove deprecated `DistributedType` and `DeviceType` enum classes (#14045)

This commit is contained in:
Carlos Mocholí 2022-08-08 10:07:54 +02:00 committed by GitHub
parent 76836a33cd
commit aaeff90254
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 8 additions and 109 deletions

View File

@ -30,7 +30,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Deprecated `amp_level` from `Trainer` in favour of passing it explictly via precision plugin ([#13898](https://github.com/Lightning-AI/lightning/pull/13898))
-
- Deprecated the calls to `pytorch_lightning.utiltiies.meta` functions in favor of built-in https://github.com/pytorch/torchdistx support ([#13868](https://github.com/Lightning-AI/lightning/pull/13868))
### Removed
@ -44,6 +44,12 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed the deprecated `DDP2Strategy` ([#14026](https://github.com/Lightning-AI/lightning/pull/14026))
- Removed the deprecated `DistributedType` and `DeviceType` enum classes ([#14045](https://github.com/Lightning-AI/lightning/pull/14045))
- Removed the experimental `pytorch_lightning.utiltiies.meta` functions in favor of built-in https://github.com/pytorch/torchdistx support ([#13868](https://github.com/Lightning-AI/lightning/pull/13868))
### Fixed
- Casted only floating point tensors to fp16 with IPUs ([#13983](https://github.com/Lightning-AI/lightning/pull/13983))

View File

@ -21,7 +21,6 @@ from pytorch_lightning.utilities.enums import ( # noqa: F401
_AcceleratorType,
_StrategyType,
AMPType,
DistributedType,
GradClipAlgorithmType,
LightningEnum,
)

View File

@ -15,11 +15,9 @@
from __future__ import annotations
import os
from enum import Enum, EnumMeta
from typing import Any
from enum import Enum
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.warnings import rank_zero_deprecation
class LightningEnum(str, Enum):
@ -43,37 +41,6 @@ class LightningEnum(str, Enum):
return hash(self.value.lower())
class _DeprecatedEnumMeta(EnumMeta):
"""Enum that calls `deprecate()` whenever a member is accessed.
Adapted from: https://stackoverflow.com/a/62309159/208880
"""
def __getattribute__(cls, name: str) -> Any:
obj = super().__getattribute__(name)
# ignore __dunder__ names -- prevents potential recursion errors
if not (name.startswith("__") and name.endswith("__")) and isinstance(obj, Enum):
obj.deprecate()
return obj
def __getitem__(cls, name: str) -> Any:
member: _DeprecatedEnumMeta = super().__getitem__(name)
member.deprecate()
return member
def __call__(cls, *args: Any, **kwargs: Any) -> Any:
obj = super().__call__(*args, **kwargs)
if isinstance(obj, Enum):
obj.deprecate()
return obj
class _DeprecatedEnum(LightningEnum, metaclass=_DeprecatedEnumMeta):
"""_DeprecatedEnum calls an enum's `deprecate()` method on member access."""
pass
class AMPType(LightningEnum):
"""Type of Automatic Mixed Precission used for training.
@ -110,66 +77,6 @@ class PrecisionType(LightningEnum):
return [x.value for x in PrecisionType]
class DistributedType(_DeprecatedEnum):
"""Define type of training strategy.
Deprecated since v1.6.0 and will be removed in v1.8.0.
Use `_StrategyType` instead.
"""
DP = "dp"
DDP = "ddp"
DDP_SPAWN = "ddp_spawn"
TPU_SPAWN = "tpu_spawn"
DEEPSPEED = "deepspeed"
HOROVOD = "horovod"
DDP_SHARDED = "ddp_sharded"
DDP_SHARDED_SPAWN = "ddp_sharded_spawn"
DDP_FULLY_SHARDED = "ddp_fully_sharded"
HPU_PARALLEL = "hpu_parallel"
@staticmethod
def interactive_compatible_types() -> list[DistributedType]:
"""Returns a list containing interactive compatible DistributeTypes."""
return [
DistributedType.DP,
DistributedType.DDP_SPAWN,
DistributedType.DDP_SHARDED_SPAWN,
DistributedType.TPU_SPAWN,
]
def is_interactive_compatible(self) -> bool:
"""Returns whether self is interactive compatible."""
return self in DistributedType.interactive_compatible_types()
def deprecate(self) -> None:
rank_zero_deprecation(
"`DistributedType` Enum has been deprecated in v1.6 and will be removed in v1.8."
f" Use the string value `{self.value!r}` instead."
)
class DeviceType(_DeprecatedEnum):
"""Define Device type by its nature - accelerators.
Deprecated since v1.6.0 and will be removed in v1.8.0.
Use `_AcceleratorType` instead.
"""
CPU = "CPU"
GPU = "GPU"
IPU = "IPU"
TPU = "TPU"
def deprecate(self) -> None:
rank_zero_deprecation(
"`DeviceType` Enum has been deprecated in v1.6 and will be removed in v1.8."
f" Use the string value `{self.value!r}` instead."
)
class GradClipAlgorithmType(LightningEnum):
"""Define gradient_clip_algorithm types - training-tricks.
NORM type means "clipping gradients by norm". This computed over all model parameters together.

View File

@ -36,7 +36,6 @@ from pytorch_lightning.trainer.configuration_validator import _check_datamodule_
from pytorch_lightning.trainer.states import RunningStage
from pytorch_lightning.utilities import device_parser
from pytorch_lightning.utilities.apply_func import move_data_to_device
from pytorch_lightning.utilities.enums import DeviceType, DistributedType
from pytorch_lightning.utilities.imports import _TORCHTEXT_LEGACY
from pytorch_lightning.utilities.rank_zero import rank_zero_only, rank_zero_warn
from tests_pytorch.deprecated_api import no_deprecated_call
@ -44,18 +43,6 @@ from tests_pytorch.helpers.runif import RunIf
from tests_pytorch.helpers.torchtext_utils import get_dummy_torchtext_data_iterator
def test_v1_8_0_deprecated_distributed_type_enum():
with pytest.deprecated_call(match="has been deprecated in v1.6 and will be removed in v1.8."):
_ = DistributedType.DDP
def test_v1_8_0_deprecated_device_type_enum():
with pytest.deprecated_call(match="has been deprecated in v1.6 and will be removed in v1.8."):
_ = DeviceType.CPU
@pytest.mark.skipif(not _TORCHTEXT_LEGACY, reason="torchtext.legacy is deprecated.")
def test_v1_8_0_deprecated_torchtext_batch():