diff --git a/pyproject.toml b/pyproject.toml index 5a8f632481..dd48b8126a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,6 @@ warn_no_return = "False" # mypy --no-error-summary 2>&1 | tr ':' ' ' | awk '{print $1}' | sort | uniq | sed 's/\.py//g; s|src/||g; s|\/|\.|g' | xargs -I {} echo '"{}",' module = [ "pytorch_lightning.callbacks.progress.rich_progress", - "pytorch_lightning.profilers.base", "pytorch_lightning.profilers.pytorch", "pytorch_lightning.trainer.trainer", "pytorch_lightning.tuner.batch_size_scaling", diff --git a/src/pytorch_lightning/CHANGELOG.md b/src/pytorch_lightning/CHANGELOG.md index 5b7bea5ce6..58baf7d2a6 100644 --- a/src/pytorch_lightning/CHANGELOG.md +++ b/src/pytorch_lightning/CHANGELOG.md @@ -164,6 +164,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed the deprecated class `TrainerCallbackHookMixin` ([#14401](https://github.com/Lightning-AI/lightning/14401)) +- Removed the deprecated `BaseProfiler` and `AbstractProfiler` classes ([#14404](https://github.com/Lightning-AI/lightning/pull/14404)) + + ### Fixed - Reset the dataloaders on OOM failure in batch size finder to use the last successful batch size ([#14372](https://github.com/Lightning-AI/lightning/pull/14372)) diff --git a/src/pytorch_lightning/profilers/__init__.py b/src/pytorch_lightning/profilers/__init__.py index dad105135f..0e97d02feb 100644 --- a/src/pytorch_lightning/profilers/__init__.py +++ b/src/pytorch_lightning/profilers/__init__.py @@ -12,15 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. from pytorch_lightning.profilers.advanced import AdvancedProfiler -from pytorch_lightning.profilers.base import AbstractProfiler, BaseProfiler, PassThroughProfiler +from pytorch_lightning.profilers.base import PassThroughProfiler from pytorch_lightning.profilers.profiler import Profiler from pytorch_lightning.profilers.pytorch import PyTorchProfiler from pytorch_lightning.profilers.simple import SimpleProfiler from pytorch_lightning.profilers.xla import XLAProfiler __all__ = [ - "AbstractProfiler", - "BaseProfiler", "Profiler", "AdvancedProfiler", "PassThroughProfiler", diff --git a/src/pytorch_lightning/profilers/base.py b/src/pytorch_lightning/profilers/base.py index b91f628013..030205066d 100644 --- a/src/pytorch_lightning/profilers/base.py +++ b/src/pytorch_lightning/profilers/base.py @@ -12,56 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. """Profiler to check if there are any bottlenecks in your code.""" -from abc import ABC, abstractmethod -from typing import Any from pytorch_lightning.profilers.profiler import Profiler -from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation - - -class AbstractProfiler(ABC): - """Specification of a profiler. - - See deprecation warning below - - .. deprecated:: v1.6 - `AbstractProfiler` was deprecated in v1.6 and will be removed in v1.8. - Please use `Profiler` instead. - """ - - @abstractmethod - def start(self, action_name: str) -> None: - """Defines how to start recording an action.""" - - @abstractmethod - def stop(self, action_name: str) -> None: - """Defines how to record the duration once an action is complete.""" - - @abstractmethod - def summary(self) -> str: - """Create profiler summary in text format.""" - - @abstractmethod - def setup(self, **kwargs: Any) -> None: - """Execute arbitrary pre-profiling set-up steps as defined by subclass.""" - - @abstractmethod - def teardown(self, **kwargs: Any) -> None: - """Execute arbitrary post-profiling tear-down steps as defined by subclass.""" - - -class BaseProfiler(Profiler): - """ - .. deprecated:: v1.6 - `BaseProfiler` was deprecated in v1.6 and will be removed in v1.8. - Please use `Profiler` instead. - """ - - def __init__(self, *args, **kwargs): - rank_zero_deprecation( - "`BaseProfiler` was deprecated in v1.6 and will be removed in v1.8. Please use `Profiler` instead." - ) - super().__init__(*args, **kwargs) class PassThroughProfiler(Profiler): diff --git a/tests/tests_pytorch/deprecated_api/test_remove_1-8.py b/tests/tests_pytorch/deprecated_api/test_remove_1-8.py index 77e007951e..2d51ed6c5c 100644 --- a/tests/tests_pytorch/deprecated_api/test_remove_1-8.py +++ b/tests/tests_pytorch/deprecated_api/test_remove_1-8.py @@ -28,8 +28,7 @@ from pytorch_lightning.callbacks import ModelCheckpoint from pytorch_lightning.demos.boring_classes import BoringDataModule, BoringModel from pytorch_lightning.loggers import CSVLogger, Logger from pytorch_lightning.plugins.precision.precision_plugin import PrecisionPlugin -from pytorch_lightning.profiler import AbstractProfiler, BaseProfiler -from pytorch_lightning.profilers import AdvancedProfiler, Profiler, SimpleProfiler +from pytorch_lightning.profilers import AdvancedProfiler, SimpleProfiler from pytorch_lightning.strategies import ParallelStrategy from pytorch_lightning.strategies.ipu import LightningIPUModule from pytorch_lightning.trainer.configuration_validator import _check_datamodule_checkpoint_hooks @@ -469,10 +468,6 @@ def test_v1_8_0_precision_plugin_checkpoint_hooks(tmpdir): trainer.fit(model) -def test_v1_8_0_abstract_profiler(): - assert "`AbstractProfiler` was deprecated in v1.6" in AbstractProfiler.__doc__ - - def test_v1_8_0_datamodule_checkpointhooks(): class CustomBoringDataModuleSave(BoringDataModule): def on_save_checkpoint(self, checkpoint): @@ -641,28 +636,6 @@ def test_trainer_num_gpu_0(monkeypatch, gpus, expected_num_gpus, strategy): assert Trainer(gpus=gpus, strategy=strategy).num_gpus == expected_num_gpus -def test_v1_8_0_base_profiler(tmpdir): - class CustomProfiler1(BaseProfiler): - def start(self, action_name: str) -> None: - pass - - def stop(self, action_name: str) -> None: - pass - - class CustomProfiler2(Profiler): - def start(self, action_name: str) -> None: - pass - - def stop(self, action_name: str) -> None: - pass - - with pytest.deprecated_call(match="`BaseProfiler` was deprecated in v1.6"): - CustomProfiler1() - - # No deprecation message - CustomProfiler2() - - @pytest.mark.parametrize( ["trainer_kwargs", "expected_ipus"], [