scheduled removal of BaseProfiler.output_filename in favor of dirpath… (#9214)

This commit is contained in:
B. Kerim Tshimanga 2021-08-31 02:30:43 -07:00 committed by GitHub
parent 861f8afeea
commit f6614b370c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 10 additions and 28 deletions

View File

@ -241,8 +241,13 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed deprecated property `Trainer.running_sanity_check` in favor of `Trainer.sanity_checking` ([#9209](https://github.com/PyTorchLightning/pytorch-lightning/pull/9209))
- Removed deprecated `BaseProfiler.output_filename` arg from it and its descendants in favor of `dirpath` and `filename` ([#9214](https://github.com/PyTorchLightning/pytorch-lightning/pull/9214))
- Removed deprecated property `ModelCheckpoint.period` in favor of `ModelCheckpoint.every_n_epochs` ([#9213](https://github.com/PyTorchLightning/pytorch-lightning/pull/9213))
### Fixed
- Fixed save/load/resume from checkpoint for DeepSpeed Plugin (

View File

@ -67,7 +67,7 @@ This option uses Python's cProfiler_ to provide a report of time spent on *each*
trainer = Trainer(..., profiler=profiler)
The profiler's results will be printed at the completion of a training `fit()`. This profiler
report can be quite long, so you can also specify an `output_filename` to save the report instead
report can be quite long, so you can also specify a `dirpath` and `filename` to save the report instead
of logging it to the output in your terminal. The output below shows the profiling for the action
`get_train_batch`.

View File

@ -36,7 +36,6 @@ class AdvancedProfiler(BaseProfiler):
dirpath: Optional[Union[str, Path]] = None,
filename: Optional[str] = None,
line_count_restriction: float = 1.0,
output_filename: Optional[str] = None,
) -> None:
"""
Args:
@ -55,7 +54,7 @@ class AdvancedProfiler(BaseProfiler):
ValueError:
If you attempt to stop recording an action which was never started.
"""
super().__init__(dirpath=dirpath, filename=filename, output_filename=output_filename)
super().__init__(dirpath=dirpath, filename=filename)
self.profiled_actions: Dict[str, cProfile.Profile] = {}
self.line_count_restriction = line_count_restriction

View File

@ -19,7 +19,6 @@ from contextlib import contextmanager
from pathlib import Path
from typing import Any, Callable, Dict, Generator, Iterable, Optional, TextIO, Union
from pytorch_lightning.utilities import rank_zero_deprecation
from pytorch_lightning.utilities.cloud_io import get_filesystem
log = logging.getLogger(__name__)
@ -58,18 +57,9 @@ class BaseProfiler(AbstractProfiler):
self,
dirpath: Optional[Union[str, Path]] = None,
filename: Optional[str] = None,
output_filename: Optional[str] = None,
) -> None:
self.dirpath = dirpath
self.filename = filename
if output_filename is not None:
rank_zero_deprecation(
"`Profiler` signature has changed in v1.3. The `output_filename` parameter has been removed in"
" favor of `dirpath` and `filename`. Support for the old signature will be removed in v1.5"
)
filepath = Path(output_filename)
self.dirpath = filepath.parent
self.filename = filepath.stem
self._output_file: Optional[TextIO] = None
self._write_stream: Optional[Callable] = None

View File

@ -222,7 +222,6 @@ class PyTorchProfiler(BaseProfiler):
sort_by_key: Optional[str] = None,
record_functions: Set[str] = None,
record_module_names: bool = True,
output_filename: Optional[str] = None,
**profiler_kwargs: Any,
) -> None:
"""
@ -274,7 +273,7 @@ class PyTorchProfiler(BaseProfiler):
If arg ``schedule`` is not a ``Callable``.
If arg ``schedule`` does not return a ``torch.profiler.ProfilerAction``.
"""
super().__init__(dirpath=dirpath, filename=filename, output_filename=output_filename)
super().__init__(dirpath=dirpath, filename=filename)
self._group_by_input_shapes = group_by_input_shapes and profiler_kwargs.get("record_shapes", False)
self._emit_nvtx = emit_nvtx

View File

@ -37,7 +37,6 @@ class SimpleProfiler(BaseProfiler):
dirpath: Optional[Union[str, Path]] = None,
filename: Optional[str] = None,
extended: bool = True,
output_filename: Optional[str] = None,
) -> None:
"""
Args:
@ -53,7 +52,7 @@ class SimpleProfiler(BaseProfiler):
If you attempt to start an action which has already started, or
if you attempt to stop recording an action which was never started.
"""
super().__init__(dirpath=dirpath, filename=filename, output_filename=output_filename)
super().__init__(dirpath=dirpath, filename=filename)
self.current_actions: Dict[str, float] = {}
self.recorded_durations = defaultdict(list)
self.extended = extended

View File

@ -69,7 +69,7 @@ class XLAProfiler(BaseProfiler):
This Profiler will help you debug and optimize training workload performance
for your models using Cloud TPU performance tools.
"""
super().__init__(dirpath=None, filename=None, output_filename=None)
super().__init__(dirpath=None, filename=None)
self.port = port
self._recording_map: Dict = {}
self._step_recoding_map: Dict = {}

View File

@ -17,21 +17,11 @@ import pytest
from pytorch_lightning import Trainer
from pytorch_lightning.core.decorators import auto_move_data
from pytorch_lightning.plugins import DeepSpeedPlugin
from pytorch_lightning.profiler import AdvancedProfiler, BaseProfiler, PyTorchProfiler, SimpleProfiler
from tests.deprecated_api import no_deprecated_call
from tests.helpers import BoringDataModule, BoringModel
from tests.helpers.runif import RunIf
@pytest.mark.parametrize("cls", (BaseProfiler, SimpleProfiler, AdvancedProfiler, PyTorchProfiler))
def test_v1_5_0_profiler_output_filename(tmpdir, cls):
filepath = str(tmpdir / "test.txt")
with pytest.deprecated_call(match="`output_filename` parameter has been removed"):
profiler = cls(output_filename=filepath)
assert profiler.dirpath == tmpdir
assert profiler.filename == "test"
def test_v1_5_0_auto_move_data():
with pytest.deprecated_call(match="deprecated in v1.3 and will be removed in v1.5.*was applied to `bar`"):