2020-12-15 17:59:13 +00:00
|
|
|
# Copyright The PyTorch Lightning team.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2021-03-02 09:47:55 +00:00
|
|
|
import logging
|
2020-03-31 12:57:48 +00:00
|
|
|
import os
|
2020-12-16 06:09:26 +00:00
|
|
|
import time
|
2021-03-21 23:39:55 +00:00
|
|
|
from distutils.version import LooseVersion
|
2021-01-03 22:13:22 +00:00
|
|
|
from pathlib import Path
|
2020-03-12 16:41:37 +00:00
|
|
|
|
2020-02-07 03:01:21 +00:00
|
|
|
import numpy as np
|
2020-02-16 04:43:43 +00:00
|
|
|
import pytest
|
2021-03-21 23:39:55 +00:00
|
|
|
import torch
|
2020-05-10 17:15:28 +00:00
|
|
|
|
2021-03-21 23:39:55 +00:00
|
|
|
from pytorch_lightning import Trainer
|
2021-03-22 18:32:31 +00:00
|
|
|
from pytorch_lightning.callbacks import Callback
|
|
|
|
from pytorch_lightning.profiler import AdvancedProfiler, PyTorchProfiler, SimpleProfiler
|
2021-03-21 23:39:55 +00:00
|
|
|
from tests.helpers import BoringModel
|
|
|
|
from tests.helpers.runif import RunIf
|
2020-02-09 22:48:37 +00:00
|
|
|
|
2020-05-12 01:57:53 +00:00
|
|
|
PROFILER_OVERHEAD_MAX_TOLERANCE = 0.0005
|
2020-02-16 04:43:43 +00:00
|
|
|
|
|
|
|
|
2020-03-24 13:15:16 +00:00
|
|
|
def _get_python_cprofile_total_duration(profile):
|
|
|
|
return sum([x.inlinetime for x in profile.getstats()])
|
|
|
|
|
|
|
|
|
|
|
|
def _sleep_generator(durations):
|
|
|
|
"""
|
|
|
|
the profile_iterable method needs an iterable in which we can ensure that we're
|
|
|
|
properly timing how long it takes to call __next__
|
|
|
|
"""
|
|
|
|
for duration in durations:
|
|
|
|
time.sleep(duration)
|
|
|
|
yield duration
|
|
|
|
|
|
|
|
|
2020-02-16 04:43:43 +00:00
|
|
|
@pytest.fixture
|
|
|
|
def simple_profiler():
|
2020-03-31 12:57:48 +00:00
|
|
|
profiler = SimpleProfiler()
|
2020-02-16 04:43:43 +00:00
|
|
|
return profiler
|
2020-02-07 03:01:21 +00:00
|
|
|
|
|
|
|
|
2020-03-30 22:37:02 +00:00
|
|
|
@pytest.mark.parametrize(["action", "expected"], [
|
|
|
|
pytest.param("a", [3, 1]),
|
|
|
|
pytest.param("b", [2]),
|
2021-02-05 23:33:12 +00:00
|
|
|
pytest.param("c", [1]),
|
2020-03-30 22:37:02 +00:00
|
|
|
])
|
2021-03-09 11:27:15 +00:00
|
|
|
def test_simple_profiler_durations(simple_profiler, action: str, expected: list):
|
2020-02-19 12:09:28 +00:00
|
|
|
"""Ensure the reported durations are reasonably accurate."""
|
2020-02-07 03:01:21 +00:00
|
|
|
|
2020-02-16 04:43:43 +00:00
|
|
|
for duration in expected:
|
|
|
|
with simple_profiler.profile(action):
|
|
|
|
time.sleep(duration)
|
2020-02-07 03:01:21 +00:00
|
|
|
|
|
|
|
# different environments have different precision when it comes to time.sleep()
|
2020-02-09 22:48:37 +00:00
|
|
|
# see: https://github.com/PyTorchLightning/pytorch-lightning/issues/796
|
2021-02-05 23:33:12 +00:00
|
|
|
np.testing.assert_allclose(simple_profiler.recorded_durations[action], expected, rtol=0.2)
|
2020-02-07 03:01:21 +00:00
|
|
|
|
|
|
|
|
2020-03-30 22:37:02 +00:00
|
|
|
@pytest.mark.parametrize(["action", "expected"], [
|
|
|
|
pytest.param("a", [3, 1]),
|
|
|
|
pytest.param("b", [2]),
|
2021-02-05 23:33:12 +00:00
|
|
|
pytest.param("c", [1]),
|
2020-03-30 22:37:02 +00:00
|
|
|
])
|
2021-03-09 11:27:15 +00:00
|
|
|
def test_simple_profiler_iterable_durations(simple_profiler, action: str, expected: list):
|
2020-03-24 13:15:16 +00:00
|
|
|
"""Ensure the reported durations are reasonably accurate."""
|
|
|
|
iterable = _sleep_generator(expected)
|
|
|
|
|
2020-04-22 00:33:10 +00:00
|
|
|
for _ in simple_profiler.profile_iterable(iterable, action):
|
2020-03-24 13:15:16 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
# we exclude the last item in the recorded durations since that's when StopIteration is raised
|
2021-02-05 23:33:12 +00:00
|
|
|
np.testing.assert_allclose(simple_profiler.recorded_durations[action][:-1], expected, rtol=0.2)
|
2020-03-24 13:15:16 +00:00
|
|
|
|
|
|
|
|
2020-02-16 04:43:43 +00:00
|
|
|
def test_simple_profiler_overhead(simple_profiler, n_iter=5):
|
2020-02-19 12:09:28 +00:00
|
|
|
"""Ensure that the profiler doesn't introduce too much overhead during training."""
|
2020-02-16 04:43:43 +00:00
|
|
|
for _ in range(n_iter):
|
|
|
|
with simple_profiler.profile("no-op"):
|
|
|
|
pass
|
|
|
|
|
|
|
|
durations = np.array(simple_profiler.recorded_durations["no-op"])
|
|
|
|
assert all(durations < PROFILER_OVERHEAD_MAX_TOLERANCE)
|
2020-02-07 03:01:21 +00:00
|
|
|
|
|
|
|
|
2020-03-29 18:56:36 +00:00
|
|
|
def test_simple_profiler_describe(caplog, simple_profiler):
|
2020-02-19 12:09:28 +00:00
|
|
|
"""Ensure the profiler won't fail when reporting the summary."""
|
2021-03-02 09:47:55 +00:00
|
|
|
with caplog.at_level(logging.INFO):
|
|
|
|
simple_profiler.describe()
|
2020-02-07 03:01:21 +00:00
|
|
|
|
2020-03-29 18:56:36 +00:00
|
|
|
assert "Profiler Report" in caplog.text
|
|
|
|
|
2020-02-07 03:01:21 +00:00
|
|
|
|
2020-03-24 13:15:16 +00:00
|
|
|
def test_simple_profiler_value_errors(simple_profiler):
|
|
|
|
"""Ensure errors are raised where expected."""
|
|
|
|
|
|
|
|
action = "test"
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
simple_profiler.stop(action)
|
|
|
|
|
|
|
|
simple_profiler.start(action)
|
|
|
|
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
simple_profiler.start(action)
|
|
|
|
|
|
|
|
simple_profiler.stop(action)
|
|
|
|
|
|
|
|
|
2021-03-21 23:39:55 +00:00
|
|
|
@pytest.fixture
|
|
|
|
def advanced_profiler(tmpdir):
|
|
|
|
profiler = AdvancedProfiler(output_filename=os.path.join(tmpdir, "profiler.txt"))
|
|
|
|
return profiler
|
|
|
|
|
|
|
|
|
2020-03-30 22:37:02 +00:00
|
|
|
@pytest.mark.parametrize(["action", "expected"], [
|
|
|
|
pytest.param("a", [3, 1]),
|
|
|
|
pytest.param("b", [2]),
|
2021-02-05 23:33:12 +00:00
|
|
|
pytest.param("c", [1]),
|
2020-03-30 22:37:02 +00:00
|
|
|
])
|
2021-03-09 11:27:15 +00:00
|
|
|
def test_advanced_profiler_durations(advanced_profiler, action: str, expected: list):
|
2020-02-07 03:01:21 +00:00
|
|
|
|
2020-02-16 04:43:43 +00:00
|
|
|
for duration in expected:
|
|
|
|
with advanced_profiler.profile(action):
|
|
|
|
time.sleep(duration)
|
2020-02-07 03:01:21 +00:00
|
|
|
|
2020-02-09 22:48:37 +00:00
|
|
|
# different environments have different precision when it comes to time.sleep()
|
|
|
|
# see: https://github.com/PyTorchLightning/pytorch-lightning/issues/796
|
2021-02-05 23:33:12 +00:00
|
|
|
recored_total_duration = _get_python_cprofile_total_duration(advanced_profiler.profiled_actions[action])
|
2020-03-24 13:15:16 +00:00
|
|
|
expected_total_duration = np.sum(expected)
|
2021-02-05 23:33:12 +00:00
|
|
|
np.testing.assert_allclose(recored_total_duration, expected_total_duration, rtol=0.2)
|
2020-03-24 13:15:16 +00:00
|
|
|
|
|
|
|
|
2020-03-30 22:37:02 +00:00
|
|
|
@pytest.mark.parametrize(["action", "expected"], [
|
|
|
|
pytest.param("a", [3, 1]),
|
|
|
|
pytest.param("b", [2]),
|
2021-02-05 23:33:12 +00:00
|
|
|
pytest.param("c", [1]),
|
2020-03-30 22:37:02 +00:00
|
|
|
])
|
2021-03-09 11:27:15 +00:00
|
|
|
def test_advanced_profiler_iterable_durations(advanced_profiler, action: str, expected: list):
|
2020-03-24 13:15:16 +00:00
|
|
|
"""Ensure the reported durations are reasonably accurate."""
|
|
|
|
iterable = _sleep_generator(expected)
|
|
|
|
|
2020-04-22 00:33:10 +00:00
|
|
|
for _ in advanced_profiler.profile_iterable(iterable, action):
|
2020-03-24 13:15:16 +00:00
|
|
|
pass
|
|
|
|
|
2021-02-05 23:33:12 +00:00
|
|
|
recored_total_duration = _get_python_cprofile_total_duration(advanced_profiler.profiled_actions[action])
|
2020-02-16 04:43:43 +00:00
|
|
|
expected_total_duration = np.sum(expected)
|
2021-02-05 23:33:12 +00:00
|
|
|
np.testing.assert_allclose(recored_total_duration, expected_total_duration, rtol=0.2)
|
2020-02-16 04:43:43 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_advanced_profiler_overhead(advanced_profiler, n_iter=5):
|
2020-02-27 21:07:51 +00:00
|
|
|
"""
|
|
|
|
ensure that the profiler doesn't introduce too much overhead during training
|
|
|
|
"""
|
2020-02-16 04:43:43 +00:00
|
|
|
for _ in range(n_iter):
|
|
|
|
with advanced_profiler.profile("no-op"):
|
|
|
|
pass
|
|
|
|
|
|
|
|
action_profile = advanced_profiler.profiled_actions["no-op"]
|
2020-03-24 13:15:16 +00:00
|
|
|
total_duration = _get_python_cprofile_total_duration(action_profile)
|
2020-02-16 04:43:43 +00:00
|
|
|
average_duration = total_duration / n_iter
|
|
|
|
assert average_duration < PROFILER_OVERHEAD_MAX_TOLERANCE
|
|
|
|
|
|
|
|
|
2020-03-24 13:15:16 +00:00
|
|
|
def test_advanced_profiler_describe(tmpdir, advanced_profiler):
|
2020-02-27 21:07:51 +00:00
|
|
|
"""
|
|
|
|
ensure the profiler won't fail when reporting the summary
|
|
|
|
"""
|
2020-03-24 13:15:16 +00:00
|
|
|
# record at least one event
|
|
|
|
with advanced_profiler.profile("test"):
|
|
|
|
pass
|
2020-03-31 12:57:48 +00:00
|
|
|
# log to stdout and print to file
|
2020-03-24 13:15:16 +00:00
|
|
|
advanced_profiler.describe()
|
2020-03-31 12:57:48 +00:00
|
|
|
data = Path(advanced_profiler.output_fname).read_text()
|
2020-03-24 13:15:16 +00:00
|
|
|
assert len(data) > 0
|
|
|
|
|
|
|
|
|
|
|
|
def test_advanced_profiler_value_errors(advanced_profiler):
|
|
|
|
"""Ensure errors are raised where expected."""
|
|
|
|
|
|
|
|
action = "test"
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
advanced_profiler.stop(action)
|
|
|
|
|
|
|
|
advanced_profiler.start(action)
|
|
|
|
advanced_profiler.stop(action)
|
2021-03-21 23:39:55 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def pytorch_profiler(tmpdir):
|
|
|
|
profiler = PyTorchProfiler(output_filename=os.path.join(tmpdir, "profiler.txt"), local_rank=0)
|
|
|
|
return profiler
|
|
|
|
|
|
|
|
|
|
|
|
def test_pytorch_profiler_describe(pytorch_profiler):
|
|
|
|
"""Ensure the profiler won't fail when reporting the summary."""
|
|
|
|
with pytorch_profiler.profile("test_step"):
|
|
|
|
pass
|
|
|
|
|
|
|
|
# log to stdout and print to file
|
|
|
|
pytorch_profiler.describe()
|
|
|
|
data = Path(pytorch_profiler.output_fname).read_text()
|
|
|
|
assert len(data) > 0
|
|
|
|
|
|
|
|
|
|
|
|
def test_pytorch_profiler_value_errors(pytorch_profiler):
|
|
|
|
"""Ensure errors are raised where expected."""
|
|
|
|
|
|
|
|
action = "test_step"
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
pytorch_profiler.stop(action)
|
|
|
|
|
|
|
|
pytorch_profiler.start(action)
|
|
|
|
pytorch_profiler.stop(action)
|
|
|
|
|
|
|
|
|
|
|
|
@RunIf(min_gpus=2, special=True)
|
|
|
|
@pytest.mark.parametrize("use_output_filename", [False, True])
|
|
|
|
def test_pytorch_profiler_trainer_ddp(tmpdir, use_output_filename):
|
|
|
|
"""Ensure that the profiler can be given to the training and default step are properly recorded. """
|
|
|
|
|
|
|
|
if use_output_filename:
|
|
|
|
output_filename = os.path.join(tmpdir, "profiler.txt")
|
|
|
|
else:
|
|
|
|
output_filename = None
|
|
|
|
|
|
|
|
profiler = PyTorchProfiler(output_filename=output_filename)
|
|
|
|
|
|
|
|
model = BoringModel()
|
|
|
|
trainer = Trainer(
|
|
|
|
fast_dev_run=True,
|
|
|
|
profiler=profiler,
|
|
|
|
accelerator="ddp",
|
|
|
|
gpus=2,
|
|
|
|
)
|
|
|
|
trainer.fit(model)
|
|
|
|
|
|
|
|
enabled = use_output_filename or not use_output_filename and profiler.local_rank == 0
|
|
|
|
|
|
|
|
if enabled:
|
|
|
|
assert len(profiler.summary()) > 0
|
|
|
|
assert set(profiler.profiled_actions.keys()) == {'training_step_and_backward', 'validation_step'}
|
|
|
|
else:
|
|
|
|
assert profiler.summary() is None
|
|
|
|
assert set(profiler.profiled_actions.keys()) == set()
|
|
|
|
|
2021-03-22 11:49:06 +00:00
|
|
|
# todo (tchaton) add support for all ranks
|
|
|
|
if use_output_filename and os.getenv("LOCAL_RANK") == "0":
|
2021-03-21 23:39:55 +00:00
|
|
|
data = Path(profiler.output_fname).read_text()
|
|
|
|
assert len(data) > 0
|
|
|
|
|
|
|
|
|
|
|
|
def test_pytorch_profiler_nested(tmpdir):
|
|
|
|
"""Ensure that the profiler handles nested context"""
|
|
|
|
|
|
|
|
pytorch_profiler = PyTorchProfiler(
|
|
|
|
profiled_functions=["a", "b", "c"], use_cuda=False, output_filename=os.path.join(tmpdir, "profiler.txt")
|
|
|
|
)
|
|
|
|
|
|
|
|
with pytorch_profiler.profile("a"):
|
|
|
|
a = torch.ones(42)
|
|
|
|
with pytorch_profiler.profile("b"):
|
|
|
|
b = torch.zeros(42)
|
|
|
|
with pytorch_profiler.profile("c"):
|
|
|
|
_ = a + b
|
|
|
|
|
|
|
|
pa = pytorch_profiler.profiled_actions
|
|
|
|
|
|
|
|
# From PyTorch 1.8.0, less operation are being traced.
|
|
|
|
if LooseVersion(torch.__version__) >= LooseVersion("1.8.0"):
|
|
|
|
expected_ = {
|
|
|
|
'a': ['ones', 'empty', 'fill_', 'zeros', 'empty', 'zero_', 'add'],
|
|
|
|
'b': ['zeros', 'empty', 'zero_'],
|
|
|
|
'c': ['add'],
|
|
|
|
}
|
|
|
|
# From PyTorch 1.6.0, more operation are being traced.
|
|
|
|
elif LooseVersion(torch.__version__) >= LooseVersion("1.6.0"):
|
|
|
|
expected_ = {
|
|
|
|
'a': ['ones', 'empty', 'fill_', 'zeros', 'empty', 'zero_', 'fill_', 'add', 'empty'],
|
|
|
|
'b': ['zeros', 'empty', 'zero_', 'fill_'],
|
|
|
|
'c': ['add', 'empty'],
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
expected_ = {
|
|
|
|
'a': ['add'],
|
|
|
|
'b': [],
|
|
|
|
'c': ['add'],
|
|
|
|
}
|
|
|
|
|
|
|
|
for n in ('a', 'b', 'c'):
|
|
|
|
pa[n] = [e.name for e in pa[n]]
|
|
|
|
if LooseVersion(torch.__version__) >= LooseVersion("1.7.1"):
|
|
|
|
pa[n] = [e.replace("aten::", "") for e in pa[n]]
|
|
|
|
assert pa[n] == expected_[n]
|
|
|
|
|
|
|
|
|
|
|
|
@RunIf(min_gpus=1, special=True)
|
|
|
|
def test_pytorch_profiler_nested_emit_nvtx(tmpdir):
|
|
|
|
"""
|
|
|
|
This test check emit_nvtx is correctly supported
|
|
|
|
"""
|
|
|
|
profiler = PyTorchProfiler(use_cuda=True, emit_nvtx=True)
|
|
|
|
|
|
|
|
model = BoringModel()
|
|
|
|
trainer = Trainer(
|
|
|
|
fast_dev_run=True,
|
|
|
|
profiler=profiler,
|
|
|
|
gpus=1,
|
|
|
|
)
|
|
|
|
trainer.fit(model)
|
2021-03-22 11:49:06 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("cls", (SimpleProfiler, AdvancedProfiler, PyTorchProfiler))
|
|
|
|
def test_profiler_teardown(tmpdir, cls):
|
|
|
|
"""
|
|
|
|
This test checks if profiler teardown method is called when trainer is exiting.
|
|
|
|
"""
|
2021-03-22 18:32:31 +00:00
|
|
|
|
|
|
|
class TestCallback(Callback):
|
|
|
|
|
|
|
|
def on_fit_end(self, trainer, pl_module) -> None:
|
|
|
|
assert trainer.profiler.output_file is not None
|
|
|
|
|
2021-03-22 11:49:06 +00:00
|
|
|
profiler = cls(output_filename=os.path.join(tmpdir, "profiler.txt"))
|
|
|
|
|
|
|
|
model = BoringModel()
|
2021-03-22 18:32:31 +00:00
|
|
|
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, profiler=profiler, callbacks=[TestCallback()])
|
2021-03-22 11:49:06 +00:00
|
|
|
trainer.fit(model)
|
|
|
|
|
2021-03-22 18:32:31 +00:00
|
|
|
assert profiler.output_file is None
|