From 32003159f0a26ebbcc58dfbe6d362314be0095c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Wed, 29 Sep 2021 19:08:26 +0200 Subject: [PATCH] Remove legacy pytest markers (#9761) --- setup.cfg | 5 ----- tests/conftest.py | 18 +----------------- tests/models/test_gpu.py | 11 ----------- 3 files changed, 1 insertion(+), 33 deletions(-) diff --git a/setup.cfg b/setup.cfg index 99f3a513b0..e399d35016 100644 --- a/setup.cfg +++ b/setup.cfg @@ -24,11 +24,6 @@ addopts = --strict --doctest-modules --color=yes -markers = - slow - remote_data - filterwarnings - gpus_param_tests junit_duration_report = call diff --git a/tests/conftest.py b/tests/conftest.py index 58a2fb1c2f..36110e6c57 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,13 +14,12 @@ import os import sys import threading -from functools import partial, wraps +from functools import partial from http.server import SimpleHTTPRequestHandler from pathlib import Path import pytest import torch.distributed -import torch.multiprocessing as mp from pytorch_lightning.plugins.environments.lightning_environment import find_free_network_port from tests import _PATH_DATASETS @@ -88,21 +87,6 @@ def teardown_process_group(): torch.distributed.destroy_process_group() -def pytest_configure(config): - config.addinivalue_line("markers", "spawn: spawn test in a separate process using torch.multiprocessing.spawn") - - -@pytest.mark.tryfirst -def pytest_pyfunc_call(pyfuncitem): - if pyfuncitem.get_closest_marker("spawn"): - testfunction = pyfuncitem.obj - funcargs = pyfuncitem.funcargs - testargs = tuple(funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames) - - mp.spawn(wraps, (testfunction, testargs)) - return True - - @pytest.fixture def tmpdir_server(tmpdir): if sys.version_info >= (3, 7): diff --git a/tests/models/test_gpu.py b/tests/models/test_gpu.py index be70dd033b..96d8874209 100644 --- a/tests/models/test_gpu.py +++ b/tests/models/test_gpu.py @@ -92,7 +92,6 @@ def mocked_device_count_0(monkeypatch): monkeypatch.setattr(torch.cuda, "device_count", device_count) -@pytest.mark.gpus_param_tests @pytest.mark.parametrize( ["gpus", "expected_num_gpus", "distributed_backend"], [ @@ -108,7 +107,6 @@ def test_trainer_gpu_parse(mocked_device_count, gpus, expected_num_gpus, distrib assert Trainer(gpus=gpus, accelerator=distributed_backend).num_gpus == expected_num_gpus -@pytest.mark.gpus_param_tests @pytest.mark.parametrize( ["gpus", "expected_num_gpus", "distributed_backend"], [ @@ -120,7 +118,6 @@ def test_trainer_num_gpu_0(mocked_device_count_0, gpus, expected_num_gpus, distr assert Trainer(gpus=gpus, accelerator=distributed_backend).num_gpus == expected_num_gpus -@pytest.mark.gpus_param_tests @pytest.mark.parametrize( ["gpus", "expected_root_gpu", "distributed_backend"], [ @@ -136,7 +133,6 @@ def test_root_gpu_property(mocked_device_count, gpus, expected_root_gpu, distrib assert Trainer(gpus=gpus, accelerator=distributed_backend).root_gpu == expected_root_gpu -@pytest.mark.gpus_param_tests @pytest.mark.parametrize( ["gpus", "expected_root_gpu", "distributed_backend"], [ @@ -150,7 +146,6 @@ def test_root_gpu_property_0_passing(mocked_device_count_0, gpus, expected_root_ # Asking for a gpu when non are available will result in a MisconfigurationException -@pytest.mark.gpus_param_tests @pytest.mark.parametrize( ["gpus", "expected_root_gpu", "distributed_backend"], [ @@ -168,7 +163,6 @@ def test_root_gpu_property_0_raising(mocked_device_count_0, gpus, expected_root_ Trainer(gpus=gpus, accelerator=distributed_backend) -@pytest.mark.gpus_param_tests @pytest.mark.parametrize( ["gpus", "expected_root_gpu"], [ @@ -183,7 +177,6 @@ def test_determine_root_gpu_device(gpus, expected_root_gpu): assert device_parser.determine_root_gpu_device(gpus) == expected_root_gpu -@pytest.mark.gpus_param_tests @pytest.mark.parametrize( ["gpus", "expected_gpu_ids"], [ @@ -206,7 +199,6 @@ def test_parse_gpu_ids(mocked_device_count, gpus, expected_gpu_ids): assert device_parser.parse_gpu_ids(gpus) == expected_gpu_ids -@pytest.mark.gpus_param_tests @pytest.mark.parametrize( ["gpus"], [ @@ -225,20 +217,17 @@ def test_parse_gpu_fail_on_unsupported_inputs(mocked_device_count, gpus): device_parser.parse_gpu_ids(gpus) -@pytest.mark.gpus_param_tests @pytest.mark.parametrize("gpus", [[1, 2, 19], -1, "-1"]) def test_parse_gpu_fail_on_non_existent_id(mocked_device_count_0, gpus): with pytest.raises(MisconfigurationException): device_parser.parse_gpu_ids(gpus) -@pytest.mark.gpus_param_tests def test_parse_gpu_fail_on_non_existent_id_2(mocked_device_count): with pytest.raises(MisconfigurationException): device_parser.parse_gpu_ids([1, 2, 19]) -@pytest.mark.gpus_param_tests @pytest.mark.parametrize("gpus", [-1, "-1"]) def test_parse_gpu_returns_none_when_no_devices_are_available(mocked_device_count_0, gpus): with pytest.raises(MisconfigurationException):