CI: fixture for global rank variable reset (#6839)
This commit is contained in:
parent
a17c027ea1
commit
b7a22ba046
|
@ -21,6 +21,16 @@ import pytest
|
|||
import torch.multiprocessing as mp
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def preserve_global_rank_variable():
|
||||
""" Ensures that the rank_zero_only.rank global variable gets reset in each test. """
|
||||
from pytorch_lightning.utilities.distributed import rank_zero_only
|
||||
rank = getattr(rank_zero_only, "rank", None)
|
||||
yield
|
||||
if rank is not None:
|
||||
setattr(rank_zero_only, "rank", rank)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def restore_env_variables():
|
||||
""" Ensures that environment variables set during the test do not leak out. """
|
||||
|
|
Loading…
Reference in New Issue