Update module path for `LightningDeprecationWarning` in setup.cfg (#11793)

This commit is contained in:
ananthsub 2022-02-09 19:29:32 -08:00 committed by GitHub
parent 1b107c5892
commit 8d23f6287a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 5 additions and 5 deletions

View File

@ -289,7 +289,7 @@ def register_ddp_comm_hook(
if not _TORCH_GREATER_EQUAL_1_9:
rank_zero_warn("Not applying DDP comm wrapper. To use communication wrapper, please use pytorch>=1.9.0.")
else:
rank_zero_info(
new_rank_zero_info(
f"DDP comm wrapper is provided, apply {ddp_comm_wrapper.__qualname__}({ddp_comm_hook.__qualname__})."
)
ddp_comm_hook = ddp_comm_wrapper(ddp_comm_hook)
@ -336,7 +336,7 @@ def init_dist_connection(
torch.distributed.init_process_group(torch_distributed_backend, rank=global_rank, world_size=world_size, **kwargs)
# on rank=0 let everyone know training is starting
rank_zero_info(
new_rank_zero_info(
f"{'-' * 100}\n"
f"distributed_backend={torch_distributed_backend}\n"
f"All distributed processes registered. Starting with {world_size} processes\n"

View File

@ -26,12 +26,12 @@ addopts =
--disable-pytest-warnings
filterwarnings =
# error out on our deprecation warnings - ensures the code and tests are kept up-to-date
error::pytorch_lightning.utilities.warnings.LightningDeprecationWarning
error::pytorch_lightning.utilities.rank_zero.LightningDeprecationWarning
error::FutureWarning
# warnings from deprecated modules on import
# TODO: remove in 1.7
ignore::pytorch_lightning.utilities.warnings.LightningDeprecationWarning:pytorch_lightning.core.decorators
ignore::pytorch_lightning.utilities.warnings.LightningDeprecationWarning:pytorch_lightning.core.memory
ignore::pytorch_lightning.utilities.rank_zero.LightningDeprecationWarning:pytorch_lightning.core.decorators
ignore::pytorch_lightning.utilities.rank_zero.LightningDeprecationWarning:pytorch_lightning.core.memory
junit_duration_report = call