fix warnings on windows (#3555)

This commit is contained in:
Adrian Wälchli 2020-09-20 00:29:06 +02:00 committed by GitHub
parent 0284f7ab5a
commit 99f05ed23f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 4 additions and 12 deletions

View File

@ -26,18 +26,14 @@ import numpy as np
import torch
from torch.utils.data._utils.collate import np_str_obj_array_pattern
from pytorch_lightning.utilities import rank_zero_warn
from pytorch_lightning.utilities.apply_func import apply_to_collection
try:
if torch.distributed.is_available():
from torch.distributed import ReduceOp
except ImportError:
else:
class ReduceOp:
SUM = None
rank_zero_warn("Unsupported `ReduceOp` for distributed computing")
def _apply_to_inputs(func_to_apply: Callable, *dec_args, **dec_kwargs) -> Callable:
"""

View File

@ -19,17 +19,13 @@ import torch
from pytorch_lightning import _logger as lightning_logger
from pytorch_lightning.metrics.metric import NumpyMetric
from pytorch_lightning.utilities import rank_zero_warn
try:
if torch.distributed.is_available():
from torch.distributed import group
except ImportError:
else:
class group:
WORLD = None
rank_zero_warn("Unsupported `group` for distributed computing.")
class SklearnMetric(NumpyMetric):
"""