2020-08-06 14:58:51 +00:00
|
|
|
import sys
|
|
|
|
|
2020-05-19 15:05:07 +00:00
|
|
|
import numpy as np
|
|
|
|
import pytest
|
|
|
|
import torch
|
|
|
|
import torch.distributed as dist
|
|
|
|
import torch.multiprocessing as mp
|
|
|
|
|
2020-06-27 01:38:25 +00:00
|
|
|
import tests.base.develop_utils as tutils
|
2020-05-19 15:05:07 +00:00
|
|
|
from pytorch_lightning.metrics.converters import (
|
2020-06-13 12:47:25 +00:00
|
|
|
_apply_to_inputs,
|
|
|
|
_apply_to_outputs,
|
2020-08-26 11:01:29 +00:00
|
|
|
convert_to_tensor,
|
|
|
|
convert_to_numpy,
|
2020-06-13 12:47:25 +00:00
|
|
|
_numpy_metric_conversion,
|
|
|
|
_tensor_metric_conversion,
|
2020-08-26 11:01:29 +00:00
|
|
|
sync_ddp_if_available,
|
2020-09-03 10:27:32 +00:00
|
|
|
gather_all_tensors_if_available,
|
2020-06-13 12:47:25 +00:00
|
|
|
tensor_metric,
|
|
|
|
numpy_metric
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def test_apply_to_inputs():
|
2020-05-19 15:05:07 +00:00
|
|
|
def apply_fn(inputs, factor):
|
|
|
|
if isinstance(inputs, (float, int)):
|
|
|
|
return inputs * factor
|
|
|
|
elif isinstance(inputs, dict):
|
|
|
|
return {k: apply_fn(v, factor) for k, v in inputs.items()}
|
|
|
|
elif isinstance(inputs, (tuple, list)):
|
|
|
|
return [apply_fn(x, factor) for x in inputs]
|
|
|
|
|
|
|
|
@_apply_to_inputs(apply_fn, factor=2.)
|
2020-06-13 12:47:25 +00:00
|
|
|
def test_fn(*args, **kwargs):
|
|
|
|
return args, kwargs
|
2020-05-19 15:05:07 +00:00
|
|
|
|
2020-06-13 12:47:25 +00:00
|
|
|
for args in [[], [1., 2.]]:
|
|
|
|
for kwargs in [{}, {'a': 1., 'b': 2.}]:
|
|
|
|
result_args, result_kwargs = test_fn(*args, **kwargs)
|
|
|
|
assert isinstance(result_args, (list, tuple))
|
|
|
|
assert isinstance(result_kwargs, dict)
|
|
|
|
assert len(result_args) == len(args)
|
|
|
|
assert len(result_kwargs) == len(kwargs)
|
|
|
|
assert all([k in result_kwargs for k in kwargs.keys()])
|
|
|
|
for arg, result_arg in zip(args, result_args):
|
|
|
|
assert arg * 2. == result_arg
|
2020-05-19 15:05:07 +00:00
|
|
|
|
2020-06-13 12:47:25 +00:00
|
|
|
for key in kwargs.keys():
|
|
|
|
arg = kwargs[key]
|
|
|
|
result_arg = result_kwargs[key]
|
|
|
|
assert arg * 2. == result_arg
|
2020-05-19 15:05:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_apply_to_outputs():
|
|
|
|
def apply_fn(inputs, additional_str):
|
|
|
|
return str(inputs) + additional_str
|
|
|
|
|
|
|
|
@_apply_to_outputs(apply_fn, additional_str='_str')
|
|
|
|
def test_fn(*args, **kwargs):
|
|
|
|
return 'dummy'
|
|
|
|
|
|
|
|
assert test_fn() == 'dummy_str'
|
|
|
|
|
|
|
|
|
|
|
|
def test_convert_to_tensor():
|
|
|
|
for test_item in [1., np.array([1.])]:
|
2020-08-26 11:01:29 +00:00
|
|
|
result_tensor = convert_to_tensor(test_item)
|
2020-05-19 15:05:07 +00:00
|
|
|
assert isinstance(result_tensor, torch.Tensor)
|
|
|
|
assert result_tensor.item() == 1.
|
|
|
|
|
|
|
|
|
|
|
|
def test_convert_to_numpy():
|
|
|
|
for test_item in [1., torch.tensor([1.])]:
|
2020-08-26 11:01:29 +00:00
|
|
|
result = convert_to_numpy(test_item)
|
2020-05-19 15:05:07 +00:00
|
|
|
assert isinstance(result, np.ndarray)
|
|
|
|
assert result.item() == 1.
|
|
|
|
|
|
|
|
|
|
|
|
def test_numpy_metric_conversion():
|
|
|
|
@_numpy_metric_conversion
|
|
|
|
def numpy_test_metric(*args, **kwargs):
|
|
|
|
for arg in args:
|
|
|
|
assert isinstance(arg, np.ndarray)
|
|
|
|
|
|
|
|
for v in kwargs.values():
|
|
|
|
assert isinstance(v, np.ndarray)
|
|
|
|
|
|
|
|
return 5.
|
|
|
|
|
|
|
|
result = numpy_test_metric(torch.tensor([1.]), dummy_kwarg=2.)
|
|
|
|
assert isinstance(result, torch.Tensor)
|
|
|
|
assert result.item() == 5.
|
|
|
|
|
|
|
|
|
|
|
|
def test_tensor_metric_conversion():
|
|
|
|
@_tensor_metric_conversion
|
|
|
|
def tensor_test_metric(*args, **kwargs):
|
|
|
|
for arg in args:
|
|
|
|
assert isinstance(arg, torch.Tensor)
|
|
|
|
|
|
|
|
for v in kwargs.values():
|
|
|
|
assert isinstance(v, torch.Tensor)
|
|
|
|
|
|
|
|
return 5.
|
|
|
|
|
|
|
|
result = tensor_test_metric(np.array([1.]), dummy_kwarg=2.)
|
|
|
|
assert isinstance(result, torch.Tensor)
|
|
|
|
assert result.item() == 5.
|
|
|
|
|
|
|
|
|
2020-06-13 12:47:25 +00:00
|
|
|
def _setup_ddp(rank, worldsize):
|
2020-05-19 15:05:07 +00:00
|
|
|
import os
|
|
|
|
|
|
|
|
os.environ['MASTER_ADDR'] = 'localhost'
|
|
|
|
|
|
|
|
# initialize the process group
|
|
|
|
dist.init_process_group("gloo", rank=rank, world_size=worldsize)
|
|
|
|
|
|
|
|
|
2020-08-04 16:32:20 +00:00
|
|
|
def _ddp_test_fn(rank, worldsize, add_offset: bool, reduction_mean=False):
|
2020-06-13 12:47:25 +00:00
|
|
|
_setup_ddp(rank, worldsize)
|
2020-08-04 16:32:20 +00:00
|
|
|
if add_offset:
|
|
|
|
tensor = torch.tensor([float(rank)])
|
|
|
|
else:
|
|
|
|
tensor = torch.tensor([1.], )
|
|
|
|
if reduction_mean:
|
2020-08-26 11:01:29 +00:00
|
|
|
reduced_tensor = sync_ddp_if_available(tensor, reduce_op='avg')
|
2020-08-04 16:32:20 +00:00
|
|
|
|
|
|
|
manual_reduction = sum([i for i in range(dist.get_world_size())]) / dist.get_world_size()
|
|
|
|
assert reduced_tensor.item() == manual_reduction
|
|
|
|
else:
|
2020-08-26 11:01:29 +00:00
|
|
|
reduced_tensor = sync_ddp_if_available(tensor)
|
2020-05-19 15:05:07 +00:00
|
|
|
|
2020-08-04 16:32:20 +00:00
|
|
|
assert reduced_tensor.item() == dist.get_world_size(), \
|
|
|
|
'Sync-Reduce does not work properly with DDP and Tensors'
|
2020-05-19 15:05:07 +00:00
|
|
|
|
|
|
|
|
2020-09-03 10:27:32 +00:00
|
|
|
def _ddp_test_gather_all_tensors(rank, worldsize):
|
|
|
|
_setup_ddp(rank, worldsize)
|
|
|
|
|
|
|
|
tensor = torch.tensor([rank])
|
|
|
|
gather_tensors = gather_all_tensors_if_available(tensor)
|
|
|
|
mannual_tensors = [torch.tensor([i]) for i in range(worldsize)]
|
|
|
|
|
|
|
|
for t1, t2 in zip(gather_tensors, mannual_tensors):
|
|
|
|
assert(t1.equal(t2))
|
|
|
|
|
|
|
|
|
2020-08-04 16:32:20 +00:00
|
|
|
@pytest.mark.skipif(sys.platform == "win32" , reason="DDP not available on windows")
|
2020-05-19 15:05:07 +00:00
|
|
|
def test_sync_reduce_ddp():
|
|
|
|
"""Make sure sync-reduce works with DDP"""
|
|
|
|
tutils.reset_seed()
|
|
|
|
tutils.set_random_master_port()
|
|
|
|
|
|
|
|
worldsize = 2
|
2020-08-04 16:32:20 +00:00
|
|
|
mp.spawn(_ddp_test_fn, args=(worldsize, False), nprocs=worldsize)
|
2020-06-13 12:47:25 +00:00
|
|
|
|
2020-08-04 16:32:20 +00:00
|
|
|
|
|
|
|
@pytest.mark.skipif(sys.platform == "win32" , reason="DDP not available on windows")
|
|
|
|
def test_sync_reduce_ddp_mean():
|
|
|
|
"""Make sure sync-reduce works with DDP"""
|
|
|
|
tutils.reset_seed()
|
|
|
|
tutils.set_random_master_port()
|
|
|
|
|
|
|
|
worldsize = 2
|
|
|
|
mp.spawn(_ddp_test_fn, args=(worldsize, True, True), nprocs=worldsize)
|
2020-05-19 15:05:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_sync_reduce_simple():
|
|
|
|
"""Make sure sync-reduce works without DDP"""
|
|
|
|
tensor = torch.tensor([1.], device='cpu')
|
|
|
|
|
2020-08-26 11:01:29 +00:00
|
|
|
reduced_tensor = sync_ddp_if_available(tensor)
|
2020-05-19 15:05:07 +00:00
|
|
|
|
|
|
|
assert torch.allclose(tensor, reduced_tensor), \
|
|
|
|
'Sync-Reduce does not work properly without DDP and Tensors'
|
|
|
|
|
|
|
|
|
2020-09-03 10:27:32 +00:00
|
|
|
@pytest.mark.skipif(sys.platform == "win32" , reason="DDP not available on windows")
|
|
|
|
def test_gather_all_tensors_ddp():
|
|
|
|
"""Make sure gather_all_tensors works with DDP"""
|
|
|
|
tutils.reset_seed()
|
|
|
|
tutils.set_random_master_port()
|
|
|
|
|
|
|
|
worldsize = 2
|
|
|
|
mp.spawn(_ddp_test_gather_all_tensors, args=(worldsize, ), nprocs=worldsize)
|
|
|
|
|
|
|
|
|
2020-05-19 15:05:07 +00:00
|
|
|
def _test_tensor_metric(is_ddp: bool):
|
|
|
|
@tensor_metric()
|
|
|
|
def tensor_test_metric(*args, **kwargs):
|
|
|
|
for arg in args:
|
|
|
|
assert isinstance(arg, torch.Tensor)
|
|
|
|
|
|
|
|
for v in kwargs.values():
|
|
|
|
assert isinstance(v, torch.Tensor)
|
|
|
|
|
|
|
|
return 5.
|
|
|
|
|
|
|
|
if is_ddp:
|
|
|
|
factor = dist.get_world_size()
|
|
|
|
else:
|
|
|
|
factor = 1.
|
|
|
|
|
|
|
|
result = tensor_test_metric(np.array([1.]), dummy_kwarg=2.)
|
|
|
|
assert isinstance(result, torch.Tensor)
|
|
|
|
assert result.item() == 5. * factor
|
|
|
|
|
|
|
|
|
|
|
|
def _ddp_test_tensor_metric(rank, worldsize):
|
2020-06-13 12:47:25 +00:00
|
|
|
_setup_ddp(rank, worldsize)
|
2020-05-19 15:05:07 +00:00
|
|
|
_test_tensor_metric(True)
|
|
|
|
|
|
|
|
|
2020-08-04 16:32:20 +00:00
|
|
|
@pytest.mark.skipif(sys.platform == "win32" , reason="DDP not available on windows")
|
2020-05-19 15:05:07 +00:00
|
|
|
def test_tensor_metric_ddp():
|
|
|
|
tutils.reset_seed()
|
|
|
|
tutils.set_random_master_port()
|
|
|
|
|
|
|
|
world_size = 2
|
|
|
|
mp.spawn(_ddp_test_tensor_metric, args=(world_size,), nprocs=world_size)
|
2020-06-13 12:47:25 +00:00
|
|
|
# dist.destroy_process_group()
|
2020-05-19 15:05:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_tensor_metric_simple():
|
|
|
|
_test_tensor_metric(False)
|
|
|
|
|
|
|
|
|
|
|
|
def _test_numpy_metric(is_ddp: bool):
|
|
|
|
@numpy_metric()
|
|
|
|
def numpy_test_metric(*args, **kwargs):
|
|
|
|
for arg in args:
|
|
|
|
assert isinstance(arg, np.ndarray)
|
|
|
|
|
|
|
|
for v in kwargs.values():
|
|
|
|
assert isinstance(v, np.ndarray)
|
|
|
|
|
|
|
|
return 5.
|
|
|
|
|
|
|
|
if is_ddp:
|
|
|
|
factor = dist.get_world_size()
|
|
|
|
else:
|
|
|
|
factor = 1.
|
|
|
|
|
|
|
|
result = numpy_test_metric(torch.tensor([1.]), dummy_kwarg=2.)
|
|
|
|
assert isinstance(result, torch.Tensor)
|
|
|
|
assert result.item() == 5. * factor
|
|
|
|
|
|
|
|
|
|
|
|
def _ddp_test_numpy_metric(rank, worldsize):
|
2020-06-13 12:47:25 +00:00
|
|
|
_setup_ddp(rank, worldsize)
|
2020-05-19 15:05:07 +00:00
|
|
|
_test_numpy_metric(True)
|
|
|
|
|
|
|
|
|
2020-08-04 16:32:20 +00:00
|
|
|
@pytest.mark.skipif(sys.platform == "win32" , reason="DDP not available on windows")
|
2020-05-19 15:05:07 +00:00
|
|
|
def test_numpy_metric_ddp():
|
|
|
|
tutils.reset_seed()
|
|
|
|
tutils.set_random_master_port()
|
|
|
|
world_size = 2
|
|
|
|
mp.spawn(_ddp_test_numpy_metric, args=(world_size,), nprocs=world_size)
|
2020-06-13 12:47:25 +00:00
|
|
|
# dist.destroy_process_group()
|
2020-05-19 15:05:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_numpy_metric_simple():
|
2020-06-13 12:47:25 +00:00
|
|
|
_test_numpy_metric(False)
|