2019-09-02 19:46:16 +00:00
|
|
|
from collections import OrderedDict
|
|
|
|
|
|
|
|
import torch
|
2019-10-22 08:32:40 +00:00
|
|
|
|
2020-02-09 22:39:10 +00:00
|
|
|
from pytorch_lightning.core.decorators import data_loader
|
2019-09-02 19:46:16 +00:00
|
|
|
|
|
|
|
|
|
|
|
class LightningValidationStepMixin:
|
|
|
|
"""
|
|
|
|
Add val_dataloader and validation_step methods for the case
|
|
|
|
when val_dataloader returns a single dataloader
|
|
|
|
"""
|
|
|
|
|
|
|
|
@data_loader
|
|
|
|
def val_dataloader(self):
|
|
|
|
return self._dataloader(train=False)
|
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
def validation_step(self, batch, batch_idx):
|
2019-09-02 19:46:16 +00:00
|
|
|
"""
|
|
|
|
Lightning calls this inside the validation loop
|
2019-09-25 23:05:06 +00:00
|
|
|
:param batch:
|
2019-09-02 19:46:16 +00:00
|
|
|
:return:
|
|
|
|
"""
|
2019-09-25 23:05:06 +00:00
|
|
|
x, y = batch
|
2019-09-02 19:46:16 +00:00
|
|
|
x = x.view(x.size(0), -1)
|
|
|
|
y_hat = self.forward(x)
|
|
|
|
|
|
|
|
loss_val = self.loss(y, y_hat)
|
|
|
|
|
|
|
|
# acc
|
|
|
|
labels_hat = torch.argmax(y_hat, dim=1)
|
|
|
|
val_acc = torch.sum(y == labels_hat).item() / (len(y) * 1.0)
|
|
|
|
val_acc = torch.tensor(val_acc)
|
|
|
|
|
|
|
|
if self.on_gpu:
|
|
|
|
val_acc = val_acc.cuda(loss_val.device.index)
|
|
|
|
|
|
|
|
# in DP mode (default) make sure if result is scalar, there's another dim in the beginning
|
|
|
|
if self.trainer.use_dp:
|
|
|
|
loss_val = loss_val.unsqueeze(0)
|
|
|
|
val_acc = val_acc.unsqueeze(0)
|
|
|
|
|
|
|
|
# alternate possible outputs to test
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 1 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
output = OrderedDict({
|
|
|
|
'val_loss': loss_val,
|
|
|
|
'val_acc': val_acc,
|
|
|
|
})
|
|
|
|
return output
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 2 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
return val_acc
|
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 3 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
output = OrderedDict({
|
|
|
|
'val_loss': loss_val,
|
|
|
|
'val_acc': val_acc,
|
|
|
|
'test_dic': {'val_loss_a': loss_val}
|
|
|
|
})
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
|
|
|
class LightningValidationMixin(LightningValidationStepMixin):
|
|
|
|
"""
|
|
|
|
Add val_dataloader, validation_step, and validation_end methods for the case
|
|
|
|
when val_dataloader returns a single dataloader
|
|
|
|
"""
|
|
|
|
|
|
|
|
def validation_end(self, outputs):
|
|
|
|
"""
|
|
|
|
Called at the end of validation to aggregate outputs
|
|
|
|
:param outputs: list of individual outputs of each validation step
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
# if returned a scalar from validation_step, outputs is a list of tensor scalars
|
|
|
|
# we return just the average in this case (if we want)
|
|
|
|
# return torch.stack(outputs).mean()
|
|
|
|
val_loss_mean = 0
|
|
|
|
val_acc_mean = 0
|
|
|
|
for output in outputs:
|
|
|
|
val_loss = output['val_loss']
|
|
|
|
|
|
|
|
# reduce manually when using dp
|
2019-10-23 08:48:24 +00:00
|
|
|
if self.trainer.use_dp or self.trainer.use_ddp2:
|
2019-09-02 19:46:16 +00:00
|
|
|
val_loss = torch.mean(val_loss)
|
|
|
|
val_loss_mean += val_loss
|
|
|
|
|
|
|
|
# reduce manually when using dp
|
|
|
|
val_acc = output['val_acc']
|
2019-10-23 08:48:24 +00:00
|
|
|
if self.trainer.use_dp or self.trainer.use_ddp2:
|
2019-09-02 19:46:16 +00:00
|
|
|
val_acc = torch.mean(val_acc)
|
|
|
|
|
|
|
|
val_acc_mean += val_acc
|
|
|
|
|
|
|
|
val_loss_mean /= len(outputs)
|
|
|
|
val_acc_mean /= len(outputs)
|
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
tqdm_dict = {'val_loss': val_loss_mean.item(), 'val_acc': val_acc_mean.item()}
|
2019-10-06 21:57:23 +00:00
|
|
|
results = {'progress_bar': tqdm_dict, 'log': tqdm_dict}
|
2019-10-05 17:35:20 +00:00
|
|
|
return results
|
2019-09-02 19:46:16 +00:00
|
|
|
|
|
|
|
|
|
|
|
class LightningValidationStepMultipleDataloadersMixin:
|
|
|
|
"""
|
|
|
|
Add val_dataloader and validation_step methods for the case
|
|
|
|
when val_dataloader returns multiple dataloaders
|
|
|
|
"""
|
|
|
|
|
|
|
|
@data_loader
|
|
|
|
def val_dataloader(self):
|
|
|
|
return [self._dataloader(train=False), self._dataloader(train=False)]
|
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
def validation_step(self, batch, batch_idx, dataloader_idx):
|
2019-09-02 19:46:16 +00:00
|
|
|
"""
|
|
|
|
Lightning calls this inside the validation loop
|
2019-09-25 23:05:06 +00:00
|
|
|
:param batch:
|
2019-09-02 19:46:16 +00:00
|
|
|
:return:
|
|
|
|
"""
|
2019-09-25 23:05:06 +00:00
|
|
|
x, y = batch
|
2019-09-02 19:46:16 +00:00
|
|
|
x = x.view(x.size(0), -1)
|
|
|
|
y_hat = self.forward(x)
|
|
|
|
|
|
|
|
loss_val = self.loss(y, y_hat)
|
|
|
|
|
|
|
|
# acc
|
|
|
|
labels_hat = torch.argmax(y_hat, dim=1)
|
|
|
|
val_acc = torch.sum(y == labels_hat).item() / (len(y) * 1.0)
|
|
|
|
val_acc = torch.tensor(val_acc)
|
|
|
|
|
|
|
|
if self.on_gpu:
|
|
|
|
val_acc = val_acc.cuda(loss_val.device.index)
|
|
|
|
|
|
|
|
# in DP mode (default) make sure if result is scalar, there's another dim in the beginning
|
|
|
|
if self.trainer.use_dp:
|
|
|
|
loss_val = loss_val.unsqueeze(0)
|
|
|
|
val_acc = val_acc.unsqueeze(0)
|
|
|
|
|
|
|
|
# alternate possible outputs to test
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 1 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
output = OrderedDict({
|
|
|
|
'val_loss': loss_val,
|
|
|
|
'val_acc': val_acc,
|
|
|
|
})
|
|
|
|
return output
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 2 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
return val_acc
|
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 3 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
output = OrderedDict({
|
|
|
|
'val_loss': loss_val,
|
|
|
|
'val_acc': val_acc,
|
|
|
|
'test_dic': {'val_loss_a': loss_val}
|
|
|
|
})
|
|
|
|
return output
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 5 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
output = OrderedDict({
|
2019-09-25 23:05:06 +00:00
|
|
|
f'val_loss_{dataloader_idx}': loss_val,
|
|
|
|
f'val_acc_{dataloader_idx}': val_acc,
|
2019-09-02 19:46:16 +00:00
|
|
|
})
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
|
|
|
class LightningValidationMultipleDataloadersMixin(LightningValidationStepMultipleDataloadersMixin):
|
|
|
|
"""
|
|
|
|
Add val_dataloader, validation_step, and validation_end methods for the case
|
|
|
|
when val_dataloader returns multiple dataloaders
|
|
|
|
"""
|
|
|
|
|
|
|
|
def validation_end(self, outputs):
|
|
|
|
"""
|
|
|
|
Called at the end of validation to aggregate outputs
|
|
|
|
:param outputs: list of individual outputs of each validation step
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
# if returned a scalar from validation_step, outputs is a list of tensor scalars
|
|
|
|
# we return just the average in this case (if we want)
|
|
|
|
# return torch.stack(outputs).mean()
|
|
|
|
val_loss_mean = 0
|
|
|
|
val_acc_mean = 0
|
2019-09-06 11:37:25 +00:00
|
|
|
i = 0
|
|
|
|
for dl_output in outputs:
|
|
|
|
for output in dl_output:
|
|
|
|
val_loss = output['val_loss']
|
2019-09-02 19:46:16 +00:00
|
|
|
|
2019-09-06 11:37:25 +00:00
|
|
|
# reduce manually when using dp
|
|
|
|
if self.trainer.use_dp:
|
|
|
|
val_loss = torch.mean(val_loss)
|
|
|
|
val_loss_mean += val_loss
|
2019-09-02 19:46:16 +00:00
|
|
|
|
2019-09-06 11:37:25 +00:00
|
|
|
# reduce manually when using dp
|
|
|
|
val_acc = output['val_acc']
|
|
|
|
if self.trainer.use_dp:
|
|
|
|
val_acc = torch.mean(val_acc)
|
2019-09-02 19:46:16 +00:00
|
|
|
|
2019-09-06 11:37:25 +00:00
|
|
|
val_acc_mean += val_acc
|
|
|
|
i += 1
|
2019-09-02 19:46:16 +00:00
|
|
|
|
2019-09-06 11:37:25 +00:00
|
|
|
val_loss_mean /= i
|
|
|
|
val_acc_mean /= i
|
2019-09-02 19:46:16 +00:00
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
tqdm_dict = {'val_loss': val_loss_mean.item(), 'val_acc': val_acc_mean.item()}
|
2019-10-05 17:35:20 +00:00
|
|
|
result = {'progress_bar': tqdm_dict}
|
|
|
|
return result
|
2019-09-02 19:46:16 +00:00
|
|
|
|
|
|
|
|
|
|
|
class LightningTestStepMixin:
|
|
|
|
|
|
|
|
@data_loader
|
|
|
|
def test_dataloader(self):
|
|
|
|
return self._dataloader(train=False)
|
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
def test_step(self, batch, batch_idx):
|
2019-09-02 19:46:16 +00:00
|
|
|
"""
|
|
|
|
Lightning calls this inside the validation loop
|
2019-09-25 23:05:06 +00:00
|
|
|
:param batch:
|
2019-09-02 19:46:16 +00:00
|
|
|
:return:
|
|
|
|
"""
|
2019-09-25 23:05:06 +00:00
|
|
|
x, y = batch
|
2019-09-02 19:46:16 +00:00
|
|
|
x = x.view(x.size(0), -1)
|
|
|
|
y_hat = self.forward(x)
|
|
|
|
|
|
|
|
loss_test = self.loss(y, y_hat)
|
|
|
|
|
|
|
|
# acc
|
|
|
|
labels_hat = torch.argmax(y_hat, dim=1)
|
|
|
|
test_acc = torch.sum(y == labels_hat).item() / (len(y) * 1.0)
|
|
|
|
test_acc = torch.tensor(test_acc)
|
|
|
|
|
|
|
|
if self.on_gpu:
|
|
|
|
test_acc = test_acc.cuda(loss_test.device.index)
|
|
|
|
|
|
|
|
# in DP mode (default) make sure if result is scalar, there's another dim in the beginning
|
|
|
|
if self.trainer.use_dp:
|
|
|
|
loss_test = loss_test.unsqueeze(0)
|
|
|
|
test_acc = test_acc.unsqueeze(0)
|
|
|
|
|
|
|
|
# alternate possible outputs to test
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 1 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
output = OrderedDict({
|
|
|
|
'test_loss': loss_test,
|
|
|
|
'test_acc': test_acc,
|
|
|
|
})
|
|
|
|
return output
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 2 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
return test_acc
|
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 3 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
output = OrderedDict({
|
|
|
|
'test_loss': loss_test,
|
|
|
|
'test_acc': test_acc,
|
|
|
|
'test_dic': {'test_loss_a': loss_test}
|
|
|
|
})
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
|
|
|
class LightningTestMixin(LightningTestStepMixin):
|
|
|
|
def test_end(self, outputs):
|
|
|
|
"""
|
|
|
|
Called at the end of validation to aggregate outputs
|
|
|
|
:param outputs: list of individual outputs of each validation step
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
# if returned a scalar from test_step, outputs is a list of tensor scalars
|
|
|
|
# we return just the average in this case (if we want)
|
|
|
|
# return torch.stack(outputs).mean()
|
|
|
|
test_loss_mean = 0
|
|
|
|
test_acc_mean = 0
|
|
|
|
for output in outputs:
|
|
|
|
test_loss = output['test_loss']
|
|
|
|
|
|
|
|
# reduce manually when using dp
|
|
|
|
if self.trainer.use_dp:
|
|
|
|
test_loss = torch.mean(test_loss)
|
|
|
|
test_loss_mean += test_loss
|
|
|
|
|
|
|
|
# reduce manually when using dp
|
|
|
|
test_acc = output['test_acc']
|
|
|
|
if self.trainer.use_dp:
|
|
|
|
test_acc = torch.mean(test_acc)
|
|
|
|
|
|
|
|
test_acc_mean += test_acc
|
|
|
|
|
|
|
|
test_loss_mean /= len(outputs)
|
|
|
|
test_acc_mean /= len(outputs)
|
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
tqdm_dict = {'test_loss': test_loss_mean.item(), 'test_acc': test_acc_mean.item()}
|
2019-10-05 17:35:20 +00:00
|
|
|
result = {'progress_bar': tqdm_dict}
|
|
|
|
return result
|
2019-09-02 19:46:16 +00:00
|
|
|
|
|
|
|
|
|
|
|
class LightningTestStepMultipleDataloadersMixin:
|
|
|
|
|
|
|
|
@data_loader
|
|
|
|
def test_dataloader(self):
|
|
|
|
return [self._dataloader(train=False), self._dataloader(train=False)]
|
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
def test_step(self, batch, batch_idx, dataloader_idx):
|
2019-09-02 19:46:16 +00:00
|
|
|
"""
|
|
|
|
Lightning calls this inside the validation loop
|
2019-09-25 23:05:06 +00:00
|
|
|
:param batch:
|
2019-09-02 19:46:16 +00:00
|
|
|
:return:
|
|
|
|
"""
|
2019-09-25 23:05:06 +00:00
|
|
|
x, y = batch
|
2019-09-02 19:46:16 +00:00
|
|
|
x = x.view(x.size(0), -1)
|
|
|
|
y_hat = self.forward(x)
|
|
|
|
|
|
|
|
loss_test = self.loss(y, y_hat)
|
|
|
|
|
|
|
|
# acc
|
|
|
|
labels_hat = torch.argmax(y_hat, dim=1)
|
|
|
|
test_acc = torch.sum(y == labels_hat).item() / (len(y) * 1.0)
|
|
|
|
test_acc = torch.tensor(test_acc)
|
|
|
|
|
|
|
|
if self.on_gpu:
|
|
|
|
test_acc = test_acc.cuda(loss_test.device.index)
|
|
|
|
|
|
|
|
# in DP mode (default) make sure if result is scalar, there's another dim in the beginning
|
|
|
|
if self.trainer.use_dp:
|
|
|
|
loss_test = loss_test.unsqueeze(0)
|
|
|
|
test_acc = test_acc.unsqueeze(0)
|
|
|
|
|
|
|
|
# alternate possible outputs to test
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 1 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
output = OrderedDict({
|
|
|
|
'test_loss': loss_test,
|
|
|
|
'test_acc': test_acc,
|
|
|
|
})
|
|
|
|
return output
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 2 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
return test_acc
|
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 3 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
output = OrderedDict({
|
|
|
|
'test_loss': loss_test,
|
|
|
|
'test_acc': test_acc,
|
|
|
|
'test_dic': {'test_loss_a': loss_test}
|
|
|
|
})
|
|
|
|
return output
|
2019-09-25 23:05:06 +00:00
|
|
|
if batch_idx % 5 == 0:
|
2019-09-02 19:46:16 +00:00
|
|
|
output = OrderedDict({
|
2019-09-25 23:05:06 +00:00
|
|
|
f'test_loss_{dataloader_idx}': loss_test,
|
|
|
|
f'test_acc_{dataloader_idx}': test_acc,
|
2019-09-02 19:46:16 +00:00
|
|
|
})
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
|
|
|
class LightningTestMultipleDataloadersMixin(LightningTestStepMultipleDataloadersMixin):
|
|
|
|
def test_end(self, outputs):
|
|
|
|
"""
|
|
|
|
Called at the end of validation to aggregate outputs
|
|
|
|
:param outputs: list of individual outputs of each validation step
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
# if returned a scalar from test_step, outputs is a list of tensor scalars
|
|
|
|
# we return just the average in this case (if we want)
|
|
|
|
# return torch.stack(outputs).mean()
|
|
|
|
test_loss_mean = 0
|
|
|
|
test_acc_mean = 0
|
2019-09-06 11:37:25 +00:00
|
|
|
i = 0
|
|
|
|
for dl_output in outputs:
|
|
|
|
for output in dl_output:
|
|
|
|
test_loss = output['test_loss']
|
|
|
|
|
|
|
|
# reduce manually when using dp
|
|
|
|
if self.trainer.use_dp:
|
|
|
|
test_loss = torch.mean(test_loss)
|
|
|
|
test_loss_mean += test_loss
|
|
|
|
|
|
|
|
# reduce manually when using dp
|
|
|
|
test_acc = output['test_acc']
|
|
|
|
if self.trainer.use_dp:
|
|
|
|
test_acc = torch.mean(test_acc)
|
|
|
|
|
|
|
|
test_acc_mean += test_acc
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
test_loss_mean /= i
|
|
|
|
test_acc_mean /= i
|
2019-09-02 19:46:16 +00:00
|
|
|
|
2019-09-25 23:05:06 +00:00
|
|
|
tqdm_dict = {'test_loss': test_loss_mean.item(), 'test_acc': test_acc_mean.item()}
|
2019-10-05 17:35:20 +00:00
|
|
|
result = {'progress_bar': tqdm_dict}
|
|
|
|
return result
|