From 88bfed371e9597e813384b3d951b0e5280be71bd Mon Sep 17 00:00:00 2001 From: SiddhantRanade Date: Thu, 13 Aug 2020 15:06:17 -0600 Subject: [PATCH] Fix enforce_datamodule_dataloader_override() for iterable datasets (#2957) This function has the if statement `if (train_dataloader or val_dataloaders) and datamodule:`. The issue is similar to that in https://github.com/PyTorchLightning/pytorch-lightning/pull/1560. The problem is that the `if(dl)` translates to `if(bool(dl))`, but there's no dataloader.__bool__ so bool() uses dataloader.__len__ > 0. But... dataloader.__len__ uses IterableDataset.__len__ for IterableDatasets for which __len__ is undefined. The fix is also the same, the `if dl` should be replaced by `if dl is not None`. Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- pytorch_lightning/trainer/configuration_validator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/configuration_validator.py b/pytorch_lightning/trainer/configuration_validator.py index f4144563a7..d910701fa3 100644 --- a/pytorch_lightning/trainer/configuration_validator.py +++ b/pytorch_lightning/trainer/configuration_validator.py @@ -10,7 +10,7 @@ class ConfigValidator(object): def enforce_datamodule_dataloader_override(self, train_dataloader, val_dataloaders, datamodule): # If you supply a datamodule you can't supply train_dataloader or val_dataloaders - if (train_dataloader or val_dataloaders) and datamodule: + if (train_dataloader is not None or val_dataloaders is not None) and datamodule is not None: raise MisconfigurationException( 'You cannot pass train_dataloader or val_dataloaders to trainer.fit if you supply a datamodule' )