From 30b25c8146f82a44d5ebcec2a1e1c0c6c9b1f0c5 Mon Sep 17 00:00:00 2001 From: William Falcon Date: Mon, 9 Sep 2019 11:36:24 -0400 Subject: [PATCH] Sai prasanna master (#219) * Fix incorrect warning for DistributedSampler. Check whether `dataloader.sampler` is an instance of DistributedSampler instead of checking the `dataloader`. * Update trainer.py * merged --- pytorch_lightning/trainer/trainer.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 9ea5c0a919..012e36ab31 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -585,9 +585,9 @@ class Trainer(TrainerIO): if self.use_ddp and self.val_dataloader is not None: for dataloader in self.val_dataloader: - if not isinstance(dataloader, DistributedSampler): + if not isinstance(dataloader.sampler, DistributedSampler): msg = """ - Your val_dataloader(s) are not all DistributedSamplers. + Your val_dataloader(s) don't use DistributedSampler. You're using multiple gpus and multiple nodes without using a DistributedSampler to assign a subset of your data to each process. To silence this warning, pass a DistributedSampler to your DataLoader. @@ -608,9 +608,9 @@ class Trainer(TrainerIO): if self.use_ddp and self.test_dataloader is not None: for dataloader in self.test_dataloader: - if not isinstance(dataloader, DistributedSampler): + if not isinstance(dataloader.sampler, DistributedSampler): msg = """ - Your test_dataloader(s) are not all DistributedSamplers. + Your test_dataloader(s) don't use DistributedSampler. You're using multiple gpus and multiple nodes without using a DistributedSampler to assign a subset of your data to each process. To silence this warning, pass a DistributedSampler to your DataLoader.