Sai prasanna master (#219)

* Fix incorrect warning for DistributedSampler.

Check whether `dataloader.sampler` is an instance of DistributedSampler instead of checking the `dataloader`.

* Update trainer.py

* merged
This commit is contained in:
William Falcon 2019-09-09 11:36:24 -04:00 committed by GitHub
parent ac0111c196
commit 30b25c8146
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 4 additions and 4 deletions

View File

@ -585,9 +585,9 @@ class Trainer(TrainerIO):
if self.use_ddp and self.val_dataloader is not None:
for dataloader in self.val_dataloader:
if not isinstance(dataloader, DistributedSampler):
if not isinstance(dataloader.sampler, DistributedSampler):
msg = """
Your val_dataloader(s) are not all DistributedSamplers.
Your val_dataloader(s) don't use DistributedSampler.
You're using multiple gpus and multiple nodes without using a DistributedSampler
to assign a subset of your data to each process. To silence this warning, pass a
DistributedSampler to your DataLoader.
@ -608,9 +608,9 @@ class Trainer(TrainerIO):
if self.use_ddp and self.test_dataloader is not None:
for dataloader in self.test_dataloader:
if not isinstance(dataloader, DistributedSampler):
if not isinstance(dataloader.sampler, DistributedSampler):
msg = """
Your test_dataloader(s) are not all DistributedSamplers.
Your test_dataloader(s) don't use DistributedSampler.
You're using multiple gpus and multiple nodes without using a DistributedSampler
to assign a subset of your data to each process. To silence this warning, pass a
DistributedSampler to your DataLoader.