bug fix for #138 (#143)

* bug fix for #138

* split if for readability
This commit is contained in:
Ananya Harsh Jha 2019-08-19 15:03:04 -04:00 committed by William Falcon
parent 4bdb976284
commit 5b694c7e0e
1 changed files with 19 additions and 18 deletions

View File

@ -495,27 +495,28 @@ If you want each process to load the full dataset, ignore this warning.
"""
warnings.warn(msg)
if self.use_ddp and\
not all(isinstance(dataloader, DistributedSampler)
for dataloader in self.val_dataloader):
msg = """
You're val_dataloader(s) are not all DistributedSamplers.
You're using multiple gpus and multiple nodes without using a DistributedSampler
to assign a subset of your data to each process. To silence this warning, pass a
DistributedSampler to your DataLoader.
if self.use_ddp and self.val_dataloader is not None:
for dataloader in self.val_dataloader:
if not isinstance(dataloader, DistributedSampler):
msg = """
Your val_dataloader(s) are not all DistributedSamplers.
You're using multiple gpus and multiple nodes without using a DistributedSampler
to assign a subset of your data to each process. To silence this warning, pass a
DistributedSampler to your DataLoader.
ie: this:
dataset = myDataset()
dataloader = Dataloader(dataset)
ie: this:
dataset = myDataset()
dataloader = Dataloader(dataset)
becomes:
dataset = myDataset()
dist_sampler = torch.utils.data.distributed.DistributedSampler(dataset)
dataloader = Dataloader(dataset, sampler=dist_sampler)
becomes:
dataset = myDataset()
dist_sampler = torch.utils.data.distributed.DistributedSampler(dataset)
dataloader = Dataloader(dataset, sampler=dist_sampler)
If you want each process to load the full dataset, ignore this warning.
"""
warnings.warn(msg)
If you want each process to load the full dataset, ignore this warning.
"""
warnings.warn(msg)
break
# -----------------------------
# MODEL TRAINING