parent
4bdb976284
commit
5b694c7e0e
|
@ -495,11 +495,11 @@ If you want each process to load the full dataset, ignore this warning.
|
|||
"""
|
||||
warnings.warn(msg)
|
||||
|
||||
if self.use_ddp and\
|
||||
not all(isinstance(dataloader, DistributedSampler)
|
||||
for dataloader in self.val_dataloader):
|
||||
if self.use_ddp and self.val_dataloader is not None:
|
||||
for dataloader in self.val_dataloader:
|
||||
if not isinstance(dataloader, DistributedSampler):
|
||||
msg = """
|
||||
You're val_dataloader(s) are not all DistributedSamplers.
|
||||
Your val_dataloader(s) are not all DistributedSamplers.
|
||||
You're using multiple gpus and multiple nodes without using a DistributedSampler
|
||||
to assign a subset of your data to each process. To silence this warning, pass a
|
||||
DistributedSampler to your DataLoader.
|
||||
|
@ -516,6 +516,7 @@ dataloader = Dataloader(dataset, sampler=dist_sampler)
|
|||
If you want each process to load the full dataset, ignore this warning.
|
||||
"""
|
||||
warnings.warn(msg)
|
||||
break
|
||||
|
||||
# -----------------------------
|
||||
# MODEL TRAINING
|
||||
|
|
Loading…
Reference in New Issue