From d6a0375974da1cbdf44c1a6c4ca4ee89d9f3496b Mon Sep 17 00:00:00 2001 From: Tian Wang Date: Wed, 6 May 2020 02:07:26 +0800 Subject: [PATCH] Fixing logic (#1734) --- pytorch_lightning/trainer/training_loop.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/training_loop.py b/pytorch_lightning/trainer/training_loop.py index 37bac3d997..b2ce8599bc 100644 --- a/pytorch_lightning/trainer/training_loop.py +++ b/pytorch_lightning/trainer/training_loop.py @@ -325,7 +325,7 @@ class TrainerTrainLoopMixin(ABC): if self.reload_dataloaders_every_epoch: self.reset_train_dataloader(model) # set seed for distributed sampler (enables shuffling for each epoch) - if self.use_ddp or self.use_horovod \ + if (self.use_ddp or self.use_horovod) \ and hasattr(self.train_dataloader.sampler, 'set_epoch'): self.train_dataloader.sampler.set_epoch(epoch)