added single node distdataparallel

This commit is contained in:
William Falcon 2019-07-03 15:17:02 -04:00
parent ac57dac235
commit b9f581ab87
1 changed files with 3 additions and 2 deletions

View File

@ -282,11 +282,12 @@ class Trainer(TrainerIO):
# when GPU is called, spawn off a single worker for each gpu # when GPU is called, spawn off a single worker for each gpu
if self.on_gpu: if self.on_gpu:
rank = 0 rank = 0
mp.spawn(self.__dp_train, nprocs=len(self.data_parallel_device_ids), args=(rank, model )) self.model = model
mp.spawn(self.__dp_train, nprocs=len(self.data_parallel_device_ids), args=(rank))
else: else:
self.__run_pretrain_routine(model) self.__run_pretrain_routine(model)
def __dp_train(self, gpu_nb, proc_rank, model): def __dp_train(self, gpu_nb, proc_rank):
""" """
Entry point into a DP thread Entry point into a DP thread
:param gpu_nb: :param gpu_nb: