added single node distdataparallel
This commit is contained in:
parent
7ef6db49d3
commit
96ab78dc41
|
@ -282,6 +282,7 @@ class Trainer(TrainerIO):
|
||||||
# when GPU is called, spawn off a single worker for each gpu
|
# when GPU is called, spawn off a single worker for each gpu
|
||||||
if self.on_gpu:
|
if self.on_gpu:
|
||||||
rank = 0
|
rank = 0
|
||||||
|
pdb.set_trace()
|
||||||
mp.spawn(self.__dp_train, nprocs=len(self.data_parallel_device_ids), args=(rank, model ))
|
mp.spawn(self.__dp_train, nprocs=len(self.data_parallel_device_ids), args=(rank, model ))
|
||||||
else:
|
else:
|
||||||
self.__run_pretrain_routine(model)
|
self.__run_pretrain_routine(model)
|
||||||
|
|
Loading…
Reference in New Issue