Custom Plugin is_distributed (#6537)
* return from plugin * dont return for tpu
This commit is contained in:
parent
6453091b8a
commit
6a14146811
|
@ -273,6 +273,10 @@ class AcceleratorConnector(object):
|
|||
|
||||
@property
|
||||
def is_distributed(self) -> bool:
|
||||
# Used for custom plugins.
|
||||
# Custom plugins should implement is_distributed property.
|
||||
if hasattr(self.training_type_plugin, 'is_distributed') and not self.on_tpu:
|
||||
return self.training_type_plugin.is_distributed
|
||||
is_distributed = self.use_ddp or self.use_ddp2 or self.use_horovod
|
||||
if self.on_tpu:
|
||||
is_distributed |= self.training_type_plugin.is_distributed
|
||||
|
|
Loading…
Reference in New Issue