Custom Plugin is_distributed (#6537)

* return from plugin

* dont return for tpu
This commit is contained in:
Amog Kamsetty 2021-03-15 12:38:30 -07:00 committed by GitHub
parent 6453091b8a
commit 6a14146811
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 4 additions and 0 deletions

View File

@ -273,6 +273,10 @@ class AcceleratorConnector(object):
@property
def is_distributed(self) -> bool:
# Used for custom plugins.
# Custom plugins should implement is_distributed property.
if hasattr(self.training_type_plugin, 'is_distributed') and not self.on_tpu:
return self.training_type_plugin.is_distributed
is_distributed = self.use_ddp or self.use_ddp2 or self.use_horovod
if self.on_tpu:
is_distributed |= self.training_type_plugin.is_distributed