Fix torchelastic detection with non-distributed installations (#13142)
* Fix torchelastic detection under Mac * CHANGELOG
This commit is contained in:
parent
29fe1dad15
commit
c5938f8fbc
|
@ -223,6 +223,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
|
|||
- Avoid redundant callback restore warning while tuning ([#13026](https://github.com/PyTorchLightning/pytorch-lightning/pull/13026))
|
||||
|
||||
|
||||
- Fixed torchelastic detection with non-distributed installations ([#13142](https://github.com/PyTorchLightning/pytorch-lightning/pull/13142))
|
||||
|
||||
|
||||
- Fixed an issue wrt unnecessary usage of habana mixed precision package for fp32 types ([#13028](https://github.com/PyTorchLightning/pytorch-lightning/pull/13028))
|
||||
|
||||
|
||||
|
|
|
@ -62,7 +62,8 @@ class TorchElasticEnvironment(ClusterEnvironment):
|
|||
def detect() -> bool:
|
||||
"""Returns ``True`` if the current process was launched using the torchelastic command."""
|
||||
if _TORCH_GREATER_EQUAL_1_9_1:
|
||||
return torch.distributed.is_torchelastic_launched()
|
||||
# if not available (for example on MacOS), `is_torchelastic_launched` is not defined
|
||||
return torch.distributed.is_available() and torch.distributed.is_torchelastic_launched()
|
||||
required_env_vars = {"RANK", "GROUP_RANK", "LOCAL_RANK", "LOCAL_WORLD_SIZE"}
|
||||
return required_env_vars.issubset(os.environ.keys())
|
||||
|
||||
|
|
Loading…
Reference in New Issue