From 7a29898a5db47a4acb05969458f487107aeb413e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Tue, 26 Sep 2023 23:58:14 +0200 Subject: [PATCH] Remove `process_group` property (#18592) --- src/lightning/pytorch/strategies/fsdp.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/src/lightning/pytorch/strategies/fsdp.py b/src/lightning/pytorch/strategies/fsdp.py index 35c7e1b10e..19d1265e27 100644 --- a/src/lightning/pytorch/strategies/fsdp.py +++ b/src/lightning/pytorch/strategies/fsdp.py @@ -58,7 +58,7 @@ from lightning.fabric.utilities.init import _EmptyInit from lightning.fabric.utilities.load import _lazy_load, _materialize_tensors from lightning.fabric.utilities.optimizer import _optimizers_to_device from lightning.fabric.utilities.seed import reset_seed -from lightning.fabric.utilities.types import _PATH, ProcessGroup, ReduceOp +from lightning.fabric.utilities.types import _PATH, ReduceOp from lightning.pytorch.core.optimizer import LightningOptimizer from lightning.pytorch.plugins.precision import PrecisionPlugin from lightning.pytorch.plugins.precision.fsdp import FSDPPrecisionPlugin @@ -200,12 +200,6 @@ class FSDPStrategy(ParallelStrategy): def num_processes(self) -> int: return len(self.parallel_devices) if self.parallel_devices is not None else 0 - @property - def process_group(self) -> Optional[ProcessGroup]: - from torch.distributed.fsdp import FullyShardedDataParallel as FSDP - - return self.model.process_group if isinstance(self.model, FSDP) else None - @property def process_group_backend(self) -> Optional[str]: return self._process_group_backend