From abc634d17cdc9f97ea1fcd45e968b3a88fa9610a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Fri, 28 Apr 2023 02:31:17 +0200 Subject: [PATCH] Fix setup_model typos in Fabric (#17498) --- src/lightning/fabric/fabric.py | 2 +- tests/tests_fabric/plugins/precision/test_amp_integration.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lightning/fabric/fabric.py b/src/lightning/fabric/fabric.py index 2b4e05c73c..6e76a92f7c 100644 --- a/src/lightning/fabric/fabric.py +++ b/src/lightning/fabric/fabric.py @@ -835,7 +835,7 @@ class Fabric: if isinstance(self._strategy, FSDPStrategy) and not _TORCH_GREATER_EQUAL_2_0: raise RuntimeError( f"The `{type(self).__name__}` requires the model and optimizer(s) to be set up separately." - " Create and set up the model first through `model = self.setup_model(model)`. Then create the" + " Create and set up the model first through `model = self.setup_module(model)`. Then create the" " optimizer and set it up: `optimizer = self.setup_optimizer(optimizer)`." ) diff --git a/tests/tests_fabric/plugins/precision/test_amp_integration.py b/tests/tests_fabric/plugins/precision/test_amp_integration.py index 70025428fd..d3fb995348 100644 --- a/tests/tests_fabric/plugins/precision/test_amp_integration.py +++ b/tests/tests_fabric/plugins/precision/test_amp_integration.py @@ -78,7 +78,7 @@ def test_amp_fused_optimizer_parity(): seed_everything(1234) fabric = Fabric(accelerator="cuda", precision=16, devices=1) - model = nn.Linear(10, 10).to(fabric.device) # TODO: replace with individual setup_model call + model = nn.Linear(10, 10).to(fabric.device) # TODO: replace with individual setup_module call optimizer = torch.optim.Adam(model.parameters(), lr=1.0, fused=fused) model, optimizer = fabric.setup(model, optimizer)