diff --git a/src/lightning/fabric/fabric.py b/src/lightning/fabric/fabric.py index 2b4e05c73c..6e76a92f7c 100644 --- a/src/lightning/fabric/fabric.py +++ b/src/lightning/fabric/fabric.py @@ -835,7 +835,7 @@ class Fabric: if isinstance(self._strategy, FSDPStrategy) and not _TORCH_GREATER_EQUAL_2_0: raise RuntimeError( f"The `{type(self).__name__}` requires the model and optimizer(s) to be set up separately." - " Create and set up the model first through `model = self.setup_model(model)`. Then create the" + " Create and set up the model first through `model = self.setup_module(model)`. Then create the" " optimizer and set it up: `optimizer = self.setup_optimizer(optimizer)`." ) diff --git a/tests/tests_fabric/plugins/precision/test_amp_integration.py b/tests/tests_fabric/plugins/precision/test_amp_integration.py index 70025428fd..d3fb995348 100644 --- a/tests/tests_fabric/plugins/precision/test_amp_integration.py +++ b/tests/tests_fabric/plugins/precision/test_amp_integration.py @@ -78,7 +78,7 @@ def test_amp_fused_optimizer_parity(): seed_everything(1234) fabric = Fabric(accelerator="cuda", precision=16, devices=1) - model = nn.Linear(10, 10).to(fabric.device) # TODO: replace with individual setup_model call + model = nn.Linear(10, 10).to(fabric.device) # TODO: replace with individual setup_module call optimizer = torch.optim.Adam(model.parameters(), lr=1.0, fused=fused) model, optimizer = fabric.setup(model, optimizer)