From 7b185c7fc9a844d5afd1a7dd7ee101713948cbc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 17 Oct 2022 16:23:56 -0400 Subject: [PATCH] Update 8-bit optimizer docs (#15155) --- docs/source-pytorch/common/precision_intermediate.rst | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/source-pytorch/common/precision_intermediate.rst b/docs/source-pytorch/common/precision_intermediate.rst index 231c186baf..2f3fbdf77f 100644 --- a/docs/source-pytorch/common/precision_intermediate.rst +++ b/docs/source-pytorch/common/precision_intermediate.rst @@ -143,5 +143,13 @@ It is also possible to use BFloat16 mixed precision on the CPU, relying on MKLDN 8-bit Optimizer *************** -It is possible to further reduce the precision using third-party libraries like `bitsandbytes `_. Although, +It is possible to further reduce the precision using third-party libraries like `bitsandbytes `_. Although, Lightning doesn't support it out of the box yet but you can still use it by configuring it in your LightningModule and setting ``Trainer(precision=32)``. + +.. code-block:: python + + import bitsandbytes as bnb + + # in your LightningModule, return the 8-bit optimizer + def configure_optimizers(self): + return bnb.optim.Adam8bit(model.parameters(), lr=0.001, betas=(0.9, 0.995))