[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
This commit is contained in:
pre-commit-ci[bot] 2024-02-16 17:26:55 +00:00
parent f10b897d94
commit 965fc03b4c
1 changed files with 40 additions and 42 deletions

View File

@ -266,48 +266,46 @@ class HookedModel(BoringModel):
using_deepspeed = kwargs.get("strategy") == "deepspeed"
out = []
for i in range(current_batch, batches):
out.extend(
[
{"name": "on_before_batch_transfer", "args": (ANY, 0)},
{"name": "transfer_batch_to_device", "args": (ANY, device, 0)},
{"name": "on_after_batch_transfer", "args": (ANY, 0)},
{"name": "Callback.on_train_batch_start", "args": (trainer, model, ANY, i)},
{"name": "on_train_batch_start", "args": (ANY, i)},
{"name": "forward", "args": (ANY,)},
{"name": "training_step", "args": (ANY, i)},
{"name": "Callback.on_before_zero_grad", "args": (trainer, model, ANY)},
{"name": "on_before_zero_grad", "args": (ANY,)},
{"name": "optimizer_zero_grad", "args": (current_epoch, i, ANY)},
{"name": "Callback.on_before_backward", "args": (trainer, model, ANY)},
{"name": "on_before_backward", "args": (ANY,)},
# DeepSpeed handles backward internally
*([{"name": "backward", "args": (ANY,)}] if not using_deepspeed else []),
{"name": "Callback.on_after_backward", "args": (trainer, model)},
{"name": "on_after_backward"},
# note: unscaling happens here in the case of AMP
{"name": "Callback.on_before_optimizer_step", "args": (trainer, model, ANY)},
{"name": "on_before_optimizer_step", "args": (ANY,)},
{
"name": "clip_gradients",
"args": (ANY,),
"kwargs": {"gradient_clip_val": None, "gradient_clip_algorithm": None},
},
{
"name": "configure_gradient_clipping",
"args": (ANY,),
"kwargs": {"gradient_clip_val": None, "gradient_clip_algorithm": None},
},
# this is after because it refers to the `LightningModule.optimizer_step` hook which encapsulates
# the actual call to `Precision.optimizer_step`
{
"name": "optimizer_step",
"args": (current_epoch, i, ANY, ANY),
},
*([{"name": "lr_scheduler_step", "args": ANY}] if i == (trainer.num_training_batches - 1) else []),
{"name": "Callback.on_train_batch_end", "args": (trainer, model, {"loss": ANY}, ANY, i)},
{"name": "on_train_batch_end", "args": ({"loss": ANY}, ANY, i)},
]
)
out.extend([
{"name": "on_before_batch_transfer", "args": (ANY, 0)},
{"name": "transfer_batch_to_device", "args": (ANY, device, 0)},
{"name": "on_after_batch_transfer", "args": (ANY, 0)},
{"name": "Callback.on_train_batch_start", "args": (trainer, model, ANY, i)},
{"name": "on_train_batch_start", "args": (ANY, i)},
{"name": "forward", "args": (ANY,)},
{"name": "training_step", "args": (ANY, i)},
{"name": "Callback.on_before_zero_grad", "args": (trainer, model, ANY)},
{"name": "on_before_zero_grad", "args": (ANY,)},
{"name": "optimizer_zero_grad", "args": (current_epoch, i, ANY)},
{"name": "Callback.on_before_backward", "args": (trainer, model, ANY)},
{"name": "on_before_backward", "args": (ANY,)},
# DeepSpeed handles backward internally
*([{"name": "backward", "args": (ANY,)}] if not using_deepspeed else []),
{"name": "Callback.on_after_backward", "args": (trainer, model)},
{"name": "on_after_backward"},
# note: unscaling happens here in the case of AMP
{"name": "Callback.on_before_optimizer_step", "args": (trainer, model, ANY)},
{"name": "on_before_optimizer_step", "args": (ANY,)},
{
"name": "clip_gradients",
"args": (ANY,),
"kwargs": {"gradient_clip_val": None, "gradient_clip_algorithm": None},
},
{
"name": "configure_gradient_clipping",
"args": (ANY,),
"kwargs": {"gradient_clip_val": None, "gradient_clip_algorithm": None},
},
# this is after because it refers to the `LightningModule.optimizer_step` hook which encapsulates
# the actual call to `Precision.optimizer_step`
{
"name": "optimizer_step",
"args": (current_epoch, i, ANY, ANY),
},
*([{"name": "lr_scheduler_step", "args": ANY}] if i == (trainer.num_training_batches - 1) else []),
{"name": "Callback.on_train_batch_end", "args": (trainer, model, {"loss": ANY}, ANY, i)},
{"name": "on_train_batch_end", "args": ({"loss": ANY}, ANY, i)},
])
return out
@staticmethod