Remove duplicate no_grad context managers (#16773)

This commit is contained in:
Carlos Mocholí 2023-02-16 15:25:56 +01:00 committed by GitHub
parent 51d44f57dd
commit cc22ddc716
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 2 additions and 6 deletions

View File

@ -15,8 +15,6 @@ import math
from collections import OrderedDict
from typing import Any, Dict, Optional, Union
import torch
import lightning.pytorch as pl
from lightning.pytorch import loops # import as loops to avoid circular imports
from lightning.pytorch.loops.fetchers import _DataFetcher, _DataLoaderIterDataFetcher
@ -284,8 +282,7 @@ class _TrainingEpochLoop(loops._Loop):
# reload dataloaders
self.val_loop._reload_evaluation_dataloaders()
with torch.no_grad():
self.val_loop.run()
self.val_loop.run()
def _accumulated_batches_reached(self) -> bool:
"""Determine if accumulation will be finished by the end of the current batch."""

View File

@ -956,8 +956,7 @@ class Trainer:
]
# run eval step
with torch.no_grad():
val_loop.run()
val_loop.run()
call._call_callback_hooks(self, "on_sanity_check_end")