debug
This commit is contained in:
parent
072c272dde
commit
ccca6b6b9b
|
@ -206,8 +206,7 @@ class LightningOptimizer:
|
|||
else:
|
||||
# make sure to call optimizer_closure when accumulating
|
||||
with trainer.profiler.profile("closure"):
|
||||
with trainer.train_loop.block_ddp_sync_behaviour():
|
||||
closure()
|
||||
closure()
|
||||
|
||||
def __repr__(self):
|
||||
groups = [
|
||||
|
|
|
@ -752,15 +752,7 @@ class TrainLoop:
|
|||
|
||||
@contextmanager
|
||||
def block_ddp_sync_behaviour(self):
|
||||
"""
|
||||
Blocks ddp sync gradients behaviour on backwards pass.
|
||||
This is useful for skipping sync when accumulating gradients, reducing communication overhead
|
||||
Returns: context manager with sync behaviour off
|
||||
"""
|
||||
if self.trainer.accelerator_backend is not None:
|
||||
yield self.trainer.accelerator_backend.block_ddp_plugin_sync_behaviour()
|
||||
else:
|
||||
yield
|
||||
yield
|
||||
|
||||
def _process_closure_result(
|
||||
self, batch_outputs: list, opt_idx: int
|
||||
|
|
Loading…
Reference in New Issue