Remove deprecated `self.log` arguments (#10423)
This commit is contained in:
parent
480af650f3
commit
edbf27430d
|
@ -84,6 +84,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
|
|||
- Removed deprecated property `is_slurm_managing_tasks` from AcceleratorConnector ([#10353](https://github.com/PyTorchLightning/pytorch-lightning/pull/10353))
|
||||
|
||||
|
||||
- Removed deprecated `LightningModule.log(tbptt_reduce_fx, tbptt_reduce_token, sync_dist_op)` ([#10423](https://github.com/PyTorchLightning/pytorch-lightning/pull/10423))
|
||||
|
||||
|
||||
- Removed PyTorch 1.6 support ([#10367](https://github.com/PyTorchLightning/pytorch-lightning/pull/10367))
|
||||
|
||||
|
||||
|
|
|
@ -305,12 +305,9 @@ class LightningModule(
|
|||
logger: bool = True,
|
||||
on_step: Optional[bool] = None,
|
||||
on_epoch: Optional[bool] = None,
|
||||
reduce_fx: Union[str, Callable] = "default", # TODO: change to 'mean' when `sync_dist_op` is removed in 1.6
|
||||
tbptt_reduce_fx: Optional = None, # todo: Remove in 1.6
|
||||
tbptt_pad_token: Optional = None, # todo: Remove in 1.6
|
||||
reduce_fx: Union[str, Callable] = "mean",
|
||||
enable_graph: bool = False,
|
||||
sync_dist: bool = False,
|
||||
sync_dist_op: Optional = None, # todo: Remove in 1.6
|
||||
sync_dist_group: Optional[Any] = None,
|
||||
add_dataloader_idx: bool = True,
|
||||
batch_size: Optional[int] = None,
|
||||
|
@ -358,28 +355,6 @@ class LightningModule(
|
|||
rank_zero_only: Whether the value will be logged only on rank 0. This will prevent synchronization which
|
||||
would produce a deadlock as not all processes would perform this log call.
|
||||
"""
|
||||
if tbptt_reduce_fx is not None:
|
||||
rank_zero_deprecation(
|
||||
"`self.log(tbptt_reduce_fx=...)` is no longer supported. The flag will be removed in v1.6."
|
||||
" Please, open a discussion explaining your use-case in"
|
||||
" `https://github.com/PyTorchLightning/pytorch-lightning/discussions`"
|
||||
)
|
||||
if tbptt_pad_token is not None:
|
||||
rank_zero_deprecation(
|
||||
"`self.log(tbptt_pad_token=...)` is no longer supported. The flag will be removed in v1.6."
|
||||
" Please, open a discussion explaining your use-case in"
|
||||
" `https://github.com/PyTorchLightning/pytorch-lightning/discussions`"
|
||||
)
|
||||
if sync_dist_op is not None:
|
||||
rank_zero_deprecation(
|
||||
f"`self.log(sync_dist_op='{sync_dist_op}')` is deprecated and will be removed in v.1.6."
|
||||
f" Use `self.log(reduce_fx={sync_dist_op})` instead."
|
||||
)
|
||||
if reduce_fx == "default":
|
||||
reduce_fx = sync_dist_op
|
||||
elif reduce_fx == "default":
|
||||
reduce_fx = "mean"
|
||||
|
||||
# check for invalid values
|
||||
apply_to_collection(value, dict, self.__check_not_nested, name)
|
||||
apply_to_collection(
|
||||
|
@ -481,12 +456,9 @@ class LightningModule(
|
|||
logger: bool = True,
|
||||
on_step: Optional[bool] = None,
|
||||
on_epoch: Optional[bool] = None,
|
||||
reduce_fx: Union[str, Callable] = "default", # TODO: change to 'mean' when `sync_dist_op` is removed in 1.6
|
||||
tbptt_reduce_fx: Optional[Any] = None, # todo: Remove in 1.6
|
||||
tbptt_pad_token: Optional[Any] = None, # todo: Remove in 1.6
|
||||
reduce_fx: Union[str, Callable] = "mean",
|
||||
enable_graph: bool = False,
|
||||
sync_dist: bool = False,
|
||||
sync_dist_op: Optional[Any] = None, # todo: Remove in 1.6
|
||||
sync_dist_group: Optional[Any] = None,
|
||||
add_dataloader_idx: bool = True,
|
||||
batch_size: Optional[int] = None,
|
||||
|
@ -531,9 +503,6 @@ class LightningModule(
|
|||
enable_graph=enable_graph,
|
||||
sync_dist=sync_dist,
|
||||
sync_dist_group=sync_dist_group,
|
||||
sync_dist_op=sync_dist_op,
|
||||
tbptt_pad_token=tbptt_pad_token,
|
||||
tbptt_reduce_fx=tbptt_reduce_fx,
|
||||
add_dataloader_idx=add_dataloader_idx,
|
||||
batch_size=batch_size,
|
||||
rank_zero_only=rank_zero_only,
|
||||
|
|
|
@ -65,39 +65,6 @@ def test_v1_6_0_reload_dataloaders_every_epoch(tmpdir):
|
|||
assert tracker.mock_calls == expected_sequence
|
||||
|
||||
|
||||
def test_v1_6_0_tbptt_reduce_fx(tmpdir):
|
||||
class TestModel(BoringModel):
|
||||
def training_step(self, *args):
|
||||
self.log("foo", 1, tbptt_reduce_fx=lambda x: x)
|
||||
return super().training_step(*args)
|
||||
|
||||
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True)
|
||||
with pytest.deprecated_call(match=r"tbptt_reduce_fx=...\)` is no longer supported"):
|
||||
trainer.fit(TestModel())
|
||||
|
||||
|
||||
def test_v1_6_0_tbptt_pad_token(tmpdir):
|
||||
class TestModel(BoringModel):
|
||||
def training_step(self, *args):
|
||||
self.log("foo", 1, tbptt_pad_token=0)
|
||||
return super().training_step(*args)
|
||||
|
||||
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True)
|
||||
with pytest.deprecated_call(match=r"tbptt_pad_token=...\)` is no longer supported"):
|
||||
trainer.fit(TestModel())
|
||||
|
||||
|
||||
def test_v1_6_0_sync_dist_op(tmpdir):
|
||||
class TestModel(BoringModel):
|
||||
def training_step(self, *args):
|
||||
self.log("foo", 1, sync_dist_op="sum")
|
||||
return super().training_step(*args)
|
||||
|
||||
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True)
|
||||
with pytest.deprecated_call(match=r"`self.log\(sync_dist_op='sum'\)` is deprecated"):
|
||||
trainer.fit(TestModel())
|
||||
|
||||
|
||||
def test_v1_6_0_is_overridden_model():
|
||||
model = BoringModel()
|
||||
with pytest.deprecated_call(match="and will be removed in v1.6"):
|
||||
|
|
Loading…
Reference in New Issue