dummy logger (#1836)

Co-authored-by: Nicki Skafte <nugginea@gmail.com>
This commit is contained in:
Nicki Skafte 2020-05-14 16:34:11 +02:00 committed by GitHub
parent 1c10560531
commit 88f816ed06
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 39 additions and 2 deletions

View File

@ -292,6 +292,41 @@ class LoggerCollection(LightningLoggerBase):
return '_'.join([str(logger.version) for logger in self._logger_iterable])
class DummyExperiment(object):
""" Dummy experiment """
def nop(*args, **kw):
pass
def __getattr__(self, _):
return self.nop
class DummyLogger(LightningLoggerBase):
""" Dummy logger for internal use. Is usefull if we want to disable users
logger for a feature, but still secure that users code can run """
def __init__(self):
super().__init__()
self._experiment = DummyExperiment()
@property
def experiment(self):
return self._experiment
def log_metrics(self, metrics, step):
pass
def log_hyperparams(self, params):
pass
@property
def name(self):
pass
@property
def version(self):
pass
def merge_dicts(
dicts: Sequence[Mapping],
agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None,

View File

@ -13,6 +13,7 @@ import os
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.callbacks import Callback
from pytorch_lightning.loggers.base import DummyLogger
from pytorch_lightning import _logger as log
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities import rank_zero_warn
@ -133,7 +134,7 @@ class TrainerLRFinderMixin(ABC):
progress_bar_refresh_rate=1)]
# No logging
self.logger = None
self.logger = DummyLogger()
# Max step set to number of iterations
self.max_steps = num_training

View File

@ -12,6 +12,7 @@ from torch.utils.data import DataLoader
from pytorch_lightning import _logger as log
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.callbacks import GradientAccumulationScheduler
from pytorch_lightning.loggers.base import DummyLogger
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.memory import is_oom_error, garbage_collection_cuda
@ -195,7 +196,7 @@ class TrainerTrainingTricksMixin(ABC):
self.auto_scale_batch_size = None # prevent recursion
self.max_steps = steps_per_trial # take few steps
self.weights_summary = None # not needed before full run
self.logger = None # not needed before full run
self.logger = DummyLogger()
self.callbacks = [] # not needed before full run
self.checkpoint_callback = False # required for saving
self.early_stop_callback = None