Finalize logger (#337)
* Ensure logger.finalize is called * Call logger.finalize * Update mlflow_logger.py * Update test_logging.py * Update trainer.py
This commit is contained in:
parent
49e04de5ac
commit
8088052825
|
@ -53,4 +53,6 @@ class MLFlowLogger(LightningLoggerBase):
|
||||||
|
|
||||||
@rank_zero_only
|
@rank_zero_only
|
||||||
def finalize(self, status="FINISHED"):
|
def finalize(self, status="FINISHED"):
|
||||||
|
if status == 'success':
|
||||||
|
status = 'FINISHED'
|
||||||
self.client.set_terminated(self.run_id, status)
|
self.client.set_terminated(self.run_id, status)
|
||||||
|
|
|
@ -1072,6 +1072,9 @@ class Trainer(TrainerIO):
|
||||||
if stop:
|
if stop:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self.logger is not None:
|
||||||
|
self.logger.finalize("success")
|
||||||
|
|
||||||
def run_training_epoch(self):
|
def run_training_epoch(self):
|
||||||
# before epoch hook
|
# before epoch hook
|
||||||
if self.__is_function_implemented('on_epoch_start'):
|
if self.__is_function_implemented('on_epoch_start'):
|
||||||
|
|
|
@ -7,6 +7,7 @@ import torch
|
||||||
from pytorch_lightning import Trainer
|
from pytorch_lightning import Trainer
|
||||||
|
|
||||||
from pytorch_lightning.testing import LightningTestModel
|
from pytorch_lightning.testing import LightningTestModel
|
||||||
|
from pytorch_lightning.logging import LightningLoggerBase, rank_zero_only
|
||||||
from .test_models import get_hparams, get_test_tube_logger, init_save_dir, clear_save_dir
|
from .test_models import get_hparams, get_test_tube_logger, init_save_dir, clear_save_dir
|
||||||
|
|
||||||
RANDOM_SEEDS = list(np.random.randint(0, 10000, 1000))
|
RANDOM_SEEDS = list(np.random.randint(0, 10000, 1000))
|
||||||
|
@ -134,6 +135,46 @@ def test_mlflow_pickle():
|
||||||
trainer2.logger.log_metrics({"acc": 1.0})
|
trainer2.logger.log_metrics({"acc": 1.0})
|
||||||
|
|
||||||
|
|
||||||
|
def test_custom_logger():
|
||||||
|
|
||||||
|
class CustomLogger(LightningLoggerBase):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.hparams_logged = None
|
||||||
|
self.metrics_logged = None
|
||||||
|
self.finalized = False
|
||||||
|
|
||||||
|
@rank_zero_only
|
||||||
|
def log_hyperparams(self, params):
|
||||||
|
self.hparams_logged = params
|
||||||
|
|
||||||
|
@rank_zero_only
|
||||||
|
def log_metrics(self, metrics, step_num):
|
||||||
|
self.metrics_logged = metrics
|
||||||
|
|
||||||
|
@rank_zero_only
|
||||||
|
def finalize(self, status):
|
||||||
|
self.finalized_status = status
|
||||||
|
|
||||||
|
hparams = get_hparams()
|
||||||
|
model = LightningTestModel(hparams)
|
||||||
|
|
||||||
|
logger = CustomLogger()
|
||||||
|
|
||||||
|
trainer_options = dict(
|
||||||
|
max_nb_epochs=1,
|
||||||
|
train_percent_check=0.01,
|
||||||
|
logger=logger
|
||||||
|
)
|
||||||
|
|
||||||
|
trainer = Trainer(**trainer_options)
|
||||||
|
result = trainer.fit(model)
|
||||||
|
assert result == 1, "Training failed"
|
||||||
|
assert logger.hparams_logged == hparams
|
||||||
|
assert logger.metrics_logged != {}
|
||||||
|
assert logger.finalized_status == "success"
|
||||||
|
|
||||||
|
|
||||||
def reset_seed():
|
def reset_seed():
|
||||||
SEED = RANDOM_SEEDS.pop()
|
SEED = RANDOM_SEEDS.pop()
|
||||||
torch.manual_seed(SEED)
|
torch.manual_seed(SEED)
|
||||||
|
|
Loading…
Reference in New Issue