Skip to content

Commit

Permalink
add utils for orange logs
Browse files Browse the repository at this point in the history
  • Loading branch information
SebChw committed Dec 1, 2023
1 parent 78c5de2 commit 3177ae1
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 6 deletions.
4 changes: 4 additions & 0 deletions art/loggers.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,10 @@ def supress_stdout(current_logger: 'Logger') -> 'Logger':
art_logger = logger


def log_yellow_warning(message: str):
art_logger.opt(ansi=True).warning(f"<yellow>{message}</yellow>")


class NeptuneLoggerAdapter(NeptuneLogger):
"""
This is a wrapper for LightningLogger for simplifying basic functionalities between different loggers.
Expand Down
19 changes: 13 additions & 6 deletions art/steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
art_logger,
get_new_log_file_name,
get_run_id,
log_yellow_warning,
remove_logger,
)
from art.metrics import MetricCalculator, SkippedMetric
Expand Down Expand Up @@ -376,17 +377,13 @@ def log_params(self, model):
self.results["parameters"].update(model_params)

else:
art_logger.opt(ansi=True).warning(
"<yellow>Art/Lightning Module does not have log_params method. You don't want to regret lack of logs.</yellow>"
)
log_yellow_warning(f"Art/Lightning Module {msg}")

if hasattr(self.datamodule, "log_params"):
data_params = self.datamodule.log_params()
self.results["parameters"].update(data_params)
else:
art_logger.opt(ansi=True).warning(
"<yellow>Datamodule does not have log_params method. You don't want to regret lack of logs.</yellow>"
)
log_yellow_warning(f"Datamodule {msg}")

def reset_trainer(self, logger: Optional[Logger] = None, trainer_kwargs: Dict = {}):
"""
Expand All @@ -411,6 +408,16 @@ def get_valloader(self):
self.datamodule.setup(stage=TrainingStage.VALIDATION.value)
return self.datamodule.val_dataloader()

def check_ckpt_callback(self, trainer_kwargs: Dict):
if self.requires_ckpt_callback:
except_msg = f"At stage {self.name} it is very likely to train some usefull model. Please provide checkpoint callback. You can check how to do this here https://pytorch-lightning.readthedocs.io/en/1.5.10/extensions/generated/pytorch_lightning.callbacks.ModelCheckpoint.html"
if "callbacks" not in trainer_kwargs:
log_yellow_warning(except_msg)
for callback in trainer_kwargs["callbacks"]:
if isinstance(callback, ModelCheckpoint):
return
log_yellow_warning(except_msg)


class ExploreData(Step):
"""This class checks whether we have some markdown file description of the dataset + we implemented visualizations"""
Expand Down

0 comments on commit 3177ae1

Please sign in to comment.