Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 6 additions & 8 deletions src/pytorch_lightning/callbacks/model_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,8 @@ def _save_checkpoint(self, trainer: "pl.Trainer", filepath: str) -> None:
# notify loggers
if trainer.is_global_zero:
for logger in trainer.loggers:
logger.after_save_checkpoint(proxy(self))
if hasattr(logger, "after_save_checkpoint"):
logger.after_save_checkpoint(proxy(self))

def _should_skip_saving_checkpoint(self, trainer: "pl.Trainer") -> bool:
from pytorch_lightning.trainer.states import TrainerFn
Expand Down Expand Up @@ -582,14 +583,11 @@ def __resolve_ckpt_dir(self, trainer: "pl.Trainer") -> _PATH:
return self.dirpath

if len(trainer.loggers) > 0:
if trainer.loggers[0].save_dir is not None:
save_dir = trainer.loggers[0].save_dir
else:
save_dir = trainer.default_root_dir
name = trainer.loggers[0].name
version = trainer.loggers[0].version
logger_ = trainer.loggers[0]
save_dir = getattr(logger_, "save_dir") or trainer.default_root_dir
version = logger_.version
version = version if isinstance(version, str) else f"version_{version}"
ckpt_path = os.path.join(save_dir, str(name), version, "checkpoints")
ckpt_path = os.path.join(save_dir, str(logger_.name), version, "checkpoints")
else:
# if no loggers, use default_root_dir
ckpt_path = os.path.join(trainer.default_root_dir, "checkpoints")
Expand Down
2 changes: 1 addition & 1 deletion src/pytorch_lightning/loggers/csv_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class ExperimentWriter(_FabricExperimentWriter):
r"""
Experiment writer for CSVLogger.

Currently supports to log hyperparameters and metrics in YAML and CSV
Currently, supports to log hyperparameters and metrics in YAML and CSV
format, respectively.

Args:
Expand Down
6 changes: 2 additions & 4 deletions src/pytorch_lightning/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1807,10 +1807,8 @@ def model(self, model: torch.nn.Module) -> None:
@property
def log_dir(self) -> Optional[str]:
if len(self.loggers) > 0:
if not isinstance(self.loggers[0], TensorBoardLogger):
dirpath = self.loggers[0].save_dir
else:
dirpath = self.loggers[0].log_dir
logger_ = self.loggers[0]
dirpath = getattr(logger_, "log_dir", getattr(logger_, "save_dir", None))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why has log_dir precedence over save_dir? It should be the other way around to be consistent with the way it was before.

Copy link
Member Author

@Borda Borda Jan 17, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍 sorry for the confusion :)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What I'm saying is: dirpath = getattr(logger_, "save_dir", getattr(logger_, "log_dir", None))
(the order in which we access save_dir and log_dir)

because that's how it's done in the if-else above.

else:
dirpath = self.default_root_dir

Expand Down