Skip to content
Snippets Groups Projects
Commit 41d1ecf2 authored by André Anjos's avatar André Anjos :speech_balloon:
Browse files

[engine.loggers] Avoid submodule with single module inside

parent e68471ad
No related branches found
No related tags found
1 merge request!6Making use of LightningDataModule and simplification of data loading
...@@ -3,69 +3,65 @@ ...@@ -3,69 +3,65 @@
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
import os import os
import typing
from typing import Any, Optional, Union
from lightning.fabric.utilities.types import _PATH from lightning.fabric.utilities.types import _PATH
from lightning.pytorch.loggers import TensorBoardLogger from lightning.pytorch.loggers import TensorBoardLogger
class CustomTensorboardLogger(TensorBoardLogger): class CustomTensorboardLogger(TensorBoardLogger):
r"""Custom implementation implementation of r"""Custom implementation implementation of lightning's TensorboardLogger.
lightning.pytorch.loggers.TensorBoardLogger.
This implementation puts all logs inside the same directory, instead of a
separate "version_n" directories, which is the default lightning behaviour.
Allows us to put all logs inside the same directory, instead of a separate "version_n" directory which is the default behaviour.
Parameters Parameters
---------- ----------
save_dir
save_dir: Directory where to save the logs to.
Save directory name
Experiment name. Defaults to ``default``. If it is the empty string
name: then no per-experiment subdirectory is used.
Experiment name. Defaults to ``'default'``. If it is the empty string then no per-experiment version
subdirectory is used. Experiment version. If version is not specified the logger inspects the
save directory for existing versions, then automatically assigns the
version: next available version. If it is a string then it is used as the
Experiment version. If version is not specified the logger inspects the save run-specific subdirectory name, otherwise ``version_${version}`` is
directory for existing versions, then automatically assigns the next available version. used.
If it is a string then it is used as the run-specific subdirectory name, log_graph
otherwise ``'version_${version}'`` is used. Adds the computational graph to tensorboard. This requires that the
user has defined the `self.example_input_array` attribute in their
log_graph:
Adds the computational graph to tensorboard. This requires that
the user has defined the `self.example_input_array` attribute in their
model. model.
default_hp_metric
default_hp_metric: Enables a placeholder metric with key `hp_metric` when
Enables a placeholder metric with key `hp_metric` when `log_hyperparams` is `log_hyperparams` is called without a metric (otherwise calls to
called without a metric (otherwise calls to log_hyperparams without a metric are ignored). log_hyperparams without a metric are ignored).
prefix
prefix:
A string to put at the beginning of metric keys. A string to put at the beginning of metric keys.
sub_dir
sub_dir: Sub-directory to group TensorBoard logs. If a sub_dir argument is
Sub-directory to group TensorBoard logs. If a sub_dir argument is passed passed then logs are saved in ``/save_dir/name/version/sub_dir/``.
then logs are saved in ``/save_dir/name/version/sub_dir/``. Defaults to ``None`` in which Defaults to ``None`` in which logs are saved in
logs are saved in ``/save_dir/name/version/``. ``/save_dir/name/version/``.
\**kwargs: \**kwargs:
Additional arguments used by :class:`tensorboardX.SummaryWriter` can be passed as keyword Additional arguments used by :py:class:`tensorboardX.SummaryWriter` can
arguments in this logger. To automatically flush to disk, `max_queue` sets the size be passed as keyword arguments in this logger. To automatically flush
of the queue for pending logs before flushing. `flush_secs` determines how many seconds to disk, ``max_queue`` sets the size of the queue for pending logs before
elapses before flushing. flushing. ``flush_secs`` determines how many seconds elapses before
flushing.
""" """
def __init__( def __init__(
self, self,
save_dir: _PATH, save_dir: _PATH,
name: Optional[str] = "lightning_logs", name: str = "lightning-logs",
version: Optional[Union[int, str]] = None, version: int | str | None = None,
log_graph: bool = False, log_graph: bool = False,
default_hp_metric: bool = True, default_hp_metric: bool = True,
prefix: str = "", prefix: str = "",
sub_dir: Optional[_PATH] = None, sub_dir: _PATH | None = None,
**kwargs: Any, **kwargs: dict[str, typing.Any],
): ):
super().__init__( super().__init__(
save_dir, save_dir,
...@@ -75,6 +71,7 @@ class CustomTensorboardLogger(TensorBoardLogger): ...@@ -75,6 +71,7 @@ class CustomTensorboardLogger(TensorBoardLogger):
default_hp_metric, default_hp_metric,
prefix, prefix,
sub_dir, sub_dir,
**kwargs,
) )
@property @property
......
...@@ -44,7 +44,7 @@ def run( ...@@ -44,7 +44,7 @@ def run(
trainer ``predict()`` method. trainer ``predict()`` method.
""" """
from .loggers.custom_tensorboard_logger import CustomTensorboardLogger from .loggers import CustomTensorboardLogger
log_dir = "logs" log_dir = "logs"
tensorboard_logger = CustomTensorboardLogger( tensorboard_logger = CustomTensorboardLogger(
......
...@@ -156,7 +156,7 @@ def run( ...@@ -156,7 +156,7 @@ def run(
# Save model summary # Save model summary
_, no_of_parameters = save_model_summary(output_folder, model) _, no_of_parameters = save_model_summary(output_folder, model)
from .loggers.custom_tensorboard_logger import CustomTensorboardLogger from .loggers import CustomTensorboardLogger
log_dir = "logs" log_dir = "logs"
tensorboard_logger = CustomTensorboardLogger( tensorboard_logger = CustomTensorboardLogger(
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment