Skip to content
Snippets Groups Projects
Commit 41d1ecf2 authored by André Anjos's avatar André Anjos :speech_balloon:
Browse files

[engine.loggers] Avoid submodule with single module inside

parent e68471ad
No related branches found
No related tags found
1 merge request!6Making use of LightningDataModule and simplification of data loading
......@@ -3,69 +3,65 @@
# SPDX-License-Identifier: GPL-3.0-or-later
import os
from typing import Any, Optional, Union
import typing
from lightning.fabric.utilities.types import _PATH
from lightning.pytorch.loggers import TensorBoardLogger
class CustomTensorboardLogger(TensorBoardLogger):
r"""Custom implementation implementation of
lightning.pytorch.loggers.TensorBoardLogger.
r"""Custom implementation implementation of lightning's TensorboardLogger.
This implementation puts all logs inside the same directory, instead of a
separate "version_n" directories, which is the default lightning behaviour.
Allows us to put all logs inside the same directory, instead of a separate "version_n" directory which is the default behaviour.
Parameters
----------
save_dir:
Save directory
name:
Experiment name. Defaults to ``'default'``. If it is the empty string then no per-experiment
subdirectory is used.
version:
Experiment version. If version is not specified the logger inspects the save
directory for existing versions, then automatically assigns the next available version.
If it is a string then it is used as the run-specific subdirectory name,
otherwise ``'version_${version}'`` is used.
log_graph:
Adds the computational graph to tensorboard. This requires that
the user has defined the `self.example_input_array` attribute in their
save_dir
Directory where to save the logs to.
name
Experiment name. Defaults to ``default``. If it is the empty string
then no per-experiment subdirectory is used.
version
Experiment version. If version is not specified the logger inspects the
save directory for existing versions, then automatically assigns the
next available version. If it is a string then it is used as the
run-specific subdirectory name, otherwise ``version_${version}`` is
used.
log_graph
Adds the computational graph to tensorboard. This requires that the
user has defined the `self.example_input_array` attribute in their
model.
default_hp_metric:
Enables a placeholder metric with key `hp_metric` when `log_hyperparams` is
called without a metric (otherwise calls to log_hyperparams without a metric are ignored).
prefix:
default_hp_metric
Enables a placeholder metric with key `hp_metric` when
`log_hyperparams` is called without a metric (otherwise calls to
log_hyperparams without a metric are ignored).
prefix
A string to put at the beginning of metric keys.
sub_dir:
Sub-directory to group TensorBoard logs. If a sub_dir argument is passed
then logs are saved in ``/save_dir/name/version/sub_dir/``. Defaults to ``None`` in which
logs are saved in ``/save_dir/name/version/``.
sub_dir
Sub-directory to group TensorBoard logs. If a sub_dir argument is
passed then logs are saved in ``/save_dir/name/version/sub_dir/``.
Defaults to ``None`` in which logs are saved in
``/save_dir/name/version/``.
\**kwargs:
Additional arguments used by :class:`tensorboardX.SummaryWriter` can be passed as keyword
arguments in this logger. To automatically flush to disk, `max_queue` sets the size
of the queue for pending logs before flushing. `flush_secs` determines how many seconds
elapses before flushing.
Additional arguments used by :py:class:`tensorboardX.SummaryWriter` can
be passed as keyword arguments in this logger. To automatically flush
to disk, ``max_queue`` sets the size of the queue for pending logs before
flushing. ``flush_secs`` determines how many seconds elapses before
flushing.
"""
def __init__(
self,
save_dir: _PATH,
name: Optional[str] = "lightning_logs",
version: Optional[Union[int, str]] = None,
name: str = "lightning-logs",
version: int | str | None = None,
log_graph: bool = False,
default_hp_metric: bool = True,
prefix: str = "",
sub_dir: Optional[_PATH] = None,
**kwargs: Any,
sub_dir: _PATH | None = None,
**kwargs: dict[str, typing.Any],
):
super().__init__(
save_dir,
......@@ -75,6 +71,7 @@ class CustomTensorboardLogger(TensorBoardLogger):
default_hp_metric,
prefix,
sub_dir,
**kwargs,
)
@property
......
......@@ -44,7 +44,7 @@ def run(
trainer ``predict()`` method.
"""
from .loggers.custom_tensorboard_logger import CustomTensorboardLogger
from .loggers import CustomTensorboardLogger
log_dir = "logs"
tensorboard_logger = CustomTensorboardLogger(
......
......@@ -156,7 +156,7 @@ def run(
# Save model summary
_, no_of_parameters = save_model_summary(output_folder, model)
from .loggers.custom_tensorboard_logger import CustomTensorboardLogger
from .loggers import CustomTensorboardLogger
log_dir = "logs"
tensorboard_logger = CustomTensorboardLogger(
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment