diff --git a/src/ptbench/engine/loggers.py b/src/ptbench/engine/loggers.py
new file mode 100644
index 0000000000000000000000000000000000000000..94c2ca3b64324fcaef701d2941bfc0ad84177310
--- /dev/null
+++ b/src/ptbench/engine/loggers.py
@@ -0,0 +1,79 @@
+# SPDX-FileCopyrightText: Copyright © 2023 Idiap Research Institute <contact@idiap.ch>
+#
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+import os
+import typing
+
+from lightning.fabric.utilities.types import _PATH
+from lightning.pytorch.loggers import TensorBoardLogger
+
+
+class CustomTensorboardLogger(TensorBoardLogger):
+    r"""Custom implementation implementation of lightning's TensorboardLogger.
+
+    This implementation puts all logs inside the same directory, instead of a
+    separate "version_n" directories, which is the default lightning behaviour.
+
+
+    Parameters
+    ----------
+    save_dir
+        Directory where to save the logs to.
+    name
+        Experiment name. Defaults to ``default``. If it is the empty string
+        then no per-experiment subdirectory is used.
+    version
+        Experiment version. If version is not specified the logger inspects the
+        save directory for existing versions, then automatically assigns the
+        next available version. If it is a string then it is used as the
+        run-specific subdirectory name, otherwise ``version_${version}`` is
+        used.
+    log_graph
+        Adds the computational graph to tensorboard. This requires that the
+        user has defined the `self.example_input_array` attribute in their
+        model.
+    default_hp_metric
+        Enables a placeholder metric with key `hp_metric` when
+        `log_hyperparams` is called without a metric (otherwise calls to
+        log_hyperparams without a metric are ignored).
+    prefix
+        A string to put at the beginning of metric keys.
+    sub_dir
+        Sub-directory to group TensorBoard logs. If a sub_dir argument is
+        passed then logs are saved in ``/save_dir/name/version/sub_dir/``.
+        Defaults to ``None`` in which logs are saved in
+        ``/save_dir/name/version/``.
+    \**kwargs:
+        Additional arguments used by :py:class:`tensorboardX.SummaryWriter` can
+        be passed as keyword arguments in this logger. To automatically flush
+        to disk, ``max_queue`` sets the size of the queue for pending logs before
+        flushing. ``flush_secs`` determines how many seconds elapses before
+        flushing.
+    """
+
+    def __init__(
+        self,
+        save_dir: _PATH,
+        name: str = "lightning-logs",
+        version: int | str | None = None,
+        log_graph: bool = False,
+        default_hp_metric: bool = True,
+        prefix: str = "",
+        sub_dir: _PATH | None = None,
+        **kwargs: dict[str, typing.Any],
+    ):
+        super().__init__(
+            save_dir,
+            name,
+            version,
+            log_graph,
+            default_hp_metric,
+            prefix,
+            sub_dir,
+            **kwargs,
+        )
+
+    @property
+    def log_dir(self) -> str:
+        return os.path.join(self.save_dir, self.name)
diff --git a/src/ptbench/engine/loggers/custom_tensorboard_logger.py b/src/ptbench/engine/loggers/custom_tensorboard_logger.py
deleted file mode 100644
index 188f0f7988f2da4ee2763a2168a18354f70265e1..0000000000000000000000000000000000000000
--- a/src/ptbench/engine/loggers/custom_tensorboard_logger.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# SPDX-FileCopyrightText: Copyright © 2023 Idiap Research Institute <contact@idiap.ch>
-#
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-import os
-
-from typing import Any, Optional, Union
-
-from lightning.fabric.utilities.types import _PATH
-from lightning.pytorch.loggers import TensorBoardLogger
-
-
-class CustomTensorboardLogger(TensorBoardLogger):
-    r"""Custom implementation implementation of
-    lightning.pytorch.loggers.TensorBoardLogger.
-
-    Allows us to put all logs inside the same directory, instead of a separate "version_n" directory which is the default behaviour.
-
-    Parameters
-    ----------
-
-    save_dir:
-        Save directory
-
-    name:
-        Experiment name. Defaults to ``'default'``. If it is the empty string then no per-experiment
-        subdirectory is used.
-
-    version:
-        Experiment version. If version is not specified the logger inspects the save
-        directory for existing versions, then automatically assigns the next available version.
-        If it is a string then it is used as the run-specific subdirectory name,
-        otherwise ``'version_${version}'`` is used.
-
-    log_graph:
-        Adds the computational graph to tensorboard. This requires that
-        the user has defined the `self.example_input_array` attribute in their
-        model.
-
-    default_hp_metric:
-        Enables a placeholder metric with key `hp_metric` when `log_hyperparams` is
-        called without a metric (otherwise calls to log_hyperparams without a metric are ignored).
-
-    prefix:
-        A string to put at the beginning of metric keys.
-
-    sub_dir:
-        Sub-directory to group TensorBoard logs. If a sub_dir argument is passed
-        then logs are saved in ``/save_dir/name/version/sub_dir/``. Defaults to ``None`` in which
-        logs are saved in ``/save_dir/name/version/``.
-
-    \**kwargs:
-        Additional arguments used by :class:`tensorboardX.SummaryWriter` can be passed as keyword
-        arguments in this logger. To automatically flush to disk, `max_queue` sets the size
-        of the queue for pending logs before flushing. `flush_secs` determines how many seconds
-        elapses before flushing.
-    """
-
-    def __init__(
-        self,
-        save_dir: _PATH,
-        name: Optional[str] = "lightning_logs",
-        version: Optional[Union[int, str]] = None,
-        log_graph: bool = False,
-        default_hp_metric: bool = True,
-        prefix: str = "",
-        sub_dir: Optional[_PATH] = None,
-        **kwargs: Any,
-    ):
-        super().__init__(
-            save_dir,
-            name,
-            version,
-            log_graph,
-            default_hp_metric,
-            prefix,
-            sub_dir,
-        )
-
-    @property
-    def log_dir(self) -> str:
-        return os.path.join(self.save_dir, self.name)
diff --git a/src/ptbench/engine/predictor.py b/src/ptbench/engine/predictor.py
index dd515789a0218333b3404fa97c3b8a3964de6039..edae044b0a0df037751b8c86fa42f68ef9526fbc 100644
--- a/src/ptbench/engine/predictor.py
+++ b/src/ptbench/engine/predictor.py
@@ -44,7 +44,7 @@ def run(
         trainer ``predict()`` method.
     """
 
-    from .loggers.custom_tensorboard_logger import CustomTensorboardLogger
+    from .loggers import CustomTensorboardLogger
 
     log_dir = "logs"
     tensorboard_logger = CustomTensorboardLogger(
diff --git a/src/ptbench/engine/trainer.py b/src/ptbench/engine/trainer.py
index 86f282523dbcfc5edc05fb6ae516e2b05ecea17e..3173fcbca73f392470dfdd1914692379fca1cbd9 100644
--- a/src/ptbench/engine/trainer.py
+++ b/src/ptbench/engine/trainer.py
@@ -156,7 +156,7 @@ def run(
     # Save model summary
     _, no_of_parameters = save_model_summary(output_folder, model)
 
-    from .loggers.custom_tensorboard_logger import CustomTensorboardLogger
+    from .loggers import CustomTensorboardLogger
 
     log_dir = "logs"
     tensorboard_logger = CustomTensorboardLogger(