diff --git a/bob/ip/binseg/configs/datasets/augmentation.py b/bob/ip/binseg/configs/datasets/augmentation.py
new file mode 100644
index 0000000000000000000000000000000000000000..bb124df1c605d4cec3c6512a4fceb1e267388ebc
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/augmentation.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""Dataset augmentation constants"""
+
+from ...data.transforms import (
+    RandomRotation,
+    RandomHorizontalFlip,
+    RandomVerticalFlip,
+    ColorJitter,
+)
+
+ROTATION = [RandomRotation()]
+"""Shared data augmentation based on random rotation only"""
+
+DEFAULT_WITHOUT_ROTATION = [
+    RandomHorizontalFlip(),
+    RandomVerticalFlip(),
+    ColorJitter(),
+]
+"""Shared data augmentation transforms without random rotation"""
+
+DEFAULT = ROTATION + DEFAULT_WITHOUT_ROTATION
+"""Shared data augmentation transforms"""
diff --git a/bob/ip/binseg/configs/datasets/chasedb1.py b/bob/ip/binseg/configs/datasets/chasedb1.py
index 553b9c2fcec8ef2796b53c5fb47678b1458f2b2d..8874e80323b769b52a59df67f87457baa22bb1c2 100644
--- a/bob/ip/binseg/configs/datasets/chasedb1.py
+++ b/bob/ip/binseg/configs/datasets/chasedb1.py
@@ -24,10 +24,13 @@ bright strip running down the centre known as the central vessel reflex.
 """
 
 from bob.ip.binseg.data.transforms import Crop
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Crop(0, 18, 960, 960)] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.chasedb1 import dataset as chasedb1
-dataset = SampleList2TorchDataset(chasedb1.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    chasedb1.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/chasedb1_test.py b/bob/ip/binseg/configs/datasets/chasedb1_test.py
index 945096cfcc393254834d4b85744eee105e0c9947..7b891ea87cd0e2550c0f177e101f88594805e40c 100644
--- a/bob/ip/binseg/configs/datasets/chasedb1_test.py
+++ b/bob/ip/binseg/configs/datasets/chasedb1_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """CHASE-DB1 (test set) for Vessel Segmentation
 
@@ -24,9 +24,12 @@ bright strip running down the centre known as the central vessel reflex.
 """
 
 from bob.ip.binseg.data.transforms import Crop
-_transforms = [Crop(0, 18, 960, 960)]  #(upper, left, height, width)
+
+_transforms = [Crop(0, 18, 960, 960)]  # (upper, left, height, width)
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.chasedb1 import dataset as chasedb1
-dataset = SampleList2TorchDataset(chasedb1.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    chasedb1.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/covd_chasedb1.py b/bob/ip/binseg/configs/datasets/covd_chasedb1.py
index f24dd304db33ba65c9e90aed7f12450a281da631..d47850d55a2a4fb252e486d53437f21d7859d5d6 100644
--- a/bob/ip/binseg/configs/datasets/covd_chasedb1.py
+++ b/bob/ip/binseg/configs/datasets/covd_chasedb1.py
@@ -10,43 +10,44 @@ vessel and HRF (with annotated samples).
 """
 
 from bob.ip.binseg.data.transforms import *
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import (
+    DEFAULT as _DA,
+    DEFAULT_WITHOUT_ROTATION as _DA_NOROT,
+    ROTATION as _ROT,
+)
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 
 from bob.ip.binseg.data.drive import dataset as _raw_drive
-_drive_transforms = [
-        RandomRotation(),
-        CenterCrop((544, 544)),
-        Resize(960),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_drive = SampleList2TorchDataset(_raw_drive.subsets("default")["train"],
-        transforms=_drive_transforms)
+
+_drive_transforms = _ROT + [CenterCrop((544, 544)), Resize(960)] + _DA_NOROT
+_drive = SampleList2TorchDataset(
+    _raw_drive.subsets("default")["train"], transforms=_drive_transforms
+)
 
 from bob.ip.binseg.data.stare import dataset as _raw_stare
-_stare_transforms = [
-        RandomRotation(),
-        Pad((0, 32, 0, 32)),
-        Resize(960),
-        CenterCrop(960),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-    ]
-_stare = SampleList2TorchDataset(_raw_stare.subsets("default")["train"],
-        transforms=_stare_transforms)
+
+_stare_transforms = (
+    _ROT + [Pad((0, 32, 0, 32)), Resize(960), CenterCrop(960)] + _DA_NOROT
+)
+
+_stare = SampleList2TorchDataset(
+    _raw_stare.subsets("default")["train"], transforms=_stare_transforms
+)
 
 from bob.ip.binseg.data.hrf import dataset as _raw_hrf
+
 _hrf_transforms = [Pad((0, 584, 0, 584)), Resize(960)] + _DA
-_hrf = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"],
-        transforms=_hrf_transforms)
+_hrf = SampleList2TorchDataset(
+    _raw_hrf.subsets("default")["train"], transforms=_hrf_transforms
+)
 
 from bob.ip.binseg.data.iostar import dataset as _raw_iostar
+
 _iostar_transforms = [Resize(960)] + _DA
-_iostar = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"],
-        transforms=_iostar_transforms)
+_iostar = SampleList2TorchDataset(
+    _raw_iostar.subsets("vessel")["train"], transforms=_iostar_transforms
+)
 
 import torch.utils.data
+
 dataset = torch.utils.data.ConcatDataset([_drive, _stare, _hrf, _iostar])
diff --git a/bob/ip/binseg/configs/datasets/covd_chasedb1_ssl.py b/bob/ip/binseg/configs/datasets/covd_chasedb1_ssl.py
index 370562e3781d7e38beb05650b6066d3521f1c312..332cecde8ffff8541b09c2008bdb2c8150be979a 100644
--- a/bob/ip/binseg/configs/datasets/covd_chasedb1_ssl.py
+++ b/bob/ip/binseg/configs/datasets/covd_chasedb1_ssl.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """COVD-CHASE-DB1 + SSL (training set) for Vessel Segmentation
 
@@ -12,4 +12,5 @@ vessel and HRF (with annotated samples) and CHASE-DB1 without labels.
 from bob.ip.binseg.configs.datasets.covd_chasedb1 import dataset as _labelled
 from bob.ip.binseg.configs.datasets.chasedb1 import dataset as _unlabelled
 from bob.ip.binseg.data.utils import SSLDataset
+
 dataset = SSLDataset(_labelled, _unlabelled)
diff --git a/bob/ip/binseg/configs/datasets/covd_drive.py b/bob/ip/binseg/configs/datasets/covd_drive.py
index 0b56c91c066d8806cd371bc1484acecd85244ad6..f60d1563f13e5b0a04540c5bde9a787dea5f9ac2 100644
--- a/bob/ip/binseg/configs/datasets/covd_drive.py
+++ b/bob/ip/binseg/configs/datasets/covd_drive.py
@@ -11,34 +11,40 @@ vessel and HRF (with annotated samples).
 
 from bob.ip.binseg.data.transforms import *
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import (
+    DEFAULT as _DA,
+    DEFAULT_WITHOUT_ROTATION as _DA_NOROT,
+    ROTATION as _ROT,
+)
 
 from bob.ip.binseg.data.stare import dataset as _raw_stare
-_stare_transforms = [
-        RandomRotation(),
-        Resize(471),
-        Pad((0, 37, 0, 36)),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_stare = SampleList2TorchDataset(_raw_stare.subsets("default")["train"],
-        transforms=_stare_transforms)
+
+_stare_transforms = _ROT + [Resize(471), Pad((0, 37, 0, 36))] + _DA_NOROT
+_stare = SampleList2TorchDataset(
+    _raw_stare.subsets("default")["train"], transforms=_stare_transforms
+)
 
 from bob.ip.binseg.data.chasedb1 import dataset as _raw_chase
+
 _chase_transforms = [Resize(544), Crop(0, 12, 544, 544)] + _DA
-_chase = SampleList2TorchDataset(_raw_chase.subsets("default")["train"],
-        transforms=_chase_transforms)
+_chase = SampleList2TorchDataset(
+    _raw_chase.subsets("default")["train"], transforms=_chase_transforms
+)
 
 from bob.ip.binseg.data.iostar import dataset as _raw_iostar
+
 _iostar_transforms = [Resize(544)] + _DA
-_iostar = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"],
-        transforms=_iostar_transforms)
+_iostar = SampleList2TorchDataset(
+    _raw_iostar.subsets("vessel")["train"], transforms=_iostar_transforms
+)
 
 from bob.ip.binseg.data.hrf import dataset as _raw_hrf
+
 _hrf_transforms = [Resize((363)), Pad((0, 90, 0, 91))] + _DA
-_hrf = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"],
-        transforms=_hrf_transforms)
+_hrf = SampleList2TorchDataset(
+    _raw_hrf.subsets("default")["train"], transforms=_hrf_transforms
+)
 
 import torch.utils.data
+
 dataset = torch.utils.data.ConcatDataset([_stare, _chase, _iostar, _hrf])
diff --git a/bob/ip/binseg/configs/datasets/covd_drive_ssl.py b/bob/ip/binseg/configs/datasets/covd_drive_ssl.py
index a26a81e76f3453fdcdfa93e676ca00e142591e8d..e02480ae8a4a674c677462769221ea91a52ffd79 100644
--- a/bob/ip/binseg/configs/datasets/covd_drive_ssl.py
+++ b/bob/ip/binseg/configs/datasets/covd_drive_ssl.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """COVD-DRIVE + SSL (training set) for Vessel Segmentation
 
@@ -12,4 +12,5 @@ vessel and HRF (with annotated samples) and DRIVE without labels.
 from bob.ip.binseg.configs.datasets.covd_drive import dataset as _labelled
 from bob.ip.binseg.configs.datasets.drive import dataset as _unlabelled
 from bob.ip.binseg.data.utils import SSLDataset
+
 dataset = SSLDataset(_labelled, _unlabelled)
diff --git a/bob/ip/binseg/configs/datasets/covd_hrf.py b/bob/ip/binseg/configs/datasets/covd_hrf.py
index 369cf9d36ee1c20e700bdf9e3274c21190c739ae..ec085c39864f48fa68107bf56aff35c0f36dae45 100644
--- a/bob/ip/binseg/configs/datasets/covd_hrf.py
+++ b/bob/ip/binseg/configs/datasets/covd_hrf.py
@@ -11,57 +11,46 @@ IOSTAR vessel (with annotated samples).
 
 from bob.ip.binseg.data.transforms import *
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
+from bob.ip.binseg.configs.datasets.augmentation import (
+    DEFAULT as _DA,
+    DEFAULT_WITHOUT_ROTATION as _DA_NOROT,
+    ROTATION as _ROT,
+)
 
 from bob.ip.binseg.data.drive import dataset as _raw_drive
-_drive_transforms = [
-        RandomRotation(),
-        Crop(75, 10, 416, 544),
-        Pad((21, 0, 22, 0)),
-        Resize(1168),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_drive = SampleList2TorchDataset(_raw_drive.subsets("default")["train"],
-        transforms=_drive_transforms)
+
+_drive_transforms = (
+    _ROT + [Crop(75, 10, 416, 544), Pad((21, 0, 22, 0)), Resize(1168)] + _DA_NOROT
+)
+_drive = SampleList2TorchDataset(
+    _raw_drive.subsets("default")["train"], transforms=_drive_transforms
+)
 
 from bob.ip.binseg.data.stare import dataset as _raw_stare
-_stare_transforms = [
-        RandomRotation(),
-        Crop(50, 0, 500, 705),
-        Resize(1168),
-        Pad((1, 0, 1, 0)),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_stare = SampleList2TorchDataset(_raw_stare.subsets("default")["train"],
-        transforms=_stare_transforms)
+
+_stare_transforms = (
+    _ROT + [Crop(50, 0, 500, 705), Resize(1168), Pad((1, 0, 1, 0))] + _DA_NOROT
+)
+_stare = SampleList2TorchDataset(
+    _raw_stare.subsets("default")["train"], transforms=_stare_transforms
+)
 
 from bob.ip.binseg.data.chasedb1 import dataset as _raw_chase
-_chase_transforms = [
-        RandomRotation(),
-        Crop(140, 18, 680, 960),
-        Resize(1168),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_chase = SampleList2TorchDataset(_raw_chase.subsets("default")["train"],
-        transforms=_chase_transforms)
+
+_chase_transforms = _ROT + [Crop(140, 18, 680, 960), Resize(1168)] + _DA_NOROT
+_chase = SampleList2TorchDataset(
+    _raw_chase.subsets("default")["train"], transforms=_chase_transforms
+)
 
 from bob.ip.binseg.data.iostar import dataset as _raw_iostar
-_iostar_transforms = [
-        RandomRotation(),
-        Crop(144, 0, 768, 1024),
-        Pad((30, 0, 30, 0)),
-        Resize(1168),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_iostar = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"],
-        transforms=_iostar_transforms)
+
+_iostar_transforms = (
+    _ROT + [Crop(144, 0, 768, 1024), Pad((30, 0, 30, 0)), Resize(1168)] + _DA_NOROT
+)
+_iostar = SampleList2TorchDataset(
+    _raw_iostar.subsets("vessel")["train"], transforms=_iostar_transforms
+)
 
 import torch.utils.data
+
 dataset = torch.utils.data.ConcatDataset([_drive, _stare, _chase, _iostar])
diff --git a/bob/ip/binseg/configs/datasets/covd_hrf_ssl.py b/bob/ip/binseg/configs/datasets/covd_hrf_ssl.py
index ec769cfbe3be35a7945be3f0eb5620f6fcb65aa7..07d1bf327e9add5f9794012f71c8053d1f135cb3 100644
--- a/bob/ip/binseg/configs/datasets/covd_hrf_ssl.py
+++ b/bob/ip/binseg/configs/datasets/covd_hrf_ssl.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """COVD-HRF + SSL (training set) for Vessel Segmentation
 
@@ -12,4 +12,5 @@ IOSTAR vessel (with annotated samples), and HRF without labels.
 from bob.ip.binseg.configs.datasets.covd_hrf import dataset as _labelled
 from bob.ip.binseg.configs.datasets.hrf_1168 import dataset as _unlabelled
 from bob.ip.binseg.data.utils import SSLDataset
+
 dataset = SSLDataset(_labelled, _unlabelled)
diff --git a/bob/ip/binseg/configs/datasets/covd_iostar_vessel.py b/bob/ip/binseg/configs/datasets/covd_iostar_vessel.py
index b101553eb1266ef32839714017c2f2821e9ae967..31ca1ee4204832849f5f27d1c14693d2702014cf 100644
--- a/bob/ip/binseg/configs/datasets/covd_iostar_vessel.py
+++ b/bob/ip/binseg/configs/datasets/covd_iostar_vessel.py
@@ -11,51 +11,44 @@ HRF (with annotated samples).
 
 from bob.ip.binseg.data.transforms import *
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import (
+    DEFAULT as _DA,
+    DEFAULT_WITHOUT_ROTATION as _DA_NOROT,
+    ROTATION as _ROT,
+)
 
 from bob.ip.binseg.data.drive import dataset as _raw_drive
-_drive_transforms = [
-        RandomRotation(),
-        CenterCrop((540, 540)),
-        Resize(1024),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_drive = SampleList2TorchDataset(_raw_drive.subsets("default")["train"],
-        transforms=_drive_transforms)
+
+_drive_transforms = _ROT + [CenterCrop((540, 540)), Resize(1024)] + _DA_NOROT
+_drive = SampleList2TorchDataset(
+    _raw_drive.subsets("default")["train"], transforms=_drive_transforms
+)
 
 
 from bob.ip.binseg.data.stare import dataset as _raw_stare
-_stare_transforms = [
-        RandomRotation(),
-        Pad((0, 32, 0, 32)),
-        Resize(1024),
-        CenterCrop(1024),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-    ]
-_stare = SampleList2TorchDataset(_raw_stare.subsets("default")["train"],
-        transforms=_stare_transforms)
+
+_stare_transforms = (
+    _ROT + [Pad((0, 32, 0, 32)), Resize(1024), CenterCrop(1024)] + _DA_NOROT
+)
+_stare = SampleList2TorchDataset(
+    _raw_stare.subsets("default")["train"], transforms=_stare_transforms
+)
 
 
 from bob.ip.binseg.data.hrf import dataset as _raw_hrf
+
 _hrf_transforms = [Pad((0, 584, 0, 584)), Resize(1024)] + _DA
-_hrf = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"],
-        transforms=_hrf_transforms)
+_hrf = SampleList2TorchDataset(
+    _raw_hrf.subsets("default")["train"], transforms=_hrf_transforms
+)
 
 from bob.ip.binseg.data.chasedb1 import dataset as _chase_raw
-_chase_transforms = [
-        RandomRotation(),
-        Crop(0, 18, 960, 960),
-        Resize(1024),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_chase = SampleList2TorchDataset(_chase_raw.subsets("default")["train"],
-        transforms=_chase_transforms)
+
+_chase_transforms = _ROT + [Crop(0, 18, 960, 960), Resize(1024)] + _DA_NOROT
+_chase = SampleList2TorchDataset(
+    _chase_raw.subsets("default")["train"], transforms=_chase_transforms
+)
 
 import torch.utils.data
+
 dataset = torch.utils.data.ConcatDataset([_drive, _stare, _hrf, _chase])
diff --git a/bob/ip/binseg/configs/datasets/covd_iostar_vessel_ssl.py b/bob/ip/binseg/configs/datasets/covd_iostar_vessel_ssl.py
index 6357d7109fd3934406872c2bc2fc3c55cf5f9732..877b01e42ff010107d2d626f341693e044fee96d 100644
--- a/bob/ip/binseg/configs/datasets/covd_iostar_vessel_ssl.py
+++ b/bob/ip/binseg/configs/datasets/covd_iostar_vessel_ssl.py
@@ -12,4 +12,5 @@ HRF (with annotated samples) and IOSTAR without labels.
 from bob.ip.binseg.configs.datasets.covd_iostar_vessel import dataset as _labelled
 from bob.ip.binseg.configs.datasets.iostar_vessel import dataset as _unlabelled
 from bob.ip.binseg.data.utils import SSLDataset
+
 dataset = SSLDataset(_labelled, _unlabelled)
diff --git a/bob/ip/binseg/configs/datasets/covd_stare.py b/bob/ip/binseg/configs/datasets/covd_stare.py
index d77e85a7b5bb6f553663cbbe610e5555d286b85c..4821534132a649efe5290be0e916459e328c13b3 100644
--- a/bob/ip/binseg/configs/datasets/covd_stare.py
+++ b/bob/ip/binseg/configs/datasets/covd_stare.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """COVD-STARE (training set) for Vessel Segmentation
 
@@ -9,44 +9,44 @@ The dataset available in this file is composed of DRIVE, CHASE-DB1, IOSTAR
 vessel and HRF (with annotated samples).
 """
 
-from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.transforms import CenterCrop, Pad, Resize
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import (
+    DEFAULT as _DA,
+    DEFAULT_WITHOUT_ROTATION as _DA_NOROT,
+    ROTATION as _ROT,
+)
 
 from bob.ip.binseg.data.drive import dataset as _raw_drive
-_drive_transforms = [
-        RandomRotation(),
-        CenterCrop((470, 544)),
-        Pad((10, 9, 10, 8)),
-        Resize(608),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_drive = SampleList2TorchDataset(_raw_drive.subsets("default")["train"],
-        transforms=_drive_transforms)
+
+_drive_transforms = (
+    _ROT + [CenterCrop((470, 544)), Pad((10, 9, 10, 8)), Resize(608)] + _DA_NOROT
+)
+_drive = SampleList2TorchDataset(
+    _raw_drive.subsets("default")["train"], transforms=_drive_transforms
+)
 
 from bob.ip.binseg.data.chasedb1 import dataset as _raw_chase
-_chase_transforms = [
-        RandomRotation(),
-        CenterCrop((829, 960)),
-        Resize(608),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_chase = SampleList2TorchDataset(_raw_chase.subsets("default")["train"],
-        transforms=_chase_transforms)
+
+_chase_transforms = _ROT + [CenterCrop((829, 960)), Resize(608)] + _DA_NOROT
+_chase = SampleList2TorchDataset(
+    _raw_chase.subsets("default")["train"], transforms=_chase_transforms
+)
 
 from bob.ip.binseg.data.iostar import dataset as _raw_iostar
+
 _iostar_transforms = [Pad((81, 0, 81, 0)), Resize(608)] + _DA
-_iostar = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"],
-        transforms=_iostar_transforms)
+_iostar = SampleList2TorchDataset(
+    _raw_iostar.subsets("vessel")["train"], transforms=_iostar_transforms
+)
 
 from bob.ip.binseg.data.hrf import dataset as _raw_hrf
+
 _hrf_transforms = [Pad((0, 345, 0, 345)), Resize(608)] + _DA
-_hrf = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"],
-        transforms=_hrf_transforms)
+_hrf = SampleList2TorchDataset(
+    _raw_hrf.subsets("default")["train"], transforms=_hrf_transforms
+)
 
 import torch.utils.data
+
 dataset = torch.utils.data.ConcatDataset([_drive, _chase, _iostar, _hrf])
diff --git a/bob/ip/binseg/configs/datasets/covd_stare_ssl.py b/bob/ip/binseg/configs/datasets/covd_stare_ssl.py
index ff0fce683463c325f361be758107797555872348..702df051b8b3c38df43318ac17219184297bca4e 100644
--- a/bob/ip/binseg/configs/datasets/covd_stare_ssl.py
+++ b/bob/ip/binseg/configs/datasets/covd_stare_ssl.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """COVD-STARE + SSL (training set) for Vessel Segmentation
 
@@ -12,4 +12,5 @@ vessel and HRF (with annotated samples) and STARE without labels.
 from bob.ip.binseg.configs.datasets.covd_stare import dataset as _labelled
 from bob.ip.binseg.configs.datasets.stare import dataset as _unlabelled
 from bob.ip.binseg.data.utils import SSLDataset
+
 dataset = SSLDataset(_labelled, _unlabelled)
diff --git a/bob/ip/binseg/configs/datasets/csv.py b/bob/ip/binseg/configs/datasets/csv.py
index a03fcbad072fcff39f0fb84fc90e3f532ce3273e..62aa3b11a1ecee75eb9ed3a7132b14e6b1cb16ff 100644
--- a/bob/ip/binseg/configs/datasets/csv.py
+++ b/bob/ip/binseg/configs/datasets/csv.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """Example CSV-based filelist dataset
 
@@ -91,6 +91,7 @@ def _loader(context, sample):
     # compact.  Of course, you can make those paths absolute and then simplify
     # it here.
     import os
+
     root_path = "/path/where/raw/files/sit"
 
     return dict(
@@ -98,16 +99,18 @@ def _loader(context, sample):
         label=load_pil_1(os.path.join(root_path, sample["label"])),
     )
 
+
 # This is just a class that puts everything together: the CSV file, how to load
 # each sample defined in the dataset, names for the various columns of the CSV
 # file and how to make unique keys for each sample (keymaker).  Once created,
 # this object can be called to generate sample lists.
 from bob.ip.binseg.data.dataset import CSVDataset
+
 raw_dataset = CSVDataset(
     # path to the CSV file(s) - you may add as many subsets as you want, each
     # with an unique name, you'll use later to generate sample lists
     subsets=dict(data="<path/to/train.csv>"),
-    fieldnames=("data", "label"),  #these are the column names
+    fieldnames=("data", "label"),  # these are the column names
     loader=_loader,
     keymaker=data_path_keymaker,
 )
@@ -119,7 +122,7 @@ raw_dataset = CSVDataset(
 # model that requires image inputs of 544 x 544 pixels.
 from bob.ip.binseg.data.transforms import CenterCrop
 
-# from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+# from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
 _transforms = [
     CenterCrop((544, 544)),
 ]  # + _DA
@@ -129,4 +132,5 @@ _transforms = [
 # This class also inherits from pytorch Dataset and respect its required API.
 # See the documentation for details.
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
-#dataset = SampleList2TorchDataset(raw_dataset.samples("data"), _transforms)
+
+# dataset = SampleList2TorchDataset(raw_dataset.samples("data"), _transforms)
diff --git a/bob/ip/binseg/configs/datasets/drionsdb.py b/bob/ip/binseg/configs/datasets/drionsdb.py
index 24556208cff56c37898021bd5d9b3d291679bb76..2f9772adbe0c1edacad7236745cddee0fd4fcef1 100644
--- a/bob/ip/binseg/configs/datasets/drionsdb.py
+++ b/bob/ip/binseg/configs/datasets/drionsdb.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRIONS-DB (training set) for Optic Disc Segmentation
 
@@ -19,10 +19,13 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Pad((4, 8, 4, 8))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drionsdb import dataset as drionsdb
-dataset = SampleList2TorchDataset(drionsdb.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drionsdb.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/drionsdb_test.py b/bob/ip/binseg/configs/datasets/drionsdb_test.py
index ed38279ef5ea4b0b96d3d0ef357c91db22ae0a4d..f377f750b4486f2edb02dacc78d34124b47f5788 100644
--- a/bob/ip/binseg/configs/datasets/drionsdb_test.py
+++ b/bob/ip/binseg/configs/datasets/drionsdb_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRIONS-DB (test set) for Optic Disc Segmentation
 
@@ -19,9 +19,12 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
+
 _transforms = [Pad((4, 8, 4, 8))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drionsdb import dataset as drionsdb
-dataset = SampleList2TorchDataset(drionsdb.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drionsdb.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1_cup.py b/bob/ip/binseg/configs/datasets/dristhigs1_cup.py
index 50ed0f1f404a9a5f3d1b401a95e3bc5d8425e3f0..4872ee9211c44475c2e84478f0ff4c524157c0b3 100644
--- a/bob/ip/binseg/configs/datasets/dristhigs1_cup.py
+++ b/bob/ip/binseg/configs/datasets/dristhigs1_cup.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRISHTI-GS1 (training set) for Cup Segmentation
 
@@ -21,10 +21,13 @@ and notching information.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [CenterCrop((1760, 2048))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drishtigs1 import dataset as drishtigs1
-dataset = SampleList2TorchDataset(drishtigs1.subsets("optic-cup-all")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drishtigs1.subsets("optic-cup-all")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1_cup_test.py b/bob/ip/binseg/configs/datasets/dristhigs1_cup_test.py
index 365a17235e073a8df80b7cf69226816276ffc2ea..8c0af6b056cd4b31775a0e0e492fd2ee7f8aed6f 100644
--- a/bob/ip/binseg/configs/datasets/dristhigs1_cup_test.py
+++ b/bob/ip/binseg/configs/datasets/dristhigs1_cup_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRISHTI-GS1 (test set) for Cup Segmentation
 
@@ -21,9 +21,12 @@ and notching information.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop((1760, 2048))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drishtigs1 import dataset as drishtigs1
-dataset = SampleList2TorchDataset(drishtigs1.subsets("optic-cup-all")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drishtigs1.subsets("optic-cup-all")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1_od.py b/bob/ip/binseg/configs/datasets/dristhigs1_od.py
index b35d635d87b9a467f5730cfa16d9f334ca362b13..d07af7eb6da4d417f59b7fa1f4850aeeecc8325d 100644
--- a/bob/ip/binseg/configs/datasets/dristhigs1_od.py
+++ b/bob/ip/binseg/configs/datasets/dristhigs1_od.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRISHTI-GS1 (training set) for Optic Disc Segmentation
 
@@ -21,10 +21,13 @@ and notching information.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [CenterCrop((1760, 2048))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drishtigs1 import dataset as drishtigs1
-dataset = SampleList2TorchDataset(drishtigs1.subsets("optic-disc-all")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drishtigs1.subsets("optic-disc-all")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1_od_test.py b/bob/ip/binseg/configs/datasets/dristhigs1_od_test.py
index 77fb604a0f15f4c659c6645195aa9c2a8fb22dfe..19a1878e2aaccb8180e5071d090a62afacd63154 100644
--- a/bob/ip/binseg/configs/datasets/dristhigs1_od_test.py
+++ b/bob/ip/binseg/configs/datasets/dristhigs1_od_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRISHTI-GS1 (test set) for Optic Disc Segmentation
 
@@ -21,9 +21,12 @@ and notching information.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop((1760, 2048))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drishtigs1 import dataset as drishtigs1
-dataset = SampleList2TorchDataset(drishtigs1.subsets("optic-disc-all")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drishtigs1.subsets("optic-disc-all")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/drive.py b/bob/ip/binseg/configs/datasets/drive.py
index 1d3d9a3ea94a0f939ab47e4d88122b6e879ce4d6..0af4e6929a3a9e5b8df08eaf5d7927eaf119c5ad 100644
--- a/bob/ip/binseg/configs/datasets/drive.py
+++ b/bob/ip/binseg/configs/datasets/drive.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRIVE (training set) for Vessel Segmentation
 
@@ -14,10 +14,13 @@ segmentation of blood vessels in retinal images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [CenterCrop((544, 544))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drive import dataset as drive
-dataset = SampleList2TorchDataset(drive.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drive.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/drive_test.py b/bob/ip/binseg/configs/datasets/drive_test.py
index d7f49c4b123a386d317084828f3178cfc8a5f870..31fea70fb6f53a4f719ad06de1126abefeaa7911 100644
--- a/bob/ip/binseg/configs/datasets/drive_test.py
+++ b/bob/ip/binseg/configs/datasets/drive_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRIVE (test set) for Vessel Segmentation
 
@@ -14,9 +14,12 @@ segmentation of blood vessels in retinal images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop((544, 544))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drive import dataset as drive
-dataset = SampleList2TorchDataset(drive.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drive.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/hrf.py b/bob/ip/binseg/configs/datasets/hrf.py
index 1c9c34262cd841540e37c4470c8357eb9bcab1d8..556b67b3793368d9439063e0ecdac51a418329d2 100644
--- a/bob/ip/binseg/configs/datasets/hrf.py
+++ b/bob/ip/binseg/configs/datasets/hrf.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """HRF (training set) for Vessel Segmentation
 
@@ -15,10 +15,13 @@ x 2336. One set of ground-truth vessel annotations is available.
 """
 
 from bob.ip.binseg.data.transforms import Crop
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Crop(0, 108, 2336, 3296)] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.hrf import dataset as hrf
-dataset = SampleList2TorchDataset(hrf.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    hrf.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/hrf_1168.py b/bob/ip/binseg/configs/datasets/hrf_1168.py
index cd2bcea84b45b8f0b0cd175bedae9ed987006aaf..5a70a9e5768d6812484c393c968bdd21a2eba0e5 100644
--- a/bob/ip/binseg/configs/datasets/hrf_1168.py
+++ b/bob/ip/binseg/configs/datasets/hrf_1168.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """HRF (training set) for Vessel Segmentation
 
@@ -15,10 +15,13 @@ x 2336. One set of ground-truth vessel annotations is available.
 """
 
 from bob.ip.binseg.data.transforms import Crop, Resize
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Crop(0, 108, 2336, 3296), Resize(1168)] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.hrf import dataset as hrf
-dataset = SampleList2TorchDataset(hrf.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    hrf.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/hrf_1168_test.py b/bob/ip/binseg/configs/datasets/hrf_1168_test.py
index 89b5df45990b941e68308bb8738b256261337616..16ebe9afa34744c91ae06d97a7632c03d982e1c0 100644
--- a/bob/ip/binseg/configs/datasets/hrf_1168_test.py
+++ b/bob/ip/binseg/configs/datasets/hrf_1168_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """HRF (test set) for Vessel Segmentation
 
@@ -15,9 +15,12 @@ x 2336. One set of ground-truth vessel annotations is available.
 """
 
 from bob.ip.binseg.data.transforms import Crop, Resize
+
 _transforms = [Crop(0, 108, 2336, 3296), Resize(1168)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.hrf import dataset as hrf
-dataset = SampleList2TorchDataset(hrf.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    hrf.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/hrf_test.py b/bob/ip/binseg/configs/datasets/hrf_test.py
index 2c926af4372cff3a753bd8092a03509b40c6da15..d4a364ed267483c8941a3cb6aa7215b1cfc29645 100644
--- a/bob/ip/binseg/configs/datasets/hrf_test.py
+++ b/bob/ip/binseg/configs/datasets/hrf_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """HRF (test set) for Vessel Segmentation
 
@@ -15,9 +15,12 @@ x 2336. One set of ground-truth vessel annotations is available.
 """
 
 from bob.ip.binseg.data.transforms import Crop
+
 _transforms = [Crop(0, 108, 2336, 3296)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.hrf import dataset as hrf
-dataset = SampleList2TorchDataset(hrf.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    hrf.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/iostar_od.py b/bob/ip/binseg/configs/datasets/iostar_od.py
index 55b0db4a631450ddba5699a4777238f68957ce1f..67d8b45a23204dbb9d7c613ef1e6878deeff31c6 100644
--- a/bob/ip/binseg/configs/datasets/iostar_od.py
+++ b/bob/ip/binseg/configs/datasets/iostar_od.py
@@ -16,8 +16,8 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
 """
 
 from bob.ip.binseg.data.transforms import *
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.iostar import dataset as iostar
-dataset = SampleList2TorchDataset(iostar.subsets("optic-disc")["train"],
-        transforms=_DA)
+
+dataset = SampleList2TorchDataset(iostar.subsets("optic-disc")["train"], transforms=_DA)
diff --git a/bob/ip/binseg/configs/datasets/iostar_od_test.py b/bob/ip/binseg/configs/datasets/iostar_od_test.py
index d9503af9e08407446b0df69982cb9b59dc4427d6..059cfe3556e08b161d290397a790fbb41c44477e 100644
--- a/bob/ip/binseg/configs/datasets/iostar_od_test.py
+++ b/bob/ip/binseg/configs/datasets/iostar_od_test.py
@@ -17,4 +17,5 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.iostar import dataset as iostar
+
 dataset = SampleList2TorchDataset(iostar.subsets("optic-disc")["test"])
diff --git a/bob/ip/binseg/configs/datasets/iostar_vessel.py b/bob/ip/binseg/configs/datasets/iostar_vessel.py
index ae749759e32f737dcd146a8cc356376966b3cdb1..3167c9fb0209f42d6a2b6b4a85eeb94233823423 100644
--- a/bob/ip/binseg/configs/datasets/iostar_vessel.py
+++ b/bob/ip/binseg/configs/datasets/iostar_vessel.py
@@ -16,8 +16,8 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
 """
 
 from bob.ip.binseg.data.transforms import *
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.iostar import dataset as iostar
-dataset = SampleList2TorchDataset(iostar.subsets("vessel")["train"],
-        transforms=_DA)
+
+dataset = SampleList2TorchDataset(iostar.subsets("vessel")["train"], transforms=_DA)
diff --git a/bob/ip/binseg/configs/datasets/iostar_vessel_test.py b/bob/ip/binseg/configs/datasets/iostar_vessel_test.py
index c05b845527cc129bcdff46f2e441a8babf9f385d..c23eeb8c8dedb9194e9dd06ac565f2ed3ec288a3 100644
--- a/bob/ip/binseg/configs/datasets/iostar_vessel_test.py
+++ b/bob/ip/binseg/configs/datasets/iostar_vessel_test.py
@@ -17,4 +17,5 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.iostar import dataset as iostar
+
 dataset = SampleList2TorchDataset(iostar.subsets("vessel")["test"])
diff --git a/bob/ip/binseg/configs/datasets/refuge_cup.py b/bob/ip/binseg/configs/datasets/refuge_cup.py
index 13af1da700902c31757027a824cea2a05db7e75b..d762b9e67e07920e98540b85af7b0a2cc839cf78 100644
--- a/bob/ip/binseg/configs/datasets/refuge_cup.py
+++ b/bob/ip/binseg/configs/datasets/refuge_cup.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (training set) for Optic Cup Segmentation
 
@@ -16,10 +16,13 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import Resize, Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Resize(1539), Pad((21, 46, 22, 47))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-cup")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-cup")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/refuge_cup_dev.py b/bob/ip/binseg/configs/datasets/refuge_cup_dev.py
index 9136756bc96ee1b3bac9d9a25cd4612c7e276382..d8fe4c81bad23f28bdfc20eb55bfff950d76db56 100644
--- a/bob/ip/binseg/configs/datasets/refuge_cup_dev.py
+++ b/bob/ip/binseg/configs/datasets/refuge_cup_dev.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (validation set) for Optic Cup Segmentation
 
@@ -16,9 +16,12 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop(1632)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-cup")["validation"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-cup")["validation"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/refuge_cup_test.py b/bob/ip/binseg/configs/datasets/refuge_cup_test.py
index a42c6a54f6ab4f994dca857906c9cb1345e1edc0..878f38417726f8a863d50fe217f92ea758918150 100644
--- a/bob/ip/binseg/configs/datasets/refuge_cup_test.py
+++ b/bob/ip/binseg/configs/datasets/refuge_cup_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (validation set) for Optic Cup Segmentation
 
@@ -16,9 +16,12 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop(1632)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-cup")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-cup")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/refuge_od.py b/bob/ip/binseg/configs/datasets/refuge_od.py
index 352d161dbeaa236379f0f98e7595cc6ce371fc89..a415bfbcb9ca8cb9754cfeb50d42d4c80c912eb4 100644
--- a/bob/ip/binseg/configs/datasets/refuge_od.py
+++ b/bob/ip/binseg/configs/datasets/refuge_od.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (training set) for Optic Disc Segmentation
 
@@ -16,10 +16,13 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import Resize, Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Resize(1539), Pad((21, 46, 22, 47))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-disc")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-disc")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/refuge_od_dev.py b/bob/ip/binseg/configs/datasets/refuge_od_dev.py
index e404f9cd1ac5c7ee17e5945f44534ef0ffade4f5..c4cccd892b5615804f897b4dbfbf867585517924 100644
--- a/bob/ip/binseg/configs/datasets/refuge_od_dev.py
+++ b/bob/ip/binseg/configs/datasets/refuge_od_dev.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (validation set) for Optic Disc Segmentation
 
@@ -16,9 +16,12 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop(1632)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-disc")["validation"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-disc")["validation"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/refuge_od_test.py b/bob/ip/binseg/configs/datasets/refuge_od_test.py
index 8da7a8bb39c35870a7030eab98a4d68b1dda45b2..0f2c50f2e4c1ba62cc401c1dadceaa9bc9bd7bbb 100644
--- a/bob/ip/binseg/configs/datasets/refuge_od_test.py
+++ b/bob/ip/binseg/configs/datasets/refuge_od_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (validation set) for Optic Disc Segmentation
 
@@ -16,9 +16,12 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop(1632)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-disc")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-disc")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3_cup.py b/bob/ip/binseg/configs/datasets/rimoner3_cup.py
index e7dea012233fb1bc067d2466cc72f383688f2fe5..021d58fbb4cba2e40cc1232f012c92b89f446e14 100644
--- a/bob/ip/binseg/configs/datasets/rimoner3_cup.py
+++ b/bob/ip/binseg/configs/datasets/rimoner3_cup.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """RIM-ONE r3 (training set) for Cup Segmentation
 
@@ -17,10 +17,13 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Pad((8, 8, 8, 8))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.rimoner3 import dataset as rimoner3
-dataset = SampleList2TorchDataset(rimoner3.subsets("optic-cup-exp1")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    rimoner3.subsets("optic-cup-exp1")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3_cup_test.py b/bob/ip/binseg/configs/datasets/rimoner3_cup_test.py
index c7f17fca7e9e5c0cb9b66de86c1a50b14ba29746..d3da003ae95616ede14e10a8990115a0d4451b2f 100644
--- a/bob/ip/binseg/configs/datasets/rimoner3_cup_test.py
+++ b/bob/ip/binseg/configs/datasets/rimoner3_cup_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """RIM-ONE r3 (test set) for Cup Segmentation
 
@@ -17,9 +17,12 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
+
 _transforms = [Pad((8, 8, 8, 8))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.rimoner3 import dataset as rimoner3
-dataset = SampleList2TorchDataset(rimoner3.subsets("optic-cup-exp1")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    rimoner3.subsets("optic-cup-exp1")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3_od.py b/bob/ip/binseg/configs/datasets/rimoner3_od.py
index b7da94d1e0bf9d09bf0f003067c862cef002d7a4..9084153de8d8942341a83386fbd84e9cd0d683cb 100644
--- a/bob/ip/binseg/configs/datasets/rimoner3_od.py
+++ b/bob/ip/binseg/configs/datasets/rimoner3_od.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """RIM-ONE r3 (training set) for Optic Disc Segmentation
 
@@ -17,10 +17,13 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Pad((8, 8, 8, 8))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.rimoner3 import dataset as rimoner3
-dataset = SampleList2TorchDataset(rimoner3.subsets("optic-disc-exp1")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    rimoner3.subsets("optic-disc-exp1")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3_od_test.py b/bob/ip/binseg/configs/datasets/rimoner3_od_test.py
index 4012858ffc5bd07c1506a82f42d8f35aac408dc0..0593ead842385eb7a7888dcff7d4f4b1cf69b91b 100644
--- a/bob/ip/binseg/configs/datasets/rimoner3_od_test.py
+++ b/bob/ip/binseg/configs/datasets/rimoner3_od_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """RIM-ONE r3 (test set) for Optic Disc Segmentation
 
@@ -17,9 +17,12 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
+
 _transforms = [Pad((8, 8, 8, 8))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.rimoner3 import dataset as rimoner3
-dataset = SampleList2TorchDataset(rimoner3.subsets("optic-disc-exp1")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    rimoner3.subsets("optic-disc-exp1")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/stare.py b/bob/ip/binseg/configs/datasets/stare.py
index 4ef2d51c928810f92872d9afa660d6cdc8b6f6f3..4784ce3bf0f5dad250a5321412e81adc4a7b2a7b 100644
--- a/bob/ip/binseg/configs/datasets/stare.py
+++ b/bob/ip/binseg/configs/datasets/stare.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """STARE (training set) for Vessel Segmentation
 
@@ -17,10 +17,13 @@ for training and testing. The second set by Valentina Kouznetsova acts as a
 """
 
 from bob.ip.binseg.data.transforms import Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Pad((2, 1, 2, 2))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.stare import dataset as stare
-dataset = SampleList2TorchDataset(stare.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    stare.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/stare_test.py b/bob/ip/binseg/configs/datasets/stare_test.py
index e8d78bcbe68cefcb4f5e0f24770c176549f41779..18b360f679c3bc5a7a7c6bf485bd7e42650f2bfb 100644
--- a/bob/ip/binseg/configs/datasets/stare_test.py
+++ b/bob/ip/binseg/configs/datasets/stare_test.py
@@ -17,9 +17,12 @@ for training and testing. The second set by Valentina Kouznetsova acts as a
 """
 
 from bob.ip.binseg.data.transforms import Pad
+
 _transforms = [Pad((2, 1, 2, 2))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.stare import dataset as stare
-dataset = SampleList2TorchDataset(stare.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    stare.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/utils.py b/bob/ip/binseg/configs/datasets/utils.py
deleted file mode 100644
index 35850c10f5b8666224790684251921f66b08fbcc..0000000000000000000000000000000000000000
--- a/bob/ip/binseg/configs/datasets/utils.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-"""Dataset configuration utilities"""
-
-from ...data.transforms import (
-    RandomHFlip,
-    RandomVFlip,
-    RandomRotation,
-    ColorJitter,
-)
-
-DATA_AUGMENTATION = [
-        RandomHFlip(),
-        RandomVFlip(),
-        RandomRotation(),
-        ColorJitter(),
-        ]
-"""Shared data augmentation transforms"""
diff --git a/bob/ip/binseg/data/transforms.py b/bob/ip/binseg/data/transforms.py
index 3c8a09dc10735050f7326a924ec82967b1c51741..ea621590e85343a0536efe9c877531b0cb4635ce 100644
--- a/bob/ip/binseg/data/transforms.py
+++ b/bob/ip/binseg/data/transforms.py
@@ -51,7 +51,22 @@ class Compose(torchvision.transforms.Compose):
         return args
 
 
-class _Crop:
+class SingleCrop:
+    """
+    Crops one image at the given coordinates.
+
+    Attributes
+    ----------
+    i : int
+        upper pixel coordinate.
+    j : int
+        left pixel coordinate.
+    h : int
+        height of the cropped image.
+    w : int
+        width of the cropped image.
+    """
+
     def __init__(self, i, j, h, w):
         self.i = i
         self.j = j
@@ -62,9 +77,9 @@ class _Crop:
         return img.crop((self.j, self.i, self.j + self.w, self.i + self.h))
 
 
-class Crop(TupleMixin, _Crop):
+class Crop(TupleMixin, SingleCrop):
     """
-    Crops one image at the given coordinates.
+    Crops multiple images at the given coordinates.
 
     Attributes
     ----------
@@ -81,15 +96,24 @@ class Crop(TupleMixin, _Crop):
     pass
 
 
-class _AutoLevel16to8:
+class SingleAutoLevel16to8:
+    """Converts a 16-bit image to 8-bit representation using "auto-level"
+
+    This transform assumes that the input image is gray-scaled.
+
+    To auto-level, we calculate the maximum and the minimum of the image, and
+    consider such a range should be mapped to the [0,255] range of the
+    destination image.
+    """
+
     def __call__(self, img):
         return PIL.Image.fromarray(
             bob.core.convert(img, "uint8", (0, 255), img.getextrema())
         )
 
 
-class AutoLevel16to8(TupleMixin, _AutoLevel16to8):
-    """Converts a 16-bit image to 8-bit representation using "auto-level"
+class AutoLevel16to8(TupleMixin, SingleAutoLevel16to8):
+    """Converts multiple 16-bit images to 8-bit representations using "auto-level"
 
     This transform assumes that the input images are gray-scaled.
 
@@ -97,16 +121,22 @@ class AutoLevel16to8(TupleMixin, _AutoLevel16to8):
     consider such a range should be mapped to the [0,255] range of the
     destination image.
     """
-
     pass
 
 
-class _ToRGB:
+class SingleToRGB:
+    """Converts from any input format to RGB, using an ADAPTIVE conversion.
+
+    This transform takes the input image and converts it to RGB using
+    py:method:`PIL.Image.Image.convert`, with `mode='RGB'` and using all other
+    defaults.  This may be aggressive if applied to 16-bit images without
+    further considerations.
+    """
     def __call__(self, img):
         return img.convert(mode="RGB")
 
 
-class ToRGB(TupleMixin, _ToRGB):
+class ToRGB(TupleMixin, SingleToRGB):
     """Converts from any input format to RGB, using an ADAPTIVE conversion.
 
     This transform takes the input image and converts it to RGB using
diff --git a/doc/api.rst b/doc/api.rst
index 0062dc3fb263282391799916ecc8e9898354007a..79f8148ca4475e12365d0d352e89323ae7766385 100644
--- a/doc/api.rst
+++ b/doc/api.rst
@@ -131,7 +131,7 @@ Datasets
    :template: config.rst
 
    bob.ip.binseg.configs.datasets.csv
-   bob.ip.binseg.configs.datasets.utils
+   bob.ip.binseg.configs.datasets.augmentation
 
    bob.ip.binseg.configs.datasets.chasedb1
    bob.ip.binseg.configs.datasets.chasedb1_test
diff --git a/doc/benchmarkresults.rst b/doc/benchmarkresults.rst
deleted file mode 100644
index 5023a3ce86bebf46c6919a2acfdc092d221b0748..0000000000000000000000000000000000000000
--- a/doc/benchmarkresults.rst
+++ /dev/null
@@ -1,59 +0,0 @@
-.. -*- coding: utf-8 -*-
-
-.. _bob.ip.binseg.benchmarkresults:
-
-===================
- Benchmark Results
-===================
-
-F1 Scores (micro-level)
------------------------
-
-* Benchmark results for models: DRIU, HED, M2U-Net and U-Net.
-* Models are trained and tested on the same dataset using the
-  train-test split as indicated in :ref:`bob.ip.binseg.configs.datasets` (i.e.,
-  these are *intra*-datasets tests)
-* Standard-deviations across all test images are indicated in brakets
-* Database and Model links (table top row and left column) are linked to the
-  originating configuration files used to obtain these results.
-* For some results, the actual deep neural network models are provided (by
-  clicking on the associated F1 Score).
-* Check `our paper`_ for details on the calculation of the F1 Score and standard
-  deviations.
-
-.. list-table::
-   :header-rows: 1
-
-   * - F1 (std)
-     - :py:mod:`DRIU <bob.ip.binseg.configs.models.driu>`
-     - :py:mod:`HED <bob.ip.binseg.configs.models.hed>`
-     - :py:mod:`M2U-Net <bob.ip.binseg.configs.models.m2unet>`
-     - :py:mod:`U-Net <bob.ip.binseg.configs.models.unet>`
-   * - :py:mod:`CHASE-DB1 <bob.ip.binseg.configs.datasets.chasedb1>`
-     - `0.810 (0.021) <driu_chasedb1.pth_>`_
-     - 0.810 (0.022)
-     - `0.802 (0.019) <m2unet_chasedb1.pth_>`_
-     - 0.812 (0.020)
-   * - :py:mod:`DRIVE <bob.ip.binseg.configs.datasets.drive>`
-     - `0.820 (0.014) <driu_drive.pth_>`_
-     - 0.817 (0.013)
-     - `0.803 (0.014) <m2unet_drive.pth_>`_
-     - 0.822 (0.015)
-   * - :py:mod:`HRF <bob.ip.binseg.configs.datasets.hrf_1168>`
-     - `0.783 (0.055) <driu_hrf.pth_>`_
-     - 0.783 (0.058)
-     - `0.780 (0.057) <m2unet_hrf.pth_>`_
-     - 0.788 (0.051)
-   * - :py:mod:`IOSTAR (vessel) <bob.ip.binseg.configs.datasets.iostar_vessel>`
-     - `0.825 (0.020) <driu_iostar.pth_>`_
-     - 0.825 (0.020)
-     - `0.817 (0.020) <m2unet_iostar.pth_>`_
-     - 0.818 (0.019)
-   * - :py:mod:`STARE <bob.ip.binseg.configs.datasets.stare>`
-     - `0.827 (0.037) <driu_stare.pth_>`_
-     - 0.823 (0.037)
-     - `0.815 (0.041) <m2unet_stare.pth_>`_
-     - 0.829 (0.042)
-
-
-.. include:: links.rst
diff --git a/doc/covdresults.rst b/doc/covdresults.rst
deleted file mode 100644
index 3abe84458751bdc56d4ba55e0799a14f92e6e499..0000000000000000000000000000000000000000
--- a/doc/covdresults.rst
+++ /dev/null
@@ -1,115 +0,0 @@
-.. -*- coding: utf-8 -*-
-
-.. _bob.ip.binseg.covdresults:
-
-============================
- COVD- and COVD-SLL Results
-============================
-
-In addition to the M2U-Net architecture, we also evaluated the larger DRIU
-network and a variation of it that contains batch normalization (DRIU+BN) on
-COVD- (Combined Vessel Dataset from all training data minus target test set)
-and COVD-SSL (COVD- and Semi-Supervised Learning). Perhaps surprisingly, for
-the majority of combinations, the performance of the DRIU variants are roughly
-equal or worse to the ones obtained with the much smaller M2U-Net.  We
-anticipate that one reason for this could be overparameterization of large
-VGG-16 models that are pretrained on ImageNet.
-
-
-F1 Scores
----------
-
-Comparison of F1 Scores (micro-level and standard deviation) of DRIU and
-M2U-Net on COVD- and COVD-SSL.  Standard deviation across test-images in
-brackets.
-
-.. list-table::
-   :header-rows: 1
-
-   * - F1 score
-     - :py:mod:`DRIU <bob.ip.binseg.configs.models.driu>`/:py:mod:`DRIU@SSL <bob.ip.binseg.configs.models.driu_ssl>`
-     - :py:mod:`DRIU+BN <bob.ip.binseg.configs.models.driu_bn>`/:py:mod:`DRIU+BN@SSL <bob.ip.binseg.configs.models.driu_bn_ssl>`
-     - :py:mod:`M2U-Net <bob.ip.binseg.configs.models.m2unet>`/:py:mod:`M2U-Net@SSL <bob.ip.binseg.configs.models.m2unet_ssl>`
-   * - :py:mod:`COVD-DRIVE <bob.ip.binseg.configs.datasets.covd_drive>`
-     - 0.788 (0.018)
-     - 0.797 (0.019)
-     - `0.789 (0.018) <m2unet_covd-drive.pth>`_
-   * - :py:mod:`COVD-DRIVE+SSL <bob.ip.binseg.configs.datasets.covd_drive_ssl>`
-     - 0.785 (0.018)
-     - 0.783 (0.019)
-     - `0.791 (0.014) <m2unet_covd-drive_ssl.pth>`_
-   * - :py:mod:`COVD-STARE <bob.ip.binseg.configs.datasets.covd_stare>`
-     - 0.778 (0.117)
-     - 0.778 (0.122)
-     - `0.812 (0.046) <m2unet_covd-stare.pth>`_
-   * - :py:mod:`COVD-STARE+SSL <bob.ip.binseg.configs.datasets.covd_stare_ssl>`
-     - 0.788 (0.102)
-     - 0.811 (0.074)
-     - `0.820 (0.044) <m2unet_covd-stare_ssl.pth>`_
-   * - :py:mod:`COVD-CHASEDB1 <bob.ip.binseg.configs.datasets.covd_chasedb1>`
-     - 0.796 (0.027)
-     - 0.791 (0.025)
-     - `0.788 (0.024) <m2unet_covd-chasedb1.pth>`_
-   * - :py:mod:`COVD-CHASEDB1+SSL <bob.ip.binseg.configs.datasets.covd_chasedb1_ssl>`
-     - 0.796 (0.024)
-     - 0.798 (0.025)
-     - `0.799 (0.026) <m2unet_covd-chasedb1_ssl.pth>`_
-   * - :py:mod:`COVD-HRF <bob.ip.binseg.configs.datasets.covd_hrf>`
-     - 0.799 (0.044)
-     - 0.800 (0.045)
-     - `0.802 (0.045) <m2unet_covd-hrf.pth>`_
-   * - :py:mod:`COVD-HRF+SSL <bob.ip.binseg.configs.datasets.covd_hrf_ssl>`
-     - 0.799 (0.044)
-     - 0.784 (0.048)
-     - `0.797 (0.044) <m2unet_covd-hrf_ssl.pth>`_
-   * - :py:mod:`COVD-IOSTAR-VESSEL <bob.ip.binseg.configs.datasets.covd_iostar_vessel>`
-     - 0.791 (0.021)
-     - 0.777 (0.032)
-     - `0.793 (0.015) <m2unet_covd-iostar.pth>`_
-   * - :py:mod:`COVD-IOSTAR-VESSEL+SSL <bob.ip.binseg.configs.datasets.covd_iostar_vessel_ssl>`
-     - 0.797 (0.017)
-     - 0.811 (0.074)
-     - `0.785 (0.018) <m2unet_covd-iostar_ssl.pth>`_
-
-
-M2U-Net Precision vs. Recall Curves
------------------------------------
-
-Precision vs. recall curves for each evaluated dataset.  Note that here the
-F1-score is calculated on a macro level (see paper for more details).
-
-.. figure:: img/pr_CHASEDB1.png
-   :scale: 50 %
-   :align: center
-   :alt: model comparisons
-
-   CHASE_DB1: Precision vs Recall curve and F1 scores
-
-.. figure:: img/pr_DRIVE.png
-   :scale: 50 %
-   :align: center
-   :alt: model comparisons
-
-   DRIVE: Precision vs Recall curve and F1 scores
-
-.. figure:: img/pr_HRF.png
-   :scale: 50 %
-   :align: center
-   :alt: model comparisons
-
-   HRF: Precision vs Recall curve and F1 scores
-
-.. figure:: img/pr_IOSTARVESSEL.png
-   :scale: 50 %
-   :align: center
-   :alt: model comparisons
-
-   IOSTAR: Precision vs Recall curve and F1 scores
-
-.. figure:: img/pr_STARE.png
-   :scale: 50 %
-   :align: center
-   :alt: model comparisons
-
-   STARE: Precision vs Recall curve and F1 scores
-
diff --git a/doc/extras.inv b/doc/extras.inv
index 11e31176fd52152ea8ea7fcf6c79f00e4e2c6e92..55baaba61aa2392fa690178e23d9dbea964b8fc4 100644
Binary files a/doc/extras.inv and b/doc/extras.inv differ
diff --git a/doc/extras.txt b/doc/extras.txt
index 1c1776cdac843269ea00ec78ad08e1b8766787d9..4bd227b7d23d3ad0c631ffc5a285e08378c44468 100644
--- a/doc/extras.txt
+++ b/doc/extras.txt
@@ -7,4 +7,14 @@ torch.nn.Module py:class 1 https://pytorch.org/docs/stable/nn.html?highlight=mod
 torch.nn.modules.module.Module py:class 1 https://pytorch.org/docs/stable/nn.html?highlight=module#torch.nn.Module -
 torch.utils.data.dataset.Dataset py:class 1 https://pytorch.org/docs/stable/data.html?highlight=dataset#torch.utils.data.Dataset -
 unittest.case.TestCase py:class 1 https://docs.python.org/3/library/unittest.html?highlight=testcase#unittest.TestCase -
-click.core.Option py:class 1 https://click.palletsprojects.com/en/7.x/api/#click.Option
+click.core.Option py:class 1 https://click.palletsprojects.com/en/7.x/api/#click.Option -
+torchvision.transforms.transforms.ColorJitter py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.ColorJitter -
+torchvision.transforms.transforms.RandomRotation py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.RandomRotation -
+torchvision.transforms.transforms.RandomVerticalFlip py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.RandomVerticalFlip -
+torchvision.transforms.transforms.RandomHorizontalFlip py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.RandomHorizontalFlip -
+torchvision.transforms.transforms.Compose py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.Compose -
+torchvision.transforms.transforms.ToTensor py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.ToTensor -
+torchvision.transforms.transforms.Resize py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.Resize -
+torchvision.transforms.transforms.Pad py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.Pad -
+torchvision.transforms.transforms.CenterCrop py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.CenterCrop -
+torchvision.transforms py:module 1 https://pytorch.org/docs/stable/torchvision/transforms.html -
diff --git a/doc/nitpick-exceptions.txt b/doc/nitpick-exceptions.txt
index 4237c341319162532cd53dd45d34c15c8bbc3226..8f6fe3b338c959f4f3113d1280c57228351a8c0a 100644
--- a/doc/nitpick-exceptions.txt
+++ b/doc/nitpick-exceptions.txt
@@ -1,3 +1,2 @@
 py:class torch.nn.modules.loss._Loss
 py:class Module
-py:class click.core.Option