From 427ffd732abef0111851c858b320d58f32d550b2 Mon Sep 17 00:00:00 2001
From: Andre Anjos <andre.dos.anjos@gmail.com>
Date: Fri, 17 Apr 2020 15:56:07 +0200
Subject: [PATCH] [data.transforms] Refactor to re-use torchvision as much as
 possible; Harmonize naming convention with torchvision; Completely test this
 module

---
 .../binseg/configs/datasets/augmentation.py   |  24 ++++
 bob/ip/binseg/configs/datasets/chasedb1.py    |   9 +-
 .../binseg/configs/datasets/chasedb1_test.py  |  11 +-
 .../binseg/configs/datasets/covd_chasedb1.py  |  53 ++++----
 .../configs/datasets/covd_chasedb1_ssl.py     |   3 +-
 bob/ip/binseg/configs/datasets/covd_drive.py  |  40 +++---
 .../binseg/configs/datasets/covd_drive_ssl.py |   3 +-
 bob/ip/binseg/configs/datasets/covd_hrf.py    |  75 +++++-------
 .../binseg/configs/datasets/covd_hrf_ssl.py   |   3 +-
 .../configs/datasets/covd_iostar_vessel.py    |  61 ++++------
 .../datasets/covd_iostar_vessel_ssl.py        |   1 +
 bob/ip/binseg/configs/datasets/covd_stare.py  |  56 ++++-----
 .../binseg/configs/datasets/covd_stare_ssl.py |   3 +-
 bob/ip/binseg/configs/datasets/csv.py         |  12 +-
 bob/ip/binseg/configs/datasets/drionsdb.py    |  11 +-
 .../binseg/configs/datasets/drionsdb_test.py  |   9 +-
 .../binseg/configs/datasets/dristhigs1_cup.py |  11 +-
 .../configs/datasets/dristhigs1_cup_test.py   |   9 +-
 .../binseg/configs/datasets/dristhigs1_od.py  |  11 +-
 .../configs/datasets/dristhigs1_od_test.py    |   9 +-
 bob/ip/binseg/configs/datasets/drive.py       |  11 +-
 bob/ip/binseg/configs/datasets/drive_test.py  |   9 +-
 bob/ip/binseg/configs/datasets/hrf.py         |  11 +-
 bob/ip/binseg/configs/datasets/hrf_1168.py    |  11 +-
 .../binseg/configs/datasets/hrf_1168_test.py  |   9 +-
 bob/ip/binseg/configs/datasets/hrf_test.py    |   9 +-
 bob/ip/binseg/configs/datasets/iostar_od.py   |   6 +-
 .../binseg/configs/datasets/iostar_od_test.py |   1 +
 .../binseg/configs/datasets/iostar_vessel.py  |   6 +-
 .../configs/datasets/iostar_vessel_test.py    |   1 +
 bob/ip/binseg/configs/datasets/refuge_cup.py  |  11 +-
 .../binseg/configs/datasets/refuge_cup_dev.py |   9 +-
 .../configs/datasets/refuge_cup_test.py       |   9 +-
 bob/ip/binseg/configs/datasets/refuge_od.py   |  11 +-
 .../binseg/configs/datasets/refuge_od_dev.py  |   9 +-
 .../binseg/configs/datasets/refuge_od_test.py |   9 +-
 .../binseg/configs/datasets/rimoner3_cup.py   |  11 +-
 .../configs/datasets/rimoner3_cup_test.py     |   9 +-
 bob/ip/binseg/configs/datasets/rimoner3_od.py |  11 +-
 .../configs/datasets/rimoner3_od_test.py      |   9 +-
 bob/ip/binseg/configs/datasets/stare.py       |  11 +-
 bob/ip/binseg/configs/datasets/stare_test.py  |   7 +-
 bob/ip/binseg/configs/datasets/utils.py       |  19 ---
 bob/ip/binseg/data/transforms.py              |  48 ++++++--
 doc/api.rst                                   |   2 +-
 doc/benchmarkresults.rst                      |  59 ---------
 doc/covdresults.rst                           | 115 ------------------
 doc/extras.inv                                | Bin 319 -> 513 bytes
 doc/extras.txt                                |  12 +-
 doc/nitpick-exceptions.txt                    |   1 -
 50 files changed, 404 insertions(+), 456 deletions(-)
 create mode 100644 bob/ip/binseg/configs/datasets/augmentation.py
 delete mode 100644 bob/ip/binseg/configs/datasets/utils.py
 delete mode 100644 doc/benchmarkresults.rst
 delete mode 100644 doc/covdresults.rst

diff --git a/bob/ip/binseg/configs/datasets/augmentation.py b/bob/ip/binseg/configs/datasets/augmentation.py
new file mode 100644
index 00000000..bb124df1
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/augmentation.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""Dataset augmentation constants"""
+
+from ...data.transforms import (
+    RandomRotation,
+    RandomHorizontalFlip,
+    RandomVerticalFlip,
+    ColorJitter,
+)
+
+ROTATION = [RandomRotation()]
+"""Shared data augmentation based on random rotation only"""
+
+DEFAULT_WITHOUT_ROTATION = [
+    RandomHorizontalFlip(),
+    RandomVerticalFlip(),
+    ColorJitter(),
+]
+"""Shared data augmentation transforms without random rotation"""
+
+DEFAULT = ROTATION + DEFAULT_WITHOUT_ROTATION
+"""Shared data augmentation transforms"""
diff --git a/bob/ip/binseg/configs/datasets/chasedb1.py b/bob/ip/binseg/configs/datasets/chasedb1.py
index 553b9c2f..8874e803 100644
--- a/bob/ip/binseg/configs/datasets/chasedb1.py
+++ b/bob/ip/binseg/configs/datasets/chasedb1.py
@@ -24,10 +24,13 @@ bright strip running down the centre known as the central vessel reflex.
 """
 
 from bob.ip.binseg.data.transforms import Crop
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Crop(0, 18, 960, 960)] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.chasedb1 import dataset as chasedb1
-dataset = SampleList2TorchDataset(chasedb1.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    chasedb1.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/chasedb1_test.py b/bob/ip/binseg/configs/datasets/chasedb1_test.py
index 945096cf..7b891ea8 100644
--- a/bob/ip/binseg/configs/datasets/chasedb1_test.py
+++ b/bob/ip/binseg/configs/datasets/chasedb1_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """CHASE-DB1 (test set) for Vessel Segmentation
 
@@ -24,9 +24,12 @@ bright strip running down the centre known as the central vessel reflex.
 """
 
 from bob.ip.binseg.data.transforms import Crop
-_transforms = [Crop(0, 18, 960, 960)]  #(upper, left, height, width)
+
+_transforms = [Crop(0, 18, 960, 960)]  # (upper, left, height, width)
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.chasedb1 import dataset as chasedb1
-dataset = SampleList2TorchDataset(chasedb1.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    chasedb1.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/covd_chasedb1.py b/bob/ip/binseg/configs/datasets/covd_chasedb1.py
index f24dd304..d47850d5 100644
--- a/bob/ip/binseg/configs/datasets/covd_chasedb1.py
+++ b/bob/ip/binseg/configs/datasets/covd_chasedb1.py
@@ -10,43 +10,44 @@ vessel and HRF (with annotated samples).
 """
 
 from bob.ip.binseg.data.transforms import *
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import (
+    DEFAULT as _DA,
+    DEFAULT_WITHOUT_ROTATION as _DA_NOROT,
+    ROTATION as _ROT,
+)
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 
 from bob.ip.binseg.data.drive import dataset as _raw_drive
-_drive_transforms = [
-        RandomRotation(),
-        CenterCrop((544, 544)),
-        Resize(960),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_drive = SampleList2TorchDataset(_raw_drive.subsets("default")["train"],
-        transforms=_drive_transforms)
+
+_drive_transforms = _ROT + [CenterCrop((544, 544)), Resize(960)] + _DA_NOROT
+_drive = SampleList2TorchDataset(
+    _raw_drive.subsets("default")["train"], transforms=_drive_transforms
+)
 
 from bob.ip.binseg.data.stare import dataset as _raw_stare
-_stare_transforms = [
-        RandomRotation(),
-        Pad((0, 32, 0, 32)),
-        Resize(960),
-        CenterCrop(960),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-    ]
-_stare = SampleList2TorchDataset(_raw_stare.subsets("default")["train"],
-        transforms=_stare_transforms)
+
+_stare_transforms = (
+    _ROT + [Pad((0, 32, 0, 32)), Resize(960), CenterCrop(960)] + _DA_NOROT
+)
+
+_stare = SampleList2TorchDataset(
+    _raw_stare.subsets("default")["train"], transforms=_stare_transforms
+)
 
 from bob.ip.binseg.data.hrf import dataset as _raw_hrf
+
 _hrf_transforms = [Pad((0, 584, 0, 584)), Resize(960)] + _DA
-_hrf = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"],
-        transforms=_hrf_transforms)
+_hrf = SampleList2TorchDataset(
+    _raw_hrf.subsets("default")["train"], transforms=_hrf_transforms
+)
 
 from bob.ip.binseg.data.iostar import dataset as _raw_iostar
+
 _iostar_transforms = [Resize(960)] + _DA
-_iostar = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"],
-        transforms=_iostar_transforms)
+_iostar = SampleList2TorchDataset(
+    _raw_iostar.subsets("vessel")["train"], transforms=_iostar_transforms
+)
 
 import torch.utils.data
+
 dataset = torch.utils.data.ConcatDataset([_drive, _stare, _hrf, _iostar])
diff --git a/bob/ip/binseg/configs/datasets/covd_chasedb1_ssl.py b/bob/ip/binseg/configs/datasets/covd_chasedb1_ssl.py
index 370562e3..332cecde 100644
--- a/bob/ip/binseg/configs/datasets/covd_chasedb1_ssl.py
+++ b/bob/ip/binseg/configs/datasets/covd_chasedb1_ssl.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """COVD-CHASE-DB1 + SSL (training set) for Vessel Segmentation
 
@@ -12,4 +12,5 @@ vessel and HRF (with annotated samples) and CHASE-DB1 without labels.
 from bob.ip.binseg.configs.datasets.covd_chasedb1 import dataset as _labelled
 from bob.ip.binseg.configs.datasets.chasedb1 import dataset as _unlabelled
 from bob.ip.binseg.data.utils import SSLDataset
+
 dataset = SSLDataset(_labelled, _unlabelled)
diff --git a/bob/ip/binseg/configs/datasets/covd_drive.py b/bob/ip/binseg/configs/datasets/covd_drive.py
index 0b56c91c..f60d1563 100644
--- a/bob/ip/binseg/configs/datasets/covd_drive.py
+++ b/bob/ip/binseg/configs/datasets/covd_drive.py
@@ -11,34 +11,40 @@ vessel and HRF (with annotated samples).
 
 from bob.ip.binseg.data.transforms import *
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import (
+    DEFAULT as _DA,
+    DEFAULT_WITHOUT_ROTATION as _DA_NOROT,
+    ROTATION as _ROT,
+)
 
 from bob.ip.binseg.data.stare import dataset as _raw_stare
-_stare_transforms = [
-        RandomRotation(),
-        Resize(471),
-        Pad((0, 37, 0, 36)),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_stare = SampleList2TorchDataset(_raw_stare.subsets("default")["train"],
-        transforms=_stare_transforms)
+
+_stare_transforms = _ROT + [Resize(471), Pad((0, 37, 0, 36))] + _DA_NOROT
+_stare = SampleList2TorchDataset(
+    _raw_stare.subsets("default")["train"], transforms=_stare_transforms
+)
 
 from bob.ip.binseg.data.chasedb1 import dataset as _raw_chase
+
 _chase_transforms = [Resize(544), Crop(0, 12, 544, 544)] + _DA
-_chase = SampleList2TorchDataset(_raw_chase.subsets("default")["train"],
-        transforms=_chase_transforms)
+_chase = SampleList2TorchDataset(
+    _raw_chase.subsets("default")["train"], transforms=_chase_transforms
+)
 
 from bob.ip.binseg.data.iostar import dataset as _raw_iostar
+
 _iostar_transforms = [Resize(544)] + _DA
-_iostar = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"],
-        transforms=_iostar_transforms)
+_iostar = SampleList2TorchDataset(
+    _raw_iostar.subsets("vessel")["train"], transforms=_iostar_transforms
+)
 
 from bob.ip.binseg.data.hrf import dataset as _raw_hrf
+
 _hrf_transforms = [Resize((363)), Pad((0, 90, 0, 91))] + _DA
-_hrf = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"],
-        transforms=_hrf_transforms)
+_hrf = SampleList2TorchDataset(
+    _raw_hrf.subsets("default")["train"], transforms=_hrf_transforms
+)
 
 import torch.utils.data
+
 dataset = torch.utils.data.ConcatDataset([_stare, _chase, _iostar, _hrf])
diff --git a/bob/ip/binseg/configs/datasets/covd_drive_ssl.py b/bob/ip/binseg/configs/datasets/covd_drive_ssl.py
index a26a81e7..e02480ae 100644
--- a/bob/ip/binseg/configs/datasets/covd_drive_ssl.py
+++ b/bob/ip/binseg/configs/datasets/covd_drive_ssl.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """COVD-DRIVE + SSL (training set) for Vessel Segmentation
 
@@ -12,4 +12,5 @@ vessel and HRF (with annotated samples) and DRIVE without labels.
 from bob.ip.binseg.configs.datasets.covd_drive import dataset as _labelled
 from bob.ip.binseg.configs.datasets.drive import dataset as _unlabelled
 from bob.ip.binseg.data.utils import SSLDataset
+
 dataset = SSLDataset(_labelled, _unlabelled)
diff --git a/bob/ip/binseg/configs/datasets/covd_hrf.py b/bob/ip/binseg/configs/datasets/covd_hrf.py
index 369cf9d3..ec085c39 100644
--- a/bob/ip/binseg/configs/datasets/covd_hrf.py
+++ b/bob/ip/binseg/configs/datasets/covd_hrf.py
@@ -11,57 +11,46 @@ IOSTAR vessel (with annotated samples).
 
 from bob.ip.binseg.data.transforms import *
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
+from bob.ip.binseg.configs.datasets.augmentation import (
+    DEFAULT as _DA,
+    DEFAULT_WITHOUT_ROTATION as _DA_NOROT,
+    ROTATION as _ROT,
+)
 
 from bob.ip.binseg.data.drive import dataset as _raw_drive
-_drive_transforms = [
-        RandomRotation(),
-        Crop(75, 10, 416, 544),
-        Pad((21, 0, 22, 0)),
-        Resize(1168),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_drive = SampleList2TorchDataset(_raw_drive.subsets("default")["train"],
-        transforms=_drive_transforms)
+
+_drive_transforms = (
+    _ROT + [Crop(75, 10, 416, 544), Pad((21, 0, 22, 0)), Resize(1168)] + _DA_NOROT
+)
+_drive = SampleList2TorchDataset(
+    _raw_drive.subsets("default")["train"], transforms=_drive_transforms
+)
 
 from bob.ip.binseg.data.stare import dataset as _raw_stare
-_stare_transforms = [
-        RandomRotation(),
-        Crop(50, 0, 500, 705),
-        Resize(1168),
-        Pad((1, 0, 1, 0)),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_stare = SampleList2TorchDataset(_raw_stare.subsets("default")["train"],
-        transforms=_stare_transforms)
+
+_stare_transforms = (
+    _ROT + [Crop(50, 0, 500, 705), Resize(1168), Pad((1, 0, 1, 0))] + _DA_NOROT
+)
+_stare = SampleList2TorchDataset(
+    _raw_stare.subsets("default")["train"], transforms=_stare_transforms
+)
 
 from bob.ip.binseg.data.chasedb1 import dataset as _raw_chase
-_chase_transforms = [
-        RandomRotation(),
-        Crop(140, 18, 680, 960),
-        Resize(1168),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_chase = SampleList2TorchDataset(_raw_chase.subsets("default")["train"],
-        transforms=_chase_transforms)
+
+_chase_transforms = _ROT + [Crop(140, 18, 680, 960), Resize(1168)] + _DA_NOROT
+_chase = SampleList2TorchDataset(
+    _raw_chase.subsets("default")["train"], transforms=_chase_transforms
+)
 
 from bob.ip.binseg.data.iostar import dataset as _raw_iostar
-_iostar_transforms = [
-        RandomRotation(),
-        Crop(144, 0, 768, 1024),
-        Pad((30, 0, 30, 0)),
-        Resize(1168),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_iostar = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"],
-        transforms=_iostar_transforms)
+
+_iostar_transforms = (
+    _ROT + [Crop(144, 0, 768, 1024), Pad((30, 0, 30, 0)), Resize(1168)] + _DA_NOROT
+)
+_iostar = SampleList2TorchDataset(
+    _raw_iostar.subsets("vessel")["train"], transforms=_iostar_transforms
+)
 
 import torch.utils.data
+
 dataset = torch.utils.data.ConcatDataset([_drive, _stare, _chase, _iostar])
diff --git a/bob/ip/binseg/configs/datasets/covd_hrf_ssl.py b/bob/ip/binseg/configs/datasets/covd_hrf_ssl.py
index ec769cfb..07d1bf32 100644
--- a/bob/ip/binseg/configs/datasets/covd_hrf_ssl.py
+++ b/bob/ip/binseg/configs/datasets/covd_hrf_ssl.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """COVD-HRF + SSL (training set) for Vessel Segmentation
 
@@ -12,4 +12,5 @@ IOSTAR vessel (with annotated samples), and HRF without labels.
 from bob.ip.binseg.configs.datasets.covd_hrf import dataset as _labelled
 from bob.ip.binseg.configs.datasets.hrf_1168 import dataset as _unlabelled
 from bob.ip.binseg.data.utils import SSLDataset
+
 dataset = SSLDataset(_labelled, _unlabelled)
diff --git a/bob/ip/binseg/configs/datasets/covd_iostar_vessel.py b/bob/ip/binseg/configs/datasets/covd_iostar_vessel.py
index b101553e..31ca1ee4 100644
--- a/bob/ip/binseg/configs/datasets/covd_iostar_vessel.py
+++ b/bob/ip/binseg/configs/datasets/covd_iostar_vessel.py
@@ -11,51 +11,44 @@ HRF (with annotated samples).
 
 from bob.ip.binseg.data.transforms import *
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import (
+    DEFAULT as _DA,
+    DEFAULT_WITHOUT_ROTATION as _DA_NOROT,
+    ROTATION as _ROT,
+)
 
 from bob.ip.binseg.data.drive import dataset as _raw_drive
-_drive_transforms = [
-        RandomRotation(),
-        CenterCrop((540, 540)),
-        Resize(1024),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_drive = SampleList2TorchDataset(_raw_drive.subsets("default")["train"],
-        transforms=_drive_transforms)
+
+_drive_transforms = _ROT + [CenterCrop((540, 540)), Resize(1024)] + _DA_NOROT
+_drive = SampleList2TorchDataset(
+    _raw_drive.subsets("default")["train"], transforms=_drive_transforms
+)
 
 
 from bob.ip.binseg.data.stare import dataset as _raw_stare
-_stare_transforms = [
-        RandomRotation(),
-        Pad((0, 32, 0, 32)),
-        Resize(1024),
-        CenterCrop(1024),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-    ]
-_stare = SampleList2TorchDataset(_raw_stare.subsets("default")["train"],
-        transforms=_stare_transforms)
+
+_stare_transforms = (
+    _ROT + [Pad((0, 32, 0, 32)), Resize(1024), CenterCrop(1024)] + _DA_NOROT
+)
+_stare = SampleList2TorchDataset(
+    _raw_stare.subsets("default")["train"], transforms=_stare_transforms
+)
 
 
 from bob.ip.binseg.data.hrf import dataset as _raw_hrf
+
 _hrf_transforms = [Pad((0, 584, 0, 584)), Resize(1024)] + _DA
-_hrf = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"],
-        transforms=_hrf_transforms)
+_hrf = SampleList2TorchDataset(
+    _raw_hrf.subsets("default")["train"], transforms=_hrf_transforms
+)
 
 from bob.ip.binseg.data.chasedb1 import dataset as _chase_raw
-_chase_transforms = [
-        RandomRotation(),
-        Crop(0, 18, 960, 960),
-        Resize(1024),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_chase = SampleList2TorchDataset(_chase_raw.subsets("default")["train"],
-        transforms=_chase_transforms)
+
+_chase_transforms = _ROT + [Crop(0, 18, 960, 960), Resize(1024)] + _DA_NOROT
+_chase = SampleList2TorchDataset(
+    _chase_raw.subsets("default")["train"], transforms=_chase_transforms
+)
 
 import torch.utils.data
+
 dataset = torch.utils.data.ConcatDataset([_drive, _stare, _hrf, _chase])
diff --git a/bob/ip/binseg/configs/datasets/covd_iostar_vessel_ssl.py b/bob/ip/binseg/configs/datasets/covd_iostar_vessel_ssl.py
index 6357d710..877b01e4 100644
--- a/bob/ip/binseg/configs/datasets/covd_iostar_vessel_ssl.py
+++ b/bob/ip/binseg/configs/datasets/covd_iostar_vessel_ssl.py
@@ -12,4 +12,5 @@ HRF (with annotated samples) and IOSTAR without labels.
 from bob.ip.binseg.configs.datasets.covd_iostar_vessel import dataset as _labelled
 from bob.ip.binseg.configs.datasets.iostar_vessel import dataset as _unlabelled
 from bob.ip.binseg.data.utils import SSLDataset
+
 dataset = SSLDataset(_labelled, _unlabelled)
diff --git a/bob/ip/binseg/configs/datasets/covd_stare.py b/bob/ip/binseg/configs/datasets/covd_stare.py
index d77e85a7..48215341 100644
--- a/bob/ip/binseg/configs/datasets/covd_stare.py
+++ b/bob/ip/binseg/configs/datasets/covd_stare.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """COVD-STARE (training set) for Vessel Segmentation
 
@@ -9,44 +9,44 @@ The dataset available in this file is composed of DRIVE, CHASE-DB1, IOSTAR
 vessel and HRF (with annotated samples).
 """
 
-from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.transforms import CenterCrop, Pad, Resize
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import (
+    DEFAULT as _DA,
+    DEFAULT_WITHOUT_ROTATION as _DA_NOROT,
+    ROTATION as _ROT,
+)
 
 from bob.ip.binseg.data.drive import dataset as _raw_drive
-_drive_transforms = [
-        RandomRotation(),
-        CenterCrop((470, 544)),
-        Pad((10, 9, 10, 8)),
-        Resize(608),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_drive = SampleList2TorchDataset(_raw_drive.subsets("default")["train"],
-        transforms=_drive_transforms)
+
+_drive_transforms = (
+    _ROT + [CenterCrop((470, 544)), Pad((10, 9, 10, 8)), Resize(608)] + _DA_NOROT
+)
+_drive = SampleList2TorchDataset(
+    _raw_drive.subsets("default")["train"], transforms=_drive_transforms
+)
 
 from bob.ip.binseg.data.chasedb1 import dataset as _raw_chase
-_chase_transforms = [
-        RandomRotation(),
-        CenterCrop((829, 960)),
-        Resize(608),
-        RandomHFlip(),
-        RandomVFlip(),
-        ColorJitter(),
-        ]
-_chase = SampleList2TorchDataset(_raw_chase.subsets("default")["train"],
-        transforms=_chase_transforms)
+
+_chase_transforms = _ROT + [CenterCrop((829, 960)), Resize(608)] + _DA_NOROT
+_chase = SampleList2TorchDataset(
+    _raw_chase.subsets("default")["train"], transforms=_chase_transforms
+)
 
 from bob.ip.binseg.data.iostar import dataset as _raw_iostar
+
 _iostar_transforms = [Pad((81, 0, 81, 0)), Resize(608)] + _DA
-_iostar = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"],
-        transforms=_iostar_transforms)
+_iostar = SampleList2TorchDataset(
+    _raw_iostar.subsets("vessel")["train"], transforms=_iostar_transforms
+)
 
 from bob.ip.binseg.data.hrf import dataset as _raw_hrf
+
 _hrf_transforms = [Pad((0, 345, 0, 345)), Resize(608)] + _DA
-_hrf = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"],
-        transforms=_hrf_transforms)
+_hrf = SampleList2TorchDataset(
+    _raw_hrf.subsets("default")["train"], transforms=_hrf_transforms
+)
 
 import torch.utils.data
+
 dataset = torch.utils.data.ConcatDataset([_drive, _chase, _iostar, _hrf])
diff --git a/bob/ip/binseg/configs/datasets/covd_stare_ssl.py b/bob/ip/binseg/configs/datasets/covd_stare_ssl.py
index ff0fce68..702df051 100644
--- a/bob/ip/binseg/configs/datasets/covd_stare_ssl.py
+++ b/bob/ip/binseg/configs/datasets/covd_stare_ssl.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """COVD-STARE + SSL (training set) for Vessel Segmentation
 
@@ -12,4 +12,5 @@ vessel and HRF (with annotated samples) and STARE without labels.
 from bob.ip.binseg.configs.datasets.covd_stare import dataset as _labelled
 from bob.ip.binseg.configs.datasets.stare import dataset as _unlabelled
 from bob.ip.binseg.data.utils import SSLDataset
+
 dataset = SSLDataset(_labelled, _unlabelled)
diff --git a/bob/ip/binseg/configs/datasets/csv.py b/bob/ip/binseg/configs/datasets/csv.py
index a03fcbad..62aa3b11 100644
--- a/bob/ip/binseg/configs/datasets/csv.py
+++ b/bob/ip/binseg/configs/datasets/csv.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """Example CSV-based filelist dataset
 
@@ -91,6 +91,7 @@ def _loader(context, sample):
     # compact.  Of course, you can make those paths absolute and then simplify
     # it here.
     import os
+
     root_path = "/path/where/raw/files/sit"
 
     return dict(
@@ -98,16 +99,18 @@ def _loader(context, sample):
         label=load_pil_1(os.path.join(root_path, sample["label"])),
     )
 
+
 # This is just a class that puts everything together: the CSV file, how to load
 # each sample defined in the dataset, names for the various columns of the CSV
 # file and how to make unique keys for each sample (keymaker).  Once created,
 # this object can be called to generate sample lists.
 from bob.ip.binseg.data.dataset import CSVDataset
+
 raw_dataset = CSVDataset(
     # path to the CSV file(s) - you may add as many subsets as you want, each
     # with an unique name, you'll use later to generate sample lists
     subsets=dict(data="<path/to/train.csv>"),
-    fieldnames=("data", "label"),  #these are the column names
+    fieldnames=("data", "label"),  # these are the column names
     loader=_loader,
     keymaker=data_path_keymaker,
 )
@@ -119,7 +122,7 @@ raw_dataset = CSVDataset(
 # model that requires image inputs of 544 x 544 pixels.
 from bob.ip.binseg.data.transforms import CenterCrop
 
-# from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+# from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
 _transforms = [
     CenterCrop((544, 544)),
 ]  # + _DA
@@ -129,4 +132,5 @@ _transforms = [
 # This class also inherits from pytorch Dataset and respect its required API.
 # See the documentation for details.
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
-#dataset = SampleList2TorchDataset(raw_dataset.samples("data"), _transforms)
+
+# dataset = SampleList2TorchDataset(raw_dataset.samples("data"), _transforms)
diff --git a/bob/ip/binseg/configs/datasets/drionsdb.py b/bob/ip/binseg/configs/datasets/drionsdb.py
index 24556208..2f9772ad 100644
--- a/bob/ip/binseg/configs/datasets/drionsdb.py
+++ b/bob/ip/binseg/configs/datasets/drionsdb.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRIONS-DB (training set) for Optic Disc Segmentation
 
@@ -19,10 +19,13 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Pad((4, 8, 4, 8))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drionsdb import dataset as drionsdb
-dataset = SampleList2TorchDataset(drionsdb.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drionsdb.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/drionsdb_test.py b/bob/ip/binseg/configs/datasets/drionsdb_test.py
index ed38279e..f377f750 100644
--- a/bob/ip/binseg/configs/datasets/drionsdb_test.py
+++ b/bob/ip/binseg/configs/datasets/drionsdb_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRIONS-DB (test set) for Optic Disc Segmentation
 
@@ -19,9 +19,12 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
+
 _transforms = [Pad((4, 8, 4, 8))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drionsdb import dataset as drionsdb
-dataset = SampleList2TorchDataset(drionsdb.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drionsdb.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1_cup.py b/bob/ip/binseg/configs/datasets/dristhigs1_cup.py
index 50ed0f1f..4872ee92 100644
--- a/bob/ip/binseg/configs/datasets/dristhigs1_cup.py
+++ b/bob/ip/binseg/configs/datasets/dristhigs1_cup.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRISHTI-GS1 (training set) for Cup Segmentation
 
@@ -21,10 +21,13 @@ and notching information.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [CenterCrop((1760, 2048))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drishtigs1 import dataset as drishtigs1
-dataset = SampleList2TorchDataset(drishtigs1.subsets("optic-cup-all")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drishtigs1.subsets("optic-cup-all")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1_cup_test.py b/bob/ip/binseg/configs/datasets/dristhigs1_cup_test.py
index 365a1723..8c0af6b0 100644
--- a/bob/ip/binseg/configs/datasets/dristhigs1_cup_test.py
+++ b/bob/ip/binseg/configs/datasets/dristhigs1_cup_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRISHTI-GS1 (test set) for Cup Segmentation
 
@@ -21,9 +21,12 @@ and notching information.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop((1760, 2048))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drishtigs1 import dataset as drishtigs1
-dataset = SampleList2TorchDataset(drishtigs1.subsets("optic-cup-all")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drishtigs1.subsets("optic-cup-all")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1_od.py b/bob/ip/binseg/configs/datasets/dristhigs1_od.py
index b35d635d..d07af7eb 100644
--- a/bob/ip/binseg/configs/datasets/dristhigs1_od.py
+++ b/bob/ip/binseg/configs/datasets/dristhigs1_od.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRISHTI-GS1 (training set) for Optic Disc Segmentation
 
@@ -21,10 +21,13 @@ and notching information.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [CenterCrop((1760, 2048))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drishtigs1 import dataset as drishtigs1
-dataset = SampleList2TorchDataset(drishtigs1.subsets("optic-disc-all")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drishtigs1.subsets("optic-disc-all")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1_od_test.py b/bob/ip/binseg/configs/datasets/dristhigs1_od_test.py
index 77fb604a..19a1878e 100644
--- a/bob/ip/binseg/configs/datasets/dristhigs1_od_test.py
+++ b/bob/ip/binseg/configs/datasets/dristhigs1_od_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRISHTI-GS1 (test set) for Optic Disc Segmentation
 
@@ -21,9 +21,12 @@ and notching information.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop((1760, 2048))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drishtigs1 import dataset as drishtigs1
-dataset = SampleList2TorchDataset(drishtigs1.subsets("optic-disc-all")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drishtigs1.subsets("optic-disc-all")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/drive.py b/bob/ip/binseg/configs/datasets/drive.py
index 1d3d9a3e..0af4e692 100644
--- a/bob/ip/binseg/configs/datasets/drive.py
+++ b/bob/ip/binseg/configs/datasets/drive.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRIVE (training set) for Vessel Segmentation
 
@@ -14,10 +14,13 @@ segmentation of blood vessels in retinal images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [CenterCrop((544, 544))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drive import dataset as drive
-dataset = SampleList2TorchDataset(drive.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drive.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/drive_test.py b/bob/ip/binseg/configs/datasets/drive_test.py
index d7f49c4b..31fea70f 100644
--- a/bob/ip/binseg/configs/datasets/drive_test.py
+++ b/bob/ip/binseg/configs/datasets/drive_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """DRIVE (test set) for Vessel Segmentation
 
@@ -14,9 +14,12 @@ segmentation of blood vessels in retinal images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop((544, 544))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.drive import dataset as drive
-dataset = SampleList2TorchDataset(drive.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    drive.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/hrf.py b/bob/ip/binseg/configs/datasets/hrf.py
index 1c9c3426..556b67b3 100644
--- a/bob/ip/binseg/configs/datasets/hrf.py
+++ b/bob/ip/binseg/configs/datasets/hrf.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """HRF (training set) for Vessel Segmentation
 
@@ -15,10 +15,13 @@ x 2336. One set of ground-truth vessel annotations is available.
 """
 
 from bob.ip.binseg.data.transforms import Crop
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Crop(0, 108, 2336, 3296)] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.hrf import dataset as hrf
-dataset = SampleList2TorchDataset(hrf.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    hrf.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/hrf_1168.py b/bob/ip/binseg/configs/datasets/hrf_1168.py
index cd2bcea8..5a70a9e5 100644
--- a/bob/ip/binseg/configs/datasets/hrf_1168.py
+++ b/bob/ip/binseg/configs/datasets/hrf_1168.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """HRF (training set) for Vessel Segmentation
 
@@ -15,10 +15,13 @@ x 2336. One set of ground-truth vessel annotations is available.
 """
 
 from bob.ip.binseg.data.transforms import Crop, Resize
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Crop(0, 108, 2336, 3296), Resize(1168)] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.hrf import dataset as hrf
-dataset = SampleList2TorchDataset(hrf.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    hrf.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/hrf_1168_test.py b/bob/ip/binseg/configs/datasets/hrf_1168_test.py
index 89b5df45..16ebe9af 100644
--- a/bob/ip/binseg/configs/datasets/hrf_1168_test.py
+++ b/bob/ip/binseg/configs/datasets/hrf_1168_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """HRF (test set) for Vessel Segmentation
 
@@ -15,9 +15,12 @@ x 2336. One set of ground-truth vessel annotations is available.
 """
 
 from bob.ip.binseg.data.transforms import Crop, Resize
+
 _transforms = [Crop(0, 108, 2336, 3296), Resize(1168)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.hrf import dataset as hrf
-dataset = SampleList2TorchDataset(hrf.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    hrf.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/hrf_test.py b/bob/ip/binseg/configs/datasets/hrf_test.py
index 2c926af4..d4a364ed 100644
--- a/bob/ip/binseg/configs/datasets/hrf_test.py
+++ b/bob/ip/binseg/configs/datasets/hrf_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """HRF (test set) for Vessel Segmentation
 
@@ -15,9 +15,12 @@ x 2336. One set of ground-truth vessel annotations is available.
 """
 
 from bob.ip.binseg.data.transforms import Crop
+
 _transforms = [Crop(0, 108, 2336, 3296)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.hrf import dataset as hrf
-dataset = SampleList2TorchDataset(hrf.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    hrf.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/iostar_od.py b/bob/ip/binseg/configs/datasets/iostar_od.py
index 55b0db4a..67d8b45a 100644
--- a/bob/ip/binseg/configs/datasets/iostar_od.py
+++ b/bob/ip/binseg/configs/datasets/iostar_od.py
@@ -16,8 +16,8 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
 """
 
 from bob.ip.binseg.data.transforms import *
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.iostar import dataset as iostar
-dataset = SampleList2TorchDataset(iostar.subsets("optic-disc")["train"],
-        transforms=_DA)
+
+dataset = SampleList2TorchDataset(iostar.subsets("optic-disc")["train"], transforms=_DA)
diff --git a/bob/ip/binseg/configs/datasets/iostar_od_test.py b/bob/ip/binseg/configs/datasets/iostar_od_test.py
index d9503af9..059cfe35 100644
--- a/bob/ip/binseg/configs/datasets/iostar_od_test.py
+++ b/bob/ip/binseg/configs/datasets/iostar_od_test.py
@@ -17,4 +17,5 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.iostar import dataset as iostar
+
 dataset = SampleList2TorchDataset(iostar.subsets("optic-disc")["test"])
diff --git a/bob/ip/binseg/configs/datasets/iostar_vessel.py b/bob/ip/binseg/configs/datasets/iostar_vessel.py
index ae749759..3167c9fb 100644
--- a/bob/ip/binseg/configs/datasets/iostar_vessel.py
+++ b/bob/ip/binseg/configs/datasets/iostar_vessel.py
@@ -16,8 +16,8 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
 """
 
 from bob.ip.binseg.data.transforms import *
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.iostar import dataset as iostar
-dataset = SampleList2TorchDataset(iostar.subsets("vessel")["train"],
-        transforms=_DA)
+
+dataset = SampleList2TorchDataset(iostar.subsets("vessel")["train"], transforms=_DA)
diff --git a/bob/ip/binseg/configs/datasets/iostar_vessel_test.py b/bob/ip/binseg/configs/datasets/iostar_vessel_test.py
index c05b8455..c23eeb8c 100644
--- a/bob/ip/binseg/configs/datasets/iostar_vessel_test.py
+++ b/bob/ip/binseg/configs/datasets/iostar_vessel_test.py
@@ -17,4 +17,5 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.iostar import dataset as iostar
+
 dataset = SampleList2TorchDataset(iostar.subsets("vessel")["test"])
diff --git a/bob/ip/binseg/configs/datasets/refuge_cup.py b/bob/ip/binseg/configs/datasets/refuge_cup.py
index 13af1da7..d762b9e6 100644
--- a/bob/ip/binseg/configs/datasets/refuge_cup.py
+++ b/bob/ip/binseg/configs/datasets/refuge_cup.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (training set) for Optic Cup Segmentation
 
@@ -16,10 +16,13 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import Resize, Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Resize(1539), Pad((21, 46, 22, 47))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-cup")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-cup")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/refuge_cup_dev.py b/bob/ip/binseg/configs/datasets/refuge_cup_dev.py
index 9136756b..d8fe4c81 100644
--- a/bob/ip/binseg/configs/datasets/refuge_cup_dev.py
+++ b/bob/ip/binseg/configs/datasets/refuge_cup_dev.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (validation set) for Optic Cup Segmentation
 
@@ -16,9 +16,12 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop(1632)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-cup")["validation"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-cup")["validation"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/refuge_cup_test.py b/bob/ip/binseg/configs/datasets/refuge_cup_test.py
index a42c6a54..878f3841 100644
--- a/bob/ip/binseg/configs/datasets/refuge_cup_test.py
+++ b/bob/ip/binseg/configs/datasets/refuge_cup_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (validation set) for Optic Cup Segmentation
 
@@ -16,9 +16,12 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop(1632)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-cup")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-cup")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/refuge_od.py b/bob/ip/binseg/configs/datasets/refuge_od.py
index 352d161d..a415bfbc 100644
--- a/bob/ip/binseg/configs/datasets/refuge_od.py
+++ b/bob/ip/binseg/configs/datasets/refuge_od.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (training set) for Optic Disc Segmentation
 
@@ -16,10 +16,13 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import Resize, Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Resize(1539), Pad((21, 46, 22, 47))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-disc")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-disc")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/refuge_od_dev.py b/bob/ip/binseg/configs/datasets/refuge_od_dev.py
index e404f9cd..c4cccd89 100644
--- a/bob/ip/binseg/configs/datasets/refuge_od_dev.py
+++ b/bob/ip/binseg/configs/datasets/refuge_od_dev.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (validation set) for Optic Disc Segmentation
 
@@ -16,9 +16,12 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop(1632)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-disc")["validation"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-disc")["validation"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/refuge_od_test.py b/bob/ip/binseg/configs/datasets/refuge_od_test.py
index 8da7a8bb..0f2c50f2 100644
--- a/bob/ip/binseg/configs/datasets/refuge_od_test.py
+++ b/bob/ip/binseg/configs/datasets/refuge_od_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """REFUGE (validation set) for Optic Disc Segmentation
 
@@ -16,9 +16,12 @@ dataset of retinal fundus images.
 """
 
 from bob.ip.binseg.data.transforms import CenterCrop
+
 _transforms = [CenterCrop(1632)]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.refuge import dataset as refuge
-dataset = SampleList2TorchDataset(refuge.subsets("optic-disc")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    refuge.subsets("optic-disc")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3_cup.py b/bob/ip/binseg/configs/datasets/rimoner3_cup.py
index e7dea012..021d58fb 100644
--- a/bob/ip/binseg/configs/datasets/rimoner3_cup.py
+++ b/bob/ip/binseg/configs/datasets/rimoner3_cup.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """RIM-ONE r3 (training set) for Cup Segmentation
 
@@ -17,10 +17,13 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Pad((8, 8, 8, 8))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.rimoner3 import dataset as rimoner3
-dataset = SampleList2TorchDataset(rimoner3.subsets("optic-cup-exp1")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    rimoner3.subsets("optic-cup-exp1")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3_cup_test.py b/bob/ip/binseg/configs/datasets/rimoner3_cup_test.py
index c7f17fca..d3da003a 100644
--- a/bob/ip/binseg/configs/datasets/rimoner3_cup_test.py
+++ b/bob/ip/binseg/configs/datasets/rimoner3_cup_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """RIM-ONE r3 (test set) for Cup Segmentation
 
@@ -17,9 +17,12 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
+
 _transforms = [Pad((8, 8, 8, 8))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.rimoner3 import dataset as rimoner3
-dataset = SampleList2TorchDataset(rimoner3.subsets("optic-cup-exp1")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    rimoner3.subsets("optic-cup-exp1")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3_od.py b/bob/ip/binseg/configs/datasets/rimoner3_od.py
index b7da94d1..9084153d 100644
--- a/bob/ip/binseg/configs/datasets/rimoner3_od.py
+++ b/bob/ip/binseg/configs/datasets/rimoner3_od.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """RIM-ONE r3 (training set) for Optic Disc Segmentation
 
@@ -17,10 +17,13 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Pad((8, 8, 8, 8))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.rimoner3 import dataset as rimoner3
-dataset = SampleList2TorchDataset(rimoner3.subsets("optic-disc-exp1")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    rimoner3.subsets("optic-disc-exp1")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3_od_test.py b/bob/ip/binseg/configs/datasets/rimoner3_od_test.py
index 4012858f..0593ead8 100644
--- a/bob/ip/binseg/configs/datasets/rimoner3_od_test.py
+++ b/bob/ip/binseg/configs/datasets/rimoner3_od_test.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """RIM-ONE r3 (test set) for Optic Disc Segmentation
 
@@ -17,9 +17,12 @@ baseline.
 """
 
 from bob.ip.binseg.data.transforms import Pad
+
 _transforms = [Pad((8, 8, 8, 8))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.rimoner3 import dataset as rimoner3
-dataset = SampleList2TorchDataset(rimoner3.subsets("optic-disc-exp1")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    rimoner3.subsets("optic-disc-exp1")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/stare.py b/bob/ip/binseg/configs/datasets/stare.py
index 4ef2d51c..4784ce3b 100644
--- a/bob/ip/binseg/configs/datasets/stare.py
+++ b/bob/ip/binseg/configs/datasets/stare.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# coding=utf-8
 
 """STARE (training set) for Vessel Segmentation
 
@@ -17,10 +17,13 @@ for training and testing. The second set by Valentina Kouznetsova acts as a
 """
 
 from bob.ip.binseg.data.transforms import Pad
-from bob.ip.binseg.configs.datasets.utils import DATA_AUGMENTATION as _DA
+from bob.ip.binseg.configs.datasets.augmentation import DEFAULT as _DA
+
 _transforms = [Pad((2, 1, 2, 2))] + _DA
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.stare import dataset as stare
-dataset = SampleList2TorchDataset(stare.subsets("default")["train"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    stare.subsets("default")["train"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/stare_test.py b/bob/ip/binseg/configs/datasets/stare_test.py
index e8d78bcb..18b360f6 100644
--- a/bob/ip/binseg/configs/datasets/stare_test.py
+++ b/bob/ip/binseg/configs/datasets/stare_test.py
@@ -17,9 +17,12 @@ for training and testing. The second set by Valentina Kouznetsova acts as a
 """
 
 from bob.ip.binseg.data.transforms import Pad
+
 _transforms = [Pad((2, 1, 2, 2))]
 
 from bob.ip.binseg.data.utils import SampleList2TorchDataset
 from bob.ip.binseg.data.stare import dataset as stare
-dataset = SampleList2TorchDataset(stare.subsets("default")["test"],
-        transforms=_transforms)
+
+dataset = SampleList2TorchDataset(
+    stare.subsets("default")["test"], transforms=_transforms
+)
diff --git a/bob/ip/binseg/configs/datasets/utils.py b/bob/ip/binseg/configs/datasets/utils.py
deleted file mode 100644
index 35850c10..00000000
--- a/bob/ip/binseg/configs/datasets/utils.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-"""Dataset configuration utilities"""
-
-from ...data.transforms import (
-    RandomHFlip,
-    RandomVFlip,
-    RandomRotation,
-    ColorJitter,
-)
-
-DATA_AUGMENTATION = [
-        RandomHFlip(),
-        RandomVFlip(),
-        RandomRotation(),
-        ColorJitter(),
-        ]
-"""Shared data augmentation transforms"""
diff --git a/bob/ip/binseg/data/transforms.py b/bob/ip/binseg/data/transforms.py
index 3c8a09dc..ea621590 100644
--- a/bob/ip/binseg/data/transforms.py
+++ b/bob/ip/binseg/data/transforms.py
@@ -51,7 +51,22 @@ class Compose(torchvision.transforms.Compose):
         return args
 
 
-class _Crop:
+class SingleCrop:
+    """
+    Crops one image at the given coordinates.
+
+    Attributes
+    ----------
+    i : int
+        upper pixel coordinate.
+    j : int
+        left pixel coordinate.
+    h : int
+        height of the cropped image.
+    w : int
+        width of the cropped image.
+    """
+
     def __init__(self, i, j, h, w):
         self.i = i
         self.j = j
@@ -62,9 +77,9 @@ class _Crop:
         return img.crop((self.j, self.i, self.j + self.w, self.i + self.h))
 
 
-class Crop(TupleMixin, _Crop):
+class Crop(TupleMixin, SingleCrop):
     """
-    Crops one image at the given coordinates.
+    Crops multiple images at the given coordinates.
 
     Attributes
     ----------
@@ -81,15 +96,24 @@ class Crop(TupleMixin, _Crop):
     pass
 
 
-class _AutoLevel16to8:
+class SingleAutoLevel16to8:
+    """Converts a 16-bit image to 8-bit representation using "auto-level"
+
+    This transform assumes that the input image is gray-scaled.
+
+    To auto-level, we calculate the maximum and the minimum of the image, and
+    consider such a range should be mapped to the [0,255] range of the
+    destination image.
+    """
+
     def __call__(self, img):
         return PIL.Image.fromarray(
             bob.core.convert(img, "uint8", (0, 255), img.getextrema())
         )
 
 
-class AutoLevel16to8(TupleMixin, _AutoLevel16to8):
-    """Converts a 16-bit image to 8-bit representation using "auto-level"
+class AutoLevel16to8(TupleMixin, SingleAutoLevel16to8):
+    """Converts multiple 16-bit images to 8-bit representations using "auto-level"
 
     This transform assumes that the input images are gray-scaled.
 
@@ -97,16 +121,22 @@ class AutoLevel16to8(TupleMixin, _AutoLevel16to8):
     consider such a range should be mapped to the [0,255] range of the
     destination image.
     """
-
     pass
 
 
-class _ToRGB:
+class SingleToRGB:
+    """Converts from any input format to RGB, using an ADAPTIVE conversion.
+
+    This transform takes the input image and converts it to RGB using
+    py:method:`PIL.Image.Image.convert`, with `mode='RGB'` and using all other
+    defaults.  This may be aggressive if applied to 16-bit images without
+    further considerations.
+    """
     def __call__(self, img):
         return img.convert(mode="RGB")
 
 
-class ToRGB(TupleMixin, _ToRGB):
+class ToRGB(TupleMixin, SingleToRGB):
     """Converts from any input format to RGB, using an ADAPTIVE conversion.
 
     This transform takes the input image and converts it to RGB using
diff --git a/doc/api.rst b/doc/api.rst
index 0062dc3f..79f8148c 100644
--- a/doc/api.rst
+++ b/doc/api.rst
@@ -131,7 +131,7 @@ Datasets
    :template: config.rst
 
    bob.ip.binseg.configs.datasets.csv
-   bob.ip.binseg.configs.datasets.utils
+   bob.ip.binseg.configs.datasets.augmentation
 
    bob.ip.binseg.configs.datasets.chasedb1
    bob.ip.binseg.configs.datasets.chasedb1_test
diff --git a/doc/benchmarkresults.rst b/doc/benchmarkresults.rst
deleted file mode 100644
index 5023a3ce..00000000
--- a/doc/benchmarkresults.rst
+++ /dev/null
@@ -1,59 +0,0 @@
-.. -*- coding: utf-8 -*-
-
-.. _bob.ip.binseg.benchmarkresults:
-
-===================
- Benchmark Results
-===================
-
-F1 Scores (micro-level)
------------------------
-
-* Benchmark results for models: DRIU, HED, M2U-Net and U-Net.
-* Models are trained and tested on the same dataset using the
-  train-test split as indicated in :ref:`bob.ip.binseg.configs.datasets` (i.e.,
-  these are *intra*-datasets tests)
-* Standard-deviations across all test images are indicated in brakets
-* Database and Model links (table top row and left column) are linked to the
-  originating configuration files used to obtain these results.
-* For some results, the actual deep neural network models are provided (by
-  clicking on the associated F1 Score).
-* Check `our paper`_ for details on the calculation of the F1 Score and standard
-  deviations.
-
-.. list-table::
-   :header-rows: 1
-
-   * - F1 (std)
-     - :py:mod:`DRIU <bob.ip.binseg.configs.models.driu>`
-     - :py:mod:`HED <bob.ip.binseg.configs.models.hed>`
-     - :py:mod:`M2U-Net <bob.ip.binseg.configs.models.m2unet>`
-     - :py:mod:`U-Net <bob.ip.binseg.configs.models.unet>`
-   * - :py:mod:`CHASE-DB1 <bob.ip.binseg.configs.datasets.chasedb1>`
-     - `0.810 (0.021) <driu_chasedb1.pth_>`_
-     - 0.810 (0.022)
-     - `0.802 (0.019) <m2unet_chasedb1.pth_>`_
-     - 0.812 (0.020)
-   * - :py:mod:`DRIVE <bob.ip.binseg.configs.datasets.drive>`
-     - `0.820 (0.014) <driu_drive.pth_>`_
-     - 0.817 (0.013)
-     - `0.803 (0.014) <m2unet_drive.pth_>`_
-     - 0.822 (0.015)
-   * - :py:mod:`HRF <bob.ip.binseg.configs.datasets.hrf_1168>`
-     - `0.783 (0.055) <driu_hrf.pth_>`_
-     - 0.783 (0.058)
-     - `0.780 (0.057) <m2unet_hrf.pth_>`_
-     - 0.788 (0.051)
-   * - :py:mod:`IOSTAR (vessel) <bob.ip.binseg.configs.datasets.iostar_vessel>`
-     - `0.825 (0.020) <driu_iostar.pth_>`_
-     - 0.825 (0.020)
-     - `0.817 (0.020) <m2unet_iostar.pth_>`_
-     - 0.818 (0.019)
-   * - :py:mod:`STARE <bob.ip.binseg.configs.datasets.stare>`
-     - `0.827 (0.037) <driu_stare.pth_>`_
-     - 0.823 (0.037)
-     - `0.815 (0.041) <m2unet_stare.pth_>`_
-     - 0.829 (0.042)
-
-
-.. include:: links.rst
diff --git a/doc/covdresults.rst b/doc/covdresults.rst
deleted file mode 100644
index 3abe8445..00000000
--- a/doc/covdresults.rst
+++ /dev/null
@@ -1,115 +0,0 @@
-.. -*- coding: utf-8 -*-
-
-.. _bob.ip.binseg.covdresults:
-
-============================
- COVD- and COVD-SLL Results
-============================
-
-In addition to the M2U-Net architecture, we also evaluated the larger DRIU
-network and a variation of it that contains batch normalization (DRIU+BN) on
-COVD- (Combined Vessel Dataset from all training data minus target test set)
-and COVD-SSL (COVD- and Semi-Supervised Learning). Perhaps surprisingly, for
-the majority of combinations, the performance of the DRIU variants are roughly
-equal or worse to the ones obtained with the much smaller M2U-Net.  We
-anticipate that one reason for this could be overparameterization of large
-VGG-16 models that are pretrained on ImageNet.
-
-
-F1 Scores
----------
-
-Comparison of F1 Scores (micro-level and standard deviation) of DRIU and
-M2U-Net on COVD- and COVD-SSL.  Standard deviation across test-images in
-brackets.
-
-.. list-table::
-   :header-rows: 1
-
-   * - F1 score
-     - :py:mod:`DRIU <bob.ip.binseg.configs.models.driu>`/:py:mod:`DRIU@SSL <bob.ip.binseg.configs.models.driu_ssl>`
-     - :py:mod:`DRIU+BN <bob.ip.binseg.configs.models.driu_bn>`/:py:mod:`DRIU+BN@SSL <bob.ip.binseg.configs.models.driu_bn_ssl>`
-     - :py:mod:`M2U-Net <bob.ip.binseg.configs.models.m2unet>`/:py:mod:`M2U-Net@SSL <bob.ip.binseg.configs.models.m2unet_ssl>`
-   * - :py:mod:`COVD-DRIVE <bob.ip.binseg.configs.datasets.covd_drive>`
-     - 0.788 (0.018)
-     - 0.797 (0.019)
-     - `0.789 (0.018) <m2unet_covd-drive.pth>`_
-   * - :py:mod:`COVD-DRIVE+SSL <bob.ip.binseg.configs.datasets.covd_drive_ssl>`
-     - 0.785 (0.018)
-     - 0.783 (0.019)
-     - `0.791 (0.014) <m2unet_covd-drive_ssl.pth>`_
-   * - :py:mod:`COVD-STARE <bob.ip.binseg.configs.datasets.covd_stare>`
-     - 0.778 (0.117)
-     - 0.778 (0.122)
-     - `0.812 (0.046) <m2unet_covd-stare.pth>`_
-   * - :py:mod:`COVD-STARE+SSL <bob.ip.binseg.configs.datasets.covd_stare_ssl>`
-     - 0.788 (0.102)
-     - 0.811 (0.074)
-     - `0.820 (0.044) <m2unet_covd-stare_ssl.pth>`_
-   * - :py:mod:`COVD-CHASEDB1 <bob.ip.binseg.configs.datasets.covd_chasedb1>`
-     - 0.796 (0.027)
-     - 0.791 (0.025)
-     - `0.788 (0.024) <m2unet_covd-chasedb1.pth>`_
-   * - :py:mod:`COVD-CHASEDB1+SSL <bob.ip.binseg.configs.datasets.covd_chasedb1_ssl>`
-     - 0.796 (0.024)
-     - 0.798 (0.025)
-     - `0.799 (0.026) <m2unet_covd-chasedb1_ssl.pth>`_
-   * - :py:mod:`COVD-HRF <bob.ip.binseg.configs.datasets.covd_hrf>`
-     - 0.799 (0.044)
-     - 0.800 (0.045)
-     - `0.802 (0.045) <m2unet_covd-hrf.pth>`_
-   * - :py:mod:`COVD-HRF+SSL <bob.ip.binseg.configs.datasets.covd_hrf_ssl>`
-     - 0.799 (0.044)
-     - 0.784 (0.048)
-     - `0.797 (0.044) <m2unet_covd-hrf_ssl.pth>`_
-   * - :py:mod:`COVD-IOSTAR-VESSEL <bob.ip.binseg.configs.datasets.covd_iostar_vessel>`
-     - 0.791 (0.021)
-     - 0.777 (0.032)
-     - `0.793 (0.015) <m2unet_covd-iostar.pth>`_
-   * - :py:mod:`COVD-IOSTAR-VESSEL+SSL <bob.ip.binseg.configs.datasets.covd_iostar_vessel_ssl>`
-     - 0.797 (0.017)
-     - 0.811 (0.074)
-     - `0.785 (0.018) <m2unet_covd-iostar_ssl.pth>`_
-
-
-M2U-Net Precision vs. Recall Curves
------------------------------------
-
-Precision vs. recall curves for each evaluated dataset.  Note that here the
-F1-score is calculated on a macro level (see paper for more details).
-
-.. figure:: img/pr_CHASEDB1.png
-   :scale: 50 %
-   :align: center
-   :alt: model comparisons
-
-   CHASE_DB1: Precision vs Recall curve and F1 scores
-
-.. figure:: img/pr_DRIVE.png
-   :scale: 50 %
-   :align: center
-   :alt: model comparisons
-
-   DRIVE: Precision vs Recall curve and F1 scores
-
-.. figure:: img/pr_HRF.png
-   :scale: 50 %
-   :align: center
-   :alt: model comparisons
-
-   HRF: Precision vs Recall curve and F1 scores
-
-.. figure:: img/pr_IOSTARVESSEL.png
-   :scale: 50 %
-   :align: center
-   :alt: model comparisons
-
-   IOSTAR: Precision vs Recall curve and F1 scores
-
-.. figure:: img/pr_STARE.png
-   :scale: 50 %
-   :align: center
-   :alt: model comparisons
-
-   STARE: Precision vs Recall curve and F1 scores
-
diff --git a/doc/extras.inv b/doc/extras.inv
index 11e31176fd52152ea8ea7fcf6c79f00e4e2c6e92..55baaba61aa2392fa690178e23d9dbea964b8fc4 100644
GIT binary patch
delta 400
zcmdnb)W|ZSx_)Y)_hAEp<M+R7vem{l{ZQaJ(%#vja7toddS=YxOuZ+0kG{S1oxN`L
zT{g*EwkA8<cK?6xUzE$SM@PcqQmV|?u*z?nx{u`<Pjyr1TYrpo;$h9_dCnV8tgqk`
zvYZqlb#$?Z<I<BW7Vo+~tLM?Kt#2ehZ4G?BqJ5FquXPtzA8KjOt3Ms3{5rc#!;pFQ
zPoWE@naw)lDz6wtH~zMm-@Nv9^#eCO>j!VXs4w1n)rG6`<c0%cinCfDNbG3Z=%9b#
z+K24CFPFIYKV0j^AHOzqQ{?Omul(IEW&Av;QV}}e>_+mL8BbaayDn7z2$*CbGBqZ$
z?(n{y1+&r(R|jU6ef3VRNPf~<f3*CAYT02)i4Xo$mv5f6Rr!o{*6(+=EoGjX=bjyU
z7I3cd$V%Ox?!~qdyY`zu_4H1Z>nRie_=9`KyU&7q?lmptwtB00dDq60hO7^MJyS&7
zxg#{X4Za+|m^8^RByop8o84g!&ySzAJ~f^VX<70w(CklKnq1qv4}34CtUu+r=iNWf
NG)0Cw1|J?n0RS4#&0_!n

delta 204
zcmZo<+0Qhgy1sWmUz34Ad-=SkmYHV9dXHGV$cm|H43fIFWQ*^6i4QN|$~!da>G73+
zKmGLG?+aCWouw<<?pzJ7D2fkyxvVXlEkMqz^TmRNSA1v8ukp7&-N>{ljC+=%5PR&y
zwqI#iUGGUd=WUbT`lD5Dp3*O!Q2kO38{W>qw=<=^_cc`hbGC9_bjiW0O~+KgV_oIH
zMZW*7KlZG)v*F&a=eAgYRpDWxqnu)_(+{;iBcV5!W~tZ*t&KYW$0hujpM1Gs^jdc9
M{d#fdWv^WU030n|k^lez

diff --git a/doc/extras.txt b/doc/extras.txt
index 1c1776cd..4bd227b7 100644
--- a/doc/extras.txt
+++ b/doc/extras.txt
@@ -7,4 +7,14 @@ torch.nn.Module py:class 1 https://pytorch.org/docs/stable/nn.html?highlight=mod
 torch.nn.modules.module.Module py:class 1 https://pytorch.org/docs/stable/nn.html?highlight=module#torch.nn.Module -
 torch.utils.data.dataset.Dataset py:class 1 https://pytorch.org/docs/stable/data.html?highlight=dataset#torch.utils.data.Dataset -
 unittest.case.TestCase py:class 1 https://docs.python.org/3/library/unittest.html?highlight=testcase#unittest.TestCase -
-click.core.Option py:class 1 https://click.palletsprojects.com/en/7.x/api/#click.Option
+click.core.Option py:class 1 https://click.palletsprojects.com/en/7.x/api/#click.Option -
+torchvision.transforms.transforms.ColorJitter py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.ColorJitter -
+torchvision.transforms.transforms.RandomRotation py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.RandomRotation -
+torchvision.transforms.transforms.RandomVerticalFlip py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.RandomVerticalFlip -
+torchvision.transforms.transforms.RandomHorizontalFlip py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.RandomHorizontalFlip -
+torchvision.transforms.transforms.Compose py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.Compose -
+torchvision.transforms.transforms.ToTensor py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.ToTensor -
+torchvision.transforms.transforms.Resize py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.Resize -
+torchvision.transforms.transforms.Pad py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.Pad -
+torchvision.transforms.transforms.CenterCrop py:class 1 https://pytorch.org/docs/stable/torchvision/transforms.html#torchvision.transforms.CenterCrop -
+torchvision.transforms py:module 1 https://pytorch.org/docs/stable/torchvision/transforms.html -
diff --git a/doc/nitpick-exceptions.txt b/doc/nitpick-exceptions.txt
index 4237c341..8f6fe3b3 100644
--- a/doc/nitpick-exceptions.txt
+++ b/doc/nitpick-exceptions.txt
@@ -1,3 +1,2 @@
 py:class torch.nn.modules.loss._Loss
 py:class Module
-py:class click.core.Option
-- 
GitLab