diff --git a/bob/ip/binseg/configs/datasets/avdrive/__init__.py b/bob/ip/binseg/configs/datasets/avdrive/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..8037e9e6e42e6212c4d855c67cd582c21cafdbba
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/avdrive/__init__.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+
+def _maker(protocol):
+
+    from ....data.drive import dataset as raw
+    from ....data.transforms import CenterCrop as ccrop
+    from .. import make_dataset as mk
+
+    return mk(raw.subsets(protocol), [ccrop((544, 544))])
diff --git a/bob/ip/binseg/configs/datasets/avdrive/default.py b/bob/ip/binseg/configs/datasets/avdrive/default.py
new file mode 100644
index 0000000000000000000000000000000000000000..1363210b0654ec7a8b4942770e9455663546c1ea
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/avdrive/default.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""DRIVE dataset for Vessel Segmentation (default protocol)
+
+* Split reference: [DRIVE-2004]_
+* This configuration resolution: 544 x 544 (center-crop)
+* See :py:mod:`bob.ip.binseg.data.drive` for dataset details
+* We are using DRIVE dataset for artery vein segmentation
+"""
+
+from bob.ip.binseg.configs.datasets.avdrive import _maker
+
+dataset = _maker("default")
diff --git a/bob/ip/binseg/configs/datasets/chasedb1/__init__.py b/bob/ip/binseg/configs/datasets/chasedb1/__init__.py
index 287fda3c50cbaf1a50236eb71074939480bae83c..c4ce99bf00ca484bb442799ec76cf57d6c0731f2 100644
--- a/bob/ip/binseg/configs/datasets/chasedb1/__init__.py
+++ b/bob/ip/binseg/configs/datasets/chasedb1/__init__.py
@@ -11,10 +11,12 @@ def _maker(protocol):
     return mk(raw.subsets(protocol), [Crop(0, 18, 960, 960)])
 
 
-def _maker_square(protocol):
+def _maker_square(protocol, size):
 
     from ....data.chasedb1 import dataset as raw
     from ....data.transforms import Pad, Resize
     from .. import make_dataset as mk
 
-    return mk(raw.subsets(protocol), [Pad((1, 20, 0, 20)), Resize((768, 768))])
+    return mk(
+        raw.subsets(protocol), [Pad((1, 20, 0, 20)), Resize((size, size))]
+    )
diff --git a/bob/ip/binseg/configs/datasets/chasedb1/first_annotator_1024.py b/bob/ip/binseg/configs/datasets/chasedb1/first_annotator_1024.py
new file mode 100644
index 0000000000000000000000000000000000000000..18fb01dd6ec96f7844322f0a478381904650ee3a
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/chasedb1/first_annotator_1024.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""CHASE-DB1 dataset for Vessel Segmentation
+
+Configuration resolution: 1024 x 1024 (after Pad and resize)
+
+"""
+
+from bob.ip.binseg.configs.datasets.chasedb1 import _maker_square
+
+dataset = _maker_square("first-annotator", 1024)
diff --git a/bob/ip/binseg/configs/datasets/chasedb1/first_annotator_768.py b/bob/ip/binseg/configs/datasets/chasedb1/first_annotator_768.py
index 7afe056bfe5c5831389747fe111b9ffcf69118fc..3975d55c8c921dcd54cd799d22695fea6f3ccb36 100644
--- a/bob/ip/binseg/configs/datasets/chasedb1/first_annotator_768.py
+++ b/bob/ip/binseg/configs/datasets/chasedb1/first_annotator_768.py
@@ -9,4 +9,4 @@ Configuration resolution: 768 x 768 (after Pad and resize)
 
 from bob.ip.binseg.configs.datasets.chasedb1 import _maker_square
 
-dataset = _maker_square("first-annotator")
+dataset = _maker_square("first-annotator", 768)
diff --git a/bob/ip/binseg/configs/datasets/drionsdb/__init__.py b/bob/ip/binseg/configs/datasets/drionsdb/__init__.py
index 29a7977f0f1d2bef29d749092249c51d66cff49d..e345eb28bc06b928d77e0109b511ebd8e722dd42 100644
--- a/bob/ip/binseg/configs/datasets/drionsdb/__init__.py
+++ b/bob/ip/binseg/configs/datasets/drionsdb/__init__.py
@@ -11,10 +11,10 @@ def _maker(protocol):
     return mk(raw.subsets(protocol), [Pad((4, 8, 4, 8))])
 
 
-def _maker_square(protocol):
+def _maker_square(protocol, size):
 
     from ....data.drionsdb import dataset as raw
     from ....data.transforms import Pad, Resize
     from .. import make_dataset as mk
 
-    return mk(raw.subsets(protocol), [Pad((0, 100)), Resize((512, 512))])
+    return mk(raw.subsets(protocol), [Pad((0, 100)), Resize((size, size))])
diff --git a/bob/ip/binseg/configs/datasets/drionsdb/expert1_512.py b/bob/ip/binseg/configs/datasets/drionsdb/expert1_512.py
index 8d97c6ad6bb97a11c99fed7f423715755e76096f..dde12a77e9410eb8b5fa68987973f7bb6d12d4ae 100644
--- a/bob/ip/binseg/configs/datasets/drionsdb/expert1_512.py
+++ b/bob/ip/binseg/configs/datasets/drionsdb/expert1_512.py
@@ -9,4 +9,4 @@ Configuration resolution: 512x512 (after padding and resizing)
 
 from bob.ip.binseg.configs.datasets.drionsdb import _maker_square
 
-dataset = _maker_square("expert1")
+dataset = _maker_square("expert1", 512)
diff --git a/bob/ip/binseg/configs/datasets/drionsdb/expert1_768.py b/bob/ip/binseg/configs/datasets/drionsdb/expert1_768.py
new file mode 100644
index 0000000000000000000000000000000000000000..24d4a3cb3cff80deea4d58cd0cae9be5649982f9
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drionsdb/expert1_768.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""DRIONS-DB for Optic Disc Segmentation (expert #1 annotations)
+
+Configuration resolution: 768x768 (after padding and resizing)
+
+"""
+
+from bob.ip.binseg.configs.datasets.drionsdb import _maker_square
+
+dataset = _maker_square("expert1", 768)
diff --git a/bob/ip/binseg/configs/datasets/drionsdb/expert2_512.py b/bob/ip/binseg/configs/datasets/drionsdb/expert2_512.py
index df1dcc93d3b46cc15b7d9865285485e3cfa3089d..da277c9680a8b9ac9ef6961057d5928472f6666f 100644
--- a/bob/ip/binseg/configs/datasets/drionsdb/expert2_512.py
+++ b/bob/ip/binseg/configs/datasets/drionsdb/expert2_512.py
@@ -9,4 +9,4 @@ Configuration resolution: 512x512 (after padding and resizing)
 
 from bob.ip.binseg.configs.datasets.drionsdb import _maker_square
 
-dataset = _maker_square("expert2")
+dataset = _maker_square("expert2", 512)
diff --git a/bob/ip/binseg/configs/datasets/drionsdb/expert2_768.py b/bob/ip/binseg/configs/datasets/drionsdb/expert2_768.py
new file mode 100644
index 0000000000000000000000000000000000000000..b79e875b5ddc108c495c621080197e5ad2e712ab
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drionsdb/expert2_768.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""DRIONS-DB for Optic Disc Segmentation (expert #2 annotations)
+
+Configuration resolution: 768x768 (after padding and resizing)
+
+"""
+
+from bob.ip.binseg.configs.datasets.drionsdb import _maker_square
+
+dataset = _maker_square("expert2", 768)
diff --git a/bob/ip/binseg/configs/datasets/drishtigs1/__init__.py b/bob/ip/binseg/configs/datasets/drishtigs1/__init__.py
index c338cda5b0fc228ddb6d9bdd8f5453b2df9edd1e..bccf081e7d3909e36bb367d054577201312f08ab 100644
--- a/bob/ip/binseg/configs/datasets/drishtigs1/__init__.py
+++ b/bob/ip/binseg/configs/datasets/drishtigs1/__init__.py
@@ -11,7 +11,7 @@ def _maker(protocol):
     return mk(raw.subsets(protocol), [ccrop((1760, 2048))])
 
 
-def _maker_square(protocol):
+def _maker_square(protocol, size):
 
     from ....data.drishtigs1 import dataset as raw
     from ....data.transforms import CenterCrop as ccrop
@@ -20,5 +20,5 @@ def _maker_square(protocol):
 
     return mk(
         raw.subsets(protocol),
-        [ccrop((1760, 2048)), Pad((0, 144)), Resize((512, 512))],
+        [ccrop((1760, 2048)), Pad((0, 144)), Resize((size, size))],
     )
diff --git a/bob/ip/binseg/configs/datasets/drishtigs1/cup_all_512.py b/bob/ip/binseg/configs/datasets/drishtigs1/cup_all_512.py
index 11d4e5c5ae08c8d141137921a760542b45d2eaa3..db0166c74802a4e82f94f7c4e5bbdb216e04be78 100644
--- a/bob/ip/binseg/configs/datasets/drishtigs1/cup_all_512.py
+++ b/bob/ip/binseg/configs/datasets/drishtigs1/cup_all_512.py
@@ -9,4 +9,4 @@
 
 from bob.ip.binseg.configs.datasets.drishtigs1 import _maker_square
 
-dataset = _maker_square("optic-cup-all")
+dataset = _maker_square("optic-cup-all", 512)
diff --git a/bob/ip/binseg/configs/datasets/drishtigs1/cup_all_768.py b/bob/ip/binseg/configs/datasets/drishtigs1/cup_all_768.py
new file mode 100644
index 0000000000000000000000000000000000000000..01eaac51cf0a7ec30b9287d6a0217146a66c62cf
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drishtigs1/cup_all_768.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""DRISHTI-GS1 dataset for Cup Segmentation (agreed by all annotators)
+
+* Configuration resolution: 768 x 768 (after center cropping, padding and resizing)
+
+"""
+
+from bob.ip.binseg.configs.datasets.drishtigs1 import _maker_square
+
+dataset = _maker_square("optic-cup-all", 768)
diff --git a/bob/ip/binseg/configs/datasets/drishtigs1/disc_all_512.py b/bob/ip/binseg/configs/datasets/drishtigs1/disc_all_512.py
index fce36f5b1c82ca3334ce1d26d542fadce3915d6e..256f75377492f06ddb59f09663b67a3c0b43d082 100644
--- a/bob/ip/binseg/configs/datasets/drishtigs1/disc_all_512.py
+++ b/bob/ip/binseg/configs/datasets/drishtigs1/disc_all_512.py
@@ -9,4 +9,4 @@
 
 from bob.ip.binseg.configs.datasets.drishtigs1 import _maker_square
 
-dataset = _maker_square("optic-disc-all")
+dataset = _maker_square("optic-disc-all", 512)
diff --git a/bob/ip/binseg/configs/datasets/drishtigs1/disc_all_768.py b/bob/ip/binseg/configs/datasets/drishtigs1/disc_all_768.py
new file mode 100644
index 0000000000000000000000000000000000000000..56f2114db10ad7d97110ea490f56a3770fd2e16d
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drishtigs1/disc_all_768.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""DRISHTI-GS1 dataset for Optic Disc Segmentation (agreed by all annotators)
+
+* Configuration resolution: 768 x 768 (after center cropping, padding and resizing)
+
+"""
+
+from bob.ip.binseg.configs.datasets.drishtigs1 import _maker_square
+
+dataset = _maker_square("optic-disc-all", 768)
diff --git a/bob/ip/binseg/configs/datasets/drive/__init__.py b/bob/ip/binseg/configs/datasets/drive/__init__.py
index ca9e465bab5f7ca915a49a79cb0aaebdaef77de1..dae2a4e06e9b3ed3c9ebb49a614026d471bb2838 100644
--- a/bob/ip/binseg/configs/datasets/drive/__init__.py
+++ b/bob/ip/binseg/configs/datasets/drive/__init__.py
@@ -11,10 +11,12 @@ def _maker(protocol):
     return mk(raw.subsets(protocol), [ccrop((544, 544))])
 
 
-def _maker_square(protocol):
+def _maker_square(protocol, size):
 
     from ....data.drive import dataset as raw
     from ....data.transforms import Pad, Resize
     from .. import make_dataset as mk
 
-    return mk(raw.subsets(protocol), [Pad((10, 1, 10, 0)), Resize((768, 768))])
+    return mk(
+        raw.subsets(protocol), [Pad((10, 1, 10, 0)), Resize((size, size))]
+    )
diff --git a/bob/ip/binseg/configs/datasets/drive/default_1024.py b/bob/ip/binseg/configs/datasets/drive/default_1024.py
new file mode 100644
index 0000000000000000000000000000000000000000..3be5a23d901049142a6c8fc5490a6924e3825639
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drive/default_1024.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""DRIVE dataset for Vessel Segmentation (Resolution used for MTL models)
+
+This configuration resolution: 1024 x 1024 (Pad + resize)
+
+"""
+
+from bob.ip.binseg.configs.datasets.drive import _maker_square
+
+dataset = _maker_square("default", 1024)
diff --git a/bob/ip/binseg/configs/datasets/drive/default_768.py b/bob/ip/binseg/configs/datasets/drive/default_768.py
index 9467bece2f3d8e56721145e9a0554aef5f7cd75a..cc69b6c597f2906cc8295877d537b0792173f5fc 100644
--- a/bob/ip/binseg/configs/datasets/drive/default_768.py
+++ b/bob/ip/binseg/configs/datasets/drive/default_768.py
@@ -9,4 +9,4 @@ This configuration resolution: 768 x 768 (Pad + resize)
 
 from bob.ip.binseg.configs.datasets.drive import _maker_square
 
-dataset = _maker_square("default")
+dataset = _maker_square("default", 768)
diff --git a/bob/ip/binseg/configs/datasets/hrf/__init__.py b/bob/ip/binseg/configs/datasets/hrf/__init__.py
index 4d799c2dfbf2ad27b45bc9651608fa932e9f37c8..c215b0dacad51ea26d6a28cfdbc2802b4c1fffc0 100644
--- a/bob/ip/binseg/configs/datasets/hrf/__init__.py
+++ b/bob/ip/binseg/configs/datasets/hrf/__init__.py
@@ -20,10 +20,28 @@ def _maker(protocol):
     return mk(raw.subsets(protocol), [Crop(0, 108, 2336, 3296)])
 
 
-def _maker_square(protocol):
+def _maker_square_768(protocol):
 
     from ....data.hrf import dataset as raw
     from ....data.transforms import Pad, Resize
     from .. import make_dataset as mk
 
     return mk(raw.subsets(protocol), [Pad((0, 584)), Resize((768, 768))])
+
+
+def _maker_square_1024(protocol):
+
+    from ....data.hrf import dataset as raw
+    from ....data.transforms import Pad, Resize
+    from .. import make_dataset as mk
+
+    return mk(raw.subsets(protocol), [Pad((0, 584)), Resize((1024, 1024))])
+
+
+def _maker_square(protocol, size):
+
+    from ....data.hrf import dataset as raw
+    from ....data.transforms import Pad, Resize
+    from .. import make_dataset as mk
+
+    return mk(raw.subsets(protocol), [Pad((0, 584)), Resize((size, size))])
diff --git a/bob/ip/binseg/configs/datasets/hrf/default_1024.py b/bob/ip/binseg/configs/datasets/hrf/default_1024.py
new file mode 100644
index 0000000000000000000000000000000000000000..ceb92992f097f7f5c9a0289675414d7579d50744
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/hrf/default_1024.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""HRF dataset for Vessel Segmentation
+
+Configuration resolution: 1024 x 1024 (Pad + Resize)
+
+"""
+
+from bob.ip.binseg.configs.datasets.hrf import _maker_square
+
+dataset = _maker_square("default", 1024)
diff --git a/bob/ip/binseg/configs/datasets/hrf/default_768.py b/bob/ip/binseg/configs/datasets/hrf/default_768.py
index 28c0e9b5e9a9f66d59ad006895f98aae64301dae..3b5bd9d10d8e61efcf6e342344b0091fc044ba43 100644
--- a/bob/ip/binseg/configs/datasets/hrf/default_768.py
+++ b/bob/ip/binseg/configs/datasets/hrf/default_768.py
@@ -9,4 +9,4 @@ Configuration resolution: 768 x 768 (Pad + Resize)
 
 from bob.ip.binseg.configs.datasets.hrf import _maker_square
 
-dataset = _maker_square("default")
+dataset = _maker_square("default", 768)
diff --git a/bob/ip/binseg/configs/datasets/iostar/__init__.py b/bob/ip/binseg/configs/datasets/iostar/__init__.py
index f5204433551788a32b9b4e7c4833711057683f5b..e523d40c2bb755bfdeb06ea16c60e7e77a0539f8 100644
--- a/bob/ip/binseg/configs/datasets/iostar/__init__.py
+++ b/bob/ip/binseg/configs/datasets/iostar/__init__.py
@@ -10,19 +10,10 @@ def _maker(protocol):
     return mk(raw.subsets(protocol), [])
 
 
-def _maker_vessel_square(protocol):
+def _maker_square(protocol, size):
 
     from ....data.iostar import dataset as raw
     from ....data.transforms import Resize
     from .. import make_dataset as mk
 
-    return mk(raw.subsets(protocol), [Resize((768, 768))])
-
-
-def _maker_od_square(protocol):
-
-    from ....data.iostar import dataset as raw
-    from ....data.transforms import Resize
-    from .. import make_dataset as mk
-
-    return mk(raw.subsets(protocol), [Resize((512, 512))])
+    return mk(raw.subsets(protocol), [Resize((size, size))])
diff --git a/bob/ip/binseg/configs/datasets/iostar/optic_disc_512.py b/bob/ip/binseg/configs/datasets/iostar/optic_disc_512.py
index ff84d196ad3904bdb3e8f135a167e440ef3d3d68..059c442866ecaad8d22e8293e5687e3b04fb0519 100644
--- a/bob/ip/binseg/configs/datasets/iostar/optic_disc_512.py
+++ b/bob/ip/binseg/configs/datasets/iostar/optic_disc_512.py
@@ -7,6 +7,6 @@ Configuration resolution: 512 x 512 (Resized )
 
 """
 
-from bob.ip.binseg.configs.datasets.iostar import _maker_od_square
+from bob.ip.binseg.configs.datasets.iostar import _maker_square
 
-dataset = _maker_od_square("optic-disc")
+dataset = _maker_square("optic-disc", 512)
diff --git a/bob/ip/binseg/configs/datasets/iostar/optic_disc_768.py b/bob/ip/binseg/configs/datasets/iostar/optic_disc_768.py
new file mode 100644
index 0000000000000000000000000000000000000000..02b44b15f9e425f9fdff5c7dcfb955aff29f2a8f
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/iostar/optic_disc_768.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""IOSTAR dataset for Optic Disc Segmentation
+
+Configuration resolution: 768 x 768 (Resized )
+
+"""
+
+from bob.ip.binseg.configs.datasets.iostar import _maker_square
+
+dataset = _maker_square("optic-disc", 768)
diff --git a/bob/ip/binseg/configs/datasets/iostar/vessel_768.py b/bob/ip/binseg/configs/datasets/iostar/vessel_768.py
index 9cb175b3dee052ce811d3592342fd9a6251c7aea..049ca873783aeecb7a1a44b684469323e1aff351 100644
--- a/bob/ip/binseg/configs/datasets/iostar/vessel_768.py
+++ b/bob/ip/binseg/configs/datasets/iostar/vessel_768.py
@@ -7,6 +7,6 @@ Configuration resolution: 768 x 768 (Resize)
 
 """
 
-from bob.ip.binseg.configs.datasets.iostar import _maker_vessel_square
+from bob.ip.binseg.configs.datasets.iostar import _maker_square
 
-dataset = _maker_vessel_square("vessel")
+dataset = _maker_square("vessel", 768)
diff --git a/bob/ip/binseg/configs/datasets/refuge/__init__.py b/bob/ip/binseg/configs/datasets/refuge/__init__.py
index 7396585dcea63fd7afb749bf1db42a5f190e14ae..8ed58216c042a352d58c7ad9fef7755209eb39a9 100644
--- a/bob/ip/binseg/configs/datasets/refuge/__init__.py
+++ b/bob/ip/binseg/configs/datasets/refuge/__init__.py
@@ -17,16 +17,19 @@ def _maker(protocol):
     return retval
 
 
-def _maker_square(protocol):
+def _maker_square(protocol, size):
 
     from ....data.refuge import dataset as raw
     from ....data.transforms import CenterCrop, Pad, Resize
     from .. import make_dataset as mk
 
     # due to different sizes, we need to make the dataset twice
-    train = mk(raw.subsets(protocol), [Resize(1539), Pad((21, 46, 22, 47))])
+    train = mk(
+        raw.subsets(protocol),
+        [Resize(1539), Pad((21, 46, 22, 47)), Resize((size, size))],
+    )
     # we'll keep "dev" and "test" from the next one
-    retval = mk(raw.subsets(protocol), [CenterCrop(1632), Resize((512, 512))])
+    retval = mk(raw.subsets(protocol), [CenterCrop(1632), Resize((size, size))])
     # and we keep the "train" set with the right transforms
     retval["train"] = train["train"]
     return retval
diff --git a/bob/ip/binseg/configs/datasets/refuge/cup_512.py b/bob/ip/binseg/configs/datasets/refuge/cup_512.py
index e3a8a1427bb5c119dbefac66f6d58a35fe1ec348..0c1cc1555438276ff3e792c973b1c38e7d855312 100644
--- a/bob/ip/binseg/configs/datasets/refuge/cup_512.py
+++ b/bob/ip/binseg/configs/datasets/refuge/cup_512.py
@@ -9,4 +9,4 @@
 
 from bob.ip.binseg.configs.datasets.refuge import _maker_square
 
-dataset = _maker_square("optic-cup")
+dataset = _maker_square("optic-cup", 512)
diff --git a/bob/ip/binseg/configs/datasets/refuge/cup_768.py b/bob/ip/binseg/configs/datasets/refuge/cup_768.py
new file mode 100644
index 0000000000000000000000000000000000000000..512b6f690d0a56c5d8bc6eb7f1df7daf73526d66
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/refuge/cup_768.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""REFUGE dataset for Optic Cup Segmentation
+
+* Configuration resolution: 768 x 768 (after resizing and padding)
+
+"""
+
+from bob.ip.binseg.configs.datasets.refuge import _maker_square
+
+dataset = _maker_square("optic-cup", 768)
diff --git a/bob/ip/binseg/configs/datasets/refuge/disc_512.py b/bob/ip/binseg/configs/datasets/refuge/disc_512.py
index fce36f5b1c82ca3334ce1d26d542fadce3915d6e..3bab59d638e060c90e62d2108435ec1a31e7dbdd 100644
--- a/bob/ip/binseg/configs/datasets/refuge/disc_512.py
+++ b/bob/ip/binseg/configs/datasets/refuge/disc_512.py
@@ -7,6 +7,6 @@
 
 """
 
-from bob.ip.binseg.configs.datasets.drishtigs1 import _maker_square
+from bob.ip.binseg.configs.datasets.refuge import _maker_square
 
-dataset = _maker_square("optic-disc-all")
+dataset = _maker_square("optic-disc", 512)
diff --git a/bob/ip/binseg/configs/datasets/refuge/disc_768.py b/bob/ip/binseg/configs/datasets/refuge/disc_768.py
new file mode 100644
index 0000000000000000000000000000000000000000..ceecf8f95aecc3904175260454689184cc760b1a
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/refuge/disc_768.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""DRISHTI-GS1 dataset for Optic Disc Segmentation (agreed by all annotators)
+
+* Configuration resolution: 768 x 768 (after center cropping, padding and resizing)
+
+"""
+
+from bob.ip.binseg.configs.datasets.refuge import _maker_square
+
+dataset = _maker_square("optic-disc", 768)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3/__init__.py b/bob/ip/binseg/configs/datasets/rimoner3/__init__.py
index 3aefd9bf9e1e175eabf0e95b63b6e91b07d23b4b..c04bfa380ccb319db86ec06505df0e1ddbaa43af 100644
--- a/bob/ip/binseg/configs/datasets/rimoner3/__init__.py
+++ b/bob/ip/binseg/configs/datasets/rimoner3/__init__.py
@@ -11,10 +11,10 @@ def _maker(protocol):
     return mk(raw.subsets(protocol), [Pad((8, 8, 8, 8))])
 
 
-def _maker_square(protocol):
+def _maker_square(protocol, size):
 
     from ....data.rimoner3 import dataset as raw
     from ....data.transforms import Pad, Resize
     from .. import make_dataset as mk
 
-    return mk(raw.subsets(protocol), [Pad((176, 0)), Resize((512, 512))])
+    return mk(raw.subsets(protocol), [Pad((176, 0)), Resize((size, size))])
diff --git a/bob/ip/binseg/configs/datasets/rimoner3/cup_exp1_512.py b/bob/ip/binseg/configs/datasets/rimoner3/cup_exp1_512.py
index e2a413241ffaa0f4b4fff0c3804b399f60d9c880..afbf299226549a15a5ccee2aa99cf4748c8c2d88 100644
--- a/bob/ip/binseg/configs/datasets/rimoner3/cup_exp1_512.py
+++ b/bob/ip/binseg/configs/datasets/rimoner3/cup_exp1_512.py
@@ -9,4 +9,4 @@ Configuration resolution: 512 x 512 (after padding and resizing)
 
 from bob.ip.binseg.configs.datasets.rimoner3 import _maker_square
 
-dataset = _maker_square("optic-cup-exp1")
+dataset = _maker_square("optic-cup-exp1", 512)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3/cup_exp1_768.py b/bob/ip/binseg/configs/datasets/rimoner3/cup_exp1_768.py
new file mode 100644
index 0000000000000000000000000000000000000000..d954d8a260639e272296f02882c3e76bba30981f
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/rimoner3/cup_exp1_768.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""RIM-ONE r3 for Optic Cup Segmentation (expert #1 annotations)
+
+Configuration resolution: 768 x 768 (after padding and resizing)
+
+"""
+
+from bob.ip.binseg.configs.datasets.rimoner3 import _maker_square
+
+dataset = _maker_square("optic-cup-exp1", 768)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3/disc_exp1_512.py b/bob/ip/binseg/configs/datasets/rimoner3/disc_exp1_512.py
index 1e0b37c44404e4483228bda0eace8922018e9ec0..d18cb188b4e7258ee9e9a1cd48f1b259b8b304d2 100644
--- a/bob/ip/binseg/configs/datasets/rimoner3/disc_exp1_512.py
+++ b/bob/ip/binseg/configs/datasets/rimoner3/disc_exp1_512.py
@@ -9,4 +9,4 @@ Configuration resolution: 512 x 512 (after padding and resizing)
 
 from bob.ip.binseg.configs.datasets.rimoner3 import _maker_square
 
-dataset = _maker_square("optic-disc-exp1")
+dataset = _maker_square("optic-disc-exp1", 512)
diff --git a/bob/ip/binseg/configs/datasets/rimoner3/disc_exp1_768.py b/bob/ip/binseg/configs/datasets/rimoner3/disc_exp1_768.py
new file mode 100644
index 0000000000000000000000000000000000000000..31fb96e7d917e1ff4d3e25bc0488ae0755b6288c
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/rimoner3/disc_exp1_768.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""RIM-ONE r3 for Optic Disc Segmentation (expert #1 annotations)
+
+Configuration resolution: 768 x 768 (after padding and resizing)
+
+"""
+
+from bob.ip.binseg.configs.datasets.rimoner3 import _maker_square
+
+dataset = _maker_square("optic-disc-exp1", 768)
diff --git a/bob/ip/binseg/configs/datasets/stare/__init__.py b/bob/ip/binseg/configs/datasets/stare/__init__.py
index 7b43cfca3b12740acfd23e87c51e95c01057d745..a03e777d2e3be9590e1621640dd4094b23d85886 100644
--- a/bob/ip/binseg/configs/datasets/stare/__init__.py
+++ b/bob/ip/binseg/configs/datasets/stare/__init__.py
@@ -13,7 +13,7 @@ def _maker(protocol, raw=None):
     return mk(raw.subsets(protocol), [Pad((2, 1, 2, 2))])
 
 
-def _maker_square(protocol, raw=None):
+def _maker_square(protocol, size, raw=None):
 
     from ....data.stare import dataset as _raw
     from ....data.transforms import Pad, Resize
@@ -21,4 +21,6 @@ def _maker_square(protocol, raw=None):
     raw = raw or _raw  # allows user to recreate dataset for testing purposes
     from .. import make_dataset as mk
 
-    return mk(raw.subsets(protocol), [Pad((1, 48, 0, 48)), Resize((768, 768))])
+    return mk(
+        raw.subsets(protocol), [Pad((1, 48, 0, 48)), Resize((size, size))]
+    )
diff --git a/bob/ip/binseg/configs/datasets/stare/ah_1024.py b/bob/ip/binseg/configs/datasets/stare/ah_1024.py
new file mode 100644
index 0000000000000000000000000000000000000000..a1eb6ac394a1ab3d1198b73814aef15728529ef0
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/stare/ah_1024.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""STARE dataset for Vessel Segmentation (annotator AH)
+
+Configuration resolution: 1024 x 1024 (after padding and resizing)
+
+"""
+from bob.ip.binseg.configs.datasets.stare import _maker_square
+
+dataset = _maker_square("ah", 1024)
diff --git a/bob/ip/binseg/configs/datasets/stare/ah_768.py b/bob/ip/binseg/configs/datasets/stare/ah_768.py
index 59e8b8cdd572306078a96ec9417167e4d19a411b..3a2c35d07a9fa167d0df61d394f7f0aed92c0b18 100644
--- a/bob/ip/binseg/configs/datasets/stare/ah_768.py
+++ b/bob/ip/binseg/configs/datasets/stare/ah_768.py
@@ -8,4 +8,4 @@ Configuration resolution: 768 x 768 (after padding and resizing)
 """
 from bob.ip.binseg.configs.datasets.stare import _maker_square
 
-dataset = _maker_square("ah")
+dataset = _maker_square("ah", 768)
diff --git a/bob/ip/binseg/data/avdrive/__init__.py b/bob/ip/binseg/data/avdrive/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f58432bf0a83557a20fba65fc059ff26b2aba557
--- /dev/null
+++ b/bob/ip/binseg/data/avdrive/__init__.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""AV_DRIVE dataset for Vessel Segmentation
+
+The DRIVE database has been established to enable comparative studies on
+segmentation of blood vessels in retinal images.
+
+* Reference: [DRIVE-2004]_
+* Original resolution (height x width): 584 x 565
+* Split reference: [DRIVE-2004]_
+* Protocol ``default``:
+
+  * Training samples: 20 (including labels and masks)
+  * Test samples: 20 (including labels from annotator 1 and masks
+
+"""
+
+import os
+
+import pkg_resources
+
+import bob.extension
+
+from ..dataset import JSONDataset
+from ..loader import load_pil_1, load_pil_rgb, make_delayed
+
+_protocols = [pkg_resources.resource_filename(__name__, "default.json")]
+
+_root_path_drive = bob.extension.rc.get(
+    "bob.ip.binseg.drive.datadir", os.path.realpath(os.curdir)
+)
+
+_root_path_av_drive = bob.extension.rc.get(
+    "bob.ip.binseg.avdrive.datadir", os.path.realpath(os.curdir)
+)
+
+
+def _raw_data_loader(sample):
+    return dict(
+        data=load_pil_rgb(os.path.join(_root_path_drive, sample["data"])),
+        label=load_pil_1(os.path.join(_root_path_av_drive, sample["label"])),
+        mask=load_pil_1(os.path.join(_root_path_drive, sample["mask"])),
+    )
+
+
+def _loader(context, sample):
+    # "context" is ignored in this case - database is homogeneous
+    # we returned delayed samples to avoid loading all images at once
+    return make_delayed(sample, _raw_data_loader)
+
+
+dataset = JSONDataset(
+    protocols=_protocols,
+    fieldnames=("data", "label", "mask"),
+    loader=_loader,
+)
+"""DRIVE dataset object"""
diff --git a/bob/ip/binseg/data/avdrive/default.json b/bob/ip/binseg/data/avdrive/default.json
new file mode 100644
index 0000000000000000000000000000000000000000..bf23b9873081489455514db00e4501e7a25ff1a6
--- /dev/null
+++ b/bob/ip/binseg/data/avdrive/default.json
@@ -0,0 +1,206 @@
+{
+ "train": [
+  [
+   "training/images/21_training.tif",
+   "training/21_manual1.mat",
+   "training/mask/21_training_mask.gif"
+  ],
+  [
+   "training/images/22_training.tif",
+   "training/22_manual1.mat",
+   "training/mask/22_training_mask.gif"
+  ],
+  [
+   "training/images/23_training.tif",
+   "training/23_manual1.mat",
+   "training/mask/23_training_mask.gif"
+  ],
+  [
+   "training/images/24_training.tif",
+   "training/24_manual1.mat",
+   "training/mask/24_training_mask.gif"
+  ],
+  [
+   "training/images/25_training.tif",
+   "training/25_manual1.mat",
+   "training/mask/25_training_mask.gif"
+  ],
+  [
+   "training/images/26_training.tif",
+   "training/26_manual1.mat",
+   "training/mask/26_training_mask.gif"
+  ],
+  [
+   "training/images/27_training.tif",
+   "training/27_manual1.mat",
+   "training/mask/27_training_mask.gif"
+  ],
+  [
+   "training/images/28_training.tif",
+   "training/28_manual1.mat",
+   "training/mask/28_training_mask.gif"
+  ],
+  [
+   "training/images/29_training.tif",
+   "training/29_manual1.mat",
+   "training/mask/29_training_mask.gif"
+  ],
+  [
+   "training/images/30_training.tif",
+   "training/30_manual1.mat",
+   "training/mask/30_training_mask.gif"
+  ],
+  [
+   "training/images/31_training.tif",
+   "training/31_manual1.mat",
+   "training/mask/31_training_mask.gif"
+  ],
+  [
+   "training/images/32_training.tif",
+   "training/32_manual1.mat",
+   "training/mask/32_training_mask.gif"
+  ],
+  [
+   "training/images/33_training.tif",
+   "training/33_manual1.mat",
+   "training/mask/33_training_mask.gif"
+  ],
+  [
+   "training/images/34_training.tif",
+   "training/34_manual1.mat",
+   "training/mask/34_training_mask.gif"
+  ],
+  [
+   "training/images/35_training.tif",
+   "training/35_manual1.mat",
+   "training/mask/35_training_mask.gif"
+  ],
+  [
+   "training/images/36_training.tif",
+   "training/36_manual1.mat",
+   "training/mask/36_training_mask.gif"
+  ],
+  [
+   "training/images/37_training.tif",
+   "training/37_manual1.mat",
+   "training/mask/37_training_mask.gif"
+  ],
+  [
+   "training/images/38_training.tif",
+   "training/38_manual1.mat",
+   "training/mask/38_training_mask.gif"
+  ],
+  [
+   "training/images/39_training.tif",
+   "training/39_manual1.mat",
+   "training/mask/39_training_mask.gif"
+  ],
+  [
+   "training/images/40_training.tif",
+   "training/40_manual1.mat",
+   "training/mask/40_training_mask.gif"
+  ]
+ ],
+ "test": [
+  [
+   "test/images/01_test.tif",
+   "test/01_manual1.mat",
+   "test/mask/01_test_mask.gif"
+  ],
+  [
+   "test/images/02_test.tif",
+   "test/02_manual1.mat",
+   "test/mask/02_test_mask.gif"
+  ],
+  [
+   "test/images/03_test.tif",
+   "test/03_manual1.mat",
+   "test/mask/03_test_mask.gif"
+  ],
+  [
+   "test/images/04_test.tif",
+   "test/04_manual1.mat",
+   "test/mask/04_test_mask.gif"
+  ],
+  [
+   "test/images/05_test.tif",
+   "test/05_manual1.mat",
+   "test/mask/05_test_mask.gif"
+  ],
+  [
+   "test/images/06_test.tif",
+   "test/06_manual1.mat",
+   "test/mask/06_test_mask.gif"
+  ],
+  [
+   "test/images/07_test.tif",
+   "test/07_manual1.mat",
+   "test/mask/07_test_mask.gif"
+  ],
+  [
+   "test/images/08_test.tif",
+   "test/08_manual1.mat",
+   "test/mask/08_test_mask.gif"
+  ],
+  [
+   "test/images/09_test.tif",
+   "test/09_manual1.mat",
+   "test/mask/09_test_mask.gif"
+  ],
+  [
+   "test/images/10_test.tif",
+   "test/10_manual1.mat",
+   "test/mask/10_test_mask.gif"
+  ],
+  [
+   "test/images/11_test.tif",
+   "test/11_manual1.mat",
+   "test/mask/11_test_mask.gif"
+  ],
+  [
+   "test/images/12_test.tif",
+   "test/12_manual1.mat",
+   "test/mask/12_test_mask.gif"
+  ],
+  [
+   "test/images/13_test.tif",
+   "test/13_manual1.mat",
+   "test/mask/13_test_mask.gif"
+  ],
+  [
+   "test/images/14_test.tif",
+   "test/14_manual1.mat",
+   "test/mask/14_test_mask.gif"
+  ],
+  [
+   "test/images/15_test.tif",
+   "test/15_manual1.mat",
+   "test/mask/15_test_mask.gif"
+  ],
+  [
+   "test/images/16_test.tif",
+   "test/16_manual1.mat",
+   "test/mask/16_test_mask.gif"
+  ],
+  [
+   "test/images/17_test.tif",
+   "test/17_manual1.mat",
+   "test/mask/17_test_mask.gif"
+  ],
+  [
+   "test/images/18_test.tif",
+   "test/18_manual1.mat",
+   "test/mask/18_test_mask.gif"
+  ],
+  [
+   "test/images/19_test.tif",
+   "test/19_manual1.mat",
+   "test/mask/19_test_mask.gif"
+  ],
+  [
+   "test/images/20_test.tif",
+   "test/20_manual1.mat",
+   "test/mask/20_test_mask.gif"
+  ]
+ ]
+}
diff --git a/bob/ip/binseg/data/drionsdb/__init__.py b/bob/ip/binseg/data/drionsdb/__init__.py
index 8fb90410a8e694849656e9e2c7705b76a863e053..396154f01b136388e91382c0824671a9f3983b28 100644
--- a/bob/ip/binseg/data/drionsdb/__init__.py
+++ b/bob/ip/binseg/data/drionsdb/__init__.py
@@ -67,7 +67,7 @@ def _pad_right(img):
 def _raw_data_loader(sample):
     data = load_pil_rgb(os.path.join(_root_path, sample["data"]))
     label = _txt_to_pil_1(os.path.join(_root_path, sample["label"]), data.size)
-    mask = load_pil_1(os.path.join(_root_path, sample["mask"]))
+    mask = load_pil_1(os.path.join(_pkg_path, sample["mask"]))
     return dict(data=data, label=label, mask=mask)
 
 
diff --git a/bob/ip/binseg/data/drionsdb/masks/images/image_069.png b/bob/ip/binseg/data/drionsdb/masks/images/image_069.png
new file mode 100644
index 0000000000000000000000000000000000000000..6af9a17fcbfdb6bee0279f7cd503dc6e865209e7
Binary files /dev/null and b/bob/ip/binseg/data/drionsdb/masks/images/image_069.png differ
diff --git a/bob/ip/binseg/script/experiment.py b/bob/ip/binseg/script/experiment.py
index a65ddd45fabae9a29a51c80094ad9c3f29abe77a..37b560f59e5db04136c05a5cff1ff8aaa8bc540a 100644
--- a/bob/ip/binseg/script/experiment.py
+++ b/bob/ip/binseg/script/experiment.py
@@ -307,7 +307,6 @@ def experiment(
     from .train import train
 
     train_output_folder = os.path.join(output_folder, "model")
-
     ctx.invoke(
         train,
         model=model,
diff --git a/bob/ip/binseg/test/test_config.py b/bob/ip/binseg/test/test_config.py
index 9a3c87bd093955ccbc0109cf8f441b361859bce0..634e1036e74afab96051f65124d8cffd47f4cfd4 100644
--- a/bob/ip/binseg/test/test_config.py
+++ b/bob/ip/binseg/test/test_config.py
@@ -18,16 +18,16 @@ N = 10
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.drive.datadir")
 def test_drive():
-    def _check_subset(samples, size):
+    def _check_subset(samples, size, height, width):
         assert len(samples) == size
         for s in samples:
             assert len(s) == 4
             assert isinstance(s[0], str)
-            assert s[1].shape, (3, 544 == 544)  # planes, height, width
+            assert s[1].shape, (3, height == height)  # planes, height, width
             assert s[1].dtype == torch.float32
-            assert s[2].shape, (1, 544 == 544)  # planes, height, width
+            assert s[2].shape, (1, height == width)  # planes, height, width
             assert s[2].dtype == torch.float32
-            assert s[3].shape, (1, 544 == 544)  # planes, height, width
+            assert s[3].shape, (1, height == width)  # planes, height, width
             assert s[3].dtype == torch.float32
             assert s[1].max() <= 1.0
             assert s[1].min() >= 0.0
@@ -35,15 +35,29 @@ def test_drive():
     from ..configs.datasets.drive.default import dataset
 
     assert len(dataset) == 4
-    _check_subset(dataset["__train__"], 20)
-    _check_subset(dataset["__valid__"], 20)
-    _check_subset(dataset["train"], 20)
-    _check_subset(dataset["test"], 20)
+    _check_subset(dataset["__train__"], 20, 544, 544)
+    _check_subset(dataset["__valid__"], 20, 544, 544)
+    _check_subset(dataset["train"], 20, 544, 544)
+    _check_subset(dataset["test"], 20, 544, 544)
 
     from ..configs.datasets.drive.second_annotator import dataset
 
     assert len(dataset) == 1
-    _check_subset(dataset["test"], 20)
+    _check_subset(dataset["test"], 20, 544, 544)
+
+    from ..configs.datasets.drive.default_768 import dataset
+
+    _check_subset(dataset["__train__"], 20, 768, 768)
+    _check_subset(dataset["__valid__"], 20, 768, 768)
+    _check_subset(dataset["train"], 20, 768, 768)
+    _check_subset(dataset["test"], 20, 768, 768)
+
+    from ..configs.datasets.drive.default_1024 import dataset
+
+    _check_subset(dataset["__train__"], 20, 1024, 1024)
+    _check_subset(dataset["__valid__"], 20, 1024, 1024)
+    _check_subset(dataset["train"], 20, 1024, 1024)
+    _check_subset(dataset["test"], 20, 1024, 1024)
 
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.drive.datadir")
@@ -160,29 +174,41 @@ def test_stare_augmentation_manipulation():
 
 
 def test_stare():
-    def _check_subset(samples, size):
+    def _check_subset(samples, size, height, width):
         assert len(samples) == size
         for s in samples:
             assert len(s) == 4
             assert isinstance(s[0], str)
-            assert s[1].shape, (3, 608 == 704)  # planes, height, width
+            assert s[1].shape, (3, height == width)  # planes, height, width
             assert s[1].dtype == torch.float32
-            assert s[2].shape, (1, 608 == 704)  # planes, height, width
+            assert s[2].shape, (1, height == width)  # planes, height, width
             assert s[2].dtype == torch.float32
-            assert s[3].shape, (1, 608 == 704)  # planes, height, width
+            assert s[3].shape, (1, height == width)  # planes, height, width
             assert s[3].dtype == torch.float32
             assert s[1].max() <= 1.0
             assert s[1].min() >= 0.0
 
     # hack to allow testing on the CI
-    from ..configs.datasets.stare import _maker
+    from ..configs.datasets.stare import _maker, _maker_square
 
     for protocol in "ah", "vk":
         dataset = _maker(protocol, stare_dataset)
         assert len(dataset) == 4
-        _check_subset(dataset["__train__"], 10)
-        _check_subset(dataset["train"], 10)
-        _check_subset(dataset["test"], 10)
+        _check_subset(dataset["__train__"], 10, 608, 704)
+        _check_subset(dataset["train"], 10, 608, 704)
+        _check_subset(dataset["test"], 10, 608, 704)
+
+    dataset = _maker_square("ah", 768, stare_dataset)
+    assert len(dataset) == 4
+    _check_subset(dataset["__train__"], 10, 768, 768)
+    _check_subset(dataset["train"], 10, 768, 768)
+    _check_subset(dataset["test"], 10, 768, 768)
+
+    dataset = _maker_square("ah", 1024, stare_dataset)
+    assert len(dataset) == 4
+    _check_subset(dataset["__train__"], 10, 1024, 1024)
+    _check_subset(dataset["train"], 10, 1024, 1024)
+    _check_subset(dataset["test"], 10, 1024, 1024)
 
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.drive.datadir")
@@ -247,16 +273,16 @@ def test_stare_covd():
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.chasedb1.datadir")
 def test_chasedb1():
-    def _check_subset(samples, size):
+    def _check_subset(samples, size, height, width):
         assert len(samples) == size
         for s in samples:
             assert len(s) == 4
             assert isinstance(s[0], str)
-            assert s[1].shape, (3, 960 == 960)  # planes, height, width
+            assert s[1].shape, (3, height == width)  # planes, height, width
             assert s[1].dtype == torch.float32
-            assert s[2].shape, (1, 960 == 960)  # planes, height, width
+            assert s[2].shape, (1, height == width)  # planes, height, width
             assert s[2].dtype == torch.float32
-            assert s[3].shape, (1, 960 == 960)  # planes, height, width
+            assert s[3].shape, (1, height == width)  # planes, height, width
             assert s[3].dtype == torch.float32
             assert s[1].max() <= 1.0
             assert s[1].min() >= 0.0
@@ -266,10 +292,26 @@ def test_chasedb1():
             f"...configs.datasets.chasedb1.{m}", package=__name__
         ).dataset
         assert len(d) == 4
-        _check_subset(d["__train__"], 8)
-        _check_subset(d["__valid__"], 8)
-        _check_subset(d["train"], 8)
-        _check_subset(d["test"], 20)
+        _check_subset(d["__train__"], 8, 960, 960)
+        _check_subset(d["__valid__"], 8, 960, 960)
+        _check_subset(d["train"], 8, 960, 960)
+        _check_subset(d["test"], 20, 960, 960)
+
+    from ..configs.datasets.chasedb1.first_annotator_768 import dataset
+
+    assert len(dataset) == 4
+    _check_subset(dataset["__train__"], 8, 768, 768)
+    _check_subset(dataset["__valid__"], 8, 768, 768)
+    _check_subset(dataset["train"], 8, 768, 768)
+    _check_subset(dataset["test"], 20, 768, 768)
+
+    from ..configs.datasets.chasedb1.first_annotator_1024 import dataset
+
+    assert len(dataset) == 4
+    _check_subset(dataset["__train__"], 8, 1024, 1024)
+    _check_subset(dataset["__valid__"], 8, 1024, 1024)
+    _check_subset(dataset["train"], 8, 1024, 1024)
+    _check_subset(dataset["test"], 20, 1024, 1024)
 
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.drive.datadir")
@@ -334,16 +376,16 @@ def test_chasedb1_covd():
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.hrf.datadir")
 def test_hrf():
-    def _check_subset(samples, size):
+    def _check_subset(samples, size, height, width):
         assert len(samples) == size
         for s in samples:
             assert len(s) == 4
             assert isinstance(s[0], str)
-            assert s[1].shape, (3, 1168 == 1648)  # planes, height, width
+            assert s[1].shape, (3, height == width)  # planes, height, width
             assert s[1].dtype == torch.float32
-            assert s[2].shape, (1, 1168 == 1648)  # planes, height, width
+            assert s[2].shape, (1, height == width)  # planes, height, width
             assert s[2].dtype == torch.float32
-            assert s[3].shape, (1, 1168 == 1648)  # planes, height, width
+            assert s[3].shape, (1, height == width)  # planes, height, width
             assert s[3].dtype == torch.float32
             assert s[1].max() <= 1.0
             assert s[1].min() >= 0.0
@@ -365,12 +407,26 @@ def test_hrf():
     from ..configs.datasets.hrf.default import dataset
 
     assert len(dataset) == 6
-    _check_subset(dataset["__train__"], 15)
-    _check_subset(dataset["train"], 15)
-    _check_subset(dataset["test"], 30)
+    _check_subset(dataset["__train__"], 15, 1168, 1648)
+    _check_subset(dataset["train"], 15, 1168, 1648)
+    _check_subset(dataset["test"], 30, 1168, 1648)
     _check_subset_fullres(dataset["train (full resolution)"], 15)
     _check_subset_fullres(dataset["test (full resolution)"], 30)
 
+    from ..configs.datasets.hrf.default_768 import dataset
+
+    assert len(dataset) == 4
+    _check_subset(dataset["__train__"], 15, 768, 768)
+    _check_subset(dataset["train"], 15, 768, 768)
+    _check_subset(dataset["test"], 30, 768, 768)
+
+    from ..configs.datasets.hrf.default_1024 import dataset
+
+    assert len(dataset) == 4
+    _check_subset(dataset["__train__"], 15, 1024, 1024)
+    _check_subset(dataset["train"], 15, 1024, 1024)
+    _check_subset(dataset["test"], 30, 1024, 1024)
+
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.drive.datadir")
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.chasedb1.datadir")
@@ -442,16 +498,16 @@ def test_hrf_covd():
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.iostar.datadir")
 def test_iostar():
-    def _check_subset(samples, size):
+    def _check_subset(samples, size, height, width):
         assert len(samples) == size
         for s in samples:
             assert len(s) == 4
             assert isinstance(s[0], str)
-            assert s[1].shape, (3, 1024 == 1024)  # planes, height, width
+            assert s[1].shape, (3, height == width)  # planes, height, width
             assert s[1].dtype == torch.float32
-            assert s[2].shape, (1, 1024 == 1024)  # planes, height, width
+            assert s[2].shape, (1, height == width)  # planes, height, width
             assert s[2].dtype == torch.float32
-            assert s[3].shape, (1, 1024 == 1024)  # planes, height, width
+            assert s[3].shape, (1, height == width)  # planes, height, width
             assert s[3].dtype == torch.float32
             assert s[1].max() <= 1.0
             assert s[1].min() >= 0.0
@@ -461,9 +517,25 @@ def test_iostar():
             f"...configs.datasets.iostar.{m}", package=__name__
         ).dataset
         assert len(d) == 4
-        _check_subset(d["__train__"], 20)
-        _check_subset(d["train"], 20)
-        _check_subset(d["test"], 10)
+        _check_subset(d["__train__"], 20, 1024, 1024)
+        _check_subset(d["train"], 20, 1024, 1024)
+        _check_subset(d["test"], 10, 1024, 1024)
+
+    for m in ("vessel_768", "optic_disc_768"):
+        d = importlib.import_module(
+            f"...configs.datasets.iostar.{m}", package=__name__
+        ).dataset
+        assert len(d) == 4
+        _check_subset(d["__train__"], 20, 768, 768)
+        _check_subset(d["train"], 20, 768, 768)
+        _check_subset(d["test"], 10, 768, 768)
+
+    from ..configs.datasets.iostar.optic_disc_512 import dataset
+
+    assert len(dataset) == 4
+    _check_subset(dataset["__train__"], 20, 512, 512)
+    _check_subset(dataset["train"], 20, 512, 512)
+    _check_subset(dataset["test"], 10, 512, 512)
 
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.drive.datadir")
@@ -528,14 +600,14 @@ def test_iostar_covd():
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.refuge.datadir")
 def test_refuge():
-    def _check_subset(samples, size):
+    def _check_subset(samples, size, height, width):
         assert len(samples) == size
         for s in samples[:N]:
-            assert len(s) == 3
+            assert len(s) == 4
             assert isinstance(s[0], str)
-            assert s[1].shape, (3, 1632 == 1632)  # planes, height, width
+            assert s[1].shape, (3, height == width)  # planes, height, width
             assert s[1].dtype == torch.float32
-            assert s[2].shape, (1, 1632 == 1632)  # planes, height, width
+            assert s[2].shape, (1, height == width)  # planes, height, width
             assert s[2].dtype == torch.float32
             assert s[1].max() <= 1.0
             assert s[1].min() >= 0.0
@@ -545,22 +617,42 @@ def test_refuge():
             f"...configs.datasets.refuge.{m}", package=__name__
         ).dataset
         assert len(d) == 5
-        _check_subset(d["__train__"], 400)
-        _check_subset(d["train"], 400)
-        _check_subset(d["validation"], 400)
-        _check_subset(d["test"], 400)
+        _check_subset(d["__train__"], 400, 1632, 1632)
+        _check_subset(d["train"], 400, 1632, 1632)
+        _check_subset(d["validation"], 400, 1632, 1632)
+        _check_subset(d["test"], 400, 1632, 1632)
+
+    for m in ("disc_512", "cup_512"):
+        d = importlib.import_module(
+            f"...configs.datasets.refuge.{m}", package=__name__
+        ).dataset
+        assert len(d) == 5
+        _check_subset(d["__train__"], 400, 512, 512)
+        _check_subset(d["train"], 400, 512, 512)
+        _check_subset(d["validation"], 400, 512, 512)
+        _check_subset(d["test"], 400, 512, 512)
+
+    for m in ("disc_768", "cup_768"):
+        d = importlib.import_module(
+            f"...configs.datasets.refuge.{m}", package=__name__
+        ).dataset
+        assert len(d) == 5
+        _check_subset(d["__train__"], 400, 768, 768)
+        _check_subset(d["train"], 400, 768, 768)
+        _check_subset(d["validation"], 400, 768, 768)
+        _check_subset(d["test"], 400, 768, 768)
 
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.drishtigs1.datadir")
 def test_drishtigs1():
-    def _check_subset(samples, size):
+    def _check_subset(samples, size, height, width):
         assert len(samples) == size
         for s in samples[:N]:
-            assert len(s) == 3
+            assert len(s) == 4
             assert isinstance(s[0], str)
-            assert s[1].shape, (3, 1760 == 2048)  # planes, height, width
+            assert s[1].shape, (3, height == width)  # planes, height, width
             assert s[1].dtype == torch.float32
-            assert s[2].shape, (1, 1760 == 2048)  # planes, height, width
+            assert s[2].shape, (1, height == width)  # planes, height, width
             assert s[2].dtype == torch.float32
             assert s[1].max() <= 1.0
             assert s[1].min() >= 0.0
@@ -570,21 +662,38 @@ def test_drishtigs1():
             f"...configs.datasets.drishtigs1.{m}", package=__name__
         ).dataset
         assert len(d) == 4
-        _check_subset(d["__train__"], 50)
-        _check_subset(d["train"], 50)
-        _check_subset(d["test"], 51)
+        _check_subset(d["__train__"], 50, 1760, 2048)
+        _check_subset(d["train"], 50, 1760, 2048)
+        _check_subset(d["test"], 51, 1760, 2048)
+
+    for m in ("disc_all_512", "cup_all_512"):
+        d = importlib.import_module(
+            f"...configs.datasets.drishtigs1.{m}", package=__name__
+        ).dataset
+        assert len(d) == 4
+        _check_subset(d["__train__"], 50, 512, 512)
+        _check_subset(d["train"], 50, 512, 512)
+        _check_subset(d["test"], 51, 512, 512)
+    for m in ("disc_all_768", "cup_all_768"):
+        d = importlib.import_module(
+            f"...configs.datasets.drishtigs1.{m}", package=__name__
+        ).dataset
+        assert len(d) == 4
+        _check_subset(d["__train__"], 50, 768, 768)
+        _check_subset(d["train"], 50, 768, 768)
+        _check_subset(d["test"], 51, 768, 768)
 
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.rimoner3.datadir")
 def test_rimoner3():
-    def _check_subset(samples, size):
+    def _check_subset(samples, size, height, width):
         assert len(samples) == size
         for s in samples[:N]:
-            assert len(s) == 3
+            assert len(s) == 4
             assert isinstance(s[0], str)
-            assert s[1].shape, (3, 1440 == 1088)  # planes, height, width
+            assert s[1].shape, (3, height == width)  # planes, height, width
             assert s[1].dtype == torch.float32
-            assert s[2].shape, (1, 1440 == 1088)  # planes, height, width
+            assert s[2].shape, (1, height == width)  # planes, height, width
             assert s[2].dtype == torch.float32
             assert s[1].max() <= 1.0
             assert s[1].min() >= 0.0
@@ -594,21 +703,39 @@ def test_rimoner3():
             f"...configs.datasets.rimoner3.{m}", package=__name__
         ).dataset
         assert len(d) == 4
-        _check_subset(d["__train__"], 99)
-        _check_subset(d["train"], 99)
-        _check_subset(d["test"], 60)
+        _check_subset(d["__train__"], 99, 1440, 1088)
+        _check_subset(d["train"], 99, 1440, 1088)
+        _check_subset(d["test"], 60, 1440, 1088)
+
+    for m in ("disc_exp1_512", "cup_exp1_512"):
+        d = importlib.import_module(
+            f"...configs.datasets.rimoner3.{m}", package=__name__
+        ).dataset
+        assert len(d) == 4
+        _check_subset(d["__train__"], 99, 512, 512)
+        _check_subset(d["train"], 99, 512, 512)
+        _check_subset(d["test"], 60, 512, 512)
+
+    for m in ("disc_exp1_768", "cup_exp1_768"):
+        d = importlib.import_module(
+            f"...configs.datasets.rimoner3.{m}", package=__name__
+        ).dataset
+        assert len(d) == 4
+        _check_subset(d["__train__"], 99, 768, 768)
+        _check_subset(d["train"], 99, 768, 768)
+        _check_subset(d["test"], 60, 768, 768)
 
 
 @pytest.mark.skip_if_rc_var_not_set("bob.ip.binseg.drionsdb.datadir")
 def test_drionsdb():
-    def _check_subset(samples, size):
+    def _check_subset(samples, size, height, width):
         assert len(samples) == size
         for s in samples[:N]:
-            assert len(s) == 3
+            assert len(s) == 4
             assert isinstance(s[0], str)
-            assert s[1].shape, (3, 416 == 608)  # planes, height, width
+            assert s[1].shape, (3, height == width)  # planes, height, width
             assert s[1].dtype == torch.float32
-            assert s[2].shape, (1, 416 == 608)  # planes, height, width
+            assert s[2].shape, (1, height == width)  # planes, height, width
             assert s[2].dtype == torch.float32
             assert s[1].max() <= 1.0
             assert s[1].min() >= 0.0
@@ -618,6 +745,24 @@ def test_drionsdb():
             f"...configs.datasets.drionsdb.{m}", package=__name__
         ).dataset
         assert len(d) == 4
-        _check_subset(d["__train__"], 60)
-        _check_subset(d["train"], 60)
-        _check_subset(d["test"], 50)
+        _check_subset(d["__train__"], 60, 416, 608)
+        _check_subset(d["train"], 60, 416, 608)
+        _check_subset(d["test"], 50, 416, 608)
+
+    for m in ("expert1_512", "expert2_512"):
+        d = importlib.import_module(
+            f"...configs.datasets.drionsdb.{m}", package=__name__
+        ).dataset
+        assert len(d) == 4
+        _check_subset(d["__train__"], 60, 512, 512)
+        _check_subset(d["train"], 60, 512, 512)
+        _check_subset(d["test"], 50, 512, 512)
+
+    for m in ("expert1_768", "expert2_768"):
+        d = importlib.import_module(
+            f"...configs.datasets.drionsdb.{m}", package=__name__
+        ).dataset
+        assert len(d) == 4
+        _check_subset(d["__train__"], 60, 768, 768)
+        _check_subset(d["train"], 60, 768, 768)
+        _check_subset(d["test"], 50, 768, 768)
diff --git a/doc/api.rst b/doc/api.rst
index a97b25b3cd3cd2032896030812f1bd364e7a972e..152c714195536d55fb21d68ff0627bd54771634c 100644
--- a/doc/api.rst
+++ b/doc/api.rst
@@ -152,6 +152,8 @@ Datasets
    bob.ip.binseg.configs.datasets.csv
 
    bob.ip.binseg.configs.datasets.chasedb1.first_annotator
+   bob.ip.binseg.configs.datasets.chasedb1.first_annotator_768
+   bob.ip.binseg.configs.datasets.chasedb1.first_annotator_1024
    bob.ip.binseg.configs.datasets.chasedb1.second_annotator
    bob.ip.binseg.configs.datasets.chasedb1.xtest
    bob.ip.binseg.configs.datasets.chasedb1.mtest
@@ -159,6 +161,8 @@ Datasets
    bob.ip.binseg.configs.datasets.chasedb1.ssl
 
    bob.ip.binseg.configs.datasets.drive.default
+   bob.ip.binseg.configs.datasets.drive.default_768
+   bob.ip.binseg.configs.datasets.drive.default_1024
    bob.ip.binseg.configs.datasets.drive.second_annotator
    bob.ip.binseg.configs.datasets.drive.xtest
    bob.ip.binseg.configs.datasets.drive.mtest
@@ -166,6 +170,8 @@ Datasets
    bob.ip.binseg.configs.datasets.drive.ssl
 
    bob.ip.binseg.configs.datasets.hrf.default
+   bob.ip.binseg.configs.datasets.hrf.default_768
+   bob.ip.binseg.configs.datasets.hrf.default_1024
    bob.ip.binseg.configs.datasets.hrf.xtest
    bob.ip.binseg.configs.datasets.hrf.mtest
    bob.ip.binseg.configs.datasets.hrf.default_fullres
@@ -173,13 +179,18 @@ Datasets
    bob.ip.binseg.configs.datasets.hrf.ssl
 
    bob.ip.binseg.configs.datasets.iostar.vessel
+   bob.ip.binseg.configs.datasets.iostar.vessel_768
    bob.ip.binseg.configs.datasets.iostar.vessel_xtest
    bob.ip.binseg.configs.datasets.iostar.vessel_mtest
    bob.ip.binseg.configs.datasets.iostar.optic_disc
+   bob.ip.binseg.configs.datasets.iostar.optic_disc_768
+   bob.ip.binseg.configs.datasets.iostar.optic_disc_512
    bob.ip.binseg.configs.datasets.iostar.covd
    bob.ip.binseg.configs.datasets.iostar.ssl
 
    bob.ip.binseg.configs.datasets.stare.ah
+   bob.ip.binseg.configs.datasets.stare.ah_768
+   bob.ip.binseg.configs.datasets.stare.ah_1024
    bob.ip.binseg.configs.datasets.stare.vk
    bob.ip.binseg.configs.datasets.stare.xtest
    bob.ip.binseg.configs.datasets.stare.mtest
@@ -188,19 +199,35 @@ Datasets
 
    bob.ip.binseg.configs.datasets.refuge.cup
    bob.ip.binseg.configs.datasets.refuge.disc
+   bob.ip.binseg.configs.datasets.refuge.cup_512
+   bob.ip.binseg.configs.datasets.refuge.cup_768
+   bob.ip.binseg.configs.datasets.refuge.disc_512
+   bob.ip.binseg.configs.datasets.refuge.disc_768
 
    bob.ip.binseg.configs.datasets.rimoner3.cup_exp1
    bob.ip.binseg.configs.datasets.rimoner3.cup_exp2
    bob.ip.binseg.configs.datasets.rimoner3.disc_exp1
    bob.ip.binseg.configs.datasets.rimoner3.disc_exp2
+   bob.ip.binseg.configs.datasets.rimoner3.cup_exp1_512
+   bob.ip.binseg.configs.datasets.rimoner3.disc_exp1_512
 
+   bob.ip.binseg.configs.datasets.rimoner3.cup_exp1_768
+   bob.ip.binseg.configs.datasets.rimoner3.disc_exp1_768
    bob.ip.binseg.configs.datasets.drishtigs1.cup_all
+   bob.ip.binseg.configs.datasets.drishtigs1.cup_all_512
+   bob.ip.binseg.configs.datasets.drishtigs1.cup_all_768
    bob.ip.binseg.configs.datasets.drishtigs1.cup_any
    bob.ip.binseg.configs.datasets.drishtigs1.disc_all
+   bob.ip.binseg.configs.datasets.drishtigs1.disc_all_512
+   bob.ip.binseg.configs.datasets.drishtigs1.disc_all_768
    bob.ip.binseg.configs.datasets.drishtigs1.disc_any
 
    bob.ip.binseg.configs.datasets.drionsdb.expert1
    bob.ip.binseg.configs.datasets.drionsdb.expert2
+   bob.ip.binseg.configs.datasets.drionsdb.expert1_512
+   bob.ip.binseg.configs.datasets.drionsdb.expert2_512
+   bob.ip.binseg.configs.datasets.drionsdb.expert1_768
+   bob.ip.binseg.configs.datasets.drionsdb.expert2_768
 
    bob.ip.binseg.configs.datasets.drhagis.default
 
diff --git a/doc/results/baselines/index.rst b/doc/results/baselines/index.rst
index 94416e1221e5065325d33e7cff39fc140d42f2c1..b4d9ec55533d646c063fb6080f9d8b2c3a1f418e 100644
--- a/doc/results/baselines/index.rst
+++ b/doc/results/baselines/index.rst
@@ -42,6 +42,7 @@ Tasks
 
    vessel
    lung
+   od_oc
 
 
 .. include:: ../../links.rst
diff --git a/doc/results/baselines/od_oc.rst b/doc/results/baselines/od_oc.rst
new file mode 100644
index 0000000000000000000000000000000000000000..12cdad3b1aaf313ddaabf6a9752cf90a3fe05bd3
--- /dev/null
+++ b/doc/results/baselines/od_oc.rst
@@ -0,0 +1,197 @@
+.. -*- coding: utf-8 -*-
+
+.. _bob.ip.binseg.results.baselines.od_oc:
+
+========================================================
+ Optic disc and Optic cup Segmentation for Retinography
+========================================================
+
+Optic Disc
+
+.. list-table::
+   :header-rows: 2
+
+   * -
+     - :py:mod:`driu <bob.ip.binseg.configs.models.driu>`
+     - :py:mod:`hed <bob.ip.binseg.configs.models.hed>`
+     - :py:mod:`m2unet <bob.ip.binseg.configs.models.m2unet>`
+     - :py:mod:`unet <bob.ip.binseg.configs.models.unet>`
+     - :py:mod:`lwnet <bob.ip.binseg.configs.models.lwnet>`
+     - :py:mod:`driu-od <bob.ip.binseg.configs.models.driu_od>`
+   * - Dataset
+     - 15M
+     - 14.7M
+     - 550k
+     - 25.8M
+     - 68k
+     - 15.2M
+   * - :py:mod:`drionsdb <bob.ip.binseg.configs.datasets.drionsdb.expert1_512>`
+     - 0.958
+     - 0.961
+     - 0.960
+     - 0.961
+     - 0.922
+     - 0.960
+   * - :py:mod:`drishtigs1-disc <bob.ip.binseg.configs.datasets.drishtigs1.disc_all_512>`
+     - 0.973
+     - 0.975
+     - 0.974
+     - 0.975
+     - 0.965
+     - 0.972
+   * - :py:mod:`iostar-disc <bob.ip.binseg.configs.datasets.iostar.optic_disc_512>`
+     - 0.894
+     - 0.922
+     - 0.913
+     - 0.921
+     - 0.893
+     - 0.921
+   * - :py:mod:`refuge-disc <bob.ip.binseg.configs.datasets.refuge.disc_512>`
+     - 0.921
+     - 0.939
+     - 0.942
+     - 0.945
+     - 0.894
+     - 0.941
+   * - :py:mod:`rimoner3-disc <bob.ip.binseg.configs.datasets.rimoner3.disc_exp1_512>`
+     - 0.950
+     - 0.955
+     - 0.953
+     - 0.956
+     - 0.939
+     - 0.954
+
+Notes
+-----
+
+* The following table describes recommended batch sizes for 24Gb of RAM GPU
+  card:
+
+
+.. list-table::
+   :header-rows: 1
+
+   * -
+     - :py:mod:`driu <bob.ip.binseg.configs.models.driu>`
+     - :py:mod:`hed <bob.ip.binseg.configs.models.hed>`
+     - :py:mod:`m2unet <bob.ip.binseg.configs.models.m2unet>`
+     - :py:mod:`unet <bob.ip.binseg.configs.models.unet>`
+     - :py:mod:`lwnet <bob.ip.binseg.configs.models.lwnet>`
+     - :py:mod:`driu-od <bob.ip.binseg.configs.models.driu_od>`
+   * - :py:mod:`drionsdb <bob.ip.binseg.configs.datasets.drionsdb.expert1_512>`
+     - 4
+     - 4
+     - 6
+     - 2
+     - 6
+     - 4
+   * - :py:mod:`drishtigs1-disc <bob.ip.binseg.configs.datasets.drishtigs1.disc_all_512>`
+     - 4
+     - 4
+     - 5
+     - 2
+     - 5
+     - 4
+   * - :py:mod:`iostar-disc <bob.ip.binseg.configs.datasets.iostar.optic_disc_512>`
+     - 4
+     - 4
+     - 6
+     - 4
+     - 6
+     - 4
+   * - :py:mod:`refuge-disc <bob.ip.binseg.configs.datasets.refuge.disc_512>`
+     - 5
+     - 5
+     - 10
+     - 5
+     - 20
+     - 5
+   * - :py:mod:`rimoner3-disc <bob.ip.binseg.configs.datasets.rimoner3.disc_exp1_512>`
+     - 4
+     - 4
+     - 5
+     - 2
+     - 5
+     - 4
+
+Optic Cup
+
+.. list-table::
+   :header-rows: 2
+
+   * -
+     - :py:mod:`driu <bob.ip.binseg.configs.models.driu>`
+     - :py:mod:`hed <bob.ip.binseg.configs.models.hed>`
+     - :py:mod:`m2unet <bob.ip.binseg.configs.models.m2unet>`
+     - :py:mod:`unet <bob.ip.binseg.configs.models.unet>`
+     - :py:mod:`lwnet <bob.ip.binseg.configs.models.lwnet>`
+     - :py:mod:`driu-od <bob.ip.binseg.configs.models.driu_od>`
+   * - Dataset
+     - 15M
+     - 14.7M
+     - 550k
+     - 25.8M
+     - 68k
+     - 15.2M
+   * - :py:mod:`drishtigs1-cup <bob.ip.binseg.configs.datasets.drishtigs1.cup_all_512>`
+     - 0.903
+     - 0.910
+     - 0.912
+     - 0.913
+     - 0.877
+     - 0.913
+   * - :py:mod:`refuge-cup <bob.ip.binseg.configs.datasets.refuge.cup_512>`
+     - 0.861
+     - 0.853
+     - 0.831
+     - 0.863
+     - 0.700
+     - 0.854
+   * - :py:mod:`rimoner3-cup <bob.ip.binseg.configs.datasets.rimoner3.cup_exp1_512>`
+     - 0.799
+     - 0.819
+     - 0.829
+     - 0.819
+     - 0.736
+     - 0.822
+
+Notes
+-----
+
+* The following table describes recommended batch sizes for 24Gb of RAM GPU
+  card:
+
+
+.. list-table::
+   :header-rows: 1
+
+   * -
+     - :py:mod:`driu <bob.ip.binseg.configs.models.driu>`
+     - :py:mod:`hed <bob.ip.binseg.configs.models.hed>`
+     - :py:mod:`m2unet <bob.ip.binseg.configs.models.m2unet>`
+     - :py:mod:`unet <bob.ip.binseg.configs.models.unet>`
+     - :py:mod:`lwnet <bob.ip.binseg.configs.models.lwnet>`
+     - :py:mod:`driu-od <bob.ip.binseg.configs.models.driu_od>`
+   * - :py:mod:`drishtigs1-cup <bob.ip.binseg.configs.datasets.drishtigs1.cup_all_512>`
+     - 4
+     - 4
+     - 5
+     - 2
+     - 5
+     - 4
+   * - :py:mod:`refuge-cup <bob.ip.binseg.configs.datasets.refuge.cup_512>`
+     - 5
+     - 5
+     - 10
+     - 5
+     - 20
+     - 5
+   * - :py:mod:`rimoner3-cup <bob.ip.binseg.configs.datasets.rimoner3.cup_exp1_512>`
+     - 4
+     - 4
+     - 5
+     - 2
+     - 5
+     - 4
+
+.. include:: ../../links.rst
diff --git a/doc/results/baselines/vessel.rst b/doc/results/baselines/vessel.rst
index d7456e21569e3c3beff133a228efc54e2c7484b0..80875b64cda8bc8d007f3e75a15850dfcf587816 100644
--- a/doc/results/baselines/vessel.rst
+++ b/doc/results/baselines/vessel.rst
@@ -75,38 +75,146 @@ Notes
 * The following table describes recommended batch sizes for 24Gb of RAM GPU
   card:
 
-  .. list-table::
-
-    * - **Models / Datasets**
-      - :py:mod:`drive <bob.ip.binseg.configs.datasets.drive.default>`
-      - :py:mod:`stare <bob.ip.binseg.configs.datasets.stare.ah>`
-      - :py:mod:`chasedb1 <bob.ip.binseg.configs.datasets.chasedb1.first_annotator>`
-      - :py:mod:`iostar-vessel <bob.ip.binseg.configs.datasets.iostar.vessel>`
-      - :py:mod:`hrf <bob.ip.binseg.configs.datasets.hrf.default>`
-    * - :py:mod:`unet <bob.ip.binseg.configs.models.unet>`
-      - 4
-      - 2
-      - 2
-      - 2
-      - 1
-    * - :py:mod:`hed <bob.ip.binseg.configs.models.hed>`
-      - 8
-      - 4
-      - 4
-      - 4
-      - 1
-    * - :py:mod:`driu <bob.ip.binseg.configs.models.driu>` / :py:mod:`driu-bn <bob.ip.binseg.configs.models.driu_bn>`
-      - 8
-      - 5
-      - 4
-      - 4
-      - 1
-    * - :py:mod:`m2unet <bob.ip.binseg.configs.models.m2unet>`
-      - 16
-      - 6
-      - 6
-      - 6
-      - 1
+.. list-table::
+   :header-rows: 1
+
+   * -
+     - :py:mod:`driu <bob.ip.binseg.configs.models.driu>`
+     - :py:mod:`hed <bob.ip.binseg.configs.models.hed>`
+     - :py:mod:`m2unet <bob.ip.binseg.configs.models.m2unet>`
+     - :py:mod:`unet <bob.ip.binseg.configs.models.unet>`
+     - :py:mod:`lwnet <bob.ip.binseg.configs.models.lwnet>`
+   * - :py:mod:`drive <bob.ip.binseg.configs.datasets.drive.default>`
+     - 8
+     - 8
+     - 16
+     - 4
+     - 4
+   * - :py:mod:`stare <bob.ip.binseg.configs.datasets.stare.ah>`
+     - 5
+     - 4
+     - 6
+     - 2
+     - 4
+   * - :py:mod:`chasedb1 <bob.ip.binseg.configs.datasets.chasedb1.first_annotator>`
+     - 4
+     - 4
+     - 6
+     - 2
+     - 4
+   * - :py:mod:`hrf <bob.ip.binseg.configs.datasets.hrf.default>`
+     - 1
+     - 1
+     - 1
+     - 1
+     - 4
+   * - :py:mod:`iostar-vessel <bob.ip.binseg.configs.datasets.iostar.vessel>`
+     - 4
+     - 4
+     - 6
+     - 2
+     - 4
+
+Results for datasets with (768x768 resolution)
 
+.. list-table::
+   :header-rows: 2
+
+   * -
+     -
+     - :py:mod:`driu <bob.ip.binseg.configs.models.driu>`
+     - :py:mod:`hed <bob.ip.binseg.configs.models.hed>`
+     - :py:mod:`m2unet <bob.ip.binseg.configs.models.m2unet>`
+     - :py:mod:`unet <bob.ip.binseg.configs.models.unet>`
+     - :py:mod:`lwnet <bob.ip.binseg.configs.models.lwnet>`
+   * - Dataset
+     - 2nd. Annot.
+     - 15M
+     - 14.7M
+     - 550k
+     - 25.8M
+     - 68k
+   * - :py:mod:`drive <bob.ip.binseg.configs.datasets.drive.default_768>`
+     -
+     - 0.813
+     - 0.803
+     - 0.799
+     - 0.816
+     - 0.807
+   * - :py:mod:`stare <bob.ip.binseg.configs.datasets.stare.ah_768>`
+     -
+     - 0.823
+     - 0.817
+     - 0.813
+     - 0.824
+     - 0.815
+   * - :py:mod:`chasedb1 <bob.ip.binseg.configs.datasets.chasedb1.first_annotator_768>`
+     -
+     - 0.812
+     - 0.807
+     - 0.794
+     - 0.801
+     - 0.804
+   * - :py:mod:`hrf <bob.ip.binseg.configs.datasets.hrf.default_768>`
+     -
+     - 0.803
+     - 0.786
+     - 0.785
+     - 0.802
+     - 0.804
+   * - :py:mod:`iostar-vessel <bob.ip.binseg.configs.datasets.iostar.vessel_768>`
+     -
+     - 0.824
+     - 0.821
+     - 0.812
+     - 0.820
+     - 0.820
+
+
+Notes
+-----
+
+* The following table describes recommended batch sizes for 24Gb of RAM GPU
+  card:
+
+.. list-table::
+   :header-rows: 1
+
+   * -
+     - :py:mod:`driu <bob.ip.binseg.configs.models.driu>`
+     - :py:mod:`hed <bob.ip.binseg.configs.models.hed>`
+     - :py:mod:`m2unet <bob.ip.binseg.configs.models.m2unet>`
+     - :py:mod:`unet <bob.ip.binseg.configs.models.unet>`
+     - :py:mod:`lwnet <bob.ip.binseg.configs.models.lwnet>`
+   * - :py:mod:`drive <bob.ip.binseg.configs.datasets.drive.default_768>`
+     - 5
+     - 10
+     - 10
+     - 4
+     - 10
+   * - :py:mod:`stare <bob.ip.binseg.configs.datasets.stare.ah_768>`
+     - 5
+     - 4
+     - 6
+     - 2
+     - 4
+   * - :py:mod:`chasedb1 <bob.ip.binseg.configs.datasets.chasedb1.first_annotator_768>`
+     - 4
+     - 4
+     - 6
+     - 2
+     - 4
+   * - :py:mod:`hrf <bob.ip.binseg.configs.datasets.hrf.default_768>`
+     - 3
+     - 3
+     - 3
+     - 3
+     - 5
+   * - :py:mod:`iostar-vessel <bob.ip.binseg.configs.datasets.iostar.vessel_768>`
+     - 4
+     - 4
+     - 6
+     - 4
+     - 6
 
 .. include:: ../../links.rst
diff --git a/setup.py b/setup.py
index 50310bf76c1f77d0b2a6a606e9c58d104a913e4d..734ce482ec154ca1a48abedd28fd074b9adfc727 100644
--- a/setup.py
+++ b/setup.py
@@ -61,6 +61,7 @@ setup(
             # drive dataset - retinography
             "drive = bob.ip.binseg.configs.datasets.drive.default",
             "drive-768 = bob.ip.binseg.configs.datasets.drive.default_768",
+            "drive-1024 = bob.ip.binseg.configs.datasets.drive.default_1024",
             "drive-2nd = bob.ip.binseg.configs.datasets.drive.second_annotator",
             "drive-xtest = bob.ip.binseg.configs.datasets.drive.xtest",
             "drive-mtest = bob.ip.binseg.configs.datasets.drive.mtest",
@@ -71,6 +72,7 @@ setup(
             # stare dataset - retinography
             "stare = bob.ip.binseg.configs.datasets.stare.ah",
             "stare-768 = bob.ip.binseg.configs.datasets.stare.ah_768",
+            "stare-1024 = bob.ip.binseg.configs.datasets.stare.ah_1024",
             "stare-2nd = bob.ip.binseg.configs.datasets.stare.vk",
             "stare-xtest = bob.ip.binseg.configs.datasets.stare.xtest",
             "stare-mtest = bob.ip.binseg.configs.datasets.stare.mtest",
@@ -83,11 +85,13 @@ setup(
             "iostar-vessel-mtest = bob.ip.binseg.configs.datasets.iostar.vessel_mtest",
             "iostar-disc = bob.ip.binseg.configs.datasets.iostar.optic_disc",
             "iostar-disc-512 = bob.ip.binseg.configs.datasets.iostar.optic_disc_512",
+            "iostar-disc-768 = bob.ip.binseg.configs.datasets.iostar.optic_disc_768",
             "iostar-vessel-covd = bob.ip.binseg.configs.datasets.iostar.covd",
             "iostar-vessel-ssl = bob.ip.binseg.configs.datasets.iostar.ssl",
             # hrf - retinography
             "hrf = bob.ip.binseg.configs.datasets.hrf.default",
             "hrf-768 = bob.ip.binseg.configs.datasets.hrf.default_768",
+            "hrf-1024 = bob.ip.binseg.configs.datasets.hrf.default_1024",
             "hrf-xtest = bob.ip.binseg.configs.datasets.hrf.xtest",
             "hrf-mtest = bob.ip.binseg.configs.datasets.hrf.mtest",
             "hrf-highres = bob.ip.binseg.configs.datasets.hrf.default_fullres",
@@ -96,6 +100,7 @@ setup(
             # chase-db1 - retinography
             "chasedb1 = bob.ip.binseg.configs.datasets.chasedb1.first_annotator",
             "chasedb1-768 = bob.ip.binseg.configs.datasets.chasedb1.first_annotator_768",
+            "chasedb1-1024 = bob.ip.binseg.configs.datasets.chasedb1.first_annotator_1024",
             "chasedb1-2nd = bob.ip.binseg.configs.datasets.chasedb1.second_annotator",
             "chasedb1-xtest = bob.ip.binseg.configs.datasets.chasedb1.xtest",
             "chasedb1-mtest = bob.ip.binseg.configs.datasets.chasedb1.mtest",
@@ -104,25 +109,32 @@ setup(
             # drionsdb - retinography
             "drionsdb = bob.ip.binseg.configs.datasets.drionsdb.expert1",
             "drionsdb-512 = bob.ip.binseg.configs.datasets.drionsdb.expert1_512",
+            "drionsdb-768 = bob.ip.binseg.configs.datasets.drionsdb.expert1_768",
             "drionsdb-2nd = bob.ip.binseg.configs.datasets.drionsdb.expert2",
             "drionsdb-2nd-512 = bob.ip.binseg.configs.datasets.drionsdb.expert2_512",
             # drishti-gs1 - retinography
             "drishtigs1-disc = bob.ip.binseg.configs.datasets.drishtigs1.disc_all",
             "drishtigs1-disc-512 = bob.ip.binseg.configs.datasets.drishtigs1.disc_all_512",
+            "drishtigs1-disc-768 = bob.ip.binseg.configs.datasets.drishtigs1.disc_all_768",
             "drishtigs1-cup = bob.ip.binseg.configs.datasets.drishtigs1.cup_all",
             "drishtigs1-cup-512 = bob.ip.binseg.configs.datasets.drishtigs1.cup_all_512",
+            "drishtigs1-cup-768 = bob.ip.binseg.configs.datasets.drishtigs1.cup_all_768",
             "drishtigs1-disc-any = bob.ip.binseg.configs.datasets.drishtigs1.disc_any",
             "drishtigs1-cup-any = bob.ip.binseg.configs.datasets.drishtigs1.cup_any",
             # refuge - retinography
             "refuge-cup = bob.ip.binseg.configs.datasets.refuge.cup",
             "refuge-cup-512 = bob.ip.binseg.configs.datasets.refuge.cup_512",
+            "refuge-cup-768 = bob.ip.binseg.configs.datasets.refuge.cup_768",
             "refuge-disc = bob.ip.binseg.configs.datasets.refuge.disc",
             "refuge-disc-512 = bob.ip.binseg.configs.datasets.refuge.disc_512",
+            "refuge-disc-768 = bob.ip.binseg.configs.datasets.refuge.disc_768",
             # rim one r3 - retinography
             "rimoner3-cup = bob.ip.binseg.configs.datasets.rimoner3.cup_exp1",
             "rimoner3-disc = bob.ip.binseg.configs.datasets.rimoner3.disc_exp1",
             "rimoner3-cup-512 = bob.ip.binseg.configs.datasets.rimoner3.cup_exp1_512",
+            "rimoner3-cup-768 = bob.ip.binseg.configs.datasets.rimoner3.cup_exp1_768",
             "rimoner3-disc-512 = bob.ip.binseg.configs.datasets.rimoner3.disc_exp1_512",
+            "rimoner3-disc-768 = bob.ip.binseg.configs.datasets.rimoner3.disc_exp1_768",
             "rimoner3-cup-2nd = bob.ip.binseg.configs.datasets.rimoner3.cup_exp2",
             "rimoner3-disc-2nd = bob.ip.binseg.configs.datasets.rimoner3.disc_exp2",
             # montgomery county - cxr