diff --git a/bob/ip/binseg/configs/datasets/chasedb11024all.py b/bob/ip/binseg/configs/datasets/chasedb11024all.py
new file mode 100644
index 0000000000000000000000000000000000000000..39d585cb513fcf380911180c1a4fe171b0139c6f
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/chasedb11024all.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.chasedb1 import Database as CHASEDB1
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+
+#### Config ####
+
+transforms = Compose([  
+                        RandomRotation()
+                        ,Crop(0,18,960,960)
+                        ,Resize(1024)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = CHASEDB1(protocol = 'default')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/chasedb11168all.py b/bob/ip/binseg/configs/datasets/chasedb11168all.py
new file mode 100644
index 0000000000000000000000000000000000000000..e30c1ef7016310303435cffebe0cfdad67a066f7
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/chasedb11168all.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.chasedb1 import Database as CHASEDB1
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+
+#### Config ####
+
+transforms = Compose([  
+                        RandomRotation()
+                        ,Crop(140,18,680,960)
+                        ,Resize(1168)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = CHASEDB1(protocol = 'default')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/chasedb1544.py b/bob/ip/binseg/configs/datasets/chasedb1544.py
index 9d94cd3ca6e9e1504aaba5bbb78422f547e102c2..a1bb3628000620fe765424df4972cbaabb6b46bb 100644
--- a/bob/ip/binseg/configs/datasets/chasedb1544.py
+++ b/bob/ip/binseg/configs/datasets/chasedb1544.py
@@ -8,11 +8,19 @@ from bob.ip.binseg.data.binsegdataset import BinSegDataset
 #### Config ####
 
 transforms = Compose([  
+<<<<<<< HEAD:bob/ip/binseg/configs/datasets/chasedb1544.py
                         RandomRotation()
                         ,Resize(544)
                         ,Crop(0,12,544,544)
                         ,RandomHFlip()
                         ,RandomVFlip()
+=======
+                        Resize(544)
+                        ,Crop(0,12,544,544)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+>>>>>>> ssl:bob/ip/binseg/configs/datasets/chasedb1544.py
                         ,ColorJitter()
                         ,ToTensor()
                     ])
diff --git a/bob/ip/binseg/configs/datasets/chasedb1608.py b/bob/ip/binseg/configs/datasets/chasedb1608.py
new file mode 100644
index 0000000000000000000000000000000000000000..9a475cae5d7fb5be49a28ae6e479dd3fe41760ed
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/chasedb1608.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.chasedb1 import Database as CHASEDB1
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        RandomRotation()
+                        ,CenterCrop((829,960))                    
+                        ,Resize(608)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = CHASEDB1(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drive1024all.py b/bob/ip/binseg/configs/datasets/drive1024all.py
new file mode 100644
index 0000000000000000000000000000000000000000..26d82bc8652ec5d8c474e859bea67d100d571112
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drive1024all.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.drive import Database as DRIVE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+
+#### Config ####
+
+transforms = Compose([  
+                        RandomRotation()
+                        ,CenterCrop((540,540))
+                        ,Resize(1024)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = DRIVE(protocol = 'default')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drive1168all.py b/bob/ip/binseg/configs/datasets/drive1168all.py
new file mode 100644
index 0000000000000000000000000000000000000000..f1e9d8562460970bd6c55222db7658c5d6fd02a4
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drive1168all.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.drive import Database as DRIVE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+
+#### Config ####
+
+transforms = Compose([  
+                        RandomRotation()
+                        ,Crop(75,10,416,544)
+                        ,Pad((21,0,22,0))
+                        ,Resize(1168)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = DRIVE(protocol = 'default')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drive960all.py b/bob/ip/binseg/configs/datasets/drive960all.py
new file mode 100644
index 0000000000000000000000000000000000000000..1242574ba3eef4935cbc6b3f22e9ddf8ed3a1ef0
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drive960all.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.drive import Database as DRIVE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+#### Config ####
+
+transforms = Compose([  
+                        RandomRotation()
+                        ,CenterCrop((544,544))
+                        ,Resize(960)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = DRIVE(protocol = 'default')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivechasedb1iostarhrf608.py b/bob/ip/binseg/configs/datasets/drivechasedb1iostarhrf608.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ab57b8ad4e34d700e4667138b6111f19b4a488a
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivechasedb1iostarhrf608.py
@@ -0,0 +1,10 @@
+from bob.ip.binseg.configs.datasets.drive608 import dataset as drive
+from bob.ip.binseg.configs.datasets.chasedb1608 import dataset as chase
+from bob.ip.binseg.configs.datasets.iostarvessel608 import dataset as iostar
+from bob.ip.binseg.configs.datasets.hrf608 import dataset as hrf
+import torch
+
+#### Config ####
+
+# PyTorch dataset
+dataset = torch.utils.data.ConcatDataset([drive,chase,iostar,hrf])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivechasedb1iostarhrf608sslstare.py b/bob/ip/binseg/configs/datasets/drivechasedb1iostarhrf608sslstare.py
new file mode 100644
index 0000000000000000000000000000000000000000..928452f4209d4526e66028457a3ababcd04fda8b
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivechasedb1iostarhrf608sslstare.py
@@ -0,0 +1,34 @@
+from bob.ip.binseg.configs.datasets.drive608 import dataset as drive
+from bob.ip.binseg.configs.datasets.chasedb1608 import dataset as chase
+from bob.ip.binseg.configs.datasets.iostarvessel608 import dataset as iostar
+from bob.ip.binseg.configs.datasets.hrf608 import dataset as hrf
+from bob.db.stare import Database as STARE
+from bob.ip.binseg.data.transforms import *
+import torch
+from bob.ip.binseg.data.binsegdataset import BinSegDataset, SSLBinSegDataset, UnLabeledBinSegDataset
+
+
+#### Config ####
+
+# PyTorch dataset
+labeled_dataset = torch.utils.data.ConcatDataset([drive,chase,iostar,hrf])
+
+#### Unlabeled STARE TRAIN ####
+unlabeled_transforms = Compose([  
+                        Pad((2,1,2,2))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+starebobdb = STARE(protocol = 'default')
+
+# PyTorch dataset
+unlabeled_dataset = UnLabeledBinSegDataset(starebobdb, split='train', transform=unlabeled_transforms)
+
+# SSL Dataset
+
+dataset = SSLBinSegDataset(labeled_dataset, unlabeled_dataset)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivestarechasedb11168.py b/bob/ip/binseg/configs/datasets/drivestarechasedb11168.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e36eff7195de38bd2122a0bb75942a210fba1db
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivestarechasedb11168.py
@@ -0,0 +1,9 @@
+from bob.ip.binseg.configs.datasets.drive1168 import dataset as drive
+from bob.ip.binseg.configs.datasets.stare1168 import dataset as stare
+from bob.ip.binseg.configs.datasets.chasedb11168 import dataset as chase
+import torch
+
+#### Config ####
+
+# PyTorch dataset
+dataset = torch.utils.data.ConcatDataset([drive,stare,chase])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivestarechasedb1hrf1024.py b/bob/ip/binseg/configs/datasets/drivestarechasedb1hrf1024.py
index 267fc45cde1cb4f1fe56bb0a0422800ec6177bcc..7dc1d487fef29c8cd31c611d86067fe2a9d61209 100644
--- a/bob/ip/binseg/configs/datasets/drivestarechasedb1hrf1024.py
+++ b/bob/ip/binseg/configs/datasets/drivestarechasedb1hrf1024.py
@@ -7,4 +7,8 @@ import torch
 #### Config ####
 
 # PyTorch dataset
-dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,chase])
\ No newline at end of file
+<<<<<<< HEAD
+dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,chase])
+=======
+dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,chase])
+>>>>>>> ssl
diff --git a/bob/ip/binseg/configs/datasets/drivestarechasedb1hrf1024ssliostar.py b/bob/ip/binseg/configs/datasets/drivestarechasedb1hrf1024ssliostar.py
new file mode 100644
index 0000000000000000000000000000000000000000..dd9016b7c076b08cc99e875d4f5cc9dcdd3babd0
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivestarechasedb1hrf1024ssliostar.py
@@ -0,0 +1,33 @@
+from bob.ip.binseg.configs.datasets.drive1024 import dataset as drive
+from bob.ip.binseg.configs.datasets.stare1024 import dataset as stare
+from bob.ip.binseg.configs.datasets.hrf1024 import dataset as hrf
+from bob.ip.binseg.configs.datasets.chasedb11024 import dataset as chasedb
+from bob.db.iostar import Database as IOSTAR
+from bob.ip.binseg.data.transforms import *
+import torch
+from bob.ip.binseg.data.binsegdataset import BinSegDataset, SSLBinSegDataset, UnLabeledBinSegDataset
+
+
+#### Config ####
+
+# PyTorch dataset
+labeled_dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,chasedb])
+
+#### Unlabeled IOSTAR Train ####
+unlabeled_transforms = Compose([  
+                        RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+iostarbobdb = IOSTAR(protocol='default_vessel')
+
+# PyTorch dataset
+unlabeled_dataset = UnLabeledBinSegDataset(iostarbobdb, split='train', transform=unlabeled_transforms)
+
+# SSL Dataset
+
+dataset = SSLBinSegDataset(labeled_dataset, unlabeled_dataset)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivestarechasedb1hrfl1024_all.py b/bob/ip/binseg/configs/datasets/drivestarechasedb1hrfl1024_all.py
new file mode 100644
index 0000000000000000000000000000000000000000..5d2b345669dbc334e061780a349af69ce4fe3fe3
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivestarechasedb1hrfl1024_all.py
@@ -0,0 +1,14 @@
+from bob.ip.binseg.configs.datasets.drive1024all import dataset as drive
+from bob.ip.binseg.configs.datasets.stare1024all import dataset as stare
+from bob.ip.binseg.configs.datasets.hrf1024all import dataset as hrf
+from bob.ip.binseg.configs.datasets.chasedb11024all import dataset as chasedb
+from bob.db.iostar import Database as IOSTAR
+from bob.ip.binseg.data.transforms import *
+import torch
+
+
+#### Config ####
+
+# PyTorch dataset
+dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,chasedb])
+
diff --git a/bob/ip/binseg/configs/datasets/drivestarechasedb1hrfl1024_all_ssliostar.py b/bob/ip/binseg/configs/datasets/drivestarechasedb1hrfl1024_all_ssliostar.py
new file mode 100644
index 0000000000000000000000000000000000000000..7e68cb52cc45a477cfb33abe8b8efbd019e4c8cd
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivestarechasedb1hrfl1024_all_ssliostar.py
@@ -0,0 +1,33 @@
+from bob.ip.binseg.configs.datasets.drive1024all import dataset as drive
+from bob.ip.binseg.configs.datasets.stare1024all import dataset as stare
+from bob.ip.binseg.configs.datasets.hrf1024all import dataset as hrf
+from bob.ip.binseg.configs.datasets.chasedb11024all import dataset as chasedb
+from bob.db.iostar import Database as IOSTAR
+from bob.ip.binseg.data.transforms import *
+import torch
+from bob.ip.binseg.data.binsegdataset import BinSegDataset, SSLBinSegDataset, UnLabeledBinSegDataset
+
+
+#### Config ####
+
+# PyTorch dataset
+labeled_dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,chasedb])
+
+#### Unlabeled IOSTAR Train ####
+unlabeled_transforms = Compose([  
+                        RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+iostarbobdb = IOSTAR(protocol='default_vessel')
+
+# PyTorch dataset
+unlabeled_dataset = UnLabeledBinSegDataset(iostarbobdb, split='train', transform=unlabeled_transforms)
+
+# SSL Dataset
+
+dataset = SSLBinSegDataset(labeled_dataset, unlabeled_dataset)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivestarechasedb1iostar1168_all.py b/bob/ip/binseg/configs/datasets/drivestarechasedb1iostar1168_all.py
new file mode 100644
index 0000000000000000000000000000000000000000..dcf35d87f4bbe9048b82b611e2014e80cbe01d66
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivestarechasedb1iostar1168_all.py
@@ -0,0 +1,13 @@
+from bob.ip.binseg.configs.datasets.drive1168all import dataset as drive
+from bob.ip.binseg.configs.datasets.stare1168all import dataset as stare
+from bob.ip.binseg.configs.datasets.chasedb11168all import dataset as chasedb
+from bob.ip.binseg.configs.datasets.iostarvessel1168all import dataset as iostar
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+import torch
+
+
+#### Config ####
+
+# PyTorch dataset
+dataset = torch.utils.data.ConcatDataset([drive,stare,iostar,chasedb])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivestarechasedb1iostar1168_all_sslhrf.py b/bob/ip/binseg/configs/datasets/drivestarechasedb1iostar1168_all_sslhrf.py
new file mode 100644
index 0000000000000000000000000000000000000000..fde74696dc7be96eb77c0870fa9475755b181e3b
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivestarechasedb1iostar1168_all_sslhrf.py
@@ -0,0 +1,35 @@
+from bob.ip.binseg.configs.datasets.drive1168all import dataset as drive
+from bob.ip.binseg.configs.datasets.stare1168all import dataset as stare
+from bob.ip.binseg.configs.datasets.chasedb11168all import dataset as chasedb
+from bob.ip.binseg.configs.datasets.iostarvessel1168all import dataset as iostar
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+import torch
+from bob.ip.binseg.data.binsegdataset import BinSegDataset, SSLBinSegDataset, UnLabeledBinSegDataset
+
+
+#### Config ####
+
+# PyTorch dataset
+labeled_dataset = torch.utils.data.ConcatDataset([drive,stare,iostar,chasedb])
+
+#### Unlabeled HRF TRAIN ####
+unlabeled_transforms = Compose([  
+                        RandomRotation()
+                        ,Crop(0,108,2336,3296)
+                        ,Resize((1168))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+hrfbobdb = HRF(protocol='default')
+
+# PyTorch dataset
+unlabeled_dataset = UnLabeledBinSegDataset(hrfbobdb, split='train', transform=unlabeled_transforms)
+
+# SSL Dataset
+
+dataset = SSLBinSegDataset(labeled_dataset, unlabeled_dataset)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivestarechasedb1iostar1168sslhrf.py b/bob/ip/binseg/configs/datasets/drivestarechasedb1iostar1168sslhrf.py
new file mode 100644
index 0000000000000000000000000000000000000000..01705e15d8752b628ed6b150cf095db08ca4eed6
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivestarechasedb1iostar1168sslhrf.py
@@ -0,0 +1,35 @@
+from bob.ip.binseg.configs.datasets.drive1168 import dataset as drive
+from bob.ip.binseg.configs.datasets.stare1168 import dataset as stare
+from bob.ip.binseg.configs.datasets.chasedb11168 import dataset as chasedb
+from bob.ip.binseg.configs.datasets.iostarvessel1168 import dataset as iostar
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+import torch
+from bob.ip.binseg.data.binsegdataset import BinSegDataset, SSLBinSegDataset, UnLabeledBinSegDataset
+
+
+#### Config ####
+
+# PyTorch dataset
+labeled_dataset = torch.utils.data.ConcatDataset([drive,stare,iostar,chasedb])
+
+#### Unlabeled HRF TRAIN ####
+unlabeled_transforms = Compose([  
+                        RandomRotation()
+                        ,Crop(0,108,2336,3296)
+                        ,Resize((1168))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+hrfbobdb = HRF(protocol='default')
+
+# PyTorch dataset
+unlabeled_dataset = UnLabeledBinSegDataset(hrfbobdb, split='train', transform=unlabeled_transforms)
+
+# SSL Dataset
+
+dataset = SSLBinSegDataset(labeled_dataset, unlabeled_dataset)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivestareiostarhrf960.py b/bob/ip/binseg/configs/datasets/drivestareiostarhrf960.py
index 911a718dcd6ff604e9acf2dc03fa3dfcf8ced254..52b6b1dd773677d611ca90608620a93d6cb5f781 100644
--- a/bob/ip/binseg/configs/datasets/drivestareiostarhrf960.py
+++ b/bob/ip/binseg/configs/datasets/drivestareiostarhrf960.py
@@ -7,4 +7,8 @@ import torch
 #### Config ####
 
 # PyTorch dataset
-dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,iostar])
\ No newline at end of file
+<<<<<<< HEAD
+dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,iostar])
+=======
+dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,iostar])
+>>>>>>> ssl
diff --git a/bob/ip/binseg/configs/datasets/drivestareiostarhrf960_all.py b/bob/ip/binseg/configs/datasets/drivestareiostarhrf960_all.py
new file mode 100644
index 0000000000000000000000000000000000000000..cedbea744f1c0843a8d010c99d2a5f7c9692928c
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivestareiostarhrf960_all.py
@@ -0,0 +1,14 @@
+from bob.ip.binseg.configs.datasets.drive960all import dataset as drive
+from bob.ip.binseg.configs.datasets.stare960all import dataset as stare
+from bob.ip.binseg.configs.datasets.hrf960all import dataset as hrf
+from bob.ip.binseg.configs.datasets.iostarvessel960all import dataset as iostar
+from bob.db.chasedb1 import Database as CHASE
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+import torch
+
+
+#### Config ####
+
+# PyTorch dataset
+dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,iostar])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivestareiostarhrf960_all_sslchase.py b/bob/ip/binseg/configs/datasets/drivestareiostarhrf960_all_sslchase.py
new file mode 100644
index 0000000000000000000000000000000000000000..5653591b5b02c5e6af6c92156e9dffb9c00011a2
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivestareiostarhrf960_all_sslchase.py
@@ -0,0 +1,35 @@
+from bob.ip.binseg.configs.datasets.drive960all import dataset as drive
+from bob.ip.binseg.configs.datasets.stare960all import dataset as stare
+from bob.ip.binseg.configs.datasets.hrf960all import dataset as hrf
+from bob.ip.binseg.configs.datasets.iostarvessel960all import dataset as iostar
+from bob.db.chasedb1 import Database as CHASE
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+import torch
+from bob.ip.binseg.data.binsegdataset import BinSegDataset, SSLBinSegDataset, UnLabeledBinSegDataset
+
+
+#### Config ####
+
+# PyTorch dataset
+labeled_dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,iostar])
+
+#### Unlabeled CHASE TRAIN ####
+unlabeled_transforms = Compose([  
+                        Crop(0,18,960,960)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+chasebobdb = CHASE(protocol = 'default')
+
+# PyTorch dataset
+unlabeled_dataset = UnLabeledBinSegDataset(chasebobdb, split='train', transform=unlabeled_transforms)
+
+# SSL Dataset
+
+dataset = SSLBinSegDataset(labeled_dataset, unlabeled_dataset)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivestareiostarhrf960sslchase.py b/bob/ip/binseg/configs/datasets/drivestareiostarhrf960sslchase.py
new file mode 100644
index 0000000000000000000000000000000000000000..a7bd4576766259e83ec633bfac15b4dc46f0da9d
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drivestareiostarhrf960sslchase.py
@@ -0,0 +1,35 @@
+from bob.ip.binseg.configs.datasets.drive960 import dataset as drive
+from bob.ip.binseg.configs.datasets.stare960 import dataset as stare
+from bob.ip.binseg.configs.datasets.hrf960 import dataset as hrf
+from bob.ip.binseg.configs.datasets.iostarvessel960 import dataset as iostar
+from bob.db.chasedb1 import Database as CHASE
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+import torch
+from bob.ip.binseg.data.binsegdataset import BinSegDataset, SSLBinSegDataset, UnLabeledBinSegDataset
+
+
+#### Config ####
+
+# PyTorch dataset
+labeled_dataset = torch.utils.data.ConcatDataset([drive,stare,hrf,iostar])
+
+#### Unlabeled CHASE TRAIN ####
+unlabeled_transforms = Compose([  
+                        Crop(0,18,960,960)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+chasebobdb = CHASE(protocol = 'default')
+
+# PyTorch dataset
+unlabeled_dataset = UnLabeledBinSegDataset(chasebobdb, split='train', transform=unlabeled_transforms)
+
+# SSL Dataset
+
+dataset = SSLBinSegDataset(labeled_dataset, unlabeled_dataset)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/hrf1024.py b/bob/ip/binseg/configs/datasets/hrf1024.py
index 2574901abeb109f8a9b28f038d21552cec77c276..d19f10bd8754f2d76672062d84c30ea806cf6dc1 100644
--- a/bob/ip/binseg/configs/datasets/hrf1024.py
+++ b/bob/ip/binseg/configs/datasets/hrf1024.py
@@ -21,4 +21,8 @@ transforms = Compose([
 bobdb = HRF(protocol = 'default')
 
 # PyTorch dataset
-dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
+<<<<<<< HEAD
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
+=======
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
+>>>>>>> ssl
diff --git a/bob/ip/binseg/configs/datasets/hrf1024all.py b/bob/ip/binseg/configs/datasets/hrf1024all.py
new file mode 100644
index 0000000000000000000000000000000000000000..c5fc4de232e87a8b77f979335058c64ff62ce7c5
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/hrf1024all.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+
+#### Config ####
+
+transforms = Compose([  
+                        Pad((0,584,0,584))                    
+                        ,Resize((1024))
+                        ,RandomRotation()
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = HRF(protocol = 'default')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
diff --git a/bob/ip/binseg/configs/datasets/hrf1168test.py b/bob/ip/binseg/configs/datasets/hrf1168test.py
new file mode 100644
index 0000000000000000000000000000000000000000..86014b75bd7ea428a5f48f85776189d6eeccb619
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/hrf1168test.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Crop(0,108,2336,3296)
+                        ,Resize((1168))
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = HRF(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/hrf544.py b/bob/ip/binseg/configs/datasets/hrf544.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e2cc05152aa20e4c75ffcb464cd8fd90f89e7bd
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/hrf544.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Resize((363))
+                        ,Pad((0,90,0,91))
+                        ,RandomRotation()
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = HRF(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
diff --git a/bob/ip/binseg/configs/datasets/hrf608.py b/bob/ip/binseg/configs/datasets/hrf608.py
new file mode 100644
index 0000000000000000000000000000000000000000..b26e772a0793d043af6e59f18caab5943805d929
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/hrf608.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Pad((0,345,0,345))
+                        ,Resize(608)
+                        ,RandomRotation()
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = HRF(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
diff --git a/bob/ip/binseg/configs/datasets/hrf960.py b/bob/ip/binseg/configs/datasets/hrf960.py
index 94412e7059b24a8e6a3ebd71b4ce06a7d524ed64..f67c4ba629a14ce0051c4f6c03d2d63e800fc824 100644
--- a/bob/ip/binseg/configs/datasets/hrf960.py
+++ b/bob/ip/binseg/configs/datasets/hrf960.py
@@ -21,4 +21,8 @@ transforms = Compose([
 bobdb = HRF(protocol = 'default')
 
 # PyTorch dataset
-dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
+<<<<<<< HEAD
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
+=======
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
+>>>>>>> ssl
diff --git a/bob/ip/binseg/configs/datasets/hrf960all.py b/bob/ip/binseg/configs/datasets/hrf960all.py
new file mode 100644
index 0000000000000000000000000000000000000000..a34b3090520e94393c7fcbb8185bd62246954733
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/hrf960all.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+
+#### Config ####
+
+transforms = Compose([  
+                        Pad((0,584,0,584))                    
+                        ,Resize((960))
+                        ,RandomRotation()
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = HRF(protocol = 'default')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
diff --git a/bob/ip/binseg/configs/datasets/iostarvessel1168all.py b/bob/ip/binseg/configs/datasets/iostarvessel1168all.py
new file mode 100644
index 0000000000000000000000000000000000000000..b97d1ed0491458eef7b9c3c36dd472c1d4445b2c
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/iostarvessel1168all.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.iostar import Database as IOSTAR
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+
+#### Config ####
+
+transforms = Compose([  
+                        RandomRotation()
+                        ,Crop(144,0,768,1024)
+                        ,Pad((30,0,30,0))
+                        ,Resize(1168)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = IOSTAR(protocol='default_vessel')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/iostarvessel544.py b/bob/ip/binseg/configs/datasets/iostarvessel544.py
new file mode 100644
index 0000000000000000000000000000000000000000..aa03abe2feb64a084fb76e6f60c69afc7499961f
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/iostarvessel544.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.iostar import Database as IOSTAR
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Resize(544)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = IOSTAR(protocol='default_vessel')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/iostarvessel608.py b/bob/ip/binseg/configs/datasets/iostarvessel608.py
new file mode 100644
index 0000000000000000000000000000000000000000..7fce4507cd090555a2e0bac7f575dfd9d9f85c3d
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/iostarvessel608.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.iostar import Database as IOSTAR
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Pad((81,0,81,0))
+                        ,Resize(608)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = IOSTAR(protocol='default_vessel')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/iostarvessel960all.py b/bob/ip/binseg/configs/datasets/iostarvessel960all.py
new file mode 100644
index 0000000000000000000000000000000000000000..1338dc225b9cb1fc4f702fafdf8ed991b9dec5fd
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/iostarvessel960all.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.iostar import Database as IOSTAR
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+
+#### Config ####
+
+transforms = Compose([  
+                        Resize(960)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = IOSTAR(protocol='default_vessel')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/stare1024all.py b/bob/ip/binseg/configs/datasets/stare1024all.py
new file mode 100644
index 0000000000000000000000000000000000000000..46d8084985c90debdc39ae392c9e358aa11f19fd
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/stare1024all.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.stare import Database as STARE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+#### Config ####
+
+transforms = Compose([  
+                        RandomRotation()
+                        ,Pad((0,32,0,32))
+                        ,Resize(1024)
+                        ,CenterCrop(1024)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = STARE(protocol = 'default')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/stare1168all.py b/bob/ip/binseg/configs/datasets/stare1168all.py
new file mode 100644
index 0000000000000000000000000000000000000000..d2384a0970c9384d494c20e969a1bc0ec916b1b9
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/stare1168all.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.stare import Database as STARE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+#### Config ####
+
+transforms = Compose([  
+                        RandomRotation()
+                        ,Crop(50,0,500,705)
+                        ,Resize(1168)
+                        ,Pad((1,0,1,0))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = STARE(protocol = 'default')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/stare544.py b/bob/ip/binseg/configs/datasets/stare544.py~HEAD
similarity index 100%
rename from bob/ip/binseg/configs/datasets/stare544.py
rename to bob/ip/binseg/configs/datasets/stare544.py~HEAD
diff --git a/bob/ip/binseg/configs/datasets/stare544.py~ssl b/bob/ip/binseg/configs/datasets/stare544.py~ssl
new file mode 100644
index 0000000000000000000000000000000000000000..f03fcefbe6fef80ef1e6a7ec97d2b6c1df221024
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/stare544.py~ssl
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.stare import Database as STARE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  RandomRotation()
+                        ,Resize(471)
+                        ,Pad((0,37,0,36))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = STARE(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/stare960all.py b/bob/ip/binseg/configs/datasets/stare960all.py
new file mode 100644
index 0000000000000000000000000000000000000000..0b7f134c3110b49ca6f41820453e8e360a6b0d2b
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/stare960all.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.stare import Database as STARE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+
+#### Config ####
+
+transforms = Compose([  
+                        RandomRotation()
+                        ,Pad((0,32,0,32))
+                        ,Resize(960)
+                        ,CenterCrop(960)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = STARE(protocol = 'default')
+
+# PyTorch dataset
+train = BinSegDataset(bobdb, split='train', transform=transforms)
+test = BinSegDataset(bobdb, split='test', transform=transforms)
+
+dataset = torch.utils.data.ConcatDataset([train,test])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/starechasedb1iostarhrf544.py b/bob/ip/binseg/configs/datasets/starechasedb1iostarhrf544.py
new file mode 100644
index 0000000000000000000000000000000000000000..72349b3bd6370c0b996edf90e96403df54ec27af
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/starechasedb1iostarhrf544.py
@@ -0,0 +1,10 @@
+from bob.ip.binseg.configs.datasets.stare544 import dataset as stare
+from bob.ip.binseg.configs.datasets.chasedb1544 import dataset as chase
+from bob.ip.binseg.configs.datasets.iostarvessel544 import dataset as iostar
+from bob.ip.binseg.configs.datasets.hrf544 import dataset as hrf
+import torch
+
+#### Config ####
+
+# PyTorch dataset
+dataset = torch.utils.data.ConcatDataset([stare,chase,hrf,iostar])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/starechasedb1iostarhrf544ssldrive.py b/bob/ip/binseg/configs/datasets/starechasedb1iostarhrf544ssldrive.py
new file mode 100644
index 0000000000000000000000000000000000000000..3a5e3008f73cea8d1163030783d70736fad6ef9f
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/starechasedb1iostarhrf544ssldrive.py
@@ -0,0 +1,34 @@
+from bob.ip.binseg.configs.datasets.stare544 import dataset as stare
+from bob.ip.binseg.configs.datasets.chasedb1544 import dataset as chase
+from bob.ip.binseg.configs.datasets.iostarvessel544 import dataset as iostar
+from bob.ip.binseg.configs.datasets.hrf544 import dataset as hrf
+from bob.db.drive import Database as DRIVE
+from bob.ip.binseg.data.transforms import *
+import torch
+from bob.ip.binseg.data.binsegdataset import BinSegDataset, SSLBinSegDataset, UnLabeledBinSegDataset
+
+
+#### Config ####
+
+# PyTorch dataset
+labeled_dataset = torch.utils.data.ConcatDataset([stare,chase,iostar,hrf])
+
+#### Unlabeled STARE TRAIN ####
+unlabeled_transforms = Compose([  
+                        CenterCrop((544,544))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+drivebobdb = DRIVE(protocol = 'default')
+
+# PyTorch dataset
+unlabeled_dataset = UnLabeledBinSegDataset(drivebobdb, split='train', transform=unlabeled_transforms)
+
+# SSL Dataset
+
+dataset = SSLBinSegDataset(labeled_dataset, unlabeled_dataset)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/models/driussl.py b/bob/ip/binseg/configs/models/driussl.py
new file mode 100644
index 0000000000000000000000000000000000000000..39afd4a03f956b24aeeb078c297fb026cdc369b5
--- /dev/null
+++ b/bob/ip/binseg/configs/models/driussl.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from torch.optim.lr_scheduler import MultiStepLR
+from bob.ip.binseg.modeling.driu import build_driu
+import torch.optim as optim
+from torch.nn import BCEWithLogitsLoss
+from bob.ip.binseg.utils.model_zoo import modelurls
+from bob.ip.binseg.modeling.losses import MixJacLoss
+from bob.ip.binseg.engine.adabound import AdaBound
+
+##### Config #####
+lr = 0.001
+betas = (0.9, 0.999)
+eps = 1e-08
+weight_decay = 0
+final_lr = 0.1
+gamma = 1e-3
+eps = 1e-8
+amsbound = False
+
+scheduler_milestones = [900]
+scheduler_gamma = 0.1
+
+# model
+model = build_driu()
+
+# pretrained backbone
+pretrained_backbone = modelurls['vgg16']
+
+# optimizer
+optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr, gamma=gamma,
+                 eps=eps, weight_decay=weight_decay, amsbound=amsbound) 
+
+# criterion
+criterion = MixJacLoss(lambda_u=0.05, jacalpha=0.7)
+
+# scheduler
+scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma)
diff --git a/bob/ip/binseg/configs/models/m2unetssl.py b/bob/ip/binseg/configs/models/m2unetssl.py
new file mode 100644
index 0000000000000000000000000000000000000000..3497cea26b03c44b682cfd3beb167afce3015e43
--- /dev/null
+++ b/bob/ip/binseg/configs/models/m2unetssl.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from torch.optim.lr_scheduler import MultiStepLR
+from bob.ip.binseg.modeling.m2u import build_m2unet
+import torch.optim as optim
+from torch.nn import BCEWithLogitsLoss
+from bob.ip.binseg.utils.model_zoo import modelurls
+from bob.ip.binseg.modeling.losses import MixJacLoss
+from bob.ip.binseg.engine.adabound import AdaBound
+
+##### Config #####
+lr = 0.001
+betas = (0.9, 0.999)
+eps = 1e-08
+weight_decay = 0
+final_lr = 0.1
+gamma = 1e-3
+eps = 1e-8
+amsbound = False
+
+scheduler_milestones = [900]
+scheduler_gamma = 0.1
+
+# model
+model = build_m2unet()
+
+# pretrained backbone
+pretrained_backbone = modelurls['mobilenetv2']
+
+# optimizer
+optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr, gamma=gamma,
+                 eps=eps, weight_decay=weight_decay, amsbound=amsbound) 
+    
+# criterion
+criterion = MixJacLoss(lambda_u=0.05, jacalpha=0.7)
+
+# scheduler
+scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma)
diff --git a/bob/ip/binseg/data/binsegdataset.py b/bob/ip/binseg/data/binsegdataset.py
index 86fc97df482707a9715b38790741aff65bfe2b52..2917203c7b530bee796431e0dbe5e7af1f85a2b9 100644
--- a/bob/ip/binseg/data/binsegdataset.py
+++ b/bob/ip/binseg/data/binsegdataset.py
@@ -1,6 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 from torch.utils.data import Dataset
+import random
 
 class BinSegDataset(Dataset):
     """PyTorch dataset wrapper around bob.db binary segmentation datasets. 
@@ -18,8 +19,11 @@ class BinSegDataset(Dataset):
     mask : bool
         whether dataset contains masks or not
     """
-    def __init__(self, bobdb, split = 'train', transform = None):
-        self.database = bobdb.samples(split)
+    def __init__(self, bobdb, split = 'train', transform = None,index_to = None):
+        if index_to:
+            self.database = bobdb.samples(split)[:index_to]
+        else:
+            self.database = bobdb.samples(split)
         self.transform = transform
         self.split = split
     
@@ -46,15 +50,12 @@ class BinSegDataset(Dataset):
         Returns
         -------
         list
-            dataitem [img_name, img, gt, mask]
+            dataitem [img_name, img, gt]
         """
         img = self.database[index].img.pil_image()
         gt = self.database[index].gt.pil_image()
         img_name = self.database[index].img.basename
         sample = [img, gt]
-        if self.mask:
-            mask = self.database[index].mask.pil_image()
-            sample.append(mask)
         
         if self.transform :
             sample = self.transform(*sample)
@@ -62,3 +63,101 @@ class BinSegDataset(Dataset):
         sample.insert(0,img_name)
         
         return sample
+
+
+class SSLBinSegDataset(Dataset):
+    """PyTorch dataset wrapper around bob.db binary segmentation datasets. 
+    A transform object can be passed that will be applied to the image, ground truth and mask (if present). 
+    It supports indexing such that dataset[i] can be used to get ith sample.
+    
+    Parameters
+    ---------- 
+    labeled_dataset : :py:class:`torch.utils.data.Dataset`
+        BinSegDataset with labeled samples
+    unlabeled_dataset : :py:class:`torch.utils.data.Dataset`
+        UnLabeledBinSegDataset with unlabeled data
+    """
+    def __init__(self, labeled_dataset, unlabeled_dataset):
+        self.labeled_dataset = labeled_dataset
+        self.unlabeled_dataset = unlabeled_dataset
+    
+
+    def __len__(self):
+        """
+        Returns
+        -------
+        int
+            size of the dataset
+        """
+        return len(self.labeled_dataset)
+    
+    def __getitem__(self,index):
+        """
+        Parameters
+        ----------
+        index : int
+        
+        Returns
+        -------
+        list
+            dataitem [img_name, img, gt, unlabeled_img_name, unlabeled_img]
+        """
+        sample = self.labeled_dataset[index]
+        unlabeled_img_name, unlabeled_img = self.unlabeled_dataset[0]
+        sample.extend([unlabeled_img_name, unlabeled_img])
+        return sample
+
+
+class UnLabeledBinSegDataset(Dataset):
+    # TODO: if switch to handle case were not a bob.db object but a path to a directory is used
+    """PyTorch dataset wrapper around bob.db binary segmentation datasets. 
+    A transform object can be passed that will be applied to the image, ground truth and mask (if present). 
+    It supports indexing such that dataset[i] can be used to get ith sample.
+    
+    Parameters
+    ---------- 
+    dv : :py:mod:`bob.db.base` or str
+        Binary segmentation bob database (e.g. bob.db.drive) or path to folder containing unlabeled images
+    split : str 
+        ``'train'`` or ``'test'``. Defaults to ``'train'``
+    transform : :py:mod:`bob.ip.binseg.data.transforms`, optional
+        A transform or composition of transfroms. Defaults to ``None``.
+    """
+    def __init__(self, db, split = 'train', transform = None,index_from= None):
+        if index_from:
+            self.database = db.samples(split)[index_from:]
+        else:
+            self.database = db.samples(split)
+        self.transform = transform
+        self.split = split   
+
+    def __len__(self):
+        """
+        Returns
+        -------
+        int
+            size of the dataset
+        """
+        return len(self.database)
+    
+    def __getitem__(self,index):
+        """
+        Parameters
+        ----------
+        index : int
+        
+        Returns
+        -------
+        list
+            dataitem [img_name, img]
+        """
+        random.shuffle(self.database)
+        img = self.database[index].img.pil_image()
+        img_name = self.database[index].img.basename
+        sample = [img]
+        if self.transform :
+            sample = self.transform(img)
+        
+        sample.insert(0,img_name)
+        
+        return sample
\ No newline at end of file
diff --git a/bob/ip/binseg/engine/ssltrainer.py b/bob/ip/binseg/engine/ssltrainer.py
new file mode 100644
index 0000000000000000000000000000000000000000..382431176fc33e4bf98e3cc38d4440c6f532c30d
--- /dev/null
+++ b/bob/ip/binseg/engine/ssltrainer.py
@@ -0,0 +1,275 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import os 
+import logging
+import time
+import datetime
+import torch
+import pandas as pd
+from tqdm import tqdm
+import numpy as np
+
+from bob.ip.binseg.utils.metric import SmoothedValue
+from bob.ip.binseg.utils.plot import loss_curve
+
+def sharpen(x, T):
+    temp = x**(1/T)
+    return temp / temp.sum(dim=1, keepdim=True)
+
+def mix_up(alpha, input, target, unlabeled_input, unlabled_target):
+    """Applies mix up as described in [MIXMATCH_19].
+    
+    Parameters
+    ----------
+    alpha : float
+    input : :py:class:`torch.Tensor`
+    target : :py:class:`torch.Tensor`
+    unlabeled_input : :py:class:`torch.Tensor`
+    unlabled_target : :py:class:`torch.Tensor`
+    
+    Returns
+    -------
+    list
+    """
+    # TODO: 
+    with torch.no_grad():
+        l = np.random.beta(alpha, alpha) # Eq (8)
+        l = max(l, 1 - l) # Eq (9)
+        # Shuffle and concat. Alg. 1 Line: 12
+        w_inputs = torch.cat([input,unlabeled_input],0)
+        w_targets = torch.cat([target,unlabled_target],0)
+        idx = torch.randperm(w_inputs.size(0)) # get random index 
+        
+        # Apply MixUp to labeled data and entries from W. Alg. 1 Line: 13
+        input_mixedup = l * input + (1 - l) * w_inputs[idx[len(input):]] 
+        target_mixedup = l * target + (1 - l) * w_targets[idx[len(target):]]
+        
+        # Apply MixUp to unlabeled data and entries from W. Alg. 1 Line: 14
+        unlabeled_input_mixedup = l * unlabeled_input + (1 - l) * w_inputs[idx[:len(unlabeled_input)]]
+        unlabled_target_mixedup =  l * unlabled_target + (1 - l) * w_targets[idx[:len(unlabled_target)]]
+        return input_mixedup, target_mixedup, unlabeled_input_mixedup, unlabled_target_mixedup
+
+
+def square_rampup(current, rampup_length=16):
+    """slowly ramp-up ``lambda_u``
+    
+    Parameters
+    ----------
+    current : int
+        current epoch
+    rampup_length : int, optional
+        how long to ramp up, by default 16
+    
+    Returns
+    -------
+    float
+        ramp up factor
+    """
+    if rampup_length == 0:
+        return 1.0
+    else:
+        current = np.clip((current/ float(rampup_length))**2, 0.0, 1.0)
+    return float(current)
+
+def linear_rampup(current, rampup_length=16):
+    """slowly ramp-up ``lambda_u``
+    
+    Parameters
+    ----------
+    current : int
+        current epoch
+    rampup_length : int, optional
+        how long to ramp up, by default 16
+    
+    Returns
+    -------
+    float
+        ramp up factor
+    """
+    if rampup_length == 0:
+        return 1.0
+    else:
+        current = np.clip(current / rampup_length, 0.0, 1.0)
+    return float(current)
+
+def guess_labels(unlabeled_images, model):
+    """
+    Calculate the average predictions by 2 augmentations: horizontal and vertical flips
+    Parameters
+    ----------
+    unlabeled_images : :py:class:`torch.Tensor`
+        shape: ``[n,c,h,w]``
+    target : :py:class:`torch.Tensor`
+    
+    Returns
+    -------
+    :py:class:`torch.Tensor`
+        shape: ``[n,c,h,w]``.
+    """
+    with torch.no_grad():
+        guess1 = torch.sigmoid(model(unlabeled_images)).unsqueeze(0)
+        # Horizontal flip and unsqueeze to work with batches (increase flip dimension by 1)
+        hflip = torch.sigmoid(model(unlabeled_images.flip(2))).unsqueeze(0)
+        guess2  = hflip.flip(3)
+        # Vertical flip and unsqueeze to work with batches (increase flip dimension by 1)
+        vflip = torch.sigmoid(model(unlabeled_images.flip(3))).unsqueeze(0)
+        guess3 = vflip.flip(4)
+        # Concat
+        concat = torch.cat([guess1,guess2,guess3],0)
+        avg_guess = torch.mean(concat,0)
+        return avg_guess
+
+def do_ssltrain(
+    model,
+    data_loader,
+    optimizer,
+    criterion,
+    scheduler,
+    checkpointer,
+    checkpoint_period,
+    device,
+    arguments,
+    output_folder,
+    rampup_length
+):
+    """ 
+    Train model and save to disk.
+    
+    Parameters
+    ----------
+    model : :py:class:`torch.nn.Module` 
+        Network (e.g. DRIU, HED, UNet)
+    data_loader : :py:class:`torch.utils.data.DataLoader`
+    optimizer : :py:mod:`torch.optim`
+    criterion : :py:class:`torch.nn.modules.loss._Loss`
+        loss function
+    scheduler : :py:mod:`torch.optim`
+        learning rate scheduler
+    checkpointer : :py:class:`bob.ip.binseg.utils.checkpointer.DetectronCheckpointer`
+        checkpointer
+    checkpoint_period : int
+        save a checkpoint every n epochs
+    device : str  
+        device to use ``'cpu'`` or ``'cuda'``
+    arguments : dict
+        start end end epochs
+    output_folder : str 
+        output path
+    rampup_Length : int
+        rampup epochs
+    """
+    logger = logging.getLogger("bob.ip.binseg.engine.trainer")
+    logger.info("Start training")
+    start_epoch = arguments["epoch"]
+    max_epoch = arguments["max_epoch"]
+
+    # Logg to file
+    with open (os.path.join(output_folder,"{}_trainlog.csv".format(model.name)), "a+",1) as outfile:
+        for state in optimizer.state.values():
+            for k, v in state.items():
+                if isinstance(v, torch.Tensor):
+                    state[k] = v.to(device)
+
+        model.train().to(device)
+        # Total training timer
+        start_training_time = time.time()
+        for epoch in range(start_epoch, max_epoch):
+            scheduler.step()
+            losses = SmoothedValue(len(data_loader))
+            labeled_loss = SmoothedValue(len(data_loader))
+            unlabeled_loss = SmoothedValue(len(data_loader))
+            epoch = epoch + 1
+            arguments["epoch"] = epoch
+            
+            # Epoch time
+            start_epoch_time = time.time()
+
+            for samples in tqdm(data_loader):
+                # labeled
+                images = samples[1].to(device)
+                ground_truths = samples[2].to(device)
+                unlabeled_images = samples[4].to(device)
+                # labeled outputs
+                outputs = model(images)
+                unlabeled_outputs = model(unlabeled_images)
+                # guessed unlabeled outputs
+                unlabeled_ground_truths = guess_labels(unlabeled_images, model)
+                #unlabeled_ground_truths = sharpen(unlabeled_ground_truths,0.5)
+                #images, ground_truths, unlabeled_images, unlabeled_ground_truths = mix_up(0.75, images, ground_truths, unlabeled_images, unlabeled_ground_truths)
+                ramp_up_factor = square_rampup(epoch,rampup_length=rampup_length)
+
+                loss, ll, ul = criterion(outputs, ground_truths, unlabeled_outputs, unlabeled_ground_truths, ramp_up_factor)
+                optimizer.zero_grad()
+                loss.backward()
+                optimizer.step()
+                losses.update(loss)
+                labeled_loss.update(ll)
+                unlabeled_loss.update(ul)
+                logger.debug("batch loss: {}".format(loss.item()))
+
+            if epoch % checkpoint_period == 0:
+                checkpointer.save("model_{:03d}".format(epoch), **arguments)
+
+            if epoch == max_epoch:
+                checkpointer.save("model_final", **arguments)
+
+            epoch_time = time.time() - start_epoch_time
+
+
+            eta_seconds = epoch_time * (max_epoch - epoch)
+            eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))
+
+            outfile.write(("{epoch}, "
+                        "{avg_loss:.6f}, "
+                        "{median_loss:.6f}, "
+                        "{median_labeled_loss},"
+                        "{median_unlabeled_loss},"
+                        "{lr:.6f}, "
+                        "{memory:.0f}"
+                        "\n"
+                        ).format(
+                    eta=eta_string,
+                    epoch=epoch,
+                    avg_loss=losses.avg,
+                    median_loss=losses.median,
+                    median_labeled_loss = labeled_loss.median,
+                    median_unlabeled_loss = unlabeled_loss.median,
+                    lr=optimizer.param_groups[0]["lr"],
+                    memory = (torch.cuda.max_memory_allocated() / 1024.0 / 1024.0) if torch.cuda.is_available() else .0,
+                    )
+                )  
+            logger.info(("eta: {eta}, " 
+                        "epoch: {epoch}, "
+                        "avg. loss: {avg_loss:.6f}, "
+                        "median loss: {median_loss:.6f}, "
+                        "labeled loss: {median_labeled_loss}, "
+                        "unlabeled loss: {median_unlabeled_loss}, "
+                        "lr: {lr:.6f}, "
+                        "max mem: {memory:.0f}"
+                        ).format(
+                    eta=eta_string,
+                    epoch=epoch,
+                    avg_loss=losses.avg,
+                    median_loss=losses.median,
+                    median_labeled_loss = labeled_loss.median,
+                    median_unlabeled_loss = unlabeled_loss.median,
+                    lr=optimizer.param_groups[0]["lr"],
+                    memory = (torch.cuda.max_memory_allocated() / 1024.0 / 1024.0) if torch.cuda.is_available() else .0
+                    )
+                )
+
+
+        total_training_time = time.time() - start_training_time
+        total_time_str = str(datetime.timedelta(seconds=total_training_time))
+        logger.info(
+            "Total training time: {} ({:.4f} s / epoch)".format(
+                total_time_str, total_training_time / (max_epoch)
+            ))
+        
+    log_plot_file = os.path.join(output_folder,"{}_trainlog.pdf".format(model.name))
+    logdf = pd.read_csv(os.path.join(output_folder,"{}_trainlog.csv".format(model.name)),header=None, names=["avg. loss", "median loss", "labeled loss", "unlabeled loss", "lr","max memory"])
+    fig = loss_curve(logdf,output_folder)
+    logger.info("saving {}".format(log_plot_file))
+    fig.savefig(log_plot_file)
+  
\ No newline at end of file
diff --git a/bob/ip/binseg/engine/trainer.py b/bob/ip/binseg/engine/trainer.py
index f06fb8d8e9ed396d24c1b810c46e1ac362056e9c..c8146cb922f8a9a93ff11e6fc155de5b2cb87da9 100644
--- a/bob/ip/binseg/engine/trainer.py
+++ b/bob/ip/binseg/engine/trainer.py
@@ -58,6 +58,10 @@ def do_train(
     with open (os.path.join(output_folder,"{}_trainlog.csv".format(model.name)), "a+", 1) as outfile:
         
         model.train().to(device)
+        for state in optimizer.state.values():
+            for k, v in state.items():
+                if isinstance(v, torch.Tensor):
+                    state[k] = v.to(device)
         # Total training timer
         start_training_time = time.time()
 
diff --git a/bob/ip/binseg/modeling/losses.py b/bob/ip/binseg/modeling/losses.py
index 5eeb7950ebe344dadc08fcc8b21c057a081c08ed..7bcde8503fdb2334fea0a2f2ce9cc1b0c8253343 100644
--- a/bob/ip/binseg/modeling/losses.py
+++ b/bob/ip/binseg/modeling/losses.py
@@ -49,9 +49,9 @@ class SoftJaccardBCELogitsLoss(_Loss):
     Attributes
     ----------
     alpha : float
-        determines the weighting of SoftJaccard and BCE. Default: ``0.3``
+        determines the weighting of SoftJaccard and BCE. Default: ``0.7``
     """
-    def __init__(self, alpha=0.3, size_average=None, reduce=None, reduction='mean', pos_weight=None):
+    def __init__(self, alpha=0.7, size_average=None, reduce=None, reduction='mean', pos_weight=None):
         super(SoftJaccardBCELogitsLoss, self).__init__(size_average, reduce, reduction) 
         self.alpha = alpha   
 
@@ -160,4 +160,41 @@ class HEDSoftJaccardBCELogitsLoss(_Loss):
             loss = self.alpha * bceloss + (1 - self.alpha) * (1-softjaccard)
             loss_over_all_inputs.append(loss.unsqueeze(0))
         final_loss = torch.cat(loss_over_all_inputs).mean()
-        return loss
\ No newline at end of file
+        return loss
+
+
+
+class MixJacLoss(_Loss):
+    """ 
+    Attributes
+    ----------
+    lambda_u : int
+        determines the weighting of SoftJaccard and BCE.
+    """
+    def __init__(self, lambda_u=100, jacalpha=0.7, size_average=None, reduce=None, reduction='mean', pos_weight=None):
+        super(MixJacLoss, self).__init__(size_average, reduce, reduction)
+        self.lambda_u = lambda_u
+        self.labeled_loss = SoftJaccardBCELogitsLoss(alpha=jacalpha)
+        self.unlabeled_loss = torch.nn.BCEWithLogitsLoss()
+
+
+    @weak_script_method
+    def forward(self, input, target, unlabeled_input, unlabeled_traget, ramp_up_factor):
+        """
+        Parameters
+        ----------
+        input : :py:class:`torch.Tensor`
+        target : :py:class:`torch.Tensor`
+        unlabeled_input : :py:class:`torch.Tensor`
+        unlabeled_traget : :py:class:`torch.Tensor`
+        ramp_up_factor : float
+        
+        Returns
+        -------
+        list
+        """
+        ll = self.labeled_loss(input,target)
+        ul = self.unlabeled_loss(unlabeled_input, unlabeled_traget)
+        
+        loss = ll + self.lambda_u * ramp_up_factor * ul
+        return loss, ll, ul
\ No newline at end of file
diff --git a/bob/ip/binseg/script/binseg.py b/bob/ip/binseg/script/binseg.py
index c439b93c0fc2ed4d7062f7514a5a316f927ffa42..884852e48e374b39de2227dccaeb29610afd67bf 100644
--- a/bob/ip/binseg/script/binseg.py
+++ b/bob/ip/binseg/script/binseg.py
@@ -24,7 +24,11 @@ from bob.extension.scripts.click_helper import (verbosity_option,
 from bob.ip.binseg.utils.checkpointer import DetectronCheckpointer
 from torch.utils.data import DataLoader
 from bob.ip.binseg.engine.trainer import do_train
+<<<<<<< HEAD
 from bob.ip.binseg.engine.valtrainer import do_valtrain
+=======
+from bob.ip.binseg.engine.ssltrainer import do_ssltrain
+>>>>>>> ssl
 from bob.ip.binseg.engine.inferencer import do_inference
 from bob.ip.binseg.utils.plot import plot_overview
 from bob.ip.binseg.utils.click import OptionEatAll
@@ -96,7 +100,7 @@ def binseg():
     help='Number of epochs used for training',
     show_default=True,
     required=True,
-    default=6,
+    default=1000,
     cls=ResourceOption)
 @click.option(
     '--checkpoint-period',
@@ -104,7 +108,7 @@ def binseg():
     help='Number of epochs after which a checkpoint is saved',
     show_default=True,
     required=True,
-    default=2,
+    default=100,
     cls=ResourceOption)
 @click.option(
     '--device',
@@ -400,7 +404,11 @@ def visualize(dataset, output_path, **kwargs):
     overlay(dataset=dataset, output_path=output_path)
 
 
+<<<<<<< HEAD
 # Validation Train
+=======
+# SSLTrain
+>>>>>>> ssl
 @binseg.command(entry_point_group='bob.ip.binseg.config', cls=ConfigCommand)
 @click.option(
     '--output-path',
@@ -454,7 +462,11 @@ def visualize(dataset, output_path, **kwargs):
     help='Number of epochs used for training',
     show_default=True,
     required=True,
+<<<<<<< HEAD
     default=6,
+=======
+    default=1000,
+>>>>>>> ssl
     cls=ResourceOption)
 @click.option(
     '--checkpoint-period',
@@ -462,7 +474,11 @@ def visualize(dataset, output_path, **kwargs):
     help='Number of epochs after which a checkpoint is saved',
     show_default=True,
     required=True,
+<<<<<<< HEAD
     default=2,
+=======
+    default=100,
+>>>>>>> ssl
     cls=ResourceOption)
 @click.option(
     '--device',
@@ -473,6 +489,7 @@ def visualize(dataset, output_path, **kwargs):
     default='cpu',
     cls=ResourceOption)
 @click.option(
+<<<<<<< HEAD
     '--valsize',
     '-a',
     help='Size of validation set',
@@ -482,6 +499,18 @@ def visualize(dataset, output_path, **kwargs):
     cls=ResourceOption)
 @verbosity_option(cls=ResourceOption)
 def valtrain(model
+=======
+    '--rampup',
+    '-r',
+    help='Ramp-up length in epochs',
+    show_default=True,
+    required=True,
+    default='900',
+    cls=ResourceOption)
+
+@verbosity_option(cls=ResourceOption)
+def ssltrain(model
+>>>>>>> ssl
         ,optimizer
         ,scheduler
         ,output_path
@@ -492,12 +521,17 @@ def valtrain(model
         ,dataset
         ,checkpoint_period
         ,device
+<<<<<<< HEAD
         ,valsize
+=======
+        ,rampup
+>>>>>>> ssl
         ,**kwargs):
     """ Train a model """
     
     if not os.path.exists(output_path): os.makedirs(output_path)
     
+<<<<<<< HEAD
 
     # Validation and training set size
     train_size = len(dataset) - valsize 
@@ -512,6 +546,15 @@ def valtrain(model
 
     valid_loader = torch.utils.data.DataLoader(dataset, pin_memory=torch.cuda.is_available(), batch_size=batch_size,
                                                    sampler=SubsetRandomSampler(valid_indices))
+=======
+    # PyTorch dataloader
+    data_loader = DataLoader(
+        dataset = dataset
+        ,batch_size = batch_size
+        ,shuffle= True
+        ,pin_memory = torch.cuda.is_available()
+        )
+>>>>>>> ssl
 
     # Checkpointer
     checkpointer = DetectronCheckpointer(model, optimizer, scheduler,save_dir = output_path, save_to_disk=True)
@@ -524,8 +567,13 @@ def valtrain(model
     # Train
     logger.info("Training for {} epochs".format(arguments["max_epoch"]))
     logger.info("Continuing from epoch {}".format(arguments["epoch"]))
+<<<<<<< HEAD
     do_valtrain(model
             , train_loader
+=======
+    do_ssltrain(model
+            , data_loader
+>>>>>>> ssl
             , optimizer
             , criterion
             , scheduler
@@ -534,5 +582,9 @@ def valtrain(model
             , device
             , arguments
             , output_path
+<<<<<<< HEAD
             , valid_loader
+=======
+            , rampup
+>>>>>>> ssl
             )
\ No newline at end of file
diff --git a/bob/ip/binseg/utils/plot.py b/bob/ip/binseg/utils/plot.py
index 9c07902c67ce5140199ff043f59f1874bca7425e..3cf05c363cea6bef6dc21c10bd7c42cd07ee4345 100644
--- a/bob/ip/binseg/utils/plot.py
+++ b/bob/ip/binseg/utils/plot.py
@@ -190,10 +190,14 @@ def plot_overview(outputfolders,title):
           rows = outfile.readlines()
           lastrow = rows[-1]
           parameter = int(lastrow.split()[1].replace(',',''))
+<<<<<<< HEAD
         name = '[P={:.2f}M] {}'.format(parameter/100**3, modelname)
         #name = '[P={:.2f}M] {} {}'.format(parameter/100**3, modelname, datasetname)
+=======
+        name = '[P={:.2f}M] {} {}'.format(parameter/100**3, modelname, datasetname)
+>>>>>>> ssl
         names.append(name)
-    title = folder.split('/')[-2]
+    #title = folder.split('/')[-4]
     fig = precision_recall_f1iso(precisions,recalls,names,title)
     #fig = precision_recall_f1iso(precisions,recalls,names,title)
     return fig
@@ -291,4 +295,4 @@ def overlay(dataset, output_path):
         # save to disk
         overlayed_path = os.path.join(output_path,'overlayed')
         if not os.path.exists(overlayed_path): os.makedirs(overlayed_path)
-        overlayed.save(os.path.join(overlayed_path,name))
\ No newline at end of file
+        overlayed.save(os.path.join(overlayed_path,name))
diff --git a/doc/benchmarkresults.rst b/doc/benchmarkresults.rst
new file mode 100644
index 0000000000000000000000000000000000000000..fb30961cb203ea3f2fe1f83f96c34d99e26d2403
--- /dev/null
+++ b/doc/benchmarkresults.rst
@@ -0,0 +1,25 @@
+.. -*- coding: utf-8 -*-
+.. _bob.ip.binseg.benchmarkresults:
+
+
+==================
+Benchmark Results
+==================
+
+Dice Scores
+===========
+
+* Benchmark results for models: DRIU, HED, M2UNet and U-Net.
+* Train-Test split as indicated in :ref:`bob.ip.binseg.datasets`
+
++--------+----------+--------+---------+--------+--------+
+|        | CHASEDB1 | DRIVE  | HRF1168 | IOSTAR | STARE  |
++--------+----------+--------+---------+--------+--------+
+| DRIU   | 0.8114   | 0.8226 | 0.7865  | 0.8273 | 0.8286 |
++--------+----------+--------+---------+--------+--------+
+| HED    | 0.8111   | 0.8192 | 0.7868  | 0.8275 | 0.8250 |
++--------+----------+--------+---------+--------+--------+
+| M2UNet | 0.8035   | 0.8051 | 0.7838  | 0.8194 | 0.8174 |
++--------+----------+--------+---------+--------+--------+
+| UNet   | 0.8136   | 0.8237 | 0.7941  | 0.8203 | 0.8306 |
++--------+----------+--------+---------+--------+--------+
diff --git a/doc/covdresults.rst b/doc/covdresults.rst
new file mode 100644
index 0000000000000000000000000000000000000000..6dca5a6d7cf58178821fb3b3b681f79e7a5061e6
--- /dev/null
+++ b/doc/covdresults.rst
@@ -0,0 +1,35 @@
+.. -*- coding: utf-8 -*-
+.. _bob.ip.binseg.covdresults:
+
+
+==========================
+COVD- and COVD-SLL Results
+==========================
+
+Dice Scores
+===========
+
++-------------------+---------------+---------+
+|                   | DRIU          | M2U-Net |
++-------------------+---------------+---------+
+| COVD-DRIVE        | 0.7896        | 0.7906  |
++-------------------+---------------+---------+
+| COVD-DRIVE SSL    | 0.7870        | 0.7938  |
++-------------------+---------------+---------+
+| COVD-STARE        | 0.7979        | 0.8120  |
++-------------------+---------------+---------+
+| COVD-STARE SSL    | 0.8062        | 0.8222  |
++-------------------+---------------+---------+
+| COVD-CHASEDB1     | 0.7979        | 0.7898  |
++-------------------+---------------+---------+
+| COVD-CHASEDB1 SSL | 0.7976        | 0.8000  |
++-------------------+---------------+---------+
+| COVD-HRF          | 0.8013        | 0.8036  |
++-------------------+---------------+---------+
+| COVD-HRF SSL      | still running | 0.7999  |
++-------------------+---------------+---------+
+| COVD-IOSTAR       | 0.7934        | 0.7953  |
++-------------------+---------------+---------+
+| COVD-IOSTAR SSL   | 0.7995        | 0.7868  |
++-------------------+---------------+---------+
+
diff --git a/doc/datasets.rst b/doc/datasets.rst
index 67939144b975ed25b1b01d996b656d9aa4c2fe6e..2543b80163bc863e7a5762b85c20f151e16240b4 100644
--- a/doc/datasets.rst
+++ b/doc/datasets.rst
@@ -1,31 +1,41 @@
 .. -*- coding: utf-8 -*-
 .. _bob.ip.binseg.datasets:
 
-
 ==================
-Supported Datasets 
+Supported Datasets
 ==================
 
-+---+-------------+--------------------+--------+-------+------+------+--------+----+-----+---------------------------+
-| # | Name        | H x W              | # imgs | Train | Test | Mask | Vessel | OD | Cup | Ethnicity                 | 
-+===+=============+====================+========+=======+======+======+========+====+=====+===========================+
-| 1 | DRIVE       | 584 x 565          | 40     |  20   | 20   |   x  |    x   |    |     |   Dutch (adult)           |
-+---+-------------+--------------------+--------+-------+------+------+--------+----+-----+---------------------------+
-| 2 | STARE       | 605 x 700          | 20     |  10   | 10   |      |    x   |    |     |   White American (adult)  |
-+---+-------------+--------------------+--------+-------+------+------+--------+----+-----+---------------------------+
-| 3 | CHASE_DB1   | 960 x 999          | 28     |   8   | 20   |      |    x   |    |     |   British (child)         |
-+---+-------------+--------------------+--------+-------+------+------+--------+----+-----+---------------------------+
-| 4 | HRF         | 2336 x 3504        | 45     |  15   | 30   |   x  |    x   |    |     |                           |
-+---+-------------+--------------------+--------+-------+------+------+--------+----+-----+---------------------------+
-| 5 | IOSTAR      | 1024 x 1024        | 30     |   20  | 10   |   x  |    x   |  x |     |                           |
-+---+-------------+--------------------+--------+-------+------+------+--------+----+-----+---------------------------+
-| 6 | DRIONS-DB   | 400 x 600          | 110    |   60  | 50   |      |        |  x |     |                           |
-+---+-------------+--------------------+--------+-------+------+------+--------+----+-----+---------------------------+
-| 7 | RIM-ONE r3  | 1424 x 1072        | 159    |   99  | 60   |      |        |  x |  x  |                           |
-+---+-------------+--------------------+--------+-------+------+------+--------+----+-----+---------------------------+
-| 8 | Drishti-GS1 | varying            | 101    |  50   |   51 |      |        |  x |  x  |                           |
-+---+-------------+--------------------+--------+-------+------+------+--------+----+-----+---------------------------+
-| 9 | REFUGE train| 2056 x 2124        | 400    | 400   |      |      |        |  x |  x  |                           |
-+---+-------------+--------------------+--------+-------+------+------+--------+----+-----+---------------------------+
-| 9 | REFUGE val  | 1634 x 1634        | 400    |       | 400  |      |        |  x |  x  |                           |
-+---+-------------+--------------------+--------+-------+------+------+--------+----+-----+---------------------------+
++-----+---------------+-------------+--------+-------+------+------+--------+-----+-----+----------------------------+
+|  #  |     Name      |    H x W    | # imgs | Train | Test | Mask | Vessel | OD  | Cup | Train-Test split reference |
++=====+===============+=============+========+=======+======+======+========+=====+=====+============================+
+| 1   | Drive_        | 584 x 565   | 40     | 20    | 20   | x    | x      |     |     | `Staal et al. (2004)`_     |
++-----+---------------+-------------+--------+-------+------+------+--------+-----+-----+----------------------------+
+| 2   | STARE_        | 605 x 700   | 20     | 10    | 10   |      | x      |     |     | `Maninis et al. (2016)`_   |
++-----+---------------+-------------+--------+-------+------+------+--------+-----+-----+----------------------------+
+| 3   | CHASEDB1_     | 960 x 999   | 28     | 8     | 20   |      | x      |     |     | `Fraz et al. (2012)`_      |
++-----+---------------+-------------+--------+-------+------+------+--------+-----+-----+----------------------------+
+| 4   | HRF_          | 2336 x 3504 | 45     | 15    | 30   | x    | x      |     |     | `Orlando et al. (2016)`_   |
++-----+---------------+-------------+--------+-------+------+------+--------+-----+-----+----------------------------+
+| 5   | IOSTAR_       | 1024 x 1024 | 30     | 20    | 10   | x    | x      | x   |     | `Meyer et al. (2017)`_     |
++-----+---------------+-------------+--------+-------+------+------+--------+-----+-----+----------------------------+
+| 6   | DRIONS-DB_    | 400 x 600   | 110    | 60    | 50   |      |        | x   |     | `Maninis et al. (2016)`_   |
++-----+---------------+-------------+--------+-------+------+------+--------+-----+-----+----------------------------+
+| 7   | RIM-ONEr3_    | 1424 x 1072 | 159    | 99    | 60   |      |        | x   | x   | `Maninis et al. (2016)`_   |
++-----+---------------+-------------+--------+-------+------+------+--------+-----+-----+----------------------------+
+| 8   | Drishti-GS1_  | varying     | 101    | 50    | 51   |      |        | x   | x   | `Sivaswamy et al. (2014)`_ |
++-----+---------------+-------------+--------+-------+------+------+--------+-----+-----+----------------------------+
+| 9   | REFUGE_ train | 2056 x 2124 | 400    | 400   |      |      |        | x   | x   | REFUGE_                    |
++-----+---------------+-------------+--------+-------+------+------+--------+-----+-----+----------------------------+
+| 9   | REFUGE_ val   | 1634 x 1634 | 400    |       | 400  |      |        | x   | x   | REFUGE_                    |
++-----+---------------+-------------+--------+-------+------+------+--------+-----+-----+----------------------------+
+
+
+Add-on: Folder-based Dataset
+============================
+
+For quick experimentation we also provide a PyTorch class that works with a 
+dataset folder structure:
+
+
+
+.. include:: links.rst
\ No newline at end of file
diff --git a/doc/evaluation.rst b/doc/evaluation.rst
new file mode 100644
index 0000000000000000000000000000000000000000..0f0df559ce53b8003088e5d87397d42eafdda2f9
--- /dev/null
+++ b/doc/evaluation.rst
@@ -0,0 +1,82 @@
+.. -*- coding: utf-8 -*-
+.. _bob.ip.binseg.evaluation:
+
+==========
+Evaluation
+==========
+
+To evaluate trained models use use ``bob binseg test`` followed by
+the model config, the dataset config and the path to the pretrained
+model via the argument ``-w``.
+
+Alternatively point to the output folder used during training via
+the ``-o`` argument. The Checkpointer will load the model as indicated
+in the file: ``last_checkpoint``.
+
+Use ``bob binseg test --help`` for more information.
+
+E.g. run inference on model M2U-Net on the DRIVE test set:
+
+.. code-block:: bash
+
+    # Point directly to saved model via -w argument:
+    bob binseg test M2UNet DRIVETEST -o /outputfolder/for/results -w /direct/path/to/weight/model_final.pth
+
+    # Use training output path (requries last_checkpoint file to be present)
+    # The evaluation results will be stored in the same folder
+    bob binseg test M2UNet DRIVETEST -o /DRIVE/M2UNet/output
+
+
+Pretrained Models
+=================
+
+Due to storage limitations we only provide weights of a subset
+of all evaluated models, namely all DRIU and M2U-Net variants:
+
+
+
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+|                    | DRIU                                                                                                 | M2UNet                                                                                                             |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| DRIVE              | `DRIU_DRIVE.pth <https://dl.dropboxusercontent.com/s/rggn9ebj38c06uf/DRIU_DRIVE.pth>`_               | `M2UNet_DRIVE.pth <https://dl.dropboxusercontent.com/s/55xply8jm0g2skp/M2UNet_DRIVE.pth>`_                         |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| COVD-DRIVE         |                                                                                                      | `M2UNet_COVD-DRIVE.pth <https://dl.dropboxusercontent.com/s/x5wb84uao8nlx44/M2UNet_COVD-DRIVE.pth>`_               |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| COVD-DRIVE SSL     |                                                                                                      | `M2UNet_COVD-DRIVE_SSL.pth <https://dl.dropboxusercontent.com/s/hp7fg6bct0i3awr/M2UNet_COVD-DRIVE_SSL.pth>`_       |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| STARE              | `DRIU_STARE.pth <https://dl.dropboxusercontent.com/s/sw5ivfzgz5djirc/DRIU_STARE.pth>`_               | `M2UNet_STARE.pth <https://dl.dropboxusercontent.com/s/pc9wb8r7tjvg06p/M2UNet_STARE.pth>`_                         |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| COVD-STARE         |                                                                                                      | `M2UNet_COVD-STARE.pth <https://dl.dropboxusercontent.com/s/vh1trws2nxqt65y/M2UNet_COVD-STARE.pth>`_               |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| COVD-STARE SSL     |                                                                                                      | `M2UNet_COVD-STARE_SSL.pth <https://dl.dropboxusercontent.com/s/slcvfgf1saf7t19/M2UNet_COVD-STARE_SSL.pth>`_       |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| CHASE_DB1          | `DRIU_CHASEDB1.pth <https://dl.dropboxusercontent.com/s/15gxvhdtq0gw074/DRIU_CHASEDB1.pth>`_         | `M2UNet_CHASEDB1.pth <https://dl.dropboxusercontent.com/s/jqq0z9boi17nhqf/M2UNet_CHASEDB1.pth>`_                   |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| COVD-CHASE_DB1     |                                                                                                      | `M2UNet_COVD-CHASEDB1.pth <https://dl.dropboxusercontent.com/s/pvbp0qky13q5o11/M2UNet_COVD-CHASEDB1.pth>`_         |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| COVD-CHASE_DB1 SSL |                                                                                                      | `M2UNet_COVD-CHASEDB1_SSL.pth <https://dl.dropboxusercontent.com/s/qx7mm5h8ywm98fi/M2UNet_COVD-CHASEDB1_SSL.pth>`_ |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| IOSTARVESSEL       | `DRIU_IOSTARVESSEL.pth <https://dl.dropboxusercontent.com/s/dx1dp8g4nct5r2z/DRIU_IOSTARVESSEL.pth>`_ | `M2UNet_IOSTARVESSEL.pth <https://dl.dropboxusercontent.com/s/g9jyvar9x8vvihr/M2UNet_IOSTARVESSEL.pth>`_           |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| COVD-IOSTAR        |                                                                                                      | `M2UNet_COVD-IOSTAR.pth <https://dl.dropboxusercontent.com/s/t5b2qomq6ey8i9t/M2UNet_COVD-IOSTAR.pth>`_             |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| COVD-IOSTAR SSL    |                                                                                                      | `M2UNet_COVD-IOSTAR_SSL.pth <https://dl.dropboxusercontent.com/s/70ynm2k3bpkj4mq/M2UNet_COVD-IOSTAR_SSL.pth>`_     |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| HRF                | `DRIU_HRF1168.pth <https://dl.dropboxusercontent.com/s/c02m2zyby1zndqx/DRIU_HRF1168.pth>`_           | `M2UNet_HRF1168.pth <https://dl.dropboxusercontent.com/s/g34g6nai1rsgbsc/M2UNet_HRF1168.pth>`_                     |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| COVD-HRF           |                                                                                                      | `M2UNet_COVD-HRF.pth <https://dl.dropboxusercontent.com/s/o3edhljeidl6fvi/M2UNet_COVD-HRF.pth>`_                   |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+| COVD-HRF SSL       |                                                                                                      | `M2UNet_COVD-HRF_SSL.pth <https://dl.dropboxusercontent.com/s/2e0aq8a5vbop2yx/M2UNet_COVD-HRF_SSL.pth>`_           |
++--------------------+------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------+
+
+
+
+To run evaluation of pretrained models pass url as ``-w`` argument. E.g.:
+
+.. code-block:: bash
+
+    bob binseg test DRIU DRIVETEST -o Evaluation_DRIU_DRIVE -w https://dl.dropboxusercontent.com/s/rggn9ebj38c06uf/DRIU_DRIVE.pth
+    bob binseg test M2UNet DRIVETEST -o Evaluation_M2UNet_DRIVE -w https://dl.dropboxusercontent.com/s/55xply8jm0g2skp/M2UNet_DRIVE.pth
+
+
+
diff --git a/doc/index.rst b/doc/index.rst
index fe6cf753734f6167476572120efa5ad3283073f7..d38e7a4e60c83c1bc4703071740adb9add84011f 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -13,11 +13,16 @@ Users Guide
 
 .. toctree::
    :maxdepth: 2
- 
+
+   setup
    datasets
+   training
+   evaluation
+   benchmarkresults
+   covdresults
    api
    references
 
 .. todolist::
 
-.. include:: links.rst
\ No newline at end of file
+.. include:: links.rst
diff --git a/doc/links.rst b/doc/links.rst
index d8a696c481b35c7360d2da0cf30ab2f392fada20..7b94ab0645774c0e00d9c1f15065d4f093fb9be0 100644
--- a/doc/links.rst
+++ b/doc/links.rst
@@ -4,6 +4,50 @@
 
 .. _idiap: http://www.idiap.ch
 .. _bob: http://www.idiap.ch/software/bob
-.. _installation: https://www.idiap.ch/software/bob/install
+.. _installation: https://www.idiap.ch/software/bob/docs/bob/docs/stable/bob/bob/doc/install.html
 .. _mailing list: https://www.idiap.ch/software/bob/discuss
-.. _torchvision package: https://github.com/pytorch/vision
\ No newline at end of file
+.. _torchvision package: https://github.com/pytorch/vision
+
+.. DRIVE
+
+.. _drive: https://doi.org/10.1109/TMI.2004.825627
+.. _staal et al. (2004): https://doi.org/10.1109/TMI.2004.825627
+
+.. STARE
+
+.. _stare: https://doi.org/10.1109/42.845178
+.. _maninis et al. (2016): https://doi.org/10.1007/978-3-319-46723-8_17
+
+.. HRF
+
+.. _hrf: http://dx.doi.org/10.1155/2013/154860
+.. _orlando et al. (2016): https://doi.org/10.1109/TBME.2016.2535311
+
+.. IOSTAR
+
+.. _iostar: https://doi.org/10.1109/TMI.2016.2587062
+.. _meyer et al. (2017): https://doi.org/10.1007/978-3-319-59876-5_56
+
+.. CHASEDB1
+
+.. _chasedb1: https://doi.org/10.1109/TBME.2012.2205687
+.. _fraz et al. (2012): https://doi.org/10.1109/TBME.2012.2205687
+
+.. DRIONSDB
+
+.. _drions-db: http://dx.doi.org/10.1016/j.artmed.2008.04.005
+.. _maninis et al. (2016): https://doi.org/10.1007/978-3-319-46723-8_17
+
+.. RIM-ONE r3
+
+.. _rim-oner3: https://dspace5.zcu.cz/bitstream/11025/29670/1/Fumero.pdf
+.. _maninis et al. (2016): https://doi.org/10.1007/978-3-319-46723-8_17
+
+.. Drishti-GS1
+
+.. _drishti-gs1: https://doi.org/10.1109/ISBI.2014.6867807
+.. _sivaswamy et al. (2014): https://doi.org/10.1109/ISBI.2014.6867807
+
+.. REFUGE
+
+.. _refuge: http://ai.baidu.com/broad/download?dataset=gon
diff --git a/doc/setup.rst b/doc/setup.rst
new file mode 100644
index 0000000000000000000000000000000000000000..5b5de78fa7bdd1fd30192f9d14b2df4ab2bd92b9
--- /dev/null
+++ b/doc/setup.rst
@@ -0,0 +1,91 @@
+.. -*- coding: utf-8 -*-
+.. _bob.ip.binseg.setup:
+
+=========
+Setup
+=========
+
+Bob.ip.binseg
+=============
+
+Complete bob's `installation`_ instructions. Then, to install this
+package
+
+.. code-block:: bash
+
+    conda install bob.ip.binseg
+
+Dataset Links
+=============
+
++------------+----------------------------------------------------------------------+
+| Dataset    | Website                                                              |
++------------+----------------------------------------------------------------------+
+| STARE      | http://cecas.clemson.edu/~ahoover/stare/                             |
++------------+----------------------------------------------------------------------+
+| DRIVE      | https://www.isi.uu.nl/Research/Databases/DRIVE/                      |
++------------+----------------------------------------------------------------------+
+| DRIONS     | http://www.ia.uned.es/~ejcarmona/DRIONS-DB.html                      |
++------------+----------------------------------------------------------------------+
+| RIM-ONE    | http://medimrg.webs.ull.es/research/downloads/                       |
++------------+----------------------------------------------------------------------+
+| CHASE-DB1  | https://blogs.kingston.ac.uk/retinal/chasedb1/                       |
++------------+----------------------------------------------------------------------+
+| HRF        | https://www5.cs.fau.de/research/data/fundus-images/                  |
++------------+----------------------------------------------------------------------+
+| Drishti-GS | http://cvit.iiit.ac.in/projects/mip/drishti-gs/mip-dataset2/Home.php |
++------------+----------------------------------------------------------------------+
+| IOSTAR     | http://www.retinacheck.org/datasets                                  |
++------------+----------------------------------------------------------------------+
+| REFUGE     | https://refuge.grand-challenge.org/Details/                          |
++------------+----------------------------------------------------------------------+
+
+Setting up dataset paths
+========================
+
+For each dataset that you are planning to use, set the datadir to
+the path where it is stored. E.g.:
+
+.. code-block:: bash
+
+    bob config set bob.db.drive.datadir "/path/to/drivedataset/"
+
+To check your current setup
+
+.. code-block:: bash
+
+    bob config show
+
+This should result in an output similar to the following:
+
+.. code-block:: bash
+
+    {
+        "bob.db.chasedb1.datadir": "/idiap/resource/database/CHASE-DB11/",
+        "bob.db.drionsdb.datadir": "/idiap/resource/database/DRIONS",
+        "bob.db.drishtigs1.datadir": "/idiap/resource/database/Drishti-GS1/",
+        "bob.db.drive.datadir": "/idiap/resource/database/DRIVE",
+        "bob.db.hrf.datadir": "/idiap/resource/database/HRF",
+        "bob.db.iostar.datadir": "/idiap/resource/database/IOSTAR/IOSTAR Vessel Segmentation Dataset/",
+        "bob.db.refuge.datadir": "/idiap/resource/database/REFUGE",
+        "bob.db.rimoner3.datadir": "/idiap/resource/database/RIM-ONE/RIM-ONE r3",
+        "bob.db.stare.datadir": "/idiap/resource/database/STARE"
+    }
+
+
+
+Testing dataset consitency
+==========================
+
+To check whether the downloaded version is consistent with
+the structure that is expected by our ``bob.db`` packages
+run ``bob_dbmanage.py datasettocheck checkfiles``
+E.g.:
+
+.. code-block:: sh
+
+    conda activate your-conda-env-with-bob.ip.binseg
+    bob_dbmanage.py drive checkfiles
+    > checkfiles completed sucessfully
+
+.. include:: links.rst
diff --git a/doc/training.rst b/doc/training.rst
new file mode 100644
index 0000000000000000000000000000000000000000..ce9fbc2136a493d807dcb92e4a710ab5fcd14f5b
--- /dev/null
+++ b/doc/training.rst
@@ -0,0 +1,322 @@
+.. -*- coding: utf-8 -*-
+.. _bob.ip.binseg.training:
+
+
+========
+Training
+========
+
+To replicate our results use ``bob binseg train`` followed by the model config
+and the dataset config. Use ``bob binseg train --help`` for more information.
+
+.. note::
+
+   We strongly advice training with a GPU (using ``-d cuda``). Depending on the available GPU
+   memory you might have to adjust your batch size (``-b``).
+
+Default Dataset configs
+=======================
+
+1. Vessel:
+
+* CHASEDB1
+* CHASEDB1TEST
+* COVD-DRIVE
+* COVD-DRIVE_SSL
+* COVD-STARE
+* COVD-STARE_SSL
+* COVD-IOSTARVESSEL
+* COVD-IOSTARVESSEL_SSL
+* COVD-HRF
+* COVD-HRF_SSL
+* COVD-CHASEDB1
+* COVD-CHASEDB1_SSL
+* DRIVE
+* DRIVETEST
+* HRF
+* HRFTEST
+* IOSTARVESSEL
+* IOSTARVESSELTEST
+* STARE
+* STARETEST
+
+2. Optic Disc and Cup
+
+* DRIONSDB
+* DRIONSDBTEST
+* DRISHTIGS1OD
+* DRISHTIGS1ODTEST
+* DRISHTIGS1CUP
+* DRISHTIGS1CUPTEST
+* IOSTAROD
+* IOSTARODTEST
+* REFUGECUP
+* REFUGECUPTEST
+* REFUGEOD
+* REFUGEODTEST
+* RIMONER3CUP
+* RIMONER3CUPTEST
+* RIMONER3OD
+* RIMONER3ODTEST
+
+Default Model configs
+=====================
+
+* DRIU
+* DRIUSSL
+* DRIUOD
+* HED
+* M2UNet
+* M2UNetSSL
+* UNet
+
+
+Baseline Benchmarks
+===================
+
+.. code-block:: bash
+
+    #!/bin/bash
+    # set output directory
+    outputroot=`pwd`"/output"
+    mkdir -p $outputroot
+
+    #### Global config ####
+    m2u=M2UNet
+    hed=HED
+    driu=DRIU
+    unet=UNet
+    m2ussl=M2UNetSSL
+    driussl=DRIUSSL
+
+    #### CHASE_DB 1 ####
+    dataset=CHASEDB1
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # batch sizes
+    b_m2u=6
+    b_hed=4
+    b_driu=4
+    b_unet=2
+    # Train
+    bob binseg train $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+    bob binseg train $hed $dataset -b $b_hed -d cuda -o $output"/"$hed -vv
+    bob binseg train $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg train $unet $dataset -b $b_unet -d cuda -o $output"/"$unet -vv
+
+    #### DRIVE ####
+    dataset=DRIVE
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    b_m2u=16
+    b_hed=8
+    b_driu=8
+    b_unet=4
+    # Train
+    bob binseg train $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+    bob binseg train $hed $dataset -b $b_hed -d cuda -o $output"/"$hed -vv
+    bob binseg train $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg train $unet $dataset -b $b_unet -d cuda -o $output"/"$unet -vv
+
+    #### HRF ####
+    dataset=HRF
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    b_m2u=1
+    b_hed=1
+    b_driu=1
+    b_unet=1
+    # Train
+    bob binseg train $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+    bob binseg train $hed $dataset -b $b_hed -d cuda -o $output"/"$hed -vv
+    bob binseg train $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg train $unet $dataset -b $b_unet -d cuda -o $output"/"$unet -vv
+
+    #### IOSTAR VESSEL ####
+    dataset=IOSTARVESSEL
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    b_m2u=6
+    b_hed=4
+    b_driu=4
+    b_unet=2
+    # Train
+    bob binseg train $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+    bob binseg train $hed $dataset -b $b_hed -d cuda -o $output"/"$hed -vv
+    bob binseg train $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg train $unet $dataset -b $b_unet -d cuda -o $output"/"$unet -vv
+
+    #### STARE ####
+    dataset=STARE
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    b_m2u=6
+    b_hed=4
+    b_driu=5
+    b_unet=2
+    # Train
+    bob binseg train $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+    bob binseg train $hed $dataset -b $b_hed -d cuda -o $output"/"$hed -vv
+    bob binseg train $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg train $unet $dataset -b $b_unet -d cuda -o $output"/"$unet -vv
+
+
+Combined Vessel Dataset (COVD) and Semi-Supervised Learning (SSL)
+=================================================================
+
+COVD-:
+
+.. code-block:: bash
+
+    ### COVD-DRIVE ####
+    dataset=COVD-DRIVE
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    driu=DRIU
+    m2u=M2UNet
+    b_driu=4
+    b_m2u=8
+    # Train
+    bob binseg train $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg train $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+
+    ### COVD-STARE ####
+    dataset=COVD-STARE
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    driu=DRIU
+    m2u=M2UNet
+    b_driu=4
+    b_m2u=4
+    # Train
+    bob binseg train $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg train $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+
+    ### COVD-IOSTAR ####
+    dataset=COVD-IOSTARVESSEL
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    driu=DRIU
+    m2u=M2UNet
+    b_driu=4
+    b_m2u=4
+    # Train
+    bob binseg train $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg train $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+
+    ### COVD-CHASEDB1 ####
+    dataset=COVD-CHASEDB1
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    driu=DRIU
+    m2u=M2UNet
+    b_driu=4
+    b_m2u=4
+    # Train
+    bob binseg train $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg train $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+
+    ### COVD-HRF ####
+    dataset=COVD-HRF
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    driu=DRIU
+    m2u=M2UNet
+    b_driu=2
+    b_m2u=4
+    # Train
+    bob binseg train $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg train $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+
+
+COVD-SSL:
+
+.. code-block:: bash
+
+    ### COVD-DRIVE_SSL ####
+    dataset=COVD-DRIVE_SSL
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    driu=DRIUSSL
+    m2u=M2UNetSSL
+    b_driu=4
+    b_m2u=4
+    # Train
+    bob binseg ssltrain $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg ssltrain $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+
+    ### COVD-STARE_SSL ####
+    dataset=COVD-STARE_SSL
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    driu=DRIUSSL
+    m2u=M2UNetSSL
+    b_driu=4
+    b_m2u=4
+    # Train
+    bob binseg ssltrain $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg ssltrain $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+
+    ### COVD-IOSTAR_SSL ####
+    dataset=COVD-IOSTARVESSEL_SSL
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    driu=DRIUSSL
+    m2u=M2UNetSSL
+    b_driu=1
+    b_m2u=2
+    # Train
+    bob binseg ssltrain $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg ssltrain $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+
+    ### COVD-CHASEDB1_SSL ####
+    dataset=COVD-CHASEDB1_SSL
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    driu=DRIUSSL
+    m2u=M2UNetSSL
+    b_driu=1
+    b_m2u=2
+    # Train
+    bob binseg ssltrain $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg ssltrain $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+
+
+    ### COVD-HRF_SSL ####
+    dataset=COVD-HRF_SSL
+    output=$outputroot"/"$dataset
+    mkdir -p $output
+    # model configs
+    driu=DRIUSSL
+    m2u=M2UNetSSL
+    b_driu=1
+    b_m2u=2
+    # Train
+    bob binseg ssltrain $driu $dataset -b $b_driu -d cuda -o $output"/"$driu -vv
+    bob binseg ssltrain $m2u $dataset -b $b_m2u -d cuda -o $output"/"$m2u -vv
+
+Using your own configs
+======================
+
+Instead of the default configs you can pass the full path of your
+customized dataset and model config.
+
+.. code-block:: bash
+
+    bob binseg train /path/to/model/config.py /path/to/dataset/config.py
+
+
+
diff --git a/setup.py b/setup.py
index a7b02f034066be20e5c0928f085fe0f382ed96ed..861a18ef2a034b6acc8c23b69eafd0f10ccb49a8 100644
--- a/setup.py
+++ b/setup.py
@@ -55,18 +55,33 @@ setup(
           'visualize = bob.ip.binseg.script.binseg:visualize',
         ],
 
-         #bob hed train configurations
+         #bob train configurations
         'bob.ip.binseg.config': [
           'DRIU = bob.ip.binseg.configs.models.driu',
+          'DRIUSSL = bob.ip.binseg.configs.models.driussl',
           'DRIUOD = bob.ip.binseg.configs.models.driuod',
           'HED = bob.ip.binseg.configs.models.hed',
           'M2UNet = bob.ip.binseg.configs.models.m2unet',
+          'M2UNetSSL = bob.ip.binseg.configs.models.m2unetssl',
           'UNet = bob.ip.binseg.configs.models.unet',
           'ResUNet = bob.ip.binseg.configs.models.resunet',
           'CHASEDB1 = bob.ip.binseg.configs.datasets.chasedb1',
           'CHASEDB11024 = bob.ip.binseg.configs.datasets.chasedb11024',
           'CHASEDB11168 = bob.ip.binseg.configs.datasets.chasedb11168',
           'CHASEDB1TEST = bob.ip.binseg.configs.datasets.chasedb1test',
+<<<<<<< HEAD
+=======
+          'COVD-DRIVE = bob.ip.binseg.configs.datasets.starechasedb1iostarhrf544',
+          'COVD-DRIVE_SSL = bob.ip.binseg.configs.datasets.starechasedb1iostarhrf544ssldrive',
+          'COVD-STARE = bob.ip.binseg.configs.datasets.drivechasedb1iostarhrf608',
+          'COVD-STARE_SSL = bob.ip.binseg.configs.datasets.drivechasedb1iostarhrf608sslstare',
+          'COVD-IOSTARVESSEL = bob.ip.binseg.configs.datasets.drivestarechasedb1hrf1024',
+          'COVD-IOSTARVESSEL_SSL = bob.ip.binseg.configs.datasets.drivestarechasedb1hrf1024ssliostar',
+          'COVD-HRF = bob.ip.binseg.configs.datasets.drivestarechasedb1iostar1168',
+          'COVD-HRF_SSL = bob.ip.binseg.configs.datasets.drivestarechasedb1iostar1168sslhrf',
+          'COVD-CHASEDB1 = bob.ip.binseg.configs.datasets.drivestareiostarhrf960',
+          'COVD-CHASEDB1_SSL = bob.ip.binseg.configs.datasets.drivestareiostarhrf960sslchase',
+>>>>>>> ssl
           'DRIONSDB = bob.ip.binseg.configs.datasets.drionsdb',
           'DRIONSDBTEST = bob.ip.binseg.configs.datasets.drionsdbtest',
           'DRISHTIGS1OD = bob.ip.binseg.configs.datasets.dristhigs1od',
@@ -78,6 +93,7 @@ setup(
           'DRIVE1024 = bob.ip.binseg.configs.datasets.drive1024',
           'DRIVE1168 = bob.ip.binseg.configs.datasets.drive1168',
           'DRIVETEST = bob.ip.binseg.configs.datasets.drivetest',
+<<<<<<< HEAD
           'DRIVESTARECHASEDB1HRF1024 = bob.ip.binseg.configs.datasets.drivestarechasedb1hrf1024',
           'DRIVESTARECHASEDB1IOSTAR1168 = bob.ip.binseg.configs.datasets.drivestarechasedb1iostar1168',
           'DRIVESTAREIOSTARHRF960 = bob.ip.binseg.configs.datasets.drivestareiostarhrf960',
@@ -85,6 +101,9 @@ setup(
           'HRF960 = bob.ip.binseg.configs.datasets.hrf960',
           'HRF1024 = bob.ip.binseg.configs.datasets.hrf1024',
           'HRF1168 = bob.ip.binseg.configs.datasets.hrf1168',
+=======
+          'HRF = bob.ip.binseg.configs.datasets.hrf1168',
+>>>>>>> ssl
           'HRFTEST = bob.ip.binseg.configs.datasets.hrftest',
           'IOSTAROD = bob.ip.binseg.configs.datasets.iostarod',
           'IOSTARODTEST = bob.ip.binseg.configs.datasets.iostarodtest',