diff --git a/.gitignore b/.gitignore
index 69dac1eb1eab1fd596c3a6674f8d2393850669bb..fcc2be3d0eb5cbef10d057c51da065a6f947ddfd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,6 +22,7 @@ record.txt
 core
 output_temp
 output
+*.DS_Store
 
 
 ### JupyterNotebook ###
diff --git a/README.rst b/README.rst
index 2121792a0ecaeae8bc1e59296dc3287edfc9dc77..a413a5c2350cc07affec4b5dd3ef9473b2dd5110 100644
--- a/README.rst
+++ b/README.rst
@@ -3,7 +3,7 @@
 .. image:: https://img.shields.io/badge/docs-stable-yellow.svg
    :target: https://www.idiap.ch/software/bob/docs/bob/bob.ip.binseg/stable/index.html
 .. image:: https://img.shields.io/badge/docs-latest-orange.svg
-   :target: https://www.idiap.ch/software/bob/docs/bob/bob.ip.binseg/master/index.html
+   :target: http://beatubulatest.lab.idiap.ch/private/docs/bob/bob.ip.binseg/master/index.html
 .. image:: https://gitlab.idiap.ch/bob/bob.ip.binseg/badges/master/build.svg
    :target: https://gitlab.idiap.ch/bob/bob.ip.binseg/commits/master
 .. image:: https://gitlab.idiap.ch/bob/bob.ip.binseg/badges/master/coverage.svg
diff --git a/bob/ip/binseg/configs/datasets/allvessel544.py b/bob/ip/binseg/configs/datasets/allvessel544.py
new file mode 100644
index 0000000000000000000000000000000000000000..9044cebccd11a8ffd1873fa58d19123d7baefe43
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/allvessel544.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+from bob.db.drive import Database as DRIVE
+from bob.db.stare import Database as STARE
+from bob.db.chasedb1 import Database as CHASEDB1
+from bob.db.iostar import Database as IOSTAR
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+
+# Target size: 544x544 (DRIVE)
+
+defaulttransforms = [RandomHFlip()
+                    ,RandomVFlip()
+                    ,RandomRotation()
+                    ,ColorJitter()
+                    ,ToTensor()]
+
+
+# DRIVE
+transforms_drive = Compose([  
+                        CenterCrop((544,544))
+                        ,*defaulttransforms
+                    ])
+
+# bob.db.dataset init
+bobdb_drive = DRIVE(protocol = 'default')
+
+# PyTorch dataset
+torch_drive = BinSegDataset(bobdb_drive, split='train', transform=transforms_drive)
+
+
+# CHASE_DB1 
+transforms_chase = Compose([      
+                        Resize(544)
+                        ,Crop(0,12,544,544)
+                        ,*defaulttransforms
+                    ])
+
+# bob.db.dataset init
+bobdb_chase = CHASEDB1(protocol = 'default')
+
+# PyTorch dataset
+torch_chase = BinSegDataset(bobdb_chase, split='train', transform=transforms_chase)
+
+
+# IOSTAR VESSEL
+transforms_iostar = Compose([  
+                        Resize(544)
+                        ,*defaulttransforms
+                    ])
+
+# bob.db.dataset init
+bobdb_iostar = IOSTAR(protocol='default_vessel')
+
+# PyTorch dataset
+torch_iostar = BinSegDataset(bobdb_iostar, split='train', transform=transforms_iostar)
+
+# STARE
+transforms = Compose([  
+                        Resize(471)
+                        ,Pad((0,37,0,36))
+                        ,*defaulttransforms
+                    ])
+
+# bob.db.dataset init
+bobdb_stare = STARE(protocol = 'default')
+
+# PyTorch dataset
+torch_stare = BinSegDataset(bobdb_stare, split='train', transform=transforms)
+
+
+# HRF
+transforms_hrf = Compose([  
+                        Resize((363))
+                        ,Pad((0,90,0,91))
+                        ,*defaulttransforms
+                    ])
+
+# bob.db.dataset init
+bobdb_hrf = HRF(protocol = 'default')
+
+# PyTorch dataset
+torch_hrf = BinSegDataset(bobdb_hrf, split='train', transform=transforms_hrf)
+
+
+
+# Merge
+dataset = torch.utils.data.ConcatDataset([torch_drive,torch_stare, torch_chase, torch_iostar, torch_hrf])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/allvessel544test.py b/bob/ip/binseg/configs/datasets/allvessel544test.py
new file mode 100644
index 0000000000000000000000000000000000000000..55994e9e045311db031290d63c45f6326eb4436a
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/allvessel544test.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+from bob.db.drive import Database as DRIVE
+from bob.db.stare import Database as STARE
+from bob.db.chasedb1 import Database as CHASEDB1
+from bob.db.iostar import Database as IOSTAR
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+import torch
+
+# Target size: 544x544 (DRIVE)
+
+defaulttransforms = [ToTensor()]
+
+
+# DRIVE
+transforms_drive = Compose([  
+                        CenterCrop((544,544))
+                        ,*defaulttransforms
+                    ])
+
+# bob.db.dataset init
+bobdb_drive = DRIVE(protocol = 'default')
+
+# PyTorch dataset
+torch_drive = BinSegDataset(bobdb_drive, split='test', transform=transforms_drive)
+
+
+# CHASE_DB1 
+transforms_chase = Compose([      
+                        Resize(544)
+                        ,Crop(0,12,544,544)
+                        ,*defaulttransforms
+                    ])
+
+# bob.db.dataset init
+bobdb_chase = CHASEDB1(protocol = 'default')
+
+# PyTorch dataset
+torch_chase = BinSegDataset(bobdb_chase, split='test', transform=transforms_chase)
+
+
+# IOSTAR VESSEL
+transforms_iostar = Compose([  
+                        Resize(544)
+                        ,*defaulttransforms
+                    ])
+
+# bob.db.dataset init
+bobdb_iostar = IOSTAR(protocol='default_vessel')
+
+# PyTorch dataset
+torch_iostar = BinSegDataset(bobdb_iostar, split='test', transform=transforms_iostar)
+
+# STARE
+transforms = Compose([  
+                        Resize(471)
+                        ,Pad((0,37,0,36))
+                        ,*defaulttransforms
+                    ])
+
+# bob.db.dataset init
+bobdb_stare = STARE(protocol = 'default')
+
+# PyTorch dataset
+torch_stare = BinSegDataset(bobdb_stare, split='test', transform=transforms)
+
+
+# HRF
+transforms_hrf = Compose([  
+                        Resize((363))
+                        ,Pad((0,90,0,91))
+                        ,*defaulttransforms
+                    ])
+
+# bob.db.dataset init
+bobdb_hrf = HRF(protocol = 'default')
+
+# PyTorch dataset
+torch_hrf = BinSegDataset(bobdb_hrf, split='test', transform=transforms_hrf)
+
+
+
+# Merge
+dataset = torch.utils.data.ConcatDataset([torch_drive,torch_stare, torch_chase, torch_iostar, torch_hrf])
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/chasedb1.py b/bob/ip/binseg/configs/datasets/chasedb1.py
new file mode 100644
index 0000000000000000000000000000000000000000..7fa0dc096aa55a8ab19665af45c9f2e1f8368a0c
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/chasedb1.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.chasedb1 import Database as CHASEDB1
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Crop(0,18,960,960)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = CHASEDB1(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/chasedb1544test.py b/bob/ip/binseg/configs/datasets/chasedb1544test.py
new file mode 100644
index 0000000000000000000000000000000000000000..aa5d00a55e68bf8d025ba338cbeb59289ce3c70a
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/chasedb1544test.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.chasedb1 import Database as CHASEDB1
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([      
+                        Resize(544)
+                        ,Crop(0,12,544,544)
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = CHASEDB1(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/chasedb1test.py b/bob/ip/binseg/configs/datasets/chasedb1test.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b267b0f022030d512197f89401f74e3373df3cb
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/chasedb1test.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.chasedb1 import Database as CHASEDB1
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Crop(0,18,960,960)
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = CHASEDB1(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drionsdb.py b/bob/ip/binseg/configs/datasets/drionsdb.py
new file mode 100644
index 0000000000000000000000000000000000000000..cd33c1d246755f11c217d7cbf7fcdab35b2144e6
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/drionsdb.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.drionsdb import Database as DRIONS
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Pad((4,8,4,8))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = DRIONS(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivecroptest.py b/bob/ip/binseg/configs/datasets/drionsdbtest.py
similarity index 74%
rename from bob/ip/binseg/configs/datasets/drivecroptest.py
rename to bob/ip/binseg/configs/datasets/drionsdbtest.py
index 230598dce92a39276e05dd4b4f842643428546b4..b65100a6c21eaa0a48f2ff2c33e05e7a17c8825c 100644
--- a/bob/ip/binseg/configs/datasets/drivecroptest.py
+++ b/bob/ip/binseg/configs/datasets/drionsdbtest.py
@@ -1,19 +1,19 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from bob.db.drive import Database as DRIVE
+from bob.db.drionsdb import Database as DRIONS
 from bob.ip.binseg.data.transforms import *
 from bob.ip.binseg.data.binsegdataset import BinSegDataset
 
 #### Config ####
 
 transforms = Compose([  
-                        CenterCrop((544,544))
+                        Pad((4,8,4,8))
                         ,ToTensor()
                     ])
 
 # bob.db.dataset init
-bobdb = DRIVE(protocol = 'default')
+bobdb = DRIONS(protocol = 'default')
 
 # PyTorch dataset
 dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1cup.py b/bob/ip/binseg/configs/datasets/dristhigs1cup.py
new file mode 100644
index 0000000000000000000000000000000000000000..f7a69dadcb4010198025e7d4d44fac8aa1b06918
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/dristhigs1cup.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.drishtigs1 import Database as DRISHTI
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        CenterCrop((1760,2048))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = DRISHTI(protocol = 'default_cup')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1cuptest.py b/bob/ip/binseg/configs/datasets/dristhigs1cuptest.py
new file mode 100644
index 0000000000000000000000000000000000000000..5c2b634e1ba402ed8af801ad7203d6585ecb6b96
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/dristhigs1cuptest.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+from bob.db.drishtigs1 import Database as DRISHTI
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        CenterCrop((1760,2048))
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = DRISHTI(protocol = 'default_cup')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1od.py b/bob/ip/binseg/configs/datasets/dristhigs1od.py
new file mode 100644
index 0000000000000000000000000000000000000000..0bd483c1a03b00e15bd90a32b8ec369b766605ae
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/dristhigs1od.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.drishtigs1 import Database as DRISHTI
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        CenterCrop((1760,2048))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = DRISHTI(protocol = 'default_od')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/dristhigs1odtest.py b/bob/ip/binseg/configs/datasets/dristhigs1odtest.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab1edd6546dac52dd235369b0126e973e2b8611a
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/dristhigs1odtest.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.drishtigs1 import Database as DRISHTI
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        CenterCrop((1760,2048))
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = DRISHTI(protocol = 'default_od')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivecroptrain.py b/bob/ip/binseg/configs/datasets/drive.py
similarity index 100%
rename from bob/ip/binseg/configs/datasets/drivecroptrain.py
rename to bob/ip/binseg/configs/datasets/drive.py
diff --git a/bob/ip/binseg/configs/datasets/hrf.py b/bob/ip/binseg/configs/datasets/hrf.py
new file mode 100644
index 0000000000000000000000000000000000000000..cb008f7da1736ef66085ddfb4de0335695c28779
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/hrf.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Crop(0,108,2336,3296)
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = HRF(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/hrf544test.py b/bob/ip/binseg/configs/datasets/hrf544test.py
new file mode 100644
index 0000000000000000000000000000000000000000..86da428b8f6bd0220d0b311b53ddcba098177a70
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/hrf544test.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Resize((363))
+                        ,Pad((0,90,0,91))
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = HRF(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/hrftest.py b/bob/ip/binseg/configs/datasets/hrftest.py
new file mode 100644
index 0000000000000000000000000000000000000000..45f952728f8e5ff4e335c8ea6a6d79762c166b8e
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/hrftest.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.hrf import Database as HRF
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Crop(0,108,2336,3296)
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = HRF(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/drivetrain.py b/bob/ip/binseg/configs/datasets/iostarod.py
similarity index 82%
rename from bob/ip/binseg/configs/datasets/drivetrain.py
rename to bob/ip/binseg/configs/datasets/iostarod.py
index 6662e1fcfbcaff3e1782b0bded13fe7e990be0cd..334df2a4ba402f879a436ce0ee3bcc07ca4ff49f 100644
--- a/bob/ip/binseg/configs/datasets/drivetrain.py
+++ b/bob/ip/binseg/configs/datasets/iostarod.py
@@ -1,13 +1,13 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from bob.db.drive import Database as DRIVE
+from bob.db.iostar import Database as IOSTAR
 from bob.ip.binseg.data.transforms import *
 from bob.ip.binseg.data.binsegdataset import BinSegDataset
 
 #### Config ####
 
-transforms = Compose([
+transforms = Compose([  
                         RandomHFlip()
                         ,RandomVFlip()
                         ,RandomRotation()
@@ -16,7 +16,7 @@ transforms = Compose([
                     ])
 
 # bob.db.dataset init
-bobdb = DRIVE(protocol = 'default')
+bobdb = IOSTAR(protocol='default_od')
 
 # PyTorch dataset
 dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/iostarodtest.py b/bob/ip/binseg/configs/datasets/iostarodtest.py
new file mode 100644
index 0000000000000000000000000000000000000000..ba06450781bc03c765504e98fa715a4b15b1e774
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/iostarodtest.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.iostar import Database as IOSTAR
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = IOSTAR(protocol='default_od')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/iostarvessel.py b/bob/ip/binseg/configs/datasets/iostarvessel.py
new file mode 100644
index 0000000000000000000000000000000000000000..ded01bb45820f9402fce9a5c6dc15c14908220eb
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/iostarvessel.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.iostar import Database as IOSTAR
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = IOSTAR(protocol='default_vessel')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/iostarvessel544test.py b/bob/ip/binseg/configs/datasets/iostarvessel544test.py
new file mode 100644
index 0000000000000000000000000000000000000000..e3ccd854079e57c642669aa33f403d4ba28d4700
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/iostarvessel544test.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.iostar import Database as IOSTAR
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Resize(544)
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = IOSTAR(protocol='default_vessel')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/iostarvesseltest.py b/bob/ip/binseg/configs/datasets/iostarvesseltest.py
new file mode 100644
index 0000000000000000000000000000000000000000..d8fe13718be5c4517c69683696965cbbe5a9abdf
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/iostarvesseltest.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.iostar import Database as IOSTAR
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = IOSTAR(protocol='default_vessel')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/refugecup.py b/bob/ip/binseg/configs/datasets/refugecup.py
new file mode 100644
index 0000000000000000000000000000000000000000..9efac5293295bc0f0eb2a03a78ebacdb4e1615c2
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/refugecup.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.refuge import Database as REFUGE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Resize((1539))
+                        ,Pad((21,46,22,47))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = REFUGE(protocol = 'default_cup')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/refugecuptest.py b/bob/ip/binseg/configs/datasets/refugecuptest.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ff916e30cc60e24e505ccb6c0b3455e97d7a9a9
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/refugecuptest.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.refuge import Database as REFUGE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        CenterCrop(1632)
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = REFUGE(protocol = 'default_cup')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/refugeod.py b/bob/ip/binseg/configs/datasets/refugeod.py
new file mode 100644
index 0000000000000000000000000000000000000000..5faaf05a9edcd7fcb4c3353dd6f9a17478233038
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/refugeod.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.refuge import Database as REFUGE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Resize((1539))
+                        ,Pad((21,46,22,47))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = REFUGE(protocol = 'default_od')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/refugeodtest.py b/bob/ip/binseg/configs/datasets/refugeodtest.py
new file mode 100644
index 0000000000000000000000000000000000000000..30085a2f5450eefb12300b27677a15bf27baa8d8
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/refugeodtest.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.refuge import Database as REFUGE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        CenterCrop(1632)
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = REFUGE(protocol = 'default_od')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/rimoner3cup.py b/bob/ip/binseg/configs/datasets/rimoner3cup.py
new file mode 100644
index 0000000000000000000000000000000000000000..47b62ba0c521d4f4209fb6026c7aae184228fdb2
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/rimoner3cup.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.rimoner3 import Database as RIMONER3
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Pad((8,8,8,8))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = RIMONER3(protocol = 'default_cup')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/rimoner3cuptest.py b/bob/ip/binseg/configs/datasets/rimoner3cuptest.py
new file mode 100644
index 0000000000000000000000000000000000000000..9f227be81289e12f4cfc8e63b1a76cbc1251c614
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/rimoner3cuptest.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.rimoner3 import Database as RIMONER3
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Pad((8,8,8,8))
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = RIMONER3(protocol = 'default_cup')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/rimoner3od.py b/bob/ip/binseg/configs/datasets/rimoner3od.py
new file mode 100644
index 0000000000000000000000000000000000000000..4905bec3cb663faed12cd85edc099f7987834657
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/rimoner3od.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.rimoner3 import Database as RIMONER3
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Pad((8,8,8,8))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = RIMONER3(protocol = 'default_od')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/rimoner3odtest.py b/bob/ip/binseg/configs/datasets/rimoner3odtest.py
new file mode 100644
index 0000000000000000000000000000000000000000..390f20d795323082c3aca3f6f0a2c81a5b144ba1
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/rimoner3odtest.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.rimoner3 import Database as RIMONER3
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Pad((8,8,8,8))
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = RIMONER3(protocol = 'default_od')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/stare.py b/bob/ip/binseg/configs/datasets/stare.py
new file mode 100644
index 0000000000000000000000000000000000000000..f2c784a9ab7a75c82c11ffc5dbb32d6390fb93be
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/stare.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.stare import Database as STARE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Pad((2,1,2,2))
+                        ,RandomHFlip()
+                        ,RandomVFlip()
+                        ,RandomRotation()
+                        ,ColorJitter()
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = STARE(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='train', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/stare544test.py b/bob/ip/binseg/configs/datasets/stare544test.py
new file mode 100644
index 0000000000000000000000000000000000000000..09b268737d08be21890ddde620769ec4e37bc874
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/stare544test.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.stare import Database as STARE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Resize(471)
+                        ,Pad((0,37,0,36))
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = STARE(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/datasets/staretest.py b/bob/ip/binseg/configs/datasets/staretest.py
new file mode 100644
index 0000000000000000000000000000000000000000..aab80b9bea6339bff87c5cc12d7375ce6216bc60
--- /dev/null
+++ b/bob/ip/binseg/configs/datasets/staretest.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from bob.db.stare import Database as STARE
+from bob.ip.binseg.data.transforms import *
+from bob.ip.binseg.data.binsegdataset import BinSegDataset
+
+#### Config ####
+
+transforms = Compose([  
+                        Pad((2,1,2,2))
+                        ,ToTensor()
+                    ])
+
+# bob.db.dataset init
+bobdb = STARE(protocol = 'default')
+
+# PyTorch dataset
+dataset = BinSegDataset(bobdb, split='test', transform=transforms)
\ No newline at end of file
diff --git a/bob/ip/binseg/configs/models/driu.py b/bob/ip/binseg/configs/models/driu.py
index deb789d00dd7a4c44124713ac6f8dc59f3b5bbba..1bc10ef053de55522058198ea36267c4b8c60ec5 100644
--- a/bob/ip/binseg/configs/models/driu.py
+++ b/bob/ip/binseg/configs/models/driu.py
@@ -6,7 +6,7 @@ from bob.ip.binseg.modeling.driu import build_driu
 import torch.optim as optim
 from torch.nn import BCEWithLogitsLoss
 from bob.ip.binseg.utils.model_zoo import modelurls
-from bob.ip.binseg.modeling.losses import WeightedBCELogitsLoss
+from bob.ip.binseg.modeling.losses import SoftJaccardBCELogitsLoss
 from bob.ip.binseg.engine.adabound import AdaBound
 
 ##### Config #####
@@ -19,7 +19,7 @@ gamma = 1e-3
 eps = 1e-8
 amsbound = False
 
-scheduler_milestones = [200]
+scheduler_milestones = [800]
 scheduler_gamma = 0.1
 
 # model
@@ -31,9 +31,8 @@ pretrained_backbone = modelurls['vgg16']
 # optimizer
 optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr, gamma=gamma,
                  eps=eps, weight_decay=weight_decay, amsbound=amsbound) 
-    
 # criterion
-criterion = WeightedBCELogitsLoss(reduction='mean')
+criterion = SoftJaccardBCELogitsLoss(alpha=0.7)
 
 # scheduler
 scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma)
diff --git a/bob/ip/binseg/configs/models/driuadam.py b/bob/ip/binseg/configs/models/driuadam.py
deleted file mode 100644
index c801206b9c4538fa4ce08d75d7451ed4a73556db..0000000000000000000000000000000000000000
--- a/bob/ip/binseg/configs/models/driuadam.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-from torch.optim.lr_scheduler import MultiStepLR
-from bob.ip.binseg.modeling.driu import build_driu
-import torch.optim as optim
-from bob.ip.binseg.modeling.losses import WeightedBCELogitsLoss
-from bob.ip.binseg.utils.model_zoo import modelurls
-
-##### Config #####
-lr = 0.001
-betas = (0.9, 0.999)
-eps = 1e-08
-weight_decay = 0
-amsgrad = False
-
-scheduler_milestones = [150]
-scheduler_gamma = 0.1
-
-# model
-model = build_driu()
-
-# pretrained backbone
-pretrained_backbone = modelurls['vgg16']
-
-# optimizer
-optimizer = optim.Adam(model.parameters(), lr=lr, betas=betas, eps=eps, weight_decay=weight_decay, amsgrad=amsgrad)
-    
-# criterion
-criterion = WeightedBCELogitsLoss(reduction='mean')
-
-# scheduler
-scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma)
diff --git a/bob/ip/binseg/configs/models/unetj01.py b/bob/ip/binseg/configs/models/driuod.py
similarity index 84%
rename from bob/ip/binseg/configs/models/unetj01.py
rename to bob/ip/binseg/configs/models/driuod.py
index da1096cd34bf8f3319d90f3bf8992b4235ae9919..199a249a59f25c13d6f987c3f784822d905887f7 100644
--- a/bob/ip/binseg/configs/models/unetj01.py
+++ b/bob/ip/binseg/configs/models/driuod.py
@@ -2,7 +2,7 @@
 # -*- coding: utf-8 -*-
 
 from torch.optim.lr_scheduler import MultiStepLR
-from bob.ip.binseg.modeling.unet import build_unet
+from bob.ip.binseg.modeling.driuod import build_driuod
 import torch.optim as optim
 from torch.nn import BCEWithLogitsLoss
 from bob.ip.binseg.utils.model_zoo import modelurls
@@ -19,11 +19,11 @@ gamma = 1e-3
 eps = 1e-8
 amsbound = False
 
-scheduler_milestones = [200]
+scheduler_milestones = [800]
 scheduler_gamma = 0.1
 
 # model
-model = build_unet()
+model = build_driuod()
 
 # pretrained backbone
 pretrained_backbone = modelurls['vgg16']
@@ -31,9 +31,8 @@ pretrained_backbone = modelurls['vgg16']
 # optimizer
 optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr, gamma=gamma,
                  eps=eps, weight_decay=weight_decay, amsbound=amsbound) 
-    
 # criterion
-criterion = SoftJaccardBCELogitsLoss(alpha=0.1)
+criterion = SoftJaccardBCELogitsLoss(alpha=0.7)
 
 # scheduler
 scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma)
diff --git a/bob/ip/binseg/configs/models/hed.py b/bob/ip/binseg/configs/models/hed.py
index a586e86e44f0048c7ce28df00f882eadcd98a2c9..306ecb80fecb8cbc17623aad22efd48375798397 100644
--- a/bob/ip/binseg/configs/models/hed.py
+++ b/bob/ip/binseg/configs/models/hed.py
@@ -4,7 +4,7 @@
 from torch.optim.lr_scheduler import MultiStepLR
 from bob.ip.binseg.modeling.hed import build_hed
 import torch.optim as optim
-from bob.ip.binseg.modeling.losses import HEDWeightedBCELogitsLoss
+from bob.ip.binseg.modeling.losses import HEDSoftJaccardBCELogitsLoss
 from bob.ip.binseg.utils.model_zoo import modelurls
 from bob.ip.binseg.engine.adabound import AdaBound
 
@@ -19,7 +19,7 @@ gamma = 1e-3
 eps = 1e-8
 amsbound = False
 
-scheduler_milestones = [200]
+scheduler_milestones = [800]
 scheduler_gamma = 0.1
 
 
@@ -33,7 +33,7 @@ pretrained_backbone = modelurls['vgg16']
 optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr, gamma=gamma,
                  eps=eps, weight_decay=weight_decay, amsbound=amsbound) 
 # criterion
-criterion = HEDWeightedBCELogitsLoss(reduction='mean')
+criterion = HEDSoftJaccardBCELogitsLoss(alpha=0.7)
 
 # scheduler
 scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma)
diff --git a/bob/ip/binseg/configs/models/m2unet.py b/bob/ip/binseg/configs/models/m2unet.py
index e97ae62dba55805bae1d2130075d754098b9cb9d..cb422dca122bda790376589f2989acebca76dd6f 100644
--- a/bob/ip/binseg/configs/models/m2unet.py
+++ b/bob/ip/binseg/configs/models/m2unet.py
@@ -6,7 +6,7 @@ from bob.ip.binseg.modeling.m2u import build_m2unet
 import torch.optim as optim
 from torch.nn import BCEWithLogitsLoss
 from bob.ip.binseg.utils.model_zoo import modelurls
-from bob.ip.binseg.modeling.losses import WeightedBCELogitsLoss
+from bob.ip.binseg.modeling.losses import SoftJaccardBCELogitsLoss
 from bob.ip.binseg.engine.adabound import AdaBound
 
 ##### Config #####
@@ -19,7 +19,7 @@ gamma = 1e-3
 eps = 1e-8
 amsbound = False
 
-scheduler_milestones = [200]
+scheduler_milestones = [800]
 scheduler_gamma = 0.1
 
 # model
@@ -33,7 +33,7 @@ optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr,
                  eps=eps, weight_decay=weight_decay, amsbound=amsbound) 
     
 # criterion
-criterion = WeightedBCELogitsLoss(reduction='mean')
+criterion = SoftJaccardBCELogitsLoss(alpha=0.7)
 
 # scheduler
 scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma)
diff --git a/bob/ip/binseg/configs/models/resunet.py b/bob/ip/binseg/configs/models/resunet.py
index 17184497d375a8750b933c1ff5ebbc7dba3ec3bf..1a049f14cc813a465ade9859ed7c17799e258e24 100644
--- a/bob/ip/binseg/configs/models/resunet.py
+++ b/bob/ip/binseg/configs/models/resunet.py
@@ -6,7 +6,7 @@ from bob.ip.binseg.modeling.resunet import build_res50unet
 import torch.optim as optim
 from torch.nn import BCEWithLogitsLoss
 from bob.ip.binseg.utils.model_zoo import modelurls
-from bob.ip.binseg.modeling.losses import WeightedBCELogitsLoss
+from bob.ip.binseg.modeling.losses import SoftJaccardBCELogitsLoss
 from bob.ip.binseg.engine.adabound import AdaBound
 
 ##### Config #####
@@ -19,7 +19,7 @@ gamma = 1e-3
 eps = 1e-8
 amsbound = False
 
-scheduler_milestones = [200]
+scheduler_milestones = [800]
 scheduler_gamma = 0.1
 
 # model
@@ -33,7 +33,7 @@ optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr,
                  eps=eps, weight_decay=weight_decay, amsbound=amsbound) 
     
 # criterion
-criterion = WeightedBCELogitsLoss(reduction='mean')
+criterion = SoftJaccardBCELogitsLoss(alpha=0.7)
 
 # scheduler
 scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma)
diff --git a/bob/ip/binseg/configs/models/unet.py b/bob/ip/binseg/configs/models/unet.py
index f034d94db6420a33a29a54c78293c3e9f8eff7cf..7c21b042dc0b1c4c76a78fa5c7a93a8d5d5b2eee 100644
--- a/bob/ip/binseg/configs/models/unet.py
+++ b/bob/ip/binseg/configs/models/unet.py
@@ -6,7 +6,7 @@ from bob.ip.binseg.modeling.unet import build_unet
 import torch.optim as optim
 from torch.nn import BCEWithLogitsLoss
 from bob.ip.binseg.utils.model_zoo import modelurls
-from bob.ip.binseg.modeling.losses import WeightedBCELogitsLoss
+from bob.ip.binseg.modeling.losses import SoftJaccardBCELogitsLoss
 from bob.ip.binseg.engine.adabound import AdaBound
 
 ##### Config #####
@@ -19,7 +19,7 @@ gamma = 1e-3
 eps = 1e-8
 amsbound = False
 
-scheduler_milestones = [200]
+scheduler_milestones = [800]
 scheduler_gamma = 0.1
 
 # model
@@ -33,7 +33,7 @@ optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr,
                  eps=eps, weight_decay=weight_decay, amsbound=amsbound) 
     
 # criterion
-criterion = WeightedBCELogitsLoss(reduction='mean')
+criterion = SoftJaccardBCELogitsLoss(alpha=0.7)
 
 # scheduler
 scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma)
diff --git a/bob/ip/binseg/data/binsegdataset.py b/bob/ip/binseg/data/binsegdataset.py
index f212c4fe680969aaef27a300d08ca6f6ec35a50c..86fc97df482707a9715b38790741aff65bfe2b52 100644
--- a/bob/ip/binseg/data/binsegdataset.py
+++ b/bob/ip/binseg/data/binsegdataset.py
@@ -7,7 +7,7 @@ class BinSegDataset(Dataset):
     A transform object can be passed that will be applied to the image, ground truth and mask (if present). 
     It supports indexing such that dataset[i] can be used to get ith sample.
     
-    Attributes
+    Parameters
     ---------- 
     bobdb : :py:mod:`bob.db.base`
         Binary segmentation bob database (e.g. bob.db.drive) 
@@ -15,6 +15,8 @@ class BinSegDataset(Dataset):
         ``'train'`` or ``'test'``. Defaults to ``'train'``
     transform : :py:mod:`bob.ip.binseg.data.transforms`, optional
         A transform or composition of transfroms. Defaults to ``None``.
+    mask : bool
+        whether dataset contains masks or not
     """
     def __init__(self, bobdb, split = 'train', transform = None):
         self.database = bobdb.samples(split)
diff --git a/bob/ip/binseg/engine/inferencer.py b/bob/ip/binseg/engine/inferencer.py
index 00f60d7c07909fdd18b29b6cd2ee3dce9048a5ed..a1017ed5d192980b431a846c18567a40a4159906 100644
--- a/bob/ip/binseg/engine/inferencer.py
+++ b/bob/ip/binseg/engine/inferencer.py
@@ -134,9 +134,9 @@ def do_inference(
     """
     logger = logging.getLogger("bob.ip.binseg.engine.inference")
     logger.info("Start evaluation")
-    logger.info("Split: {}, Output folder: {}, Device: {}".format(data_loader.dataset.split, output_folder, device))
+    logger.info("Output folder: {}, Device: {}".format(output_folder, device))
     results_subfolder = os.path.join(output_folder,'results') 
-    if not os.path.exists(results_subfolder): os.makedirs(results_subfolder)
+    os.makedirs(results_subfolder,exist_ok=True)
     
     model.eval().to(device)
     # Sigmoid for probabilities 
@@ -206,7 +206,7 @@ def do_inference(
     np_avg_metrics = avg_metrics.to_numpy().T
     fig_name = "precision_recall.pdf"
     logger.info("saving {}".format(fig_name))
-    fig = precision_recall_f1iso([np_avg_metrics[0]],[np_avg_metrics[1]], [model.name,None], title=output_folder)
+    fig = precision_recall_f1iso([np_avg_metrics[0]],[np_avg_metrics[1]], [model.name,None], title=output_folder.split('/')[-2:])
     fig_filename = os.path.join(results_subfolder, fig_name)
     fig.savefig(fig_filename)
     
diff --git a/bob/ip/binseg/modeling/driupix.py b/bob/ip/binseg/modeling/driupix.py
new file mode 100644
index 0000000000000000000000000000000000000000..00e40932afc0c04f52925783fe3595fa22a7d098
--- /dev/null
+++ b/bob/ip/binseg/modeling/driupix.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import torch
+from torch import nn
+from collections import OrderedDict
+from bob.ip.binseg.modeling.backbones.vgg import vgg16
+from bob.ip.binseg.modeling.make_layers import conv_with_kaiming_uniform,convtrans_with_kaiming_uniform, UpsampleCropBlock
+
+class ConcatFuseBlock(nn.Module):
+    """ 
+    Takes in four feature maps with 16 channels each, concatenates them 
+    and applies a 1x1 convolution with 1 output channel. 
+    """
+    def __init__(self):
+        super().__init__()
+        self.conv = conv_with_kaiming_uniform(4*16,1,1,1,0)
+    
+    def forward(self,x1,x2,x3,x4):
+        x_cat = torch.cat([x1,x2,x3,x4],dim=1)
+        x = self.conv(x_cat)
+        return x 
+            
+class DRIUPIX(nn.Module):
+    """
+    DRIUPIX head module. DRIU with pixelshuffle instead of ConvTrans2D
+    
+    Parameters
+    ----------
+    in_channels_list : list
+        number of channels for each feature map that is returned from backbone
+    """
+    def __init__(self, in_channels_list=None):
+        super(DRIUPIX, self).__init__()
+        in_conv_1_2_16, in_upsample2, in_upsample_4, in_upsample_8 = in_channels_list
+
+        self.conv1_2_16 = nn.Conv2d(in_conv_1_2_16, 16, 3, 1, 1)
+        # Upsample layers
+        self.upsample2 = UpsampleCropBlock(in_upsample2, 16, 4, 2, 0, pixelshuffle=True)
+        self.upsample4 = UpsampleCropBlock(in_upsample_4, 16, 8, 4, 0, pixelshuffle=True)
+        self.upsample8 = UpsampleCropBlock(in_upsample_8, 16, 16, 8, 0, pixelshuffle=True)
+        
+        # Concat and Fuse
+        self.concatfuse = ConcatFuseBlock()
+
+    def forward(self,x):
+        """
+        Parameters
+        ----------
+        x : list
+            list of tensors as returned from the backbone network.
+            First element: height and width of input image. 
+            Remaining elements: feature maps for each feature level.
+
+        Returns
+        -------
+        :py:class:`torch.Tensor`
+        """
+        hw = x[0]
+        conv1_2_16 = self.conv1_2_16(x[1])  # conv1_2_16   
+        upsample2 = self.upsample2(x[2], hw) # side-multi2-up
+        upsample4 = self.upsample4(x[3], hw) # side-multi3-up
+        upsample8 = self.upsample8(x[4], hw) # side-multi4-up
+        out = self.concatfuse(conv1_2_16, upsample2, upsample4, upsample8)
+        return out
+
+def build_driupix():
+    """ 
+    Adds backbone and head together
+
+    Returns
+    -------
+    :py:class:torch.nn.Module
+    """
+    backbone = vgg16(pretrained=False, return_features = [3, 8, 14, 22])
+    driu_head = DRIUPIX([64, 128, 256, 512])
+
+    model = nn.Sequential(OrderedDict([("backbone", backbone), ("head", driu_head)]))
+    model.name = "DRIUPIX"
+    return model
\ No newline at end of file
diff --git a/bob/ip/binseg/script/binseg.py b/bob/ip/binseg/script/binseg.py
index e56aa2fe9435dadcde25e1661e7b559ed2292dc5..2a90d5ed652dcab7779a97770caf02fec1f39622 100644
--- a/bob/ip/binseg/script/binseg.py
+++ b/bob/ip/binseg/script/binseg.py
@@ -197,12 +197,21 @@ def train(model
     required=True,
     default='cpu',
     cls=ResourceOption)
+@click.option(
+    '--weight',
+    '-w',
+    help='Path or URL to pretrained model',
+    required=False,
+    default=None,
+    cls=ResourceOption
+    )
 @verbosity_option(cls=ResourceOption)
 def test(model
         ,output_path
         ,device
         ,batch_size
         ,dataset
+        ,weight
         , **kwargs):
     """ Run inference and evalaute the model performance """
 
@@ -216,7 +225,7 @@ def test(model
     
     # checkpointer, load last model in dir
     checkpointer = DetectronCheckpointer(model, save_dir = output_path, save_to_disk=False)
-    checkpointer.load()
+    checkpointer.load(weight)
     do_inference(model, data_loader, device, output_path)
 
 
diff --git a/setup.py b/setup.py
index 2b70354fd849718f12ac18b2c246088c336b70bb..8b82e9b20a4badd7c8af70a37dea541e41d9fb56 100644
--- a/setup.py
+++ b/setup.py
@@ -58,25 +58,17 @@ setup(
          #bob hed train configurations
         'bob.ip.binseg.config': [
           'DRIU = bob.ip.binseg.configs.models.driu',
-          'DRIUJ = bob.ip.binseg.configs.models.driuj',
-          'DRIUJ7 = bob.ip.binseg.configs.models.driuj7',
-          'DRIUODJ = bob.ip.binseg.configs.models.driuodj',
+          'DRIUOD = bob.ip.binseg.configs.models.driuod',
           'HED = bob.ip.binseg.configs.models.hed',
-          'HEDJ = bob.ip.binseg.configs.models.hedj',
-          'HEDJ7 = bob.ip.binseg.configs.models.hedj7',
-          'HEDODJ = bob.ip.binseg.configs.models.hedodj',
           'M2UNet = bob.ip.binseg.configs.models.m2unet',
-          'M2UNetJ = bob.ip.binseg.configs.models.m2unetj',
-          'M2UNetJ7 = bob.ip.binseg.configs.models.m2unetj7',
           'UNet = bob.ip.binseg.configs.models.unet',
-          'UNetJ = bob.ip.binseg.configs.models.unetj',
-          'UNetJ7 = bob.ip.binseg.configs.models.unetj7',
-          'UNetODJ = bob.ip.binseg.configs.models.unetodj',
           'ResUNet = bob.ip.binseg.configs.models.resunet',
-          'ResUNetJ = bob.ip.binseg.configs.models.resunetj',
           'ShapeResUNet = bob.ip.binseg.configs.models.shaperesunet',
+          'ALLVESSEL544 = bob.ip.binseg.configs.datasets.allvessel544',
+          'ALLVESSEL544TEST = bob.ip.binseg.configs.datasets.allvessel544test',
           'CHASEDB1 = bob.ip.binseg.configs.datasets.chasedb1',
           'CHASEDB1TEST = bob.ip.binseg.configs.datasets.chasedb1test',
+          'CHASEDB1544TEST = bob.ip.binseg.configs.datasets.chasedb1544test',
           'DRIONSDB = bob.ip.binseg.configs.datasets.drionsdb',
           'DRIONSDBTEST = bob.ip.binseg.configs.datasets.drionsdbtest',
           'DRISHTIGS1OD = bob.ip.binseg.configs.datasets.dristhigs1od',
@@ -87,10 +79,12 @@ setup(
           'DRIVETEST = bob.ip.binseg.configs.datasets.drivetest',
           'HRF = bob.ip.binseg.configs.datasets.hrf',
           'HRFTEST = bob.ip.binseg.configs.datasets.hrftest',
+          'HRF544TEST = bob.ip.binseg.configs.datasets.hrf544test',
           'IOSTAROD = bob.ip.binseg.configs.datasets.iostarod',
           'IOSTARODTEST = bob.ip.binseg.configs.datasets.iostarodtest',
           'IOSTARVESSEL = bob.ip.binseg.configs.datasets.iostarvessel',
           'IOSTARVESSELTEST = bob.ip.binseg.configs.datasets.iostarvesseltest',
+          'IOSTARVESSEL544TEST = bob.ip.binseg.configs.datasets.iostarvessel544test',
           'REFUGECUP = bob.ip.binseg.configs.datasets.refugecup',
           'REFUGECUPTEST = bob.ip.binseg.configs.datasets.refugecuptest',
           'REFUGEOD = bob.ip.binseg.configs.datasets.refugeod',
@@ -101,6 +95,7 @@ setup(
           'RIMONER3ODTEST = bob.ip.binseg.configs.datasets.rimoner3odtest',
           'STARE = bob.ip.binseg.configs.datasets.stare',
           'STARETEST = bob.ip.binseg.configs.datasets.staretest',
+          'STARE544TEST = bob.ip.binseg.configs.datasets.stare544test',
           ]
     },