From a99b34a6f7be2a874250479b241c89769811a677 Mon Sep 17 00:00:00 2001 From: Tim Laibacher <tim.laibacher@idiap.ch> Date: Wed, 22 May 2019 11:00:01 +0200 Subject: [PATCH] Add configs --- .gitignore | 1 + README.rst | 2 +- .../binseg/configs/datasets/allvessel544.py | 90 +++++++++++++++++++ .../configs/datasets/allvessel544test.py | 86 ++++++++++++++++++ bob/ip/binseg/configs/datasets/chasedb1.py | 23 +++++ .../configs/datasets/chasedb1544test.py | 20 +++++ .../binseg/configs/datasets/chasedb1test.py | 19 ++++ bob/ip/binseg/configs/datasets/drionsdb.py | 23 +++++ .../{drivecroptest.py => drionsdbtest.py} | 6 +- .../binseg/configs/datasets/dristhigs1cup.py | 23 +++++ .../configs/datasets/dristhigs1cuptest.py | 18 ++++ .../binseg/configs/datasets/dristhigs1od.py | 23 +++++ .../configs/datasets/dristhigs1odtest.py | 19 ++++ .../datasets/{drivecroptrain.py => drive.py} | 0 bob/ip/binseg/configs/datasets/hrf.py | 23 +++++ bob/ip/binseg/configs/datasets/hrf544test.py | 20 +++++ bob/ip/binseg/configs/datasets/hrftest.py | 19 ++++ .../datasets/{drivetrain.py => iostarod.py} | 6 +- .../binseg/configs/datasets/iostarodtest.py | 18 ++++ .../binseg/configs/datasets/iostarvessel.py | 22 +++++ .../configs/datasets/iostarvessel544test.py | 19 ++++ .../configs/datasets/iostarvesseltest.py | 18 ++++ bob/ip/binseg/configs/datasets/refugecup.py | 24 +++++ .../binseg/configs/datasets/refugecuptest.py | 19 ++++ bob/ip/binseg/configs/datasets/refugeod.py | 24 +++++ .../binseg/configs/datasets/refugeodtest.py | 19 ++++ bob/ip/binseg/configs/datasets/rimoner3cup.py | 23 +++++ .../configs/datasets/rimoner3cuptest.py | 19 ++++ bob/ip/binseg/configs/datasets/rimoner3od.py | 23 +++++ .../binseg/configs/datasets/rimoner3odtest.py | 19 ++++ bob/ip/binseg/configs/datasets/stare.py | 23 +++++ .../binseg/configs/datasets/stare544test.py | 20 +++++ bob/ip/binseg/configs/datasets/staretest.py | 19 ++++ bob/ip/binseg/configs/models/driu.py | 7 +- bob/ip/binseg/configs/models/driuadam.py | 33 ------- .../configs/models/{unetj01.py => driuod.py} | 9 +- bob/ip/binseg/configs/models/hed.py | 6 +- bob/ip/binseg/configs/models/m2unet.py | 6 +- bob/ip/binseg/configs/models/resunet.py | 6 +- bob/ip/binseg/configs/models/unet.py | 6 +- bob/ip/binseg/data/binsegdataset.py | 4 +- bob/ip/binseg/engine/inferencer.py | 6 +- bob/ip/binseg/modeling/driupix.py | 80 +++++++++++++++++ bob/ip/binseg/script/binseg.py | 11 ++- setup.py | 19 ++-- 45 files changed, 845 insertions(+), 78 deletions(-) create mode 100644 bob/ip/binseg/configs/datasets/allvessel544.py create mode 100644 bob/ip/binseg/configs/datasets/allvessel544test.py create mode 100644 bob/ip/binseg/configs/datasets/chasedb1.py create mode 100644 bob/ip/binseg/configs/datasets/chasedb1544test.py create mode 100644 bob/ip/binseg/configs/datasets/chasedb1test.py create mode 100644 bob/ip/binseg/configs/datasets/drionsdb.py rename bob/ip/binseg/configs/datasets/{drivecroptest.py => drionsdbtest.py} (74%) create mode 100644 bob/ip/binseg/configs/datasets/dristhigs1cup.py create mode 100644 bob/ip/binseg/configs/datasets/dristhigs1cuptest.py create mode 100644 bob/ip/binseg/configs/datasets/dristhigs1od.py create mode 100644 bob/ip/binseg/configs/datasets/dristhigs1odtest.py rename bob/ip/binseg/configs/datasets/{drivecroptrain.py => drive.py} (100%) create mode 100644 bob/ip/binseg/configs/datasets/hrf.py create mode 100644 bob/ip/binseg/configs/datasets/hrf544test.py create mode 100644 bob/ip/binseg/configs/datasets/hrftest.py rename bob/ip/binseg/configs/datasets/{drivetrain.py => iostarod.py} (82%) create mode 100644 bob/ip/binseg/configs/datasets/iostarodtest.py create mode 100644 bob/ip/binseg/configs/datasets/iostarvessel.py create mode 100644 bob/ip/binseg/configs/datasets/iostarvessel544test.py create mode 100644 bob/ip/binseg/configs/datasets/iostarvesseltest.py create mode 100644 bob/ip/binseg/configs/datasets/refugecup.py create mode 100644 bob/ip/binseg/configs/datasets/refugecuptest.py create mode 100644 bob/ip/binseg/configs/datasets/refugeod.py create mode 100644 bob/ip/binseg/configs/datasets/refugeodtest.py create mode 100644 bob/ip/binseg/configs/datasets/rimoner3cup.py create mode 100644 bob/ip/binseg/configs/datasets/rimoner3cuptest.py create mode 100644 bob/ip/binseg/configs/datasets/rimoner3od.py create mode 100644 bob/ip/binseg/configs/datasets/rimoner3odtest.py create mode 100644 bob/ip/binseg/configs/datasets/stare.py create mode 100644 bob/ip/binseg/configs/datasets/stare544test.py create mode 100644 bob/ip/binseg/configs/datasets/staretest.py delete mode 100644 bob/ip/binseg/configs/models/driuadam.py rename bob/ip/binseg/configs/models/{unetj01.py => driuod.py} (84%) create mode 100644 bob/ip/binseg/modeling/driupix.py diff --git a/.gitignore b/.gitignore index 69dac1eb..fcc2be3d 100644 --- a/.gitignore +++ b/.gitignore @@ -22,6 +22,7 @@ record.txt core output_temp output +*.DS_Store ### JupyterNotebook ### diff --git a/README.rst b/README.rst index 2121792a..a413a5c2 100644 --- a/README.rst +++ b/README.rst @@ -3,7 +3,7 @@ .. image:: https://img.shields.io/badge/docs-stable-yellow.svg :target: https://www.idiap.ch/software/bob/docs/bob/bob.ip.binseg/stable/index.html .. image:: https://img.shields.io/badge/docs-latest-orange.svg - :target: https://www.idiap.ch/software/bob/docs/bob/bob.ip.binseg/master/index.html + :target: http://beatubulatest.lab.idiap.ch/private/docs/bob/bob.ip.binseg/master/index.html .. image:: https://gitlab.idiap.ch/bob/bob.ip.binseg/badges/master/build.svg :target: https://gitlab.idiap.ch/bob/bob.ip.binseg/commits/master .. image:: https://gitlab.idiap.ch/bob/bob.ip.binseg/badges/master/coverage.svg diff --git a/bob/ip/binseg/configs/datasets/allvessel544.py b/bob/ip/binseg/configs/datasets/allvessel544.py new file mode 100644 index 00000000..9044cebc --- /dev/null +++ b/bob/ip/binseg/configs/datasets/allvessel544.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from bob.db.drive import Database as DRIVE +from bob.db.stare import Database as STARE +from bob.db.chasedb1 import Database as CHASEDB1 +from bob.db.iostar import Database as IOSTAR +from bob.db.hrf import Database as HRF +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset +import torch + +# Target size: 544x544 (DRIVE) + +defaulttransforms = [RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor()] + + +# DRIVE +transforms_drive = Compose([ + CenterCrop((544,544)) + ,*defaulttransforms + ]) + +# bob.db.dataset init +bobdb_drive = DRIVE(protocol = 'default') + +# PyTorch dataset +torch_drive = BinSegDataset(bobdb_drive, split='train', transform=transforms_drive) + + +# CHASE_DB1 +transforms_chase = Compose([ + Resize(544) + ,Crop(0,12,544,544) + ,*defaulttransforms + ]) + +# bob.db.dataset init +bobdb_chase = CHASEDB1(protocol = 'default') + +# PyTorch dataset +torch_chase = BinSegDataset(bobdb_chase, split='train', transform=transforms_chase) + + +# IOSTAR VESSEL +transforms_iostar = Compose([ + Resize(544) + ,*defaulttransforms + ]) + +# bob.db.dataset init +bobdb_iostar = IOSTAR(protocol='default_vessel') + +# PyTorch dataset +torch_iostar = BinSegDataset(bobdb_iostar, split='train', transform=transforms_iostar) + +# STARE +transforms = Compose([ + Resize(471) + ,Pad((0,37,0,36)) + ,*defaulttransforms + ]) + +# bob.db.dataset init +bobdb_stare = STARE(protocol = 'default') + +# PyTorch dataset +torch_stare = BinSegDataset(bobdb_stare, split='train', transform=transforms) + + +# HRF +transforms_hrf = Compose([ + Resize((363)) + ,Pad((0,90,0,91)) + ,*defaulttransforms + ]) + +# bob.db.dataset init +bobdb_hrf = HRF(protocol = 'default') + +# PyTorch dataset +torch_hrf = BinSegDataset(bobdb_hrf, split='train', transform=transforms_hrf) + + + +# Merge +dataset = torch.utils.data.ConcatDataset([torch_drive,torch_stare, torch_chase, torch_iostar, torch_hrf]) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/allvessel544test.py b/bob/ip/binseg/configs/datasets/allvessel544test.py new file mode 100644 index 00000000..55994e9e --- /dev/null +++ b/bob/ip/binseg/configs/datasets/allvessel544test.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from bob.db.drive import Database as DRIVE +from bob.db.stare import Database as STARE +from bob.db.chasedb1 import Database as CHASEDB1 +from bob.db.iostar import Database as IOSTAR +from bob.db.hrf import Database as HRF +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset +import torch + +# Target size: 544x544 (DRIVE) + +defaulttransforms = [ToTensor()] + + +# DRIVE +transforms_drive = Compose([ + CenterCrop((544,544)) + ,*defaulttransforms + ]) + +# bob.db.dataset init +bobdb_drive = DRIVE(protocol = 'default') + +# PyTorch dataset +torch_drive = BinSegDataset(bobdb_drive, split='test', transform=transforms_drive) + + +# CHASE_DB1 +transforms_chase = Compose([ + Resize(544) + ,Crop(0,12,544,544) + ,*defaulttransforms + ]) + +# bob.db.dataset init +bobdb_chase = CHASEDB1(protocol = 'default') + +# PyTorch dataset +torch_chase = BinSegDataset(bobdb_chase, split='test', transform=transforms_chase) + + +# IOSTAR VESSEL +transforms_iostar = Compose([ + Resize(544) + ,*defaulttransforms + ]) + +# bob.db.dataset init +bobdb_iostar = IOSTAR(protocol='default_vessel') + +# PyTorch dataset +torch_iostar = BinSegDataset(bobdb_iostar, split='test', transform=transforms_iostar) + +# STARE +transforms = Compose([ + Resize(471) + ,Pad((0,37,0,36)) + ,*defaulttransforms + ]) + +# bob.db.dataset init +bobdb_stare = STARE(protocol = 'default') + +# PyTorch dataset +torch_stare = BinSegDataset(bobdb_stare, split='test', transform=transforms) + + +# HRF +transforms_hrf = Compose([ + Resize((363)) + ,Pad((0,90,0,91)) + ,*defaulttransforms + ]) + +# bob.db.dataset init +bobdb_hrf = HRF(protocol = 'default') + +# PyTorch dataset +torch_hrf = BinSegDataset(bobdb_hrf, split='test', transform=transforms_hrf) + + + +# Merge +dataset = torch.utils.data.ConcatDataset([torch_drive,torch_stare, torch_chase, torch_iostar, torch_hrf]) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/chasedb1.py b/bob/ip/binseg/configs/datasets/chasedb1.py new file mode 100644 index 00000000..7fa0dc09 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/chasedb1.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.chasedb1 import Database as CHASEDB1 +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Crop(0,18,960,960) + ,RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = CHASEDB1(protocol = 'default') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/chasedb1544test.py b/bob/ip/binseg/configs/datasets/chasedb1544test.py new file mode 100644 index 00000000..aa5d00a5 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/chasedb1544test.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.chasedb1 import Database as CHASEDB1 +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Resize(544) + ,Crop(0,12,544,544) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = CHASEDB1(protocol = 'default') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/chasedb1test.py b/bob/ip/binseg/configs/datasets/chasedb1test.py new file mode 100644 index 00000000..4b267b0f --- /dev/null +++ b/bob/ip/binseg/configs/datasets/chasedb1test.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.chasedb1 import Database as CHASEDB1 +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Crop(0,18,960,960) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = CHASEDB1(protocol = 'default') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/drionsdb.py b/bob/ip/binseg/configs/datasets/drionsdb.py new file mode 100644 index 00000000..cd33c1d2 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/drionsdb.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.drionsdb import Database as DRIONS +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Pad((4,8,4,8)) + ,RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = DRIONS(protocol = 'default') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/drivecroptest.py b/bob/ip/binseg/configs/datasets/drionsdbtest.py similarity index 74% rename from bob/ip/binseg/configs/datasets/drivecroptest.py rename to bob/ip/binseg/configs/datasets/drionsdbtest.py index 230598dc..b65100a6 100644 --- a/bob/ip/binseg/configs/datasets/drivecroptest.py +++ b/bob/ip/binseg/configs/datasets/drionsdbtest.py @@ -1,19 +1,19 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -from bob.db.drive import Database as DRIVE +from bob.db.drionsdb import Database as DRIONS from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.binsegdataset import BinSegDataset #### Config #### transforms = Compose([ - CenterCrop((544,544)) + Pad((4,8,4,8)) ,ToTensor() ]) # bob.db.dataset init -bobdb = DRIVE(protocol = 'default') +bobdb = DRIONS(protocol = 'default') # PyTorch dataset dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/dristhigs1cup.py b/bob/ip/binseg/configs/datasets/dristhigs1cup.py new file mode 100644 index 00000000..f7a69dad --- /dev/null +++ b/bob/ip/binseg/configs/datasets/dristhigs1cup.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.drishtigs1 import Database as DRISHTI +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + CenterCrop((1760,2048)) + ,RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = DRISHTI(protocol = 'default_cup') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/dristhigs1cuptest.py b/bob/ip/binseg/configs/datasets/dristhigs1cuptest.py new file mode 100644 index 00000000..5c2b634e --- /dev/null +++ b/bob/ip/binseg/configs/datasets/dristhigs1cuptest.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from bob.db.drishtigs1 import Database as DRISHTI +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + CenterCrop((1760,2048)) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = DRISHTI(protocol = 'default_cup') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/dristhigs1od.py b/bob/ip/binseg/configs/datasets/dristhigs1od.py new file mode 100644 index 00000000..0bd483c1 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/dristhigs1od.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.drishtigs1 import Database as DRISHTI +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + CenterCrop((1760,2048)) + ,RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = DRISHTI(protocol = 'default_od') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/dristhigs1odtest.py b/bob/ip/binseg/configs/datasets/dristhigs1odtest.py new file mode 100644 index 00000000..ab1edd65 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/dristhigs1odtest.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.drishtigs1 import Database as DRISHTI +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + CenterCrop((1760,2048)) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = DRISHTI(protocol = 'default_od') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/drivecroptrain.py b/bob/ip/binseg/configs/datasets/drive.py similarity index 100% rename from bob/ip/binseg/configs/datasets/drivecroptrain.py rename to bob/ip/binseg/configs/datasets/drive.py diff --git a/bob/ip/binseg/configs/datasets/hrf.py b/bob/ip/binseg/configs/datasets/hrf.py new file mode 100644 index 00000000..cb008f7d --- /dev/null +++ b/bob/ip/binseg/configs/datasets/hrf.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.hrf import Database as HRF +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Crop(0,108,2336,3296) + ,RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = HRF(protocol = 'default') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/hrf544test.py b/bob/ip/binseg/configs/datasets/hrf544test.py new file mode 100644 index 00000000..86da428b --- /dev/null +++ b/bob/ip/binseg/configs/datasets/hrf544test.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.hrf import Database as HRF +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Resize((363)) + ,Pad((0,90,0,91)) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = HRF(protocol = 'default') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/hrftest.py b/bob/ip/binseg/configs/datasets/hrftest.py new file mode 100644 index 00000000..45f95272 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/hrftest.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.hrf import Database as HRF +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Crop(0,108,2336,3296) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = HRF(protocol = 'default') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/drivetrain.py b/bob/ip/binseg/configs/datasets/iostarod.py similarity index 82% rename from bob/ip/binseg/configs/datasets/drivetrain.py rename to bob/ip/binseg/configs/datasets/iostarod.py index 6662e1fc..334df2a4 100644 --- a/bob/ip/binseg/configs/datasets/drivetrain.py +++ b/bob/ip/binseg/configs/datasets/iostarod.py @@ -1,13 +1,13 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -from bob.db.drive import Database as DRIVE +from bob.db.iostar import Database as IOSTAR from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.binsegdataset import BinSegDataset #### Config #### -transforms = Compose([ +transforms = Compose([ RandomHFlip() ,RandomVFlip() ,RandomRotation() @@ -16,7 +16,7 @@ transforms = Compose([ ]) # bob.db.dataset init -bobdb = DRIVE(protocol = 'default') +bobdb = IOSTAR(protocol='default_od') # PyTorch dataset dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/iostarodtest.py b/bob/ip/binseg/configs/datasets/iostarodtest.py new file mode 100644 index 00000000..ba064507 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/iostarodtest.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.iostar import Database as IOSTAR +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + ToTensor() + ]) + +# bob.db.dataset init +bobdb = IOSTAR(protocol='default_od') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/iostarvessel.py b/bob/ip/binseg/configs/datasets/iostarvessel.py new file mode 100644 index 00000000..ded01bb4 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/iostarvessel.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.iostar import Database as IOSTAR +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = IOSTAR(protocol='default_vessel') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/iostarvessel544test.py b/bob/ip/binseg/configs/datasets/iostarvessel544test.py new file mode 100644 index 00000000..e3ccd854 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/iostarvessel544test.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.iostar import Database as IOSTAR +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Resize(544) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = IOSTAR(protocol='default_vessel') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/iostarvesseltest.py b/bob/ip/binseg/configs/datasets/iostarvesseltest.py new file mode 100644 index 00000000..d8fe1371 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/iostarvesseltest.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.iostar import Database as IOSTAR +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + ToTensor() + ]) + +# bob.db.dataset init +bobdb = IOSTAR(protocol='default_vessel') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/refugecup.py b/bob/ip/binseg/configs/datasets/refugecup.py new file mode 100644 index 00000000..9efac529 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/refugecup.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.refuge import Database as REFUGE +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Resize((1539)) + ,Pad((21,46,22,47)) + ,RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = REFUGE(protocol = 'default_cup') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/refugecuptest.py b/bob/ip/binseg/configs/datasets/refugecuptest.py new file mode 100644 index 00000000..8ff916e3 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/refugecuptest.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.refuge import Database as REFUGE +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + CenterCrop(1632) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = REFUGE(protocol = 'default_cup') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/refugeod.py b/bob/ip/binseg/configs/datasets/refugeod.py new file mode 100644 index 00000000..5faaf05a --- /dev/null +++ b/bob/ip/binseg/configs/datasets/refugeod.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.refuge import Database as REFUGE +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Resize((1539)) + ,Pad((21,46,22,47)) + ,RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = REFUGE(protocol = 'default_od') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/refugeodtest.py b/bob/ip/binseg/configs/datasets/refugeodtest.py new file mode 100644 index 00000000..30085a2f --- /dev/null +++ b/bob/ip/binseg/configs/datasets/refugeodtest.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.refuge import Database as REFUGE +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + CenterCrop(1632) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = REFUGE(protocol = 'default_od') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/rimoner3cup.py b/bob/ip/binseg/configs/datasets/rimoner3cup.py new file mode 100644 index 00000000..47b62ba0 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/rimoner3cup.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.rimoner3 import Database as RIMONER3 +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Pad((8,8,8,8)) + ,RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = RIMONER3(protocol = 'default_cup') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/rimoner3cuptest.py b/bob/ip/binseg/configs/datasets/rimoner3cuptest.py new file mode 100644 index 00000000..9f227be8 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/rimoner3cuptest.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.rimoner3 import Database as RIMONER3 +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Pad((8,8,8,8)) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = RIMONER3(protocol = 'default_cup') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/rimoner3od.py b/bob/ip/binseg/configs/datasets/rimoner3od.py new file mode 100644 index 00000000..4905bec3 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/rimoner3od.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.rimoner3 import Database as RIMONER3 +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Pad((8,8,8,8)) + ,RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = RIMONER3(protocol = 'default_od') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/rimoner3odtest.py b/bob/ip/binseg/configs/datasets/rimoner3odtest.py new file mode 100644 index 00000000..390f20d7 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/rimoner3odtest.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.rimoner3 import Database as RIMONER3 +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Pad((8,8,8,8)) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = RIMONER3(protocol = 'default_od') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/stare.py b/bob/ip/binseg/configs/datasets/stare.py new file mode 100644 index 00000000..f2c784a9 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/stare.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.stare import Database as STARE +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Pad((2,1,2,2)) + ,RandomHFlip() + ,RandomVFlip() + ,RandomRotation() + ,ColorJitter() + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = STARE(protocol = 'default') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='train', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/stare544test.py b/bob/ip/binseg/configs/datasets/stare544test.py new file mode 100644 index 00000000..09b26873 --- /dev/null +++ b/bob/ip/binseg/configs/datasets/stare544test.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.stare import Database as STARE +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Resize(471) + ,Pad((0,37,0,36)) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = STARE(protocol = 'default') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/datasets/staretest.py b/bob/ip/binseg/configs/datasets/staretest.py new file mode 100644 index 00000000..aab80b9b --- /dev/null +++ b/bob/ip/binseg/configs/datasets/staretest.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from bob.db.stare import Database as STARE +from bob.ip.binseg.data.transforms import * +from bob.ip.binseg.data.binsegdataset import BinSegDataset + +#### Config #### + +transforms = Compose([ + Pad((2,1,2,2)) + ,ToTensor() + ]) + +# bob.db.dataset init +bobdb = STARE(protocol = 'default') + +# PyTorch dataset +dataset = BinSegDataset(bobdb, split='test', transform=transforms) \ No newline at end of file diff --git a/bob/ip/binseg/configs/models/driu.py b/bob/ip/binseg/configs/models/driu.py index deb789d0..1bc10ef0 100644 --- a/bob/ip/binseg/configs/models/driu.py +++ b/bob/ip/binseg/configs/models/driu.py @@ -6,7 +6,7 @@ from bob.ip.binseg.modeling.driu import build_driu import torch.optim as optim from torch.nn import BCEWithLogitsLoss from bob.ip.binseg.utils.model_zoo import modelurls -from bob.ip.binseg.modeling.losses import WeightedBCELogitsLoss +from bob.ip.binseg.modeling.losses import SoftJaccardBCELogitsLoss from bob.ip.binseg.engine.adabound import AdaBound ##### Config ##### @@ -19,7 +19,7 @@ gamma = 1e-3 eps = 1e-8 amsbound = False -scheduler_milestones = [200] +scheduler_milestones = [800] scheduler_gamma = 0.1 # model @@ -31,9 +31,8 @@ pretrained_backbone = modelurls['vgg16'] # optimizer optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr, gamma=gamma, eps=eps, weight_decay=weight_decay, amsbound=amsbound) - # criterion -criterion = WeightedBCELogitsLoss(reduction='mean') +criterion = SoftJaccardBCELogitsLoss(alpha=0.7) # scheduler scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma) diff --git a/bob/ip/binseg/configs/models/driuadam.py b/bob/ip/binseg/configs/models/driuadam.py deleted file mode 100644 index c801206b..00000000 --- a/bob/ip/binseg/configs/models/driuadam.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from torch.optim.lr_scheduler import MultiStepLR -from bob.ip.binseg.modeling.driu import build_driu -import torch.optim as optim -from bob.ip.binseg.modeling.losses import WeightedBCELogitsLoss -from bob.ip.binseg.utils.model_zoo import modelurls - -##### Config ##### -lr = 0.001 -betas = (0.9, 0.999) -eps = 1e-08 -weight_decay = 0 -amsgrad = False - -scheduler_milestones = [150] -scheduler_gamma = 0.1 - -# model -model = build_driu() - -# pretrained backbone -pretrained_backbone = modelurls['vgg16'] - -# optimizer -optimizer = optim.Adam(model.parameters(), lr=lr, betas=betas, eps=eps, weight_decay=weight_decay, amsgrad=amsgrad) - -# criterion -criterion = WeightedBCELogitsLoss(reduction='mean') - -# scheduler -scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma) diff --git a/bob/ip/binseg/configs/models/unetj01.py b/bob/ip/binseg/configs/models/driuod.py similarity index 84% rename from bob/ip/binseg/configs/models/unetj01.py rename to bob/ip/binseg/configs/models/driuod.py index da1096cd..199a249a 100644 --- a/bob/ip/binseg/configs/models/unetj01.py +++ b/bob/ip/binseg/configs/models/driuod.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- from torch.optim.lr_scheduler import MultiStepLR -from bob.ip.binseg.modeling.unet import build_unet +from bob.ip.binseg.modeling.driuod import build_driuod import torch.optim as optim from torch.nn import BCEWithLogitsLoss from bob.ip.binseg.utils.model_zoo import modelurls @@ -19,11 +19,11 @@ gamma = 1e-3 eps = 1e-8 amsbound = False -scheduler_milestones = [200] +scheduler_milestones = [800] scheduler_gamma = 0.1 # model -model = build_unet() +model = build_driuod() # pretrained backbone pretrained_backbone = modelurls['vgg16'] @@ -31,9 +31,8 @@ pretrained_backbone = modelurls['vgg16'] # optimizer optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr, gamma=gamma, eps=eps, weight_decay=weight_decay, amsbound=amsbound) - # criterion -criterion = SoftJaccardBCELogitsLoss(alpha=0.1) +criterion = SoftJaccardBCELogitsLoss(alpha=0.7) # scheduler scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma) diff --git a/bob/ip/binseg/configs/models/hed.py b/bob/ip/binseg/configs/models/hed.py index a586e86e..306ecb80 100644 --- a/bob/ip/binseg/configs/models/hed.py +++ b/bob/ip/binseg/configs/models/hed.py @@ -4,7 +4,7 @@ from torch.optim.lr_scheduler import MultiStepLR from bob.ip.binseg.modeling.hed import build_hed import torch.optim as optim -from bob.ip.binseg.modeling.losses import HEDWeightedBCELogitsLoss +from bob.ip.binseg.modeling.losses import HEDSoftJaccardBCELogitsLoss from bob.ip.binseg.utils.model_zoo import modelurls from bob.ip.binseg.engine.adabound import AdaBound @@ -19,7 +19,7 @@ gamma = 1e-3 eps = 1e-8 amsbound = False -scheduler_milestones = [200] +scheduler_milestones = [800] scheduler_gamma = 0.1 @@ -33,7 +33,7 @@ pretrained_backbone = modelurls['vgg16'] optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr, gamma=gamma, eps=eps, weight_decay=weight_decay, amsbound=amsbound) # criterion -criterion = HEDWeightedBCELogitsLoss(reduction='mean') +criterion = HEDSoftJaccardBCELogitsLoss(alpha=0.7) # scheduler scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma) diff --git a/bob/ip/binseg/configs/models/m2unet.py b/bob/ip/binseg/configs/models/m2unet.py index e97ae62d..cb422dca 100644 --- a/bob/ip/binseg/configs/models/m2unet.py +++ b/bob/ip/binseg/configs/models/m2unet.py @@ -6,7 +6,7 @@ from bob.ip.binseg.modeling.m2u import build_m2unet import torch.optim as optim from torch.nn import BCEWithLogitsLoss from bob.ip.binseg.utils.model_zoo import modelurls -from bob.ip.binseg.modeling.losses import WeightedBCELogitsLoss +from bob.ip.binseg.modeling.losses import SoftJaccardBCELogitsLoss from bob.ip.binseg.engine.adabound import AdaBound ##### Config ##### @@ -19,7 +19,7 @@ gamma = 1e-3 eps = 1e-8 amsbound = False -scheduler_milestones = [200] +scheduler_milestones = [800] scheduler_gamma = 0.1 # model @@ -33,7 +33,7 @@ optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr, eps=eps, weight_decay=weight_decay, amsbound=amsbound) # criterion -criterion = WeightedBCELogitsLoss(reduction='mean') +criterion = SoftJaccardBCELogitsLoss(alpha=0.7) # scheduler scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma) diff --git a/bob/ip/binseg/configs/models/resunet.py b/bob/ip/binseg/configs/models/resunet.py index 17184497..1a049f14 100644 --- a/bob/ip/binseg/configs/models/resunet.py +++ b/bob/ip/binseg/configs/models/resunet.py @@ -6,7 +6,7 @@ from bob.ip.binseg.modeling.resunet import build_res50unet import torch.optim as optim from torch.nn import BCEWithLogitsLoss from bob.ip.binseg.utils.model_zoo import modelurls -from bob.ip.binseg.modeling.losses import WeightedBCELogitsLoss +from bob.ip.binseg.modeling.losses import SoftJaccardBCELogitsLoss from bob.ip.binseg.engine.adabound import AdaBound ##### Config ##### @@ -19,7 +19,7 @@ gamma = 1e-3 eps = 1e-8 amsbound = False -scheduler_milestones = [200] +scheduler_milestones = [800] scheduler_gamma = 0.1 # model @@ -33,7 +33,7 @@ optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr, eps=eps, weight_decay=weight_decay, amsbound=amsbound) # criterion -criterion = WeightedBCELogitsLoss(reduction='mean') +criterion = SoftJaccardBCELogitsLoss(alpha=0.7) # scheduler scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma) diff --git a/bob/ip/binseg/configs/models/unet.py b/bob/ip/binseg/configs/models/unet.py index f034d94d..7c21b042 100644 --- a/bob/ip/binseg/configs/models/unet.py +++ b/bob/ip/binseg/configs/models/unet.py @@ -6,7 +6,7 @@ from bob.ip.binseg.modeling.unet import build_unet import torch.optim as optim from torch.nn import BCEWithLogitsLoss from bob.ip.binseg.utils.model_zoo import modelurls -from bob.ip.binseg.modeling.losses import WeightedBCELogitsLoss +from bob.ip.binseg.modeling.losses import SoftJaccardBCELogitsLoss from bob.ip.binseg.engine.adabound import AdaBound ##### Config ##### @@ -19,7 +19,7 @@ gamma = 1e-3 eps = 1e-8 amsbound = False -scheduler_milestones = [200] +scheduler_milestones = [800] scheduler_gamma = 0.1 # model @@ -33,7 +33,7 @@ optimizer = AdaBound(model.parameters(), lr=lr, betas=betas, final_lr=final_lr, eps=eps, weight_decay=weight_decay, amsbound=amsbound) # criterion -criterion = WeightedBCELogitsLoss(reduction='mean') +criterion = SoftJaccardBCELogitsLoss(alpha=0.7) # scheduler scheduler = MultiStepLR(optimizer, milestones=scheduler_milestones, gamma=scheduler_gamma) diff --git a/bob/ip/binseg/data/binsegdataset.py b/bob/ip/binseg/data/binsegdataset.py index f212c4fe..86fc97df 100644 --- a/bob/ip/binseg/data/binsegdataset.py +++ b/bob/ip/binseg/data/binsegdataset.py @@ -7,7 +7,7 @@ class BinSegDataset(Dataset): A transform object can be passed that will be applied to the image, ground truth and mask (if present). It supports indexing such that dataset[i] can be used to get ith sample. - Attributes + Parameters ---------- bobdb : :py:mod:`bob.db.base` Binary segmentation bob database (e.g. bob.db.drive) @@ -15,6 +15,8 @@ class BinSegDataset(Dataset): ``'train'`` or ``'test'``. Defaults to ``'train'`` transform : :py:mod:`bob.ip.binseg.data.transforms`, optional A transform or composition of transfroms. Defaults to ``None``. + mask : bool + whether dataset contains masks or not """ def __init__(self, bobdb, split = 'train', transform = None): self.database = bobdb.samples(split) diff --git a/bob/ip/binseg/engine/inferencer.py b/bob/ip/binseg/engine/inferencer.py index 00f60d7c..a1017ed5 100644 --- a/bob/ip/binseg/engine/inferencer.py +++ b/bob/ip/binseg/engine/inferencer.py @@ -134,9 +134,9 @@ def do_inference( """ logger = logging.getLogger("bob.ip.binseg.engine.inference") logger.info("Start evaluation") - logger.info("Split: {}, Output folder: {}, Device: {}".format(data_loader.dataset.split, output_folder, device)) + logger.info("Output folder: {}, Device: {}".format(output_folder, device)) results_subfolder = os.path.join(output_folder,'results') - if not os.path.exists(results_subfolder): os.makedirs(results_subfolder) + os.makedirs(results_subfolder,exist_ok=True) model.eval().to(device) # Sigmoid for probabilities @@ -206,7 +206,7 @@ def do_inference( np_avg_metrics = avg_metrics.to_numpy().T fig_name = "precision_recall.pdf" logger.info("saving {}".format(fig_name)) - fig = precision_recall_f1iso([np_avg_metrics[0]],[np_avg_metrics[1]], [model.name,None], title=output_folder) + fig = precision_recall_f1iso([np_avg_metrics[0]],[np_avg_metrics[1]], [model.name,None], title=output_folder.split('/')[-2:]) fig_filename = os.path.join(results_subfolder, fig_name) fig.savefig(fig_filename) diff --git a/bob/ip/binseg/modeling/driupix.py b/bob/ip/binseg/modeling/driupix.py new file mode 100644 index 00000000..00e40932 --- /dev/null +++ b/bob/ip/binseg/modeling/driupix.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import torch +from torch import nn +from collections import OrderedDict +from bob.ip.binseg.modeling.backbones.vgg import vgg16 +from bob.ip.binseg.modeling.make_layers import conv_with_kaiming_uniform,convtrans_with_kaiming_uniform, UpsampleCropBlock + +class ConcatFuseBlock(nn.Module): + """ + Takes in four feature maps with 16 channels each, concatenates them + and applies a 1x1 convolution with 1 output channel. + """ + def __init__(self): + super().__init__() + self.conv = conv_with_kaiming_uniform(4*16,1,1,1,0) + + def forward(self,x1,x2,x3,x4): + x_cat = torch.cat([x1,x2,x3,x4],dim=1) + x = self.conv(x_cat) + return x + +class DRIUPIX(nn.Module): + """ + DRIUPIX head module. DRIU with pixelshuffle instead of ConvTrans2D + + Parameters + ---------- + in_channels_list : list + number of channels for each feature map that is returned from backbone + """ + def __init__(self, in_channels_list=None): + super(DRIUPIX, self).__init__() + in_conv_1_2_16, in_upsample2, in_upsample_4, in_upsample_8 = in_channels_list + + self.conv1_2_16 = nn.Conv2d(in_conv_1_2_16, 16, 3, 1, 1) + # Upsample layers + self.upsample2 = UpsampleCropBlock(in_upsample2, 16, 4, 2, 0, pixelshuffle=True) + self.upsample4 = UpsampleCropBlock(in_upsample_4, 16, 8, 4, 0, pixelshuffle=True) + self.upsample8 = UpsampleCropBlock(in_upsample_8, 16, 16, 8, 0, pixelshuffle=True) + + # Concat and Fuse + self.concatfuse = ConcatFuseBlock() + + def forward(self,x): + """ + Parameters + ---------- + x : list + list of tensors as returned from the backbone network. + First element: height and width of input image. + Remaining elements: feature maps for each feature level. + + Returns + ------- + :py:class:`torch.Tensor` + """ + hw = x[0] + conv1_2_16 = self.conv1_2_16(x[1]) # conv1_2_16 + upsample2 = self.upsample2(x[2], hw) # side-multi2-up + upsample4 = self.upsample4(x[3], hw) # side-multi3-up + upsample8 = self.upsample8(x[4], hw) # side-multi4-up + out = self.concatfuse(conv1_2_16, upsample2, upsample4, upsample8) + return out + +def build_driupix(): + """ + Adds backbone and head together + + Returns + ------- + :py:class:torch.nn.Module + """ + backbone = vgg16(pretrained=False, return_features = [3, 8, 14, 22]) + driu_head = DRIUPIX([64, 128, 256, 512]) + + model = nn.Sequential(OrderedDict([("backbone", backbone), ("head", driu_head)])) + model.name = "DRIUPIX" + return model \ No newline at end of file diff --git a/bob/ip/binseg/script/binseg.py b/bob/ip/binseg/script/binseg.py index e56aa2fe..2a90d5ed 100644 --- a/bob/ip/binseg/script/binseg.py +++ b/bob/ip/binseg/script/binseg.py @@ -197,12 +197,21 @@ def train(model required=True, default='cpu', cls=ResourceOption) +@click.option( + '--weight', + '-w', + help='Path or URL to pretrained model', + required=False, + default=None, + cls=ResourceOption + ) @verbosity_option(cls=ResourceOption) def test(model ,output_path ,device ,batch_size ,dataset + ,weight , **kwargs): """ Run inference and evalaute the model performance """ @@ -216,7 +225,7 @@ def test(model # checkpointer, load last model in dir checkpointer = DetectronCheckpointer(model, save_dir = output_path, save_to_disk=False) - checkpointer.load() + checkpointer.load(weight) do_inference(model, data_loader, device, output_path) diff --git a/setup.py b/setup.py index 2b70354f..8b82e9b2 100644 --- a/setup.py +++ b/setup.py @@ -58,25 +58,17 @@ setup( #bob hed train configurations 'bob.ip.binseg.config': [ 'DRIU = bob.ip.binseg.configs.models.driu', - 'DRIUJ = bob.ip.binseg.configs.models.driuj', - 'DRIUJ7 = bob.ip.binseg.configs.models.driuj7', - 'DRIUODJ = bob.ip.binseg.configs.models.driuodj', + 'DRIUOD = bob.ip.binseg.configs.models.driuod', 'HED = bob.ip.binseg.configs.models.hed', - 'HEDJ = bob.ip.binseg.configs.models.hedj', - 'HEDJ7 = bob.ip.binseg.configs.models.hedj7', - 'HEDODJ = bob.ip.binseg.configs.models.hedodj', 'M2UNet = bob.ip.binseg.configs.models.m2unet', - 'M2UNetJ = bob.ip.binseg.configs.models.m2unetj', - 'M2UNetJ7 = bob.ip.binseg.configs.models.m2unetj7', 'UNet = bob.ip.binseg.configs.models.unet', - 'UNetJ = bob.ip.binseg.configs.models.unetj', - 'UNetJ7 = bob.ip.binseg.configs.models.unetj7', - 'UNetODJ = bob.ip.binseg.configs.models.unetodj', 'ResUNet = bob.ip.binseg.configs.models.resunet', - 'ResUNetJ = bob.ip.binseg.configs.models.resunetj', 'ShapeResUNet = bob.ip.binseg.configs.models.shaperesunet', + 'ALLVESSEL544 = bob.ip.binseg.configs.datasets.allvessel544', + 'ALLVESSEL544TEST = bob.ip.binseg.configs.datasets.allvessel544test', 'CHASEDB1 = bob.ip.binseg.configs.datasets.chasedb1', 'CHASEDB1TEST = bob.ip.binseg.configs.datasets.chasedb1test', + 'CHASEDB1544TEST = bob.ip.binseg.configs.datasets.chasedb1544test', 'DRIONSDB = bob.ip.binseg.configs.datasets.drionsdb', 'DRIONSDBTEST = bob.ip.binseg.configs.datasets.drionsdbtest', 'DRISHTIGS1OD = bob.ip.binseg.configs.datasets.dristhigs1od', @@ -87,10 +79,12 @@ setup( 'DRIVETEST = bob.ip.binseg.configs.datasets.drivetest', 'HRF = bob.ip.binseg.configs.datasets.hrf', 'HRFTEST = bob.ip.binseg.configs.datasets.hrftest', + 'HRF544TEST = bob.ip.binseg.configs.datasets.hrf544test', 'IOSTAROD = bob.ip.binseg.configs.datasets.iostarod', 'IOSTARODTEST = bob.ip.binseg.configs.datasets.iostarodtest', 'IOSTARVESSEL = bob.ip.binseg.configs.datasets.iostarvessel', 'IOSTARVESSELTEST = bob.ip.binseg.configs.datasets.iostarvesseltest', + 'IOSTARVESSEL544TEST = bob.ip.binseg.configs.datasets.iostarvessel544test', 'REFUGECUP = bob.ip.binseg.configs.datasets.refugecup', 'REFUGECUPTEST = bob.ip.binseg.configs.datasets.refugecuptest', 'REFUGEOD = bob.ip.binseg.configs.datasets.refugeod', @@ -101,6 +95,7 @@ setup( 'RIMONER3ODTEST = bob.ip.binseg.configs.datasets.rimoner3odtest', 'STARE = bob.ip.binseg.configs.datasets.stare', 'STARETEST = bob.ip.binseg.configs.datasets.staretest', + 'STARE544TEST = bob.ip.binseg.configs.datasets.stare544test', ] }, -- GitLab