Skip to content
Snippets Groups Projects
Commit 643c74fe authored by André Anjos's avatar André Anjos :speech_balloon:
Browse files

Major module renaming (for consistency); DRY when possible

parent 83216033
Branches
Tags v2.2.7b0
1 merge request!12Streamlining
Pipeline #38889 passed
Showing
with 91 additions and 175 deletions
...@@ -15,10 +15,6 @@ dataset includes annotations for the optic disc and the artery/vein ratio. ...@@ -15,10 +15,6 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
* Split reference: [MEYER-2017]_ * Split reference: [MEYER-2017]_
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.utils import SampleList2TorchDataset
_transforms = Compose([ToTensor()])
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset
from bob.ip.binseg.data.iostar import dataset as iostar from bob.ip.binseg.data.iostar import dataset as iostar
dataset = DelayedSample2TorchDataset(iostar.subsets("optic-disc")["test"], dataset = SampleList2TorchDataset(iostar.subsets("optic-disc")["test"])
transform=_transforms)
...@@ -16,17 +16,14 @@ dataset includes annotations for the optic disc and the artery/vein ratio. ...@@ -16,17 +16,14 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
_transforms = Compose( _transforms = [
[
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
RandomRotation(), RandomRotation(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset from bob.ip.binseg.data.utils import SampleList2TorchDataset
from bob.ip.binseg.data.iostar import dataset as iostar from bob.ip.binseg.data.iostar import dataset as iostar
dataset = DelayedSample2TorchDataset(iostar.subsets("vessel")["train"], dataset = SampleList2TorchDataset(iostar.subsets("vessel")["train"],
transform=_transforms) transforms=_transforms)
...@@ -15,10 +15,6 @@ dataset includes annotations for the optic disc and the artery/vein ratio. ...@@ -15,10 +15,6 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
* Split reference: [MEYER-2017]_ * Split reference: [MEYER-2017]_
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.utils import SampleList2TorchDataset
_transforms = Compose([ToTensor()])
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset
from bob.ip.binseg.data.iostar import dataset as iostar from bob.ip.binseg.data.iostar import dataset as iostar
dataset = DelayedSample2TorchDataset(iostar.subsets("vessel")["test"], dataset = SampleList2TorchDataset(iostar.subsets("vessel")["test"])
transform=_transforms)
...@@ -16,8 +16,7 @@ dataset includes annotations for the optic disc and the artery/vein ratio. ...@@ -16,8 +16,7 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
_transforms = Compose( _transforms = [
[
RandomRotation(), RandomRotation(),
Crop(144, 0, 768, 1024), Crop(144, 0, 768, 1024),
Pad((30, 0, 30, 0)), Pad((30, 0, 30, 0)),
...@@ -25,11 +24,9 @@ _transforms = Compose( ...@@ -25,11 +24,9 @@ _transforms = Compose(
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset from bob.ip.binseg.data.utils import SampleList2TorchDataset
from bob.ip.binseg.data.iostar import dataset as iostar from bob.ip.binseg.data.iostar import dataset as iostar
dataset = DelayedSample2TorchDataset(iostar.subsets("vessel")["train"], dataset = SampleList2TorchDataset(iostar.subsets("vessel")["train"],
transform=_transforms) transforms=_transforms)
...@@ -16,18 +16,15 @@ dataset includes annotations for the optic disc and the artery/vein ratio. ...@@ -16,18 +16,15 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
_transforms = Compose( _transforms = [
[
Resize(544), Resize(544),
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
RandomRotation(), RandomRotation(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset from bob.ip.binseg.data.utils import SampleList2TorchDataset
from bob.ip.binseg.data.iostar import dataset as iostar from bob.ip.binseg.data.iostar import dataset as iostar
dataset = DelayedSample2TorchDataset(iostar.subsets("vessel")["train"], dataset = SampleList2TorchDataset(iostar.subsets("vessel")["train"],
transform=_transforms) transforms=_transforms)
...@@ -16,19 +16,16 @@ dataset includes annotations for the optic disc and the artery/vein ratio. ...@@ -16,19 +16,16 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
_transforms = Compose( _transforms = [
[
Pad((81, 0, 81, 0)), Pad((81, 0, 81, 0)),
Resize(608), Resize(608),
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
RandomRotation(), RandomRotation(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset from bob.ip.binseg.data.utils import SampleList2TorchDataset
from bob.ip.binseg.data.iostar import dataset as iostar from bob.ip.binseg.data.iostar import dataset as iostar
dataset = DelayedSample2TorchDataset(iostar.subsets("vessel")["train"], dataset = SampleList2TorchDataset(iostar.subsets("vessel")["train"],
transform=_transforms) transforms=_transforms)
...@@ -16,18 +16,15 @@ dataset includes annotations for the optic disc and the artery/vein ratio. ...@@ -16,18 +16,15 @@ dataset includes annotations for the optic disc and the artery/vein ratio.
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
_transforms = Compose( _transforms = [
[
Resize(960), Resize(960),
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
RandomRotation(), RandomRotation(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset from bob.ip.binseg.data.utils import SampleList2TorchDataset
from bob.ip.binseg.data.iostar import dataset as iostar from bob.ip.binseg.data.iostar import dataset as iostar
dataset = DelayedSample2TorchDataset(iostar.subsets("vessel")["train"], dataset = SampleList2TorchDataset(iostar.subsets("vessel")["train"],
transform=_transforms) transforms=_transforms)
...@@ -27,20 +27,17 @@ from bob.ip.binseg.data.binsegdataset import BinSegDataset ...@@ -27,20 +27,17 @@ from bob.ip.binseg.data.binsegdataset import BinSegDataset
#### Config #### #### Config ####
transforms = Compose( _transforms = [
[ Resize(1539),
Resize((1539)),
Pad((21, 46, 22, 47)), #(left, top, right, bottom) Pad((21, 46, 22, 47)), #(left, top, right, bottom)
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
RandomRotation(), RandomRotation(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
# bob.db.dataset init # bob.db.dataset init
bobdb = REFUGE(protocol="default_cup") bobdb = REFUGE(protocol="default_cup")
# PyTorch dataset # PyTorch dataset
dataset = BinSegDataset(bobdb, split="train", transform=transforms) dataset = BinSegDataset(bobdb, split="train", transforms=_transforms)
...@@ -26,16 +26,11 @@ dataset of retinal fundus images. ...@@ -26,16 +26,11 @@ dataset of retinal fundus images.
""" """
from bob.db.refuge import Database as REFUGE from bob.ip.binseg.data.transforms import CenterCrop
from bob.ip.binseg.data.transforms import * _transforms = [CenterCrop(1632)]
from bob.ip.binseg.data.binsegdataset import BinSegDataset
#### Config ####
transforms = Compose([CenterCrop(1632), ToTensor()]) from bob.db.refuge import Database as REFUGE
# bob.db.dataset init
bobdb = REFUGE(protocol="default_cup") bobdb = REFUGE(protocol="default_cup")
# PyTorch dataset from bob.ip.binseg.data.binsegdataset import BinSegDataset
dataset = BinSegDataset(bobdb, split="test", transform=transforms) dataset = BinSegDataset(bobdb, split="test", transforms=_transforms)
...@@ -21,26 +21,18 @@ dataset of retinal fundus images. ...@@ -21,26 +21,18 @@ dataset of retinal fundus images.
""" """
from bob.db.refuge import Database as REFUGE
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
from bob.ip.binseg.data.binsegdataset import BinSegDataset _transforms = [
#### Config ####
transforms = Compose(
[
Resize((1539)), Resize((1539)),
Pad((21, 46, 22, 47)), Pad((21, 46, 22, 47)),
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
RandomRotation(), RandomRotation(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
# bob.db.dataset init from bob.db.refuge import Database as REFUGE
bobdb = REFUGE(protocol="default_od") bobdb = REFUGE(protocol="default_od")
# PyTorch dataset from bob.ip.binseg.data.binsegdataset import BinSegDataset
dataset = BinSegDataset(bobdb, split="train", transform=transforms) dataset = BinSegDataset(bobdb, split="train", transforms=_transforms)
...@@ -26,16 +26,11 @@ dataset of retinal fundus images. ...@@ -26,16 +26,11 @@ dataset of retinal fundus images.
""" """
from bob.db.refuge import Database as REFUGE from bob.ip.binseg.data.transforms import CenterCrop
from bob.ip.binseg.data.transforms import * _transforms = [CenterCrop(1632)]
from bob.ip.binseg.data.binsegdataset import BinSegDataset
#### Config ####
transforms = Compose([CenterCrop(1632), ToTensor()]) from bob.db.refuge import Database as REFUGE
# bob.db.dataset init
bobdb = REFUGE(protocol="default_od") bobdb = REFUGE(protocol="default_od")
# PyTorch dataset from bob.ip.binseg.data.binsegdataset import BinSegDataset
dataset = BinSegDataset(bobdb, split="test", transform=transforms) dataset = BinSegDataset(bobdb, split="test", transforms=_transforms)
...@@ -16,25 +16,17 @@ baseline. ...@@ -16,25 +16,17 @@ baseline.
* Split reference: [MANINIS-2016]_ * Split reference: [MANINIS-2016]_
""" """
from bob.db.rimoner3 import Database as RIMONER3
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
from bob.ip.binseg.data.binsegdataset import BinSegDataset _transforms = [
#### Config ####
transforms = Compose(
[
Pad((8, 8, 8, 8)), Pad((8, 8, 8, 8)),
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
RandomRotation(), RandomRotation(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
# bob.db.dataset init from bob.db.rimoner3 import Database as RIMONER3
bobdb = RIMONER3(protocol="default_cup") bobdb = RIMONER3(protocol="default_cup")
# PyTorch dataset from bob.ip.binseg.data.binsegdataset import BinSegDataset
dataset = BinSegDataset(bobdb, split="train", transform=transforms) dataset = BinSegDataset(bobdb, split="train", transforms=_transforms)
...@@ -16,16 +16,12 @@ baseline. ...@@ -16,16 +16,12 @@ baseline.
* Split reference: [MANINIS-2016]_ * Split reference: [MANINIS-2016]_
""" """
from bob.db.rimoner3 import Database as RIMONER3
from bob.ip.binseg.data.transforms import *
from bob.ip.binseg.data.binsegdataset import BinSegDataset
#### Config ####
transforms = Compose([Pad((8, 8, 8, 8)), ToTensor()]) from bob.ip.binseg.data.transforms import Pad
_transforms = [Pad((8, 8, 8, 8))]
# bob.db.dataset init from bob.db.rimoner3 import Database as RIMONER3
bobdb = RIMONER3(protocol="default_cup") bobdb = RIMONER3(protocol="default_cup")
# PyTorch dataset from bob.ip.binseg.data.binsegdataset import BinSegDataset
dataset = BinSegDataset(bobdb, split="test", transform=transforms) dataset = BinSegDataset(bobdb, split="test", transforms=_transforms)
...@@ -16,25 +16,17 @@ baseline. ...@@ -16,25 +16,17 @@ baseline.
* Split reference: [MANINIS-2016]_ * Split reference: [MANINIS-2016]_
""" """
from bob.db.rimoner3 import Database as RIMONER3
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
from bob.ip.binseg.data.binsegdataset import BinSegDataset _transforms = [
#### Config ####
transforms = Compose(
[
Pad((8, 8, 8, 8)), Pad((8, 8, 8, 8)),
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
RandomRotation(), RandomRotation(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
# bob.db.dataset init from bob.db.rimoner3 import Database as RIMONER3
bobdb = RIMONER3(protocol="default_od") bobdb = RIMONER3(protocol="default_od")
# PyTorch dataset from bob.ip.binseg.data.binsegdataset import BinSegDataset
dataset = BinSegDataset(bobdb, split="train", transform=transforms) dataset = BinSegDataset(bobdb, split="train", transforms=_transforms)
...@@ -16,16 +16,11 @@ baseline. ...@@ -16,16 +16,11 @@ baseline.
* Split reference: [MANINIS-2016]_ * Split reference: [MANINIS-2016]_
""" """
from bob.db.rimoner3 import Database as RIMONER3 from bob.ip.binseg.data.transforms import Pad
from bob.ip.binseg.data.transforms import * _transforms = [Pad((8, 8, 8, 8))]
from bob.ip.binseg.data.binsegdataset import BinSegDataset
#### Config ####
transforms = Compose([Pad((8, 8, 8, 8)), ToTensor()]) from bob.db.rimoner3 import Database as RIMONER3
# bob.db.dataset init
bobdb = RIMONER3(protocol="default_od") bobdb = RIMONER3(protocol="default_od")
# PyTorch dataset from bob.ip.binseg.data.binsegdataset import BinSegDataset
dataset = BinSegDataset(bobdb, split="test", transform=transforms) dataset = BinSegDataset(bobdb, split="test", transforms=_transforms)
...@@ -17,18 +17,15 @@ for training and testing. The second set by Valentina Kouznetsova acts as a ...@@ -17,18 +17,15 @@ for training and testing. The second set by Valentina Kouznetsova acts as a
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
_transforms = Compose( _transforms = [
[
Pad((2, 1, 2, 2)), #(left, top, right, bottom) Pad((2, 1, 2, 2)), #(left, top, right, bottom)
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
RandomRotation(), RandomRotation(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset from bob.ip.binseg.data.utils import SampleList2TorchDataset
from bob.ip.binseg.data.stare import dataset as stare from bob.ip.binseg.data.stare import dataset as stare
dataset = DelayedSample2TorchDataset(stare.subsets("default")["train"], dataset = SampleList2TorchDataset(stare.subsets("default")["train"],
transform=_transforms) transforms=_transforms)
...@@ -17,8 +17,7 @@ for training and testing. The second set by Valentina Kouznetsova acts as a ...@@ -17,8 +17,7 @@ for training and testing. The second set by Valentina Kouznetsova acts as a
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
_transforms = Compose( _transforms = [
[
RandomRotation(), RandomRotation(),
Pad((0, 32, 0, 32)), Pad((0, 32, 0, 32)),
Resize(1024), Resize(1024),
...@@ -26,11 +25,9 @@ _transforms = Compose( ...@@ -26,11 +25,9 @@ _transforms = Compose(
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
ColorJitter(), ColorJitter(),
ToTensor(),
] ]
)
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset from bob.ip.binseg.data.utils import SampleList2TorchDataset
from bob.ip.binseg.data.stare import dataset as stare from bob.ip.binseg.data.stare import dataset as stare
dataset = DelayedSample2TorchDataset(stare.subsets("default")["train"], dataset = SampleList2TorchDataset(stare.subsets("default")["train"],
transform=_transforms) transforms=_transforms)
...@@ -14,8 +14,7 @@ for training and testing. The second set by Valentina Kouznetsova acts as a ...@@ -14,8 +14,7 @@ for training and testing. The second set by Valentina Kouznetsova acts as a
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
_transforms = Compose( _transforms = [
[
RandomRotation(), RandomRotation(),
Crop(50, 0, 500, 705), Crop(50, 0, 500, 705),
Resize(1168), Resize(1168),
...@@ -23,11 +22,9 @@ _transforms = Compose( ...@@ -23,11 +22,9 @@ _transforms = Compose(
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset from bob.ip.binseg.data.utils import SampleList2TorchDataset
from bob.ip.binseg.data.stare import dataset as stare from bob.ip.binseg.data.stare import dataset as stare
dataset = DelayedSample2TorchDataset(stare.subsets("default")["train"], dataset = SampleList2TorchDataset(stare.subsets("default")["train"],
transform=_transforms) transforms=_transforms)
...@@ -17,19 +17,16 @@ for training and testing. The second set by Valentina Kouznetsova acts as a ...@@ -17,19 +17,16 @@ for training and testing. The second set by Valentina Kouznetsova acts as a
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
_transforms = Compose( _transforms = [
[
RandomRotation(), RandomRotation(),
Resize(471), Resize(471),
Pad((0, 37, 0, 36)), Pad((0, 37, 0, 36)),
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
ColorJitter(), ColorJitter(),
ToTensor(), ]
]
)
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset from bob.ip.binseg.data.utils import SampleList2TorchDataset
from bob.ip.binseg.data.stare import dataset as stare from bob.ip.binseg.data.stare import dataset as stare
dataset = DelayedSample2TorchDataset(stare.subsets("default")["train"], dataset = SampleList2TorchDataset(stare.subsets("default")["train"],
transform=_transforms) transforms=_transforms)
...@@ -17,8 +17,7 @@ for training and testing. The second set by Valentina Kouznetsova acts as a ...@@ -17,8 +17,7 @@ for training and testing. The second set by Valentina Kouznetsova acts as a
""" """
from bob.ip.binseg.data.transforms import * from bob.ip.binseg.data.transforms import *
_transforms = Compose( _transforms = [
[
RandomRotation(), RandomRotation(),
Pad((0, 32, 0, 32)), Pad((0, 32, 0, 32)),
Resize(960), Resize(960),
...@@ -26,11 +25,9 @@ _transforms = Compose( ...@@ -26,11 +25,9 @@ _transforms = Compose(
RandomHFlip(), RandomHFlip(),
RandomVFlip(), RandomVFlip(),
ColorJitter(), ColorJitter(),
ToTensor(),
] ]
)
from bob.ip.binseg.data.utils import DelayedSample2TorchDataset from bob.ip.binseg.data.utils import SampleList2TorchDataset
from bob.ip.binseg.data.stare import dataset as stare from bob.ip.binseg.data.stare import dataset as stare
dataset = DelayedSample2TorchDataset(stare.subsets("default")["train"], dataset = SampleList2TorchDataset(stare.subsets("default")["train"],
transform=_transforms) transforms=_transforms)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment