Skip to content
Snippets Groups Projects
Commit a37f6baf authored by Daniel CARRON's avatar Daniel CARRON :b: Committed by André Anjos
Browse files

[segmentation.models] Set default resize to 512 fo all models

parent 4db9f0a7
No related branches found
No related tags found
1 merge request!46Create common library
Showing
with 16 additions and 18 deletions
......@@ -39,5 +39,5 @@ model = DRIU(
amsbound=amsbound,
),
augmentation_transforms=[],
crop_size=1024,
crop_size=512,
)
......@@ -39,5 +39,5 @@ model = DRIUBN(
amsbound=amsbound,
),
augmentation_transforms=[],
crop_size=1024,
crop_size=512,
)
......@@ -39,5 +39,5 @@ model = DRIUOD(
amsbound=amsbound,
),
augmentation_transforms=[],
crop_size=1024,
crop_size=512,
)
......@@ -39,5 +39,5 @@ model = DRIUPix(
amsbound=amsbound,
),
augmentation_transforms=[],
crop_size=1024,
crop_size=512,
)
......@@ -15,7 +15,6 @@ final_lr = 0.1
gamma = 1e-3
eps = 1e-8
amsbound = False
crop_size = 544
model = HED(
loss_type=MultiSoftJaccardBCELogitsLoss,
......@@ -31,5 +30,5 @@ model = HED(
amsbound=amsbound,
),
augmentation_transforms=[],
crop_size=crop_size,
crop_size=512,
)
......@@ -22,5 +22,5 @@ model = LittleWNet(
optimizer_type=Adam,
optimizer_arguments=dict(lr=max_lr),
augmentation_transforms=[],
crop_size=544,
crop_size=512,
)
......@@ -28,7 +28,6 @@ final_lr = 0.1
gamma = 1e-3
eps = 1e-8
amsbound = False
crop_size = 544
model = M2UNET(
loss_type=SoftJaccardBCELogitsLoss,
......@@ -44,5 +43,5 @@ model = M2UNET(
amsbound=amsbound,
),
augmentation_transforms=[],
crop_size=crop_size,
crop_size=512,
)
......@@ -41,5 +41,5 @@ model = Unet(
amsbound=amsbound,
),
augmentation_transforms=[],
crop_size=1024,
crop_size=512,
)
......@@ -110,7 +110,7 @@ class DRIU(Model):
augmentation_transforms: TransformSequence = [],
num_classes: int = 1,
pretrained: bool = False,
crop_size: int = 1024,
crop_size: int = 512,
):
super().__init__(
loss_type,
......
......@@ -113,7 +113,7 @@ class DRIUBN(Model):
augmentation_transforms: TransformSequence = [],
num_classes: int = 1,
pretrained: bool = False,
crop_size: int = 544,
crop_size: int = 512,
):
super().__init__(
loss_type,
......
......@@ -95,7 +95,7 @@ class DRIUOD(Model):
augmentation_transforms: TransformSequence = [],
num_classes: int = 1,
pretrained: bool = False,
crop_size: int = 544,
crop_size: int = 512,
):
super().__init__(
loss_type,
......
......@@ -99,7 +99,7 @@ class DRIUPix(Model):
augmentation_transforms: TransformSequence = [],
num_classes: int = 1,
pretrained: bool = False,
crop_size: int = 544,
crop_size: int = 512,
):
super().__init__(
loss_type,
......
......@@ -113,7 +113,7 @@ class HED(Model):
augmentation_transforms: TransformSequence = [],
num_classes: int = 1,
pretrained: bool = False,
crop_size: int = 544,
crop_size: int = 512,
):
super().__init__(
loss_type,
......
......@@ -310,7 +310,7 @@ class LittleWNet(Model):
optimizer_arguments: dict[str, typing.Any] = {},
augmentation_transforms: TransformSequence = [],
num_classes: int = 1,
crop_size: int = 544,
crop_size: int = 512,
):
super().__init__(
loss_type,
......
......@@ -161,7 +161,7 @@ class M2UNET(Model):
augmentation_transforms: TransformSequence = [],
num_classes: int = 1,
pretrained: bool = False,
crop_size: int = 544,
crop_size: int = 512,
):
super().__init__(
loss_type,
......
......@@ -102,7 +102,7 @@ class Unet(Model):
augmentation_transforms: TransformSequence = [],
num_classes: int = 1,
pretrained: bool = False,
crop_size: int = 544,
crop_size: int = 512,
):
super().__init__(
loss_type,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment