Skip to content
Snippets Groups Projects
Commit 8d9cf6c1 authored by Tim Laibacher's avatar Tim Laibacher
Browse files

Remove unused parts of MobileNetV2

parent 18dc9c0a
Branches
Tags
No related merge requests found
Pipeline #30728 failed
......@@ -80,14 +80,14 @@ class MobileNetV2(nn.Module):
[6, 32, 3, 2],
[6, 64, 4, 2],
[6, 96, 3, 1],
[6, 160, 3, 2],
[6, 320, 1, 1],
#[6, 160, 3, 2],
#[6, 320, 1, 1],
]
# building first layer
assert input_size % 32 == 0
input_channel = int(input_channel * width_mult)
self.last_channel = int(last_channel * width_mult) if width_mult > 1.0 else last_channel
#self.last_channel = int(last_channel * width_mult) if width_mult > 1.0 else last_channel
self.features = [conv_bn(3, input_channel, 2)]
# building inverted residual blocks
for t, c, n, s in interverted_residual_setting:
......@@ -99,15 +99,15 @@ class MobileNetV2(nn.Module):
self.features.append(block(input_channel, output_channel, 1, expand_ratio=t))
input_channel = output_channel
# building last several layers
self.features.append(conv_1x1_bn(input_channel, self.last_channel))
#self.features.append(conv_1x1_bn(input_channel, self.last_channel))
# make it nn.Sequential
self.features = nn.Sequential(*self.features)
# building classifier
self.classifier = nn.Sequential(
nn.Dropout(0.2),
nn.Linear(self.last_channel, n_class),
)
#self.classifier = nn.Sequential(
# nn.Dropout(0.2),
# nn.Linear(self.last_channel, n_class),
#)
self._initialize_weights()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment