Skip to content
Snippets Groups Projects
Commit e08a1fb7 authored by André Anjos's avatar André Anjos :speech_balloon:
Browse files

[modelling,utils] Avoid method names with underscore to avoid sphinx errors

parent ba9be5b4
No related branches found
No related tags found
1 merge request!12Streamlining
Pipeline #38679 passed
......@@ -15,27 +15,27 @@ model_urls = {
}
def _conv3x3(in_planes, out_planes, stride=1):
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(
in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False,
)
def _conv1x1(in_planes, out_planes, stride=1):
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class _BasicBlock(nn.Module):
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(_BasicBlock, self).__init__()
self.conv1 = _conv3x3(inplanes, planes, stride)
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = _conv3x3(planes, planes)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
......@@ -59,16 +59,16 @@ class _BasicBlock(nn.Module):
return out
class _Bottleneck(nn.Module):
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(_Bottleneck, self).__init__()
self.conv1 = _conv1x1(inplanes, planes)
super(Bottleneck, self).__init__()
self.conv1 = conv1x1(inplanes, planes)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = _conv3x3(planes, planes, stride)
self.conv2 = conv3x3(planes, planes, stride)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = _conv1x1(planes, planes * self.expansion)
self.conv3 = conv1x1(planes, planes * self.expansion)
self.bn3 = nn.BatchNorm2d(planes * self.expansion)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
......@@ -141,16 +141,16 @@ class ResNet(nn.Module):
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, _Bottleneck):
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, _BasicBlock):
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
_conv1x1(self.inplanes, planes * block.expansion, stride),
conv1x1(self.inplanes, planes * block.expansion, stride),
nn.BatchNorm2d(planes * block.expansion),
)
......@@ -179,7 +179,7 @@ def resnet18(pretrained=False, **kwargs):
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(_BasicBlock, [2, 2, 2, 2], **kwargs)
model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["resnet18"]))
return model
......@@ -190,7 +190,7 @@ def resnet34(pretrained=False, **kwargs):
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(_BasicBlock, [3, 4, 6, 3], **kwargs)
model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["resnet34"]))
return model
......@@ -201,7 +201,7 @@ def resnet50(pretrained=False, **kwargs):
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(_Bottleneck, [3, 4, 6, 3], **kwargs)
model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["resnet50"]))
return model
......@@ -212,7 +212,7 @@ def shaperesnet50(pretrained=False, **kwargs):
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(_Bottleneck, [3, 4, 6, 3], **kwargs)
model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(
model_zoo.load_url(
......@@ -227,7 +227,7 @@ def resnet101(pretrained=False, **kwargs):
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(_Bottleneck, [3, 4, 23, 3], **kwargs)
model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["resnet101"]))
return model
......@@ -238,7 +238,7 @@ def resnet152(pretrained=False, **kwargs):
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(_Bottleneck, [3, 8, 36, 3], **kwargs)
model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["resnet152"]))
return model
......@@ -52,7 +52,7 @@ class VGG(nn.Module):
nn.init.constant_(m.bias, 0)
def _make_layers(cfg, batch_norm=False):
def make_layers(cfg, batch_norm=False):
layers = []
in_channels = 3
for v in cfg:
......@@ -124,7 +124,7 @@ def vgg11(pretrained=False, **kwargs):
"""
if pretrained:
kwargs["init_weights"] = False
model = VGG(_make_layers(_cfg["A"]), **kwargs)
model = VGG(make_layers(_cfg["A"]), **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["vgg11"]))
return model
......@@ -137,7 +137,7 @@ def vgg11_bn(pretrained=False, **kwargs):
"""
if pretrained:
kwargs["init_weights"] = False
model = VGG(_make_layers(_cfg["A"], batch_norm=True), **kwargs)
model = VGG(make_layers(_cfg["A"], batch_norm=True), **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["vgg11_bn"]))
return model
......@@ -150,7 +150,7 @@ def vgg13(pretrained=False, **kwargs):
"""
if pretrained:
kwargs["init_weights"] = False
model = VGG(_make_layers(_cfg["B"]), **kwargs)
model = VGG(make_layers(_cfg["B"]), **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["vgg13"]))
return model
......@@ -163,7 +163,7 @@ def vgg13_bn(pretrained=False, **kwargs):
"""
if pretrained:
kwargs["init_weights"] = False
model = VGG(_make_layers(_cfg["B"], batch_norm=True), **kwargs)
model = VGG(make_layers(_cfg["B"], batch_norm=True), **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["vgg13_bn"]))
return model
......@@ -176,7 +176,7 @@ def vgg16(pretrained=False, **kwargs):
"""
if pretrained:
kwargs["init_weights"] = False
model = VGG(_make_layers(_cfg["D"]), **kwargs)
model = VGG(make_layers(_cfg["D"]), **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["vgg16"]), strict=False)
return model
......@@ -189,7 +189,7 @@ def vgg16_bn(pretrained=False, **kwargs):
"""
if pretrained:
kwargs["init_weights"] = False
model = VGG(_make_layers(_cfg["D"], batch_norm=True), **kwargs)
model = VGG(make_layers(_cfg["D"], batch_norm=True), **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["vgg16_bn"]))
return model
......@@ -202,7 +202,7 @@ def vgg19(pretrained=False, **kwargs):
"""
if pretrained:
kwargs["init_weights"] = False
model = VGG(_make_layers(_cfg["E"]), **kwargs)
model = VGG(make_layers(_cfg["E"]), **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["vgg19"]))
return model
......@@ -215,7 +215,7 @@ def vgg19_bn(pretrained=False, **kwargs):
"""
if pretrained:
kwargs["init_weights"] = False
model = VGG(_make_layers(_cfg["E"], batch_norm=True), **kwargs)
model = VGG(make_layers(_cfg["E"], batch_norm=True), **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls["vgg19_bn"]))
return model
......@@ -35,7 +35,7 @@ modelurls = {
"""URLs of pre-trained models (backbones)"""
def _download_url_to_file(url, dst, hash_prefix, progress):
def download_url_to_file(url, dst, hash_prefix, progress):
file_size = None
u = urlopen(url)
meta = u.info()
......@@ -109,6 +109,6 @@ def cache_url(url, model_dir=None, progress=True):
hash_prefix = HASH_REGEX.search(filename)
if hash_prefix is not None:
hash_prefix = hash_prefix.group(1)
_download_url_to_file(url, cached_file, hash_prefix, progress=progress)
download_url_to_file(url, cached_file, hash_prefix, progress=progress)
return cached_file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment