Skip to content
Snippets Groups Projects

MLP class and config to train it

Merged Olegs NIKISINS requested to merge mlp_train into master
Files
11
+ 100
0
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
@author: Olegs Nikisins
"""
#==============================================================================
# Import here:
from torch import nn
import torch.nn.functional as F
#==============================================================================
# Define the network:
class TwoLayerMLP(nn.Module):
"""
A simple two-layer MLP for binary classification. The output activation
function is sigmoid.
Attributes
----------
in_features : int
Dimensionality of the input feature vectors.
n_hidden_relu : int
Number of ReLU units in the hidden layer of the MLP.
apply_sigmoid : bool
If set to ``True`` the sigmoid will be applied to the output of the
hidden FC layer. If ``False`` the sigmoid is not applied.
"""
def __init__(self, in_features, n_hidden_relu, apply_sigmoid = True):
super(TwoLayerMLP, self).__init__()
"""
Init method.
Parameters
----------
in_features : int
Dimensionality of the input feature vectors.
n_hidden_relu : int
Number of ReLU units in the hidden layer of the MLP.
apply_sigmoid : bool
If set to ``True`` the sigmoid will be applied to the output of the
hidden FC layer. If ``False`` the sigmoid is not applied.
Default: ``True``.
"""
self.in_features = in_features
self.n_hidden_relu = n_hidden_relu
self.apply_sigmoid = apply_sigmoid
self.fc1 = nn.Linear(in_features = self.in_features, out_features = self.n_hidden_relu, bias=True)
self.fc2 = nn.Linear(in_features = self.n_hidden_relu, out_features = 1, bias=True)
def forward(self, x):
"""
The forward method.
Parameters
----------
x : :py:class:`torch.Tensor`
The batch to forward through the network. Size of the input batch
is [batch_size, 1, self.in_features].
Returns
-------
x : :py:class:`torch.Tensor`
Output of the MLP, class probability.
"""
# input is a batch of the size: [batch_size, 1, self.in_features],
# convert it to the size [batch_size, self.in_features] as expected by FC layer:
x = x.squeeze()
# first fully connected activated by ReLu:
x = self.fc1(x)
x = F.relu(x)
# second fully connected activated by sigmoid:
x = self.fc2(x)
if not self.apply_sigmoid:
return x
x = F.sigmoid(x)
return x
Loading