TwoLayerMLP.py 2.57 KB
Newer Older
Olegs NIKISINS's avatar
Olegs NIKISINS committed
1
2
3
4
5
6
7
8
9
10
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
@author: Olegs Nikisins
"""
#==============================================================================
# Import here:

from torch import nn

11
12
13
import torch.nn.functional as F


Olegs NIKISINS's avatar
Olegs NIKISINS committed
14
15
16
17
18
#==============================================================================
# Define the network:

class TwoLayerMLP(nn.Module):
    """
19
20
    A simple two-layer MLP for binary classification. The output activation
    function is sigmoid.
Olegs NIKISINS's avatar
Olegs NIKISINS committed
21
22
23
24
25
26
27

    Attributes
    ----------
    in_features : int
        Dimensionality of the input feature vectors.

    n_hidden_relu : int
28
        Number of ReLU units in the hidden layer of the MLP.
29
30
31
32

    apply_sigmoid : bool
        If set to ``True`` the sigmoid will be applied to the output of the
        hidden FC layer. If ``False`` the sigmoid is not applied.
Olegs NIKISINS's avatar
Olegs NIKISINS committed
33
34
    """

35
    def __init__(self, in_features, n_hidden_relu, apply_sigmoid = True):
Olegs NIKISINS's avatar
Olegs NIKISINS committed
36
37
38
        super(TwoLayerMLP, self).__init__()
        """
        Init method.
39
40
41
42
43
44
45
46
47
48
49
50
51

        Parameters
        ----------
        in_features : int
            Dimensionality of the input feature vectors.

        n_hidden_relu : int
            Number of ReLU units in the hidden layer of the MLP.

        apply_sigmoid : bool
            If set to ``True`` the sigmoid will be applied to the output of the
            hidden FC layer. If ``False`` the sigmoid is not applied.
            Default: ``True``.
Olegs NIKISINS's avatar
Olegs NIKISINS committed
52
53
54
55
56
57
        """

        self.in_features = in_features

        self.n_hidden_relu = n_hidden_relu

58
59
        self.apply_sigmoid = apply_sigmoid

Olegs NIKISINS's avatar
Olegs NIKISINS committed
60
61
62
63
        self.fc1 = nn.Linear(in_features = self.in_features, out_features = self.n_hidden_relu, bias=True)

        self.fc2 = nn.Linear(in_features = self.n_hidden_relu, out_features = 1, bias=True)

64

Olegs NIKISINS's avatar
Olegs NIKISINS committed
65
66
67
    def forward(self, x):
        """
        The forward method.
68
69
70
71
72
73
74
75
76
77
78

        Parameters
        ----------
        x : :py:class:`torch.Tensor`
            The batch to forward through the network. Size of the input batch
            is [batch_size, 1, self.in_features].

        Returns
        -------
        x : :py:class:`torch.Tensor`
            Output of the MLP, class probability.
Olegs NIKISINS's avatar
Olegs NIKISINS committed
79
80
81
82
83
84
85
86
87
88
89
90
        """

        # input is a batch of the size: [batch_size, 1, self.in_features],
        # convert it to the size [batch_size, self.in_features] as expected by FC layer:
        x = x.squeeze()

        # first fully connected activated by ReLu:
        x = self.fc1(x)
        x = F.relu(x)

        # second fully connected activated by sigmoid:
        x = self.fc2(x)
91
92
93
94
95

        if not self.apply_sigmoid:

            return x

Olegs NIKISINS's avatar
Olegs NIKISINS committed
96
97
98
99
100
        x = F.sigmoid(x)

        return x