neural_filter_2R.py 2.97 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
"""
NeuralFilter2R
**************

This module implements a trainable all-pole second order filter with real poles using pyTorch


Copyright (c) 2018 Idiap Research Institute, http://www.idiap.ch/

Written by Francois Marelli <Francois.Marelli@idiap.ch>

This file is part of neural_filters.

neural_filters is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License version 3 as
published by the Free Software Foundation.

neural_filters is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with neural_filters. If not, see <http://www.gnu.org/licenses/>.

"""

Francois Marelli's avatar
Francois Marelli committed
28
from . import NeuralFilter
29 30

import torch
31
import numpy as np
32

33 34
from . import RANDOM_STD

35 36

class NeuralFilter2R(torch.nn.Module):
37 38 39 40 41 42 43 44 45 46 47
    """
        A trainable second-order all-(real)pole filter :math:`\\frac{1}{1 - P_{1} z^{-1}} \\frac{1}{1 - P_{2} z^{-1}}`

        * **hidden_size** (int) - the size of data vector
        """

    def __init__(self, hidden_size):
        super(NeuralFilter2R, self).__init__()

        self.hidden_size = hidden_size

Francois Marelli's avatar
Francois Marelli committed
48 49
        self.first_cell = NeuralFilter(self.hidden_size)
        self.second_cell = NeuralFilter(self.hidden_size)
50

51
        self.reset_parameters()
Francois Marelli's avatar
Francois Marelli committed
52 53

    def reset_parameters(self, init=None):
54
        if init is None:
55 56
            self.first_cell.bias_forget.data.uniform_(-0.5 - RANDOM_STD, -0.5 + RANDOM_STD)
            self.second_cell.bias_forget.data.uniform_(0.5 - RANDOM_STD, 0.5 + RANDOM_STD)
57
        elif isinstance(init, tuple):
Francois Marelli's avatar
Francois Marelli committed
58 59 60 61 62 63
            self.first_cell.reset_parameters(init[0])
            self.second_cell.reset_parameters(init[1])
        else:
            self.first_cell.reset_parameters(init)
            self.second_cell.reset_parameters(init)

64
    def __repr__(self):
65
        s = '{name}({hidden_size})'
66 67
        return s.format(name=self.__class__.__name__, **self.__dict__)

68
    def forward(self, input_var, hx=None):
69
        if hx is None:
70 71 72
            hx = torch.autograd.Variable(input_var.data.new(input_var.size(1),
                                                            self.hidden_size
                                                            ).zero_(), requires_grad=False)
73

74
        interm, interm_hidden = self.first_cell(input_var, hx)
75 76 77
        output, hidden = self.second_cell(interm)

        return output, hidden
78 79 80 81 82

    @property
    def denominator(self):
        first = self.first_cell.denominator
        second = self.second_cell.denominator
83
        denom = np.zeros((first.shape[0], 3))
84 85 86 87 88 89 90 91
        for i in range(self.hidden_size):
            denom[i] = np.polymul(first[i], second[i])
        return denom

    @property
    def gradients(self):
        first = self.first_cell.gradients
        second = self.second_cell.gradients
92
        return np.concatenate((first, second), axis=1)