"""
NeuralFilter2CD
***************
This module implements a trainable critically damped all-pole second order filter with real poles using pyTorch
Copyright (c) 2018 Idiap Research Institute, http://www.idiap.ch/
Written by Francois Marelli
This file is part of neural_filters.
neural_filters is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License version 3 as
published by the Free Software Foundation.
neural_filters is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with neural_filters. If not, see .
"""
from . import NeuralFilterCell
import torch
import numpy as np
class NeuralFilter2CD (torch.nn.Module):
"""
A trainable second-order critically damped all-pole filter :math:`\\frac{1}{(1 - P z^{-1})^{2}}`
* **hidden_size** (int) - the size of data vector
"""
def __init__(self, hidden_size):
super(NeuralFilter2CD, self).__init__()
self.hidden_size = hidden_size
self.cell = NeuralFilterCell(self.hidden_size)
def __repr__(self):
s = '{name}({hidden_size})'
return s.format(name=self.__class__.__name__, **self.__dict__)
def forward(self, input, hx=None):
if hx is None:
hx = torch.autograd.Variable(input.data.new(input.size(1),
self.hidden_size
).zero_(), requires_grad=False)
interm, interm_hidden = self.cell(input, hx)
output, hidden = self.cell(interm)
return output, hidden
@property
def denominator(self):
first = self.cell.denominator
denom = np.zeros((first.shape[0], 3))
for i in range(self.hidden_size):
denom[i] = np.polymul(first[i], first[i])
return denom
@property
def gradients(self):
return self.cell.gradients