Skip to content
Snippets Groups Projects
Commit 82217ab9 authored by M. François's avatar M. François
Browse files

deprecated torch functions

v 1.2
parent b00df262
No related branches found
No related tags found
No related merge requests found
......@@ -28,7 +28,6 @@ along with neural_filters. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import torch
from torch.nn import Parameter
from torch.nn import functional as F
from torch.nn._functions.rnn import Recurrent, VariableRecurrent
from torch.nn.utils.rnn import PackedSequence
......@@ -97,7 +96,7 @@ class NeuralFilter(torch.nn.Module):
def step(self, input_var, hidden, a=None):
if a is None:
a = F.sigmoid(self.bias_forget)
a = torch.sigmoid(self.bias_forget)
next_state = (a * hidden) + input_var
return next_state
......@@ -117,7 +116,7 @@ class NeuralFilter(torch.nn.Module):
self.check_forward_args(input_var, hidden, batch_sizes)
# compute this once for all steps for efficiency
a = F.sigmoid(self.bias_forget)
a = torch.sigmoid(self.bias_forget)
func = Recurrent(self.step) if batch_sizes is None else VariableRecurrent(self.step)
nexth, output = func(input_var, hidden, (a,), batch_sizes)
......@@ -138,7 +137,7 @@ class NeuralFilter(torch.nn.Module):
@property
def denominator(self):
forgetgate = F.sigmoid(self.bias_forget).detach().cpu().numpy()
forgetgate = torch.sigmoid(self.bias_forget).detach().cpu().numpy()
forgetgate = forgetgate.reshape((forgetgate.size, 1))
one = np.ones(forgetgate.shape)
denom = np.concatenate((one, -forgetgate), axis=1)
......
......@@ -114,7 +114,7 @@ class NeuralFilter2CC(torch.nn.Module):
def step(self, input_var, hidden, a=None, b=None):
if a is None or b is None:
modulus = F.sigmoid(self.bias_modulus)
modulus = torch.sigmoid(self.bias_modulus)
cosangle = F.tanh(self.bias_theta)
a = 2 * cosangle * modulus
b = - modulus.pow(2)
......@@ -140,7 +140,7 @@ class NeuralFilter2CC(torch.nn.Module):
self.check_forward_args(input_var, hidden, batch_sizes)
# do not recompute this at each step to gain efficiency
modulus = F.sigmoid(self.bias_modulus)
modulus = torch.sigmoid(self.bias_modulus)
cosangle = F.tanh(self.bias_theta)
a = 2 * cosangle * modulus
b = - modulus.pow(2)
......@@ -154,7 +154,7 @@ class NeuralFilter2CC(torch.nn.Module):
return output, nexth, modulus
def print_param(self):
modulus = F.sigmoid(self.bias_modulus)
modulus = torch.sigmoid(self.bias_modulus)
cosangle = F.tanh(self.bias_theta)
p1 = -2 * cosangle * modulus
p2 = modulus.pow(2)
......@@ -162,7 +162,7 @@ class NeuralFilter2CC(torch.nn.Module):
@property
def denominator(self):
modulus = F.sigmoid(self.bias_modulus)
modulus = torch.sigmoid(self.bias_modulus)
cosangle = F.tanh(self.bias_theta)
p1 = -2 * cosangle * modulus
p2 = modulus.pow(2)
......
......@@ -25,10 +25,10 @@ along with neural_filters. If not, see <http://www.gnu.org/licenses/>.
"""
from . import NeuralFilter
import torch
import numpy as np
import torch
from . import NeuralFilter
class NeuralFilter2R(torch.nn.Module):
......
......@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup(
name='neural-filters',
version='1.1',
version='1.2',
description='Linear filters for neural networks in pyTorch',
author='Idiap research institute - Francois Marelli',
author_email='francois.marelli@idiap.ch',
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment