Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
N
neural_filters
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
software
neural_filters
Commits
c4ad480f
Commit
c4ad480f
authored
Jun 27, 2018
by
M. François
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
modulus output in forward
parent
cf0fc7d9
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
12 additions
and
12 deletions
+12
-12
neural_filters/neural_filter.py
neural_filters/neural_filter.py
+2
-2
neural_filters/neural_filter_2CC.py
neural_filters/neural_filter_2CC.py
+3
-3
neural_filters/neural_filter_2CD.py
neural_filters/neural_filter_2CD.py
+3
-3
neural_filters/neural_filter_2R.py
neural_filters/neural_filter_2R.py
+3
-3
setup.py
setup.py
+1
-1
No files found.
neural_filters/neural_filter.py
View file @
c4ad480f
...
@@ -125,7 +125,7 @@ class NeuralFilter(torch.nn.Module):
...
@@ -125,7 +125,7 @@ class NeuralFilter(torch.nn.Module):
if
is_packed
:
if
is_packed
:
output
=
PackedSequence
(
output
,
batch_sizes
)
output
=
PackedSequence
(
output
,
batch_sizes
)
return
output
,
nexth
return
output
,
nexth
,
a
@
property
@
property
def
gradients
(
self
):
def
gradients
(
self
):
...
@@ -138,7 +138,7 @@ class NeuralFilter(torch.nn.Module):
...
@@ -138,7 +138,7 @@ class NeuralFilter(torch.nn.Module):
@
property
@
property
def
denominator
(
self
):
def
denominator
(
self
):
forgetgate
=
F
.
sigmoid
(
self
.
bias_forget
).
d
ata
.
numpy
()
forgetgate
=
F
.
sigmoid
(
self
.
bias_forget
).
d
etach
().
cpu
()
.
numpy
()
forgetgate
=
forgetgate
.
reshape
((
forgetgate
.
size
,
1
))
forgetgate
=
forgetgate
.
reshape
((
forgetgate
.
size
,
1
))
one
=
np
.
ones
(
forgetgate
.
shape
)
one
=
np
.
ones
(
forgetgate
.
shape
)
denom
=
np
.
concatenate
((
one
,
-
forgetgate
),
axis
=
1
)
denom
=
np
.
concatenate
((
one
,
-
forgetgate
),
axis
=
1
)
...
...
neural_filters/neural_filter_2CC.py
View file @
c4ad480f
...
@@ -151,7 +151,7 @@ class NeuralFilter2CC(torch.nn.Module):
...
@@ -151,7 +151,7 @@ class NeuralFilter2CC(torch.nn.Module):
if
is_packed
:
if
is_packed
:
output
=
PackedSequence
(
output
,
batch_sizes
)
output
=
PackedSequence
(
output
,
batch_sizes
)
return
output
,
nexth
return
output
,
nexth
,
modulus
def
print_param
(
self
):
def
print_param
(
self
):
modulus
=
F
.
sigmoid
(
self
.
bias_modulus
)
modulus
=
F
.
sigmoid
(
self
.
bias_modulus
)
...
@@ -166,8 +166,8 @@ class NeuralFilter2CC(torch.nn.Module):
...
@@ -166,8 +166,8 @@ class NeuralFilter2CC(torch.nn.Module):
cosangle
=
F
.
tanh
(
self
.
bias_theta
)
cosangle
=
F
.
tanh
(
self
.
bias_theta
)
p1
=
-
2
*
cosangle
*
modulus
p1
=
-
2
*
cosangle
*
modulus
p2
=
modulus
.
pow
(
2
)
p2
=
modulus
.
pow
(
2
)
p1
=
p1
.
d
ata
.
numpy
()
p1
=
p1
.
d
etach
().
cpu
()
.
numpy
()
p2
=
p2
.
d
ata
.
numpy
()
p2
=
p2
.
d
etach
().
cpu
()
.
numpy
()
p1
=
p1
.
reshape
(
p1
.
size
,
1
)
p1
=
p1
.
reshape
(
p1
.
size
,
1
)
p2
=
p2
.
reshape
(
p2
.
size
,
1
)
p2
=
p2
.
reshape
(
p2
.
size
,
1
)
one
=
np
.
ones
(
p1
.
shape
)
one
=
np
.
ones
(
p1
.
shape
)
...
...
neural_filters/neural_filter_2CD.py
View file @
c4ad480f
...
@@ -53,10 +53,10 @@ class NeuralFilter2CD(torch.nn.Module):
...
@@ -53,10 +53,10 @@ class NeuralFilter2CD(torch.nn.Module):
return
s
.
format
(
name
=
self
.
__class__
.
__name__
,
**
self
.
__dict__
)
return
s
.
format
(
name
=
self
.
__class__
.
__name__
,
**
self
.
__dict__
)
def
forward
(
self
,
input_var
,
hx
=
(
None
,
None
)):
def
forward
(
self
,
input_var
,
hx
=
(
None
,
None
)):
inter
,
inter_hidden
=
self
.
cell
(
input_var
,
hx
[
0
])
inter
,
inter_hidden
,
modulus
=
self
.
cell
(
input_var
,
hx
[
0
])
output
,
hidden
=
self
.
cell
(
inter
,
hx
[
1
])
output
,
hidden
,
modulus
=
self
.
cell
(
inter
,
hx
[
1
])
return
output
,
(
inter_hidden
,
hidden
)
return
output
,
(
inter_hidden
,
hidden
)
,
modulus
@
property
@
property
def
denominator
(
self
):
def
denominator
(
self
):
...
...
neural_filters/neural_filter_2R.py
View file @
c4ad480f
...
@@ -65,10 +65,10 @@ class NeuralFilter2R(torch.nn.Module):
...
@@ -65,10 +65,10 @@ class NeuralFilter2R(torch.nn.Module):
return
s
.
format
(
name
=
self
.
__class__
.
__name__
,
**
self
.
__dict__
)
return
s
.
format
(
name
=
self
.
__class__
.
__name__
,
**
self
.
__dict__
)
def
forward
(
self
,
input_var
,
hx
=
(
None
,
None
)):
def
forward
(
self
,
input_var
,
hx
=
(
None
,
None
)):
interm
,
interm_hidden
=
self
.
first_cell
(
input_var
,
hx
[
0
])
interm
,
interm_hidden
,
first_modulus
=
self
.
first_cell
(
input_var
,
hx
[
0
])
output
,
hidden
=
self
.
second_cell
(
interm
,
hx
[
1
])
output
,
hidden
,
second_modulus
=
self
.
second_cell
(
interm
,
hx
[
1
])
return
output
,
(
interm_hidden
,
hidden
)
return
output
,
(
interm_hidden
,
hidden
)
,
(
first_modulus
,
second_modulus
)
@
property
@
property
def
denominator
(
self
):
def
denominator
(
self
):
...
...
setup.py
View file @
c4ad480f
...
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
...
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup
(
setup
(
name
=
'neural-filters'
,
name
=
'neural-filters'
,
version
=
'1.
0
'
,
version
=
'1.
1
'
,
description
=
'Linear filters for neural networks in pyTorch'
,
description
=
'Linear filters for neural networks in pyTorch'
,
author
=
'Idiap research institute - Francois Marelli'
,
author
=
'Idiap research institute - Francois Marelli'
,
author_email
=
'francois.marelli@idiap.ch'
,
author_email
=
'francois.marelli@idiap.ch'
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment