Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
bob.learn.tensorflow
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
11
Issues
11
List
Boards
Labels
Milestones
Merge Requests
1
Merge Requests
1
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
bob
bob.learn.tensorflow
Commits
f8248a9e
Commit
f8248a9e
authored
Oct 31, 2016
by
Tiago de Freitas Pereira
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Implementing batch normalization
parent
ecd82cb4
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
15 additions
and
11 deletions
+15
-11
bob/learn/tensorflow/layers/Layer.py
bob/learn/tensorflow/layers/Layer.py
+8
-8
bob/learn/tensorflow/network/SequenceNetwork.py
bob/learn/tensorflow/network/SequenceNetwork.py
+3
-1
bob/learn/tensorflow/test/test_cnn_scratch.py
bob/learn/tensorflow/test/test_cnn_scratch.py
+4
-2
No files found.
bob/learn/tensorflow/layers/Layer.py
View file @
f8248a9e
...
...
@@ -69,12 +69,12 @@ class Layer(object):
#with tf.variable_scope(name):
phase_train
=
tf
.
convert_to_tensor
(
phase_train
,
dtype
=
tf
.
bool
)
n_out
=
int
(
x
.
get_shape
()[
-
1
])
self
.
beta
=
tf
.
Variable
(
tf
.
constant
(
0.0
,
shape
=
[
n_out
],
dtype
=
x
.
dtype
)
,
name
=
name
+
'_beta'
,
self
.
beta
=
tf
.
get_variable
(
name
+
'_beta'
,
initializer
=
tf
.
constant
(
0.0
,
shape
=
[
n_out
],
dtype
=
x
.
dtype
)
,
trainable
=
True
,
dtype
=
x
.
dtype
)
self
.
gamma
=
tf
.
Variable
(
tf
.
constant
(
1.0
,
shape
=
[
n_out
],
dtype
=
x
.
dtype
)
,
name
=
name
+
'_gamma'
,
self
.
gamma
=
tf
.
get_variable
(
name
+
'_gamma'
,
initializer
=
tf
.
constant
(
1.0
,
shape
=
[
n_out
],
dtype
=
x
.
dtype
)
,
trainable
=
True
,
dtype
=
x
.
dtype
)
...
...
bob/learn/tensorflow/network/SequenceNetwork.py
View file @
f8248a9e
...
...
@@ -244,7 +244,9 @@ class SequenceNetwork(six.with_metaclass(abc.ABCMeta, object)):
self
.
sequence_net
[
k
].
b
.
assign
(
hdf5
.
read
(
self
.
sequence_net
[
k
].
b
.
name
)).
eval
(
session
=
session
)
session
.
run
(
self
.
sequence_net
[
k
].
b
)
if
self
.
sequence_net
[
k
].
batch_norm
:
self
.
sequence_net
[
k
].
beta
.
assign
(
hdf5
.
read
(
self
.
sequence_net
[
k
].
beta
.
name
)).
eval
(
session
=
session
)
self
.
sequence_net
[
k
].
gamma
.
assign
(
hdf5
.
read
(
self
.
sequence_net
[
k
].
gamma
.
name
)).
eval
(
session
=
session
)
hdf5
.
cd
(
".."
)
...
...
bob/learn/tensorflow/test/test_cnn_scratch.py
View file @
f8248a9e
...
...
@@ -54,7 +54,7 @@ def validate_network(validation_data, validation_labels, directory):
path
=
os
.
path
.
join
(
directory
,
"model.hdf5"
)
#path = os.path.join(directory, "model.ckp")
#scratch = SequenceNetwork(default_feature_layer="fc1")
scratch
=
SequenceNetwork
()
scratch
=
SequenceNetwork
(
default_feature_layer
=
"fc1"
)
#scratch.load_original(session, os.path.join(directory, "model.ckp"))
scratch
.
load
(
bob
.
io
.
base
.
HDF5File
(
path
),
shape
=
validation_shape
,
session
=
session
)
...
...
@@ -94,8 +94,10 @@ def test_cnn_trainer_scratch():
prefetch
=
False
,
temp_dir
=
directory
)
trainer
.
train
(
train_data_shuffler
)
del
trainer
del
scratch
import
ipdb
;
ipdb
.
set_trace
();
#
import ipdb; ipdb.set_trace();
accuracy
=
validate_network
(
validation_data
,
validation_labels
,
directory
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment