Skip to content
Snippets Groups Projects
Commit 21229e01 authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Debugging

parent 9ff69c1f
No related branches found
No related tags found
No related merge requests found
......@@ -52,10 +52,10 @@ class ExperimentAnalizer:
def __call__(self, data_shuffler, network, session):
if self.data_shuffler is None:
self.data_shuffler = data_shuffler
self.network = network
self.session = session
#if self.data_shuffler is None:
# self.data_shuffler = data_shuffler
# self.network = network
# self.session = session
# Getting the base class. Recipe extracted from
# http://stackoverflow.com/questions/5516263/creating-an-object-from-a-base-class-object-in-python/5516330#5516330
......@@ -68,18 +68,18 @@ class ExperimentAnalizer:
# Extracting features for enrollment
enroll_data, enroll_labels = base_data_shuffler.get_batch()
enroll_features = self.network(enroll_data, session=self.session)
enroll_features = network(enroll_data, session=session)
del enroll_data
# Extracting features for probing
probe_data, probe_labels = base_data_shuffler.get_batch()
probe_features = self.network(probe_data, session=self.session)
probe_features = network(probe_data, session=session)
del probe_data
# Creating models
models = []
for i in range(len(base_data_shuffler.possible_labels)):
indexes_model = numpy.where(enroll_labels == self.data_shuffler.possible_labels[i])[0]
indexes_model = numpy.where(enroll_labels == data_shuffler.possible_labels[i])[0]
models.append(numpy.mean(enroll_features[indexes_model, :], axis=0))
# Probing
......
......@@ -83,20 +83,20 @@ class Chopra(SequenceNetwork):
self.add(Conv2D(name="conv1", kernel_size=conv1_kernel_size,
filters=conv1_output,
activation=tf.nn.relu,
activation=None,
weights_initialization=Xavier(seed=seed, use_gpu=self.use_gpu),
bias_initialization=Constant(use_gpu=self.use_gpu),
batch_norm=batch_norm
))
self.add(MaxPooling(name="pooling1", shape=pooling1_size, activation=tf.nn.relu, batch_norm=False))
self.add(MaxPooling(name="pooling1", shape=pooling1_size, activation=tf.nn.tanh, batch_norm=False))
self.add(Conv2D(name="conv2", kernel_size=conv2_kernel_size,
filters=conv2_output,
activation=tf.nn.relu,
activation=None,
weights_initialization=Xavier(seed=seed, use_gpu=self.use_gpu),
bias_initialization=Constant(use_gpu=self.use_gpu),
batch_norm=batch_norm))
self.add(MaxPooling(name="pooling2", shape=pooling2_size, activation=tf.nn.relu, batch_norm=False))
self.add(MaxPooling(name="pooling2", shape=pooling2_size, activation=tf.nn.tanh, batch_norm=False))
self.add(FullyConnected(name="fc1", output_dim=fc1_output,
activation=None,
......
......@@ -67,7 +67,7 @@ def main():
trainer = Trainer(architecture=architecture,
loss=loss,
iterations=ITERATIONS,
prefetch=False, temp_dir="./temp/cnn/no-batch-norm-all-relu")
prefetch=False, temp_dir="./temp/cnn/no-batch-norm")
#prefetch = False, temp_dir = "./temp/cnn/batch-norm-2convs-all-relu")
......
......@@ -55,7 +55,6 @@ def test_dnn_trainer():
trainer.train(train_data_shuffler)
del trainer# Just to clean the variables
import ipdb; ipdb.set_trace();
with tf.Session() as session:
# Testing
mlp = MLP(10, hidden_layers=[15, 20])
......
......@@ -252,6 +252,12 @@ class Trainer(object):
session.run(self.enqueue_op, feed_dict=feed_dict)
def create_graphs(self, train_data_shuffler, validation_data_shuffler):
"""
:param train_data_shuffler:
:param validation_data_shuffler:
:return:
"""
# Creating train graph
self.training_graph = self.compute_graph(train_data_shuffler, prefetch=self.prefetch, name="train")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment