Created some unit tests

parent b132e782
# see https://docs.python.org/3/library/pkgutil.html
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
\ No newline at end of file
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# @author: Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
# @date: Thu 13 Oct 2016 13:35 CEST
import numpy
from bob.learn.tensorflow.datashuffler import Memory, SiameseMemory, TripletMemory, Disk, SiameseDisk, TripletDisk
import pkg_resources
from ..util import load_mnist
import os
"""
Some unit tests for the datashuffler
"""
def get_dummy_files():
base_path = pkg_resources.resource_filename(__name__, 'data/dummy_database')
files = []
clients = []
for f in os.listdir(base_path):
if f.endswith(".hdf5"):
files.append(os.path.join(base_path, f))
clients.append(int(f[1:4]))
return files, clients
def test_memory_shuffler():
train_data, train_labels, validation_data, validation_labels = load_mnist()
train_data = numpy.reshape(train_data, (train_data.shape[0], 28, 28, 1))
batch_shape = [16, 28, 28, 1]
data_shuffler = Memory(train_data, train_labels,
input_shape=batch_shape[1:],
scale=True,
batch_size=batch_shape[0])
batch = data_shuffler.get_batch()
assert len(batch) == 2
assert batch[0].shape == tuple(batch_shape)
assert batch[1].shape[0] == batch_shape[0]
placeholders = data_shuffler.get_placeholders(name="train")
assert placeholders[0].get_shape().as_list() == batch_shape
assert placeholders[1].get_shape().as_list()[0] == batch_shape[0]
def test_siamesememory_shuffler():
train_data, train_labels, validation_data, validation_labels = load_mnist()
train_data = numpy.reshape(train_data, (train_data.shape[0], 28, 28, 1))
batch_shape = [16, 28, 28, 1]
data_shuffler = SiameseMemory(train_data, train_labels,
input_shape=batch_shape[1:],
scale=True,
batch_size=batch_shape[0])
batch = data_shuffler.get_batch()
assert len(batch) == 3
assert batch[0].shape == tuple(batch_shape)
assert batch[1].shape == tuple(batch_shape)
assert batch[2].shape[0] == batch_shape[0]
placeholders = data_shuffler.get_placeholders(name="train")
assert placeholders[0].get_shape().as_list() == batch_shape
assert placeholders[1].get_shape().as_list() == batch_shape
assert placeholders[2].get_shape().as_list()[0] == batch_shape[0]
def test_tripletmemory_shuffler():
train_data, train_labels, validation_data, validation_labels = load_mnist()
train_data = numpy.reshape(train_data, (train_data.shape[0], 28, 28, 1))
batch_shape = [16, 28, 28, 1]
data_shuffler = TripletMemory(train_data, train_labels,
input_shape=batch_shape[1:],
scale=True,
batch_size=batch_shape[0])
batch = data_shuffler.get_batch()
assert len(batch) == 3
assert batch[0].shape == tuple(batch_shape)
assert batch[1].shape == tuple(batch_shape)
assert batch[2].shape == tuple(batch_shape)
placeholders = data_shuffler.get_placeholders(name="train")
assert placeholders[0].get_shape().as_list() == batch_shape
assert placeholders[1].get_shape().as_list() == batch_shape
assert placeholders[2].get_shape().as_list() == batch_shape
def test_disk_shuffler():
train_data, train_labels = get_dummy_files()
batch_shape = [2, 125, 125, 3]
data_shuffler = Disk(train_data, train_labels,
input_shape=batch_shape[1:],
scale=True,
batch_size=batch_shape[0])
batch = data_shuffler.get_batch()
assert len(batch) == 2
assert batch[0].shape == tuple(batch_shape)
assert batch[1].shape[0] == batch_shape[0]
placeholders = data_shuffler.get_placeholders(name="train")
assert placeholders[0].get_shape().as_list() == batch_shape
assert placeholders[1].get_shape().as_list()[0] == batch_shape[0]
def test_siamesedisk_shuffler():
train_data, train_labels = get_dummy_files()
batch_shape = [2, 125, 125, 3]
data_shuffler = SiameseDisk(train_data, train_labels,
input_shape=batch_shape[1:],
scale=True,
batch_size=batch_shape[0])
batch = data_shuffler.get_batch()
assert len(batch) == 3
assert batch[0].shape == tuple(batch_shape)
assert batch[1].shape == tuple(batch_shape)
assert batch[2].shape[0] == batch_shape[0]
placeholders = data_shuffler.get_placeholders(name="train")
assert placeholders[0].get_shape().as_list() == batch_shape
assert placeholders[1].get_shape().as_list() == batch_shape
assert placeholders[2].get_shape().as_list()[0] == batch_shape[0]
def test_tripletdisk_shuffler():
train_data, train_labels = get_dummy_files()
batch_shape = [1, 125, 125, 3]
data_shuffler = TripletDisk(train_data, train_labels,
input_shape=batch_shape[1:],
scale=True,
batch_size=batch_shape[0])
batch = data_shuffler.get_batch()
assert len(batch) == 3
assert batch[0].shape == tuple(batch_shape)
assert batch[1].shape == tuple(batch_shape)
assert batch[2].shape == tuple(batch_shape)
placeholders = data_shuffler.get_placeholders(name="train")
assert placeholders[0].get_shape().as_list() == batch_shape
assert placeholders[1].get_shape().as_list() == batch_shape
assert placeholders[2].get_shape().as_list() == batch_shape
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# @author: Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
# @date: Thu 13 Oct 2016 13:35 CEST
import numpy
from bob.learn.tensorflow.datashuffler import Memory, SiameseMemory, TripletMemory, Disk, SiameseDisk, TripletDisk
from bob.learn.tensorflow.network import Chopra
from bob.learn.tensorflow.loss import BaseLoss
from bob.learn.tensorflow.trainers import Trainer, SiameseTrainer, TripletTrainer
#from ..analyzers import ExperimentAnalizer, SoftmaxAnalizer
from bob.learn.tensorflow.util import load_mnist
import tensorflow as tf
import bob.io.base
"""
Some unit tests for the datashuffler
"""
batch_size = 16
validation_batch_size = 400
iterations = 50
seed = 10
def test_cnn_trainer():
train_data, train_labels, validation_data, validation_labels = load_mnist()
train_data = numpy.reshape(train_data, (train_data.shape[0], 28, 28, 1))
validation_data = numpy.reshape(validation_data, (validation_data.shape[0], 28, 28, 1))
# Creating datashufflers
train_data_shuffler = Memory(train_data, train_labels,
input_shape=[28, 28, 1],
batch_size=batch_size)
with tf.Session() as session:
# Preparing the architecture
architecture = Chopra(seed=seed, fc1_output=10)
# Loss for the softmax
loss = BaseLoss(tf.nn.sparse_softmax_cross_entropy_with_logits, tf.reduce_mean)
# One graph trainer
trainer = Trainer(architecture=architecture,
loss=loss,
iterations=iterations,
analizer=None,
prefetch=False,
temp_dir="./temp/cnn")
trainer.train(train_data_shuffler)
# Testing
validation_shape = [400, 28, 28, 1]
chopra = Chopra(seed=seed, fc1_output=10, shape=validation_shape)
chopra.load(bob.io.HDF5File("./temp/cnn/model.hdf5"))
validation_data_shuffler = Memory(validation_data, validation_labels,
input_shape=[28, 28, 1],
batch_size=validation_batch_size)
#import ipdb; ipdb.set_trace();
[data, labels] = validation_data_shuffler.get_batch()
predictions = chopra(data, session=session)
accuracy = 100. * numpy.sum(numpy.argmax(predictions, 1) == labels) / predictions.shape[0]
assert accuracy > 80.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment