Commit b1666c83 authored by Amir MOHAMMADI's avatar Amir MOHAMMADI
Browse files

pep8 formatting done by yapf -ri .

parent 69f0e7b0
Pipeline #14772 failed with stages
in 0 seconds
......@@ -2,7 +2,6 @@
# vim: set fileencoding=utf-8 :
# @author: Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
# @date: Tue 09 Aug 2016 15:33 CEST
"""
Neural net work error rates analizer
"""
......@@ -25,7 +24,8 @@ class ExperimentAnalizer:
"""
def __init__(self, convergence_threshold=0.01, convergence_reference='eer'):
def __init__(self, convergence_threshold=0.01,
convergence_reference='eer'):
"""
Use the CNN as feature extractor for a n-class classification
......@@ -76,8 +76,10 @@ class ExperimentAnalizer:
# Creating models
models = []
for i in range(len(base_data_shuffler.possible_labels)):
indexes_model = numpy.where(enroll_labels == data_shuffler.possible_labels[i])[0]
models.append(numpy.mean(enroll_features[indexes_model, :], axis=0))
indexes_model = numpy.where(
enroll_labels == data_shuffler.possible_labels[i])[0]
models.append(
numpy.mean(enroll_features[indexes_model, :], axis=0))
# Probing
positive_scores = numpy.zeros(shape=0)
......@@ -87,16 +89,23 @@ class ExperimentAnalizer:
# Positive scoring
indexes = probe_labels == base_data_shuffler.possible_labels[i]
positive_data = probe_features[indexes, :]
p = [cosine(models[i], positive_data[j]) for j in range(positive_data.shape[0])]
p = [
cosine(models[i], positive_data[j])
for j in range(positive_data.shape[0])
]
positive_scores = numpy.hstack((positive_scores, p))
# negative scoring
indexes = probe_labels != base_data_shuffler.possible_labels[i]
negative_data = probe_features[indexes, :]
n = [cosine(models[i], negative_data[j]) for j in range(negative_data.shape[0])]
n = [
cosine(models[i], negative_data[j])
for j in range(negative_data.shape[0])
]
negative_scores = numpy.hstack((negative_scores, n))
return self.__compute_tensorflow_summary((-1)*negative_scores, (-1) * positive_scores)
return self.__compute_tensorflow_summary((-1) * negative_scores,
(-1) * positive_scores)
def __compute_tensorflow_summary(self, negative_scores, positive_scores):
"""
......@@ -119,27 +128,38 @@ class ExperimentAnalizer:
# Compute EER
threshold = bob.measure.eer_threshold(negative_scores, positive_scores)
far, frr = bob.measure.farfrr(negative_scores, positive_scores, threshold)
far, frr = bob.measure.farfrr(negative_scores, positive_scores,
threshold)
eer = (far + frr) / 2.
summaries.append(summary_pb2.Summary.Value(tag="EER", simple_value=eer))
summaries.append(
summary_pb2.Summary.Value(tag="EER", simple_value=eer))
self.eer.append(eer)
# Computing FAR 10
threshold = bob.measure.far_threshold(negative_scores, positive_scores, far_value=0.1)
far, frr = bob.measure.farfrr(negative_scores, positive_scores, threshold)
summaries.append(summary_pb2.Summary.Value(tag="FAR 10", simple_value=frr))
threshold = bob.measure.far_threshold(
negative_scores, positive_scores, far_value=0.1)
far, frr = bob.measure.farfrr(negative_scores, positive_scores,
threshold)
summaries.append(
summary_pb2.Summary.Value(tag="FAR 10", simple_value=frr))
self.far10.append(frr)
# Computing FAR 100
threshold = bob.measure.far_threshold(negative_scores, positive_scores, far_value=0.01)
far, frr = bob.measure.farfrr(negative_scores, positive_scores, threshold)
summaries.append(summary_pb2.Summary.Value(tag="FAR 100", simple_value=frr))
threshold = bob.measure.far_threshold(
negative_scores, positive_scores, far_value=0.01)
far, frr = bob.measure.farfrr(negative_scores, positive_scores,
threshold)
summaries.append(
summary_pb2.Summary.Value(tag="FAR 100", simple_value=frr))
self.far100.append(frr)
# Computing FAR 1000
threshold = bob.measure.far_threshold(negative_scores, positive_scores, far_value=0.001)
far, frr = bob.measure.farfrr(negative_scores, positive_scores, threshold)
summaries.append(summary_pb2.Summary.Value(tag="FAR 1000", simple_value=frr))
threshold = bob.measure.far_threshold(
negative_scores, positive_scores, far_value=0.001)
far, frr = bob.measure.farfrr(negative_scores, positive_scores,
threshold)
summaries.append(
summary_pb2.Summary.Value(tag="FAR 1000", simple_value=frr))
self.far1000.append(frr)
return summary_pb2.Summary(value=summaries)
\ No newline at end of file
return summary_pb2.Summary(value=summaries)
......@@ -2,7 +2,6 @@
# vim: set fileencoding=utf-8 :
# @author: Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
# @date: Tue 09 Aug 2016 15:33 CEST
"""
Neural net work error rates analizer
"""
......@@ -24,8 +23,15 @@ class SoftmaxAnalizer(object):
def __call__(self, data_shuffler, network, session):
data, labels = data_shuffler.get_batch()
predictions = numpy.argmax(session.run(network.inference_graph, feed_dict={network.inference_placeholder: data[:]}), 1)
accuracy = 100. * numpy.sum(predictions == labels) / predictions.shape[0]
predictions = numpy.argmax(
session.run(
network.inference_graph,
feed_dict={
network.inference_placeholder: data[:]
}), 1)
accuracy = 100. * numpy.sum(
predictions == labels) / predictions.shape[0]
summaries = [(summary_pb2.Summary.Value(tag="accuracy_validation", simple_value=float(accuracy)))]
summaries = [(summary_pb2.Summary.Value(
tag="accuracy_validation", simple_value=float(accuracy)))]
return summary_pb2.Summary(value=summaries)
......@@ -4,7 +4,7 @@ from .SoftmaxAnalizer import SoftmaxAnalizer
# gets sphinx autodoc done right - don't remove it
def __appropriate__(*args):
"""Says object was actually declared here, an not on the import module.
"""Says object was actually declared here, an not on the import module.
Parameters:
......@@ -14,11 +14,12 @@ def __appropriate__(*args):
<https://github.com/sphinx-doc/sphinx/issues/3048>`
"""
for obj in args: obj.__module__ = __name__
for obj in args:
obj.__module__ = __name__
__appropriate__(
ExperimentAnalizer,
SoftmaxAnalizer,
)
)
__all__ = [_ for _ in dir() if not _.startswith('_')]
......@@ -3,20 +3,22 @@ import numpy
import os
import bob.io.base
DEFAULT_FEATURE = {'data': tf.FixedLenFeature([], tf.string),
'label': tf.FixedLenFeature([], tf.int64),
'key': tf.FixedLenFeature([], tf.string)}
DEFAULT_FEATURE = {
'data': tf.FixedLenFeature([], tf.string),
'label': tf.FixedLenFeature([], tf.int64),
'key': tf.FixedLenFeature([], tf.string)
}
def from_hdf5file_to_tensor(filename):
import bob.io.image
data = bob.io.image.to_matplotlib(bob.io.base.load(filename))
#reshaping to ndim == 3
#reshaping to ndim == 3
if data.ndim == 2:
data = numpy.reshape(data, (data.shape[0], data.shape[1], 1))
data = data.astype("float32")
return data
......@@ -27,14 +29,16 @@ def from_filename_to_tensor(filename, extension=None):
If the file extension is something that tensorflow understands (.jpg, .bmp, .tif,...),
it uses the `tf.image.decode_image` otherwise it uses `bob.io.base.load`
"""
if extension == "hdf5":
return tf.py_func(from_hdf5file_to_tensor, [filename], [tf.float32])
else:
return tf.cast(tf.image.decode_image(tf.read_file(filename)), tf.float32)
return tf.cast(
tf.image.decode_image(tf.read_file(filename)), tf.float32)
def append_image_augmentation(image, gray_scale=False,
def append_image_augmentation(image,
gray_scale=False,
output_shape=None,
random_flip=False,
random_brightness=False,
......@@ -73,8 +77,8 @@ def append_image_augmentation(image, gray_scale=False,
if output_shape is not None:
assert len(output_shape) == 2
image = tf.image.resize_image_with_crop_or_pad(
image, output_shape[0], output_shape[1])
image = tf.image.resize_image_with_crop_or_pad(image, output_shape[0],
output_shape[1])
if random_flip:
image = tf.image.random_flip_left_right(image)
......@@ -137,18 +141,20 @@ def triplets_random_generator(input_data, input_labels):
input_labels = numpy.array(input_labels)
total_samples = input_data.shape[0]
indexes_per_labels = arrange_indexes_by_label(
input_labels, possible_labels)
indexes_per_labels = arrange_indexes_by_label(input_labels,
possible_labels)
# searching for random triplets
offset_class = 0
for i in range(total_samples):
anchor_sample = input_data[indexes_per_labels[possible_labels[offset_class]][numpy.random.randint(
len(indexes_per_labels[possible_labels[offset_class]]))], ...]
anchor_sample = input_data[indexes_per_labels[possible_labels[
offset_class]][numpy.random.randint(
len(indexes_per_labels[possible_labels[offset_class]]))], ...]
positive_sample = input_data[indexes_per_labels[possible_labels[offset_class]][numpy.random.randint(
len(indexes_per_labels[possible_labels[offset_class]]))], ...]
positive_sample = input_data[indexes_per_labels[possible_labels[
offset_class]][numpy.random.randint(
len(indexes_per_labels[possible_labels[offset_class]]))], ...]
# Changing the class
offset_class += 1
......@@ -156,8 +162,9 @@ def triplets_random_generator(input_data, input_labels):
if offset_class == len(possible_labels):
offset_class = 0
negative_sample = input_data[indexes_per_labels[possible_labels[offset_class]][numpy.random.randint(
len(indexes_per_labels[possible_labels[offset_class]]))], ...]
negative_sample = input_data[indexes_per_labels[possible_labels[
offset_class]][numpy.random.randint(
len(indexes_per_labels[possible_labels[offset_class]]))], ...]
append(str(anchor_sample), str(positive_sample), str(negative_sample))
# yield anchor, positive, negative
......@@ -199,8 +206,8 @@ def siamease_pairs_generator(input_data, input_labels):
# for l in possible_labels:
# indexes_per_labels[l] = numpy.where(input_labels == l)[0]
# numpy.random.shuffle(indexes_per_labels[l])
indexes_per_labels = arrange_indexes_by_label(
input_labels, possible_labels)
indexes_per_labels = arrange_indexes_by_label(input_labels,
possible_labels)
left_possible_indexes = numpy.random.choice(
possible_labels, total_samples, replace=True)
......@@ -215,10 +222,10 @@ def siamease_pairs_generator(input_data, input_labels):
class_index = left_possible_indexes[i]
# Now selecting the samples for the pair
left = input_data[indexes_per_labels[class_index][numpy.random.randint(
len(indexes_per_labels[class_index]))]]
right = input_data[indexes_per_labels[class_index][numpy.random.randint(
len(indexes_per_labels[class_index]))]]
left = input_data[indexes_per_labels[class_index][
numpy.random.randint(len(indexes_per_labels[class_index]))]]
right = input_data[indexes_per_labels[class_index][
numpy.random.randint(len(indexes_per_labels[class_index]))]]
append(left, right, 0)
# yield left, right, 0
else:
......@@ -237,10 +244,12 @@ def siamease_pairs_generator(input_data, input_labels):
if j < total_samples:
# Now selecting the samples for the pair
left = input_data[indexes_per_labels[class_index[0]][numpy.random.randint(
len(indexes_per_labels[class_index[0]]))]]
right = input_data[indexes_per_labels[class_index[1]][numpy.random.randint(
len(indexes_per_labels[class_index[1]]))]]
left = input_data[indexes_per_labels[class_index[0]][
numpy.random.randint(
len(indexes_per_labels[class_index[0]]))]]
right = input_data[indexes_per_labels[class_index[1]][
numpy.random.randint(
len(indexes_per_labels[class_index[1]]))]]
append(left, right, 1)
genuine = not genuine
......@@ -274,9 +283,8 @@ def blocks_tensorflow(images, block_size):
# extract image patches for each color space:
output = []
for i in range(3):
blocks = tf.extract_image_patches(
images[:, :, :, i:i + 1], block_size, block_size, [1, 1, 1, 1],
"VALID")
blocks = tf.extract_image_patches(images[:, :, :, i:i + 1], block_size,
block_size, [1, 1, 1, 1], "VALID")
if i == 0:
n_blocks = int(numpy.prod(blocks.shape[1:3]))
blocks = tf.reshape(blocks, output_size)
......
......@@ -40,17 +40,23 @@ class BioGenerator(object):
The shapes of the returned samples.
"""
def __init__(self, database, biofiles, load_data=None,
biofile_to_label=None, multiple_samples=False, **kwargs):
def __init__(self,
database,
biofiles,
load_data=None,
biofile_to_label=None,
multiple_samples=False,
**kwargs):
super(BioGenerator, self).__init__(**kwargs)
if load_data is None:
def load_data(database, biofile):
data = read_original_data(
biofile,
database.original_directory,
database.original_extension)
data = read_original_data(biofile, database.original_directory,
database.original_extension)
return data
if biofile_to_label is None:
def biofile_to_label(biofile):
return -1
......@@ -71,8 +77,8 @@ class BioGenerator(object):
data = six.next(data)
data = tf.convert_to_tensor(data)
self._output_types = (data.dtype, tf.int64, tf.string)
self._output_shapes = (
data.shape, tf.TensorShape([]), tf.TensorShape([]))
self._output_shapes = (data.shape, tf.TensorShape([]),
tf.TensorShape([]))
logger.info("Initializing a dataset with %d files and %s types "
"and %s shapes", len(self.biofiles), self.output_types,
......@@ -107,8 +113,8 @@ class BioGenerator(object):
(data, label, key) : tuple
A tuple containing the data, label, and the key.
"""
for f, label, key in six.moves.zip(
self.biofiles, self.labels, self.keys):
for f, label, key in six.moves.zip(self.biofiles, self.labels,
self.keys):
data = self.load_data(self.database, f)
# labels
if self.multiple_samples:
......
......@@ -7,16 +7,21 @@ from functools import partial
from . import append_image_augmentation, from_filename_to_tensor
def shuffle_data_and_labels_image_augmentation(filenames, labels, data_shape, data_type,
batch_size, epochs=None, buffer_size=10**3,
gray_scale=False,
output_shape=None,
random_flip=False,
random_brightness=False,
random_contrast=False,
random_saturation=False,
per_image_normalization=True,
extension=None):
def shuffle_data_and_labels_image_augmentation(filenames,
labels,
data_shape,
data_type,
batch_size,
epochs=None,
buffer_size=10**3,
gray_scale=False,
output_shape=None,
random_flip=False,
random_brightness=False,
random_contrast=False,
random_saturation=False,
per_image_normalization=True,
extension=None):
"""
Dump random batches from a list of image paths and labels:
......@@ -71,28 +76,33 @@ def shuffle_data_and_labels_image_augmentation(filenames, labels, data_shape, da
extension:
If None, will load files using `tf.image.decode..` if set to `hdf5`, will load with `bob.io.base.load`
"""
"""
dataset = create_dataset_from_path_augmentation(
filenames,
labels,
data_shape,
data_type,
gray_scale=gray_scale,
output_shape=output_shape,
random_flip=random_flip,
random_brightness=random_brightness,
random_contrast=random_contrast,
random_saturation=random_saturation,
per_image_normalization=per_image_normalization,
extension=extension)
dataset = create_dataset_from_path_augmentation(filenames, labels, data_shape,
data_type,
gray_scale=gray_scale,
output_shape=output_shape,
random_flip=random_flip,
random_brightness=random_brightness,
random_contrast=random_contrast,
random_saturation=random_saturation,
per_image_normalization=per_image_normalization,
extension=extension)
dataset = dataset.shuffle(buffer_size).batch(batch_size).repeat(epochs)
data, labels = dataset.make_one_shot_iterator().get_next()
return data, labels
def create_dataset_from_path_augmentation(filenames, labels,
data_shape, data_type,
gray_scale=False,
def create_dataset_from_path_augmentation(filenames,
labels,
data_shape,
data_type,
gray_scale=False,
output_shape=None,
random_flip=False,
random_brightness=False,
......@@ -120,26 +130,30 @@ def create_dataset_from_path_augmentation(filenames, labels,
feature:
"""
parser = partial(image_augmentation_parser,
data_shape=data_shape,
data_type=data_type,
gray_scale=gray_scale,
output_shape=output_shape,
random_flip=random_flip,
random_brightness=random_brightness,
random_contrast=random_contrast,
random_saturation=random_saturation,
per_image_normalization=per_image_normalization,
extension=extension)
parser = partial(
image_augmentation_parser,
data_shape=data_shape,
data_type=data_type,
gray_scale=gray_scale,
output_shape=output_shape,
random_flip=random_flip,
random_brightness=random_brightness,
random_contrast=random_contrast,
random_saturation=random_saturation,
per_image_normalization=per_image_normalization,
extension=extension)
dataset = tf.contrib.data.Dataset.from_tensor_slices((filenames, labels))
dataset = dataset.map(parser)
return dataset
def image_augmentation_parser(filename, label, data_shape, data_type,
gray_scale=False,
def image_augmentation_parser(filename,
label,
data_shape,
data_type,
gray_scale=False,
output_shape=None,
random_flip=False,
random_brightness=False,
......@@ -147,30 +161,30 @@ def image_augmentation_parser(filename, label, data_shape, data_type,
random_saturation=False,
per_image_normalization=True,
extension=None):
"""
Parses a single tf.Example into image and label tensors.
"""
# Convert the image data from string back to the numbers
image = from_filename_to_tensor(filename, extension=extension)
# Reshape image data into the original shape
image = tf.reshape(image, data_shape)
#Applying image augmentation
image = append_image_augmentation(image, gray_scale=gray_scale,
output_shape=output_shape,
random_flip=random_flip,
random_brightness=random_brightness,
random_contrast=random_contrast,
random_saturation=random_saturation,
per_image_normalization=per_image_normalization)
image = append_image_augmentation(
image,
gray_scale=gray_scale,
output_shape=output_shape,
random_flip=random_flip,
random_brightness=random_brightness,
random_contrast=random_contrast,
random_saturation=random_saturation,
per_image_normalization=per_image_normalization)
label = tf.cast(label, tf.int64)
features = dict()
features['data'] = image
features['key'] = filename
return features, label
......@@ -7,16 +7,21 @@ from functools import partial
from . import append_image_augmentation, siamease_pairs_generator, from_filename_to_tensor
def shuffle_data_and_labels_image_augmentation(filenames, labels, data_shape, data_type,
batch_size, epochs=None, buffer_size=10**3,
gray_scale=False,
output_shape=None,
random_flip=False,
random_brightness=False,
random_contrast=False,
random_saturation=False,
per_image_normalization=True,
extension=None):
def shuffle_data_and_labels_image_augmentation(filenames,
labels,
data_shape,
data_type,
batch_size,
epochs=None,
buffer_size=10**3,
gray_scale=False,
output_shape=None,
random_flip=False,
random_brightness=False,
random_contrast=False,
random_saturation=False,
per_image_normalization=True,
extension=None):
"""
Dump random batches for siamese networks from a list of image paths and labels:
......@@ -76,27 +81,32 @@ def shuffle_data_and_labels_image_augmentation(filenames, labels, data_shape, da
extension:
If None, will load files using `tf.image.decode..` if set to `hdf5`, will load with `bob.io.base.load`
"""
"""
dataset = create_dataset_from_path_augmentation(filenames, labels, data_shape,
data_type,
gray_scale=gray_scale,
output_shape=output_shape,
random_flip=random_flip,
random_brightness=random_brightness,
random_contrast=random_contrast,
random_saturation=random_saturation,
per_image_normalization=per_image_normalization,
extension=extension)
dataset = create_dataset_from_path_augmentation(
filenames,
labels,
data_shape,
data_type,
gray_scale=gray_scale,
output_shape=output_shape,
random_flip=random_flip,
random_brightness=random_brightness,
random_contrast=random_contrast,
random_saturation=random_saturation,
per_image_normalization=per_image_normalization,
extension=extension)