The algorithm used for feature scaling. Look :py:class:`bob.learn.tensorflow.datashuffler.ScaleFactor`, :py:class:`bob.learn.tensorflow.datashuffler.Linear` and :py:class:`bob.learn.tensorflow.datashuffler.MeanOffset`
prefetch:
Do prefetch?
prefetch_capacity:
"""
def__init__(self,data,labels,
input_shape,
input_shape=[None,28,28,1],
input_dtype="float64",
batch_size=1,
batch_size=32,
seed=10,
data_augmentation=None,
normalizer=Linear()):
normalizer=Linear(),
prefetch=False,
prefetch_capacity=10):
# Setting the seed for the pseudo random number generator
self.seed=seed
numpy.random.seed(seed)
...
...
@@ -58,10 +68,9 @@ class Base(object):
# TODO: Check if the bacth size is higher than the input data
self.batch_size=batch_size
# Preparing the inputs
self.data=data
self.shape=tuple([batch_size]+input_shape)
self.input_shape=tuple(input_shape)
self.labels=labels
self.possible_labels=list(set(self.labels))
...
...
@@ -72,43 +81,72 @@ class Base(object):
self.indexes=numpy.array(range(self.n_samples))
numpy.random.shuffle(self.indexes)
self.data_placeholder=None
self.label_placeholder=None
# Use data data augmentation?
self.data_augmentation=data_augmentation
self.deployment_shape=[None]+list(input_shape)
defset_placeholders(self,data,label):
self.data_placeholder=data
self.label_placeholder=label
# Preparing placeholders
self.data_ph=None
self.label_ph=None
# Prefetch variables
self.prefetch=prefetch
self.data_ph_from_queue=None
self.label_ph_from_queue=None
defget_batch(self):
defcreate_placeholders(self):
"""
Shuffle dataset and get a random batch.
Create place holder instances
:return:
"""
raiseNotImplementedError("Method not implemented in this level. You should use one of the derived classes.")