Commit fdb710b5 authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Removed spaces

Removed dels

Removed extra chars

Removed extra chars
parent 633a2554
Pipeline #19083 passed with stage
in 33 minutes and 32 seconds
......@@ -114,7 +114,6 @@ class GMM (Algorithm):
logger.info(" -> Training K-Means")
# Reseting the pseudo random number generator so we can have the same initialization for serial and parallel execution.
del self.rng
self.rng = bob.core.random.mt19937(self.init_seed)
bob.learn.em.train(self.kmeans_trainer, kmeans, array, self.kmeans_training_iterations, self.training_threshold, rng=self.rng)
......@@ -130,7 +129,6 @@ class GMM (Algorithm):
# Trains the GMM
logger.info(" -> Training GMM")
# Reseting the pseudo random number generator so we can have the same initialization for serial and parallel execution.
del self.rng
self.rng = bob.core.random.mt19937(self.init_seed)
bob.learn.em.train(self.ubm_trainer, self.ubm, array, self.gmm_training_iterations, self.training_threshold, rng=self.rng)
......@@ -205,7 +203,7 @@ class GMM (Algorithm):
logger.debug(" .... Enrolling with %d feature vectors", array.shape[0])
gmm = bob.learn.em.GMMMachine(self.ubm)
gmm.set_variance_thresholds(self.variance_threshold)
gmm.set_variance_thresholds(self.variance_threshold)
bob.learn.em.train(self.enroll_trainer, gmm, array, self.gmm_enroll_iterations, self.training_threshold, rng=self.rng)
return gmm
......
......@@ -65,7 +65,6 @@ class ISV (GMM):
self.isvbase = bob.learn.em.ISVBase(self.ubm, self.subspace_dimension_of_u)
# train ISV model
# Reseting the pseudo random number generator so we can have the same initialization for serial and parallel execution.
del self.rng
self.rng = bob.core.random.mt19937(self.init_seed)
bob.learn.em.train(self.isv_trainer, self.isvbase, data, self.isv_training_iterations, rng=self.rng)
......
......@@ -95,7 +95,6 @@ class IVector (GMM):
self.tv = bob.learn.em.IVectorMachine(self.ubm, self.subspace_dimension_of_t, self.variance_threshold)
# Reseting the pseudo random number generator so we can have the same initialization for serial and parallel execution.
del self.rng
self.rng = bob.core.random.mt19937(self.init_seed)
# train IVector model
......@@ -131,7 +130,6 @@ class IVector (GMM):
input_dim = training_features[0].shape[1]
# Reseting the pseudo random number generator so we can have the same initialization for serial and parallel execution.
del self.rng
self.rng = bob.core.random.mt19937(self.init_seed)
self.plda_base = bob.learn.em.PLDABase(input_dim, self.plda_dim_F, self.plda_dim_G, variance_flooring)
......
......@@ -238,7 +238,7 @@ def test_isv():
# compare model with probe
probe = isv1.read_feature(pkg_resources.resource_filename('bob.bio.gmm.test', 'data/isv_projected.hdf5'))
reference_score = 0.02136783885580
reference_score = 0.02136784
assert abs(isv1.score(model, probe) - reference_score) < 1e-5, "The scores differ: %3.8f, %3.8f" % (isv1.score(model, probe), reference_score)
# assert abs(isv1.score_for_multiple_probes(model, [probe]*4) - reference_score) < 1e-5, isv1.score_for_multiple_probes(model, [probe, probe])
# TODO: Why is the score not identical for multiple copies of the same probe?
......@@ -387,7 +387,7 @@ def test_ivector_cosine():
# compare model with probe
probe = ivec1.read_feature(pkg_resources.resource_filename('bob.bio.gmm.test', 'data/ivector_projected.hdf5'))
reference_score = -0.00187151477
reference_score = -0.00187151
assert abs(ivec1.score(model, probe) - reference_score) < 1e-5, "The scores differ: %3.8f, %3.8f" % (ivec1.score(model, probe), reference_score)
# TODO: implement that
assert abs(ivec1.score_for_multiple_probes(model, [probe, probe]) - reference_score) < 1e-5
......@@ -425,6 +425,7 @@ def test_ivector_plda():
assert os.path.exists(temp_file)
if regenerate_refs: shutil.copy(temp_file, reference_file)
# check projection matrix
ivec1.load_projector(reference_file)
ivec2.load_projector(temp_file)
......@@ -444,13 +445,13 @@ def test_ivector_plda():
random_features = utils.random_training_set((20,45), count=5, minimum=-5., maximum=5.)
enroll_features = [ivec1.project(feature) for feature in random_features]
model = ivec1.enroll(enroll_features)
model = ivec1.enroll(enroll_features)
_compare(model, pkg_resources.resource_filename('bob.bio.gmm.test', 'data/ivector2_model.hdf5'), ivec1.write_model, ivec1.read_model)
# compare model with probe
probe = ivec1.read_feature(pkg_resources.resource_filename('bob.bio.gmm.test', 'data/ivector2_projected.hdf5'))
logger.info("%f" %ivec1.score(model, probe))
reference_score = 1.2187982243
reference_score = 1.21879822
assert abs(ivec1.score(model, probe) - reference_score) < 1e-5, "The scores differ: %3.8f, %3.8f" % (ivec1.score(model, probe), reference_score)
assert abs(ivec1.score_for_multiple_probes(model, [probe, probe]) - reference_score) < 1e-5
......@@ -509,7 +510,7 @@ def test_ivector_lda_wccn_plda():
# enroll model from random features
random_features = utils.random_training_set((20,45), count=5, minimum=-5., maximum=5.)
enroll_features = [ivec1.project(feature) for feature in random_features]
model = ivec1.enroll(enroll_features)
model = ivec1.enroll(enroll_features)
_compare(model, pkg_resources.resource_filename('bob.bio.gmm.test', 'data/ivector3_model.hdf5'), ivec1.write_model, ivec1.read_model)
# compare model with probe
......
......@@ -28,9 +28,6 @@ def _verify(parameters, test_dir, sub_dir, ref_modifier="", score_modifier=('sco
# assert that the score file exists
score_files = [os.path.join(test_dir, sub_dir, 'Default', norm, '%s-dev%s'%score_modifier) for norm in ('nonorm', 'ztnorm')]
#import ipdb; ipdb.set_trace()
assert os.path.exists(score_files[0]), "Score file %s does not exist" % score_files[0]
assert os.path.exists(score_files[1]), "Score file %s does not exist" % score_files[1]
......
......@@ -33,7 +33,6 @@ def kmeans_initialize(algorithm, extractor, limit_data = None, force = False, al
kmeans_machine = bob.learn.em.KMeansMachine(algorithm.gaussians, data.shape[1])
# Creates the KMeansTrainer and call the initialization procedure
# Reseting the pseudo random number generator so we can have the same initialization for serial and parallel execution.
del algorithm.rng
algorithm.rng = bob.core.random.mt19937(algorithm.init_seed)
algorithm.kmeans_trainer.initialize(kmeans_machine, data, algorithm.rng)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment