diff --git a/bob/learn/em/__init__.py b/bob/learn/em/__init__.py index 32e88b6260e4c319f79f9eba847dc5686a7ffdd7..511980f2a5df956d95a64ad25b5b2907a3a9b8ef 100644 --- a/bob/learn/em/__init__.py +++ b/bob/learn/em/__init__.py @@ -1,11 +1,11 @@ import bob.extension +from .factor_analysis import ISVMachine, JFAMachine from .gmm import GMMMachine, GMMStats from .k_means import KMeansMachine from .linear_scoring import linear_scoring # noqa: F401 from .wccn import WCCN from .whitening import Whitening -from .factor_analysis import ISVMachine, JFAMachine def get_config(): diff --git a/bob/learn/em/factor_analysis.py b/bob/learn/em/factor_analysis.py index 1d0b81edff2f27fdfa0e74ec2d7c05913b6c2896..dc62f4fcac009db2d5aedfd424b71934abe66221 100644 --- a/bob/learn/em/factor_analysis.py +++ b/bob/learn/em/factor_analysis.py @@ -6,7 +6,9 @@ import logging import numpy as np +from h5py import File as HDF5File from sklearn.base import BaseEstimator + from . import linear_scoring logger = logging.getLogger(__name__) diff --git a/bob/learn/em/test/test_jfa.py b/bob/learn/em/test/test_jfa.py index 23086de322ac17fc7deedc63343bdc934138a3a9..c96f59e1916f3b10b4b41b3a8d61be2d2fdaca73 100644 --- a/bob/learn/em/test/test_jfa.py +++ b/bob/learn/em/test/test_jfa.py @@ -7,8 +7,8 @@ # Copyright (C) 2011-2014 Idiap Research Institute, Martigny, Switzerland import numpy as np + from bob.learn.em import GMMMachine, GMMStats, ISVMachine, JFAMachine -import copy def test_JFAMachine(): @@ -97,4 +97,3 @@ def test_ISVMachine(): score_ref = -3.280498193082100 assert abs(score_ref - score) < eps - pass diff --git a/bob/learn/em/test/test_jfa_trainer.py b/bob/learn/em/test/test_jfa_trainer.py index f9e51e4473da960988edc2c1cfc1237b7e4d2b5c..11e97dafa2fe8516ae03186781b3bdcbe30e8b8a 100644 --- a/bob/learn/em/test/test_jfa_trainer.py +++ b/bob/learn/em/test/test_jfa_trainer.py @@ -6,9 +6,11 @@ # # Copyright (C) 2011-2014 Idiap Research Institute, Martigny, Switzerland +import copy + import numpy as np + from bob.learn.em import GMMMachine, GMMStats, ISVMachine, JFAMachine -import copy # Define Training set and initial values for tests F1 = np.array( @@ -120,12 +122,11 @@ def test_JFATrainAndEnrol(): ubm.means = UBM_MEAN.reshape((2, 3)) ubm.variances = UBM_VAR.reshape((2, 3)) it = JFAMachine(ubm, 2, 2, em_iterations=10) - # n_acc, f_acc = it.initialize(TRAINING_STATS_X, TRAINING_STATS_y) + it.U = copy.deepcopy(M_u) it.V = copy.deepcopy(M_v) it.D = copy.deepcopy(M_d) it.fit(TRAINING_STATS_X, TRAINING_STATS_y) - # bob.learn.em.train_jfa(t, mb, TRAINING_STATS, initialize=False) v_ref = np.array( [ @@ -211,28 +212,6 @@ def test_JFATrainAndEnrol(): assert np.allclose(latent_y, y_ref, eps) assert np.allclose(latent_z, z_ref, eps) - # Testing exceptions - """ - nose.tools.assert_raises(RuntimeError, t.initialize, mb, [1, 2, 2]) - nose.tools.assert_raises(RuntimeError, t.initialize, mb, [[1, 2, 2]]) - nose.tools.assert_raises(RuntimeError, t.e_step_u, mb, [1, 2, 2]) - nose.tools.assert_raises(RuntimeError, t.e_step_u, mb, [[1, 2, 2]]) - nose.tools.assert_raises(RuntimeError, t.m_step_u, mb, [1, 2, 2]) - nose.tools.assert_raises(RuntimeError, t.m_step_u, mb, [[1, 2, 2]]) - - nose.tools.assert_raises(RuntimeError, t.e_step_v, mb, [1, 2, 2]) - nose.tools.assert_raises(RuntimeError, t.e_step_v, mb, [[1, 2, 2]]) - nose.tools.assert_raises(RuntimeError, t.m_step_v, mb, [1, 2, 2]) - nose.tools.assert_raises(RuntimeError, t.m_step_v, mb, [[1, 2, 2]]) - - nose.tools.assert_raises(RuntimeError, t.e_step_d, mb, [1, 2, 2]) - nose.tools.assert_raises(RuntimeError, t.e_step_d, mb, [[1, 2, 2]]) - nose.tools.assert_raises(RuntimeError, t.m_step_d, mb, [1, 2, 2]) - nose.tools.assert_raises(RuntimeError, t.m_step_d, mb, [[1, 2, 2]]) - - nose.tools.assert_raises(RuntimeError, t.enroll, m, [[1, 2, 2]], 5) - """ - def test_ISVTrainAndEnrol(): # Train and enroll an 'ISVMachine' @@ -325,13 +304,6 @@ def test_ISVTrainAndEnrol(): latent_z = it.enroll(gse, 5) assert np.allclose(latent_z, z_ref, eps) - # Testing exceptions - # nose.tools.assert_raises(RuntimeError, t.initialize, mb, [1, 2, 2]) - # nose.tools.assert_raises(RuntimeError, t.initialize, mb, [[1, 2, 2]]) - # nose.tools.assert_raises(RuntimeError, t.e_step, mb, [1, 2, 2]) - # nose.tools.assert_raises(RuntimeError, t.e_step, mb, [[1, 2, 2]]) - # nose.tools.assert_raises(RuntimeError, t.enroll, m, [[1, 2, 2]], 5) - def test_JFATrainInitialize(): # Check that the initialization is consistent and using the rng (cf. issue #118) diff --git a/doc/plot/plot_ISV.py b/doc/plot/plot_ISV.py index 85e88c0b0acd07fc42b9eeef83413139d6e98268..0a9dec10ec494b802d07d592116b509d05becce4 100644 --- a/doc/plot/plot_ISV.py +++ b/doc/plot/plot_ISV.py @@ -1,8 +1,9 @@ +import matplotlib.pyplot as plt +import numpy as np + from sklearn.datasets import load_iris import bob.learn.em -import matplotlib.pyplot as plt -import numpy as np np.random.seed(2) # FIXING A SEED diff --git a/doc/plot/plot_JFA.py b/doc/plot/plot_JFA.py index c67572b095549ecac6e26d3f3eeab78bfc9aa42e..2585d0c6fee64ba3654c4bd61a20c28c0e5201d4 100644 --- a/doc/plot/plot_JFA.py +++ b/doc/plot/plot_JFA.py @@ -1,8 +1,9 @@ +import matplotlib.pyplot as plt +import numpy as np + from sklearn.datasets import load_iris import bob.learn.em -import matplotlib.pyplot as plt -import numpy as np np.random.seed(2) # FIXING A SEED