diff --git a/bob/learn/misc/__init__.py b/bob/learn/misc/__init__.py index 2c3dd9d94c29e36af8fe91c53e8e4f3151813366..4692c864bd6c1eb1c8a8a167f5d434a47868c6c1 100644 --- a/bob/learn/misc/__init__.py +++ b/bob/learn/misc/__init__.py @@ -16,6 +16,7 @@ from .__ML_gmm_trainer__ import * from .__MAP_gmm_trainer__ import * from .__jfa_trainer__ import * from .__isv_trainer__ import * +from .__ivector_trainer__ import * def ztnorm_same_value(vect_a, vect_b): diff --git a/bob/learn/misc/__isv_trainer__.py b/bob/learn/misc/__isv_trainer__.py index b77e06d63ee3ec8d47b6a6b761cfe10671a142fe..98a5457e3c4a24338881369e7e215d61515b1a01 100644 --- a/bob/learn/misc/__isv_trainer__.py +++ b/bob/learn/misc/__isv_trainer__.py @@ -11,7 +11,7 @@ import numpy # define the class class ISVTrainer (_ISVTrainer): - def __init__(self, max_iterations=10, relevance_factor=4., convergence_threshold = 0.001): + def __init__(self, max_iterations=10, relevance_factor=4.): """ :py:class:`bob.learn.misc.ISVTrainer` constructor @@ -19,7 +19,7 @@ class ISVTrainer (_ISVTrainer): max_iterations Number of maximum iterations """ - _ISVTrainer.__init__(self, relevance_factor, convergence_threshold) + _ISVTrainer.__init__(self, relevance_factor) self._max_iterations = max_iterations diff --git a/bob/learn/misc/__ivector_trainer__.py b/bob/learn/misc/__ivector_trainer__.py new file mode 100644 index 0000000000000000000000000000000000000000..a53a2d452eab0772390fdc4063ba3055690cdca1 --- /dev/null +++ b/bob/learn/misc/__ivector_trainer__.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +# vim: set fileencoding=utf-8 : +# Tiago de Freitas Pereira <tiago.pereira@idiap.ch> +# Tue Fev 03 13:20:10 2015 +0200 +# +# Copyright (C) 2011-2015 Idiap Research Institute, Martigny, Switzerland + +from ._library import _IVectorTrainer +import numpy + +# define the class +class IVectorTrainer (_IVectorTrainer): + + def __init__(self, max_iterations=10, update_sigma=False): + """ + :py:class:`bob.learn.misc.IVectorTrainer` constructor + + Keyword Parameters: + max_iterations + Number of maximum iterations + update_sigma + + """ + _IVectorTrainer.__init__(self, update_sigma) + self._max_iterations = max_iterations + + + def train(self, ivector_machine, data): + """ + Train the :py:class:`bob.learn.misc.IVectorMachine` using data + + Keyword Parameters: + ivector_machine + The `:py:class:bob.learn.misc.IVectorMachine` class + data + The data to be trained + """ + + #Initialization + self.initialize(ivector_machine, data); + + for i in range(self._max_iterations): + #eStep + self.eStep(ivector_machine, data); + #mStep + self.mStep(ivector_machine); + + + +# copy the documentation from the base class +__doc__ = _IVectorTrainer.__doc__ diff --git a/bob/learn/misc/cpp/ISVTrainer.cpp b/bob/learn/misc/cpp/ISVTrainer.cpp index c7891b9f15dd8fd8ae4ef86e59283e6050849a5e..5ccedb078255554f33e45004323589dfffdbbff0 100644 --- a/bob/learn/misc/cpp/ISVTrainer.cpp +++ b/bob/learn/misc/cpp/ISVTrainer.cpp @@ -19,17 +19,16 @@ //////////////////////////// ISVTrainer /////////////////////////// -bob::learn::misc::ISVTrainer::ISVTrainer(const double relevance_factor, const double convergence_threshold): +bob::learn::misc::ISVTrainer::ISVTrainer(const double relevance_factor): m_relevance_factor(relevance_factor), - m_convergence_threshold(convergence_threshold), m_rng(new boost::mt19937()) {} bob::learn::misc::ISVTrainer::ISVTrainer(const bob::learn::misc::ISVTrainer& other): - m_convergence_threshold(other.m_convergence_threshold), - m_relevance_factor(other.m_relevance_factor), m_rng(other.m_rng) -{} +{ + m_relevance_factor = other.m_relevance_factor; +} bob::learn::misc::ISVTrainer::~ISVTrainer() {} @@ -39,7 +38,6 @@ bob::learn::misc::ISVTrainer& bob::learn::misc::ISVTrainer::operator= { if (this != &other) { - m_convergence_threshold = other.m_convergence_threshold; m_rng = other.m_rng; m_relevance_factor = other.m_relevance_factor; } @@ -48,8 +46,7 @@ bob::learn::misc::ISVTrainer& bob::learn::misc::ISVTrainer::operator= bool bob::learn::misc::ISVTrainer::operator==(const bob::learn::misc::ISVTrainer& b) const { - return m_convergence_threshold == b.m_convergence_threshold && - m_rng == b.m_rng && + return m_rng == b.m_rng && m_relevance_factor == b.m_relevance_factor; } @@ -61,9 +58,8 @@ bool bob::learn::misc::ISVTrainer::operator!=(const bob::learn::misc::ISVTrainer bool bob::learn::misc::ISVTrainer::is_similar_to(const bob::learn::misc::ISVTrainer& b, const double r_epsilon, const double a_epsilon) const { - return m_convergence_threshold == b.m_convergence_threshold && - m_rng == b.m_rng && - m_relevance_factor == b.m_relevance_factor; + return m_rng == b.m_rng && + m_relevance_factor == b.m_relevance_factor; } void bob::learn::misc::ISVTrainer::initialize(bob::learn::misc::ISVBase& machine, diff --git a/bob/learn/misc/cpp/IVectorTrainer.cpp b/bob/learn/misc/cpp/IVectorTrainer.cpp index 4f929af95a62fa232f73f6d529e9ca8e8d849c43..32bff8f6b9e9642b9e38fede8f5c3da469595d97 100644 --- a/bob/learn/misc/cpp/IVectorTrainer.cpp +++ b/bob/learn/misc/cpp/IVectorTrainer.cpp @@ -7,30 +7,26 @@ #include <bob.learn.misc/IVectorTrainer.h> -#include <bob.learn.misc/IVectorMachine.h> +#include <bob.core/check.h> #include <bob.core/array_copy.h> #include <bob.core/array_random.h> -#include <bob.core/check.h> #include <bob.math/inv.h> +#include <bob.core/check.h> +#include <bob.core/array_repmat.h> +#include <algorithm> + #include <bob.math/linear.h> #include <bob.math/linsolve.h> -#include <boost/shared_ptr.hpp> - -bob::learn::misc::IVectorTrainer::IVectorTrainer(const bool update_sigma, - const double convergence_threshold, - const size_t max_iterations, bool compute_likelihood): - bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine, - std::vector<bob::learn::misc::GMMStats> >(convergence_threshold, - max_iterations, compute_likelihood), - m_update_sigma(update_sigma) -{ -} + +bob::learn::misc::IVectorTrainer::IVectorTrainer(const bool update_sigma): + m_update_sigma(update_sigma), + m_rng(new boost::mt19937()) +{} bob::learn::misc::IVectorTrainer::IVectorTrainer(const bob::learn::misc::IVectorTrainer& other): - bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine, - std::vector<bob::learn::misc::GMMStats> >(other), m_update_sigma(other.m_update_sigma) { + m_rng = other.m_rng; m_acc_Nij_wij2.reference(bob::core::array::ccopy(other.m_acc_Nij_wij2)); m_acc_Fnormij_wij.reference(bob::core::array::ccopy(other.m_acc_Fnormij_wij)); m_acc_Nij.reference(bob::core::array::ccopy(other.m_acc_Nij)); @@ -51,11 +47,10 @@ bob::learn::misc::IVectorTrainer::~IVectorTrainer() } void bob::learn::misc::IVectorTrainer::initialize( - bob::learn::misc::IVectorMachine& machine, - const std::vector<bob::learn::misc::GMMStats>& data) + bob::learn::misc::IVectorMachine& machine) { - const int C = machine.getDimC(); - const int D = machine.getDimD(); + const int C = machine.getNGaussians(); + const int D = machine.getNInputs(); const int Rt = machine.getDimRt(); // Cache @@ -91,7 +86,7 @@ void bob::learn::misc::IVectorTrainer::eStep( const std::vector<bob::learn::misc::GMMStats>& data) { blitz::Range rall = blitz::Range::all(); - const int C = machine.getDimC(); + const int C = machine.getNGaussians(); // Reinitializes accumulators to 0 m_acc_Nij_wij2 = 0.; @@ -144,14 +139,13 @@ void bob::learn::misc::IVectorTrainer::eStep( } void bob::learn::misc::IVectorTrainer::mStep( - bob::learn::misc::IVectorMachine& machine, - const std::vector<bob::learn::misc::GMMStats>& data) + bob::learn::misc::IVectorMachine& machine) { blitz::Range rall = blitz::Range::all(); blitz::Array<double,2>& T = machine.updateT(); blitz::Array<double,1>& sigma = machine.updateSigma(); - const int C = (int)machine.getDimC(); - const int D = (int)machine.getDimD(); + const int C = (int)machine.getNGaussians(); + const int D = (int)machine.getNInputs(); for (int c=0; c<C; ++c) { // Solves linear system A.T = B to update T, based on accumulators of @@ -178,26 +172,11 @@ void bob::learn::misc::IVectorTrainer::mStep( } -double bob::learn::misc::IVectorTrainer::computeLikelihood( - bob::learn::misc::IVectorMachine& machine) -{ - // TODO: implementation - return 0; -} - -void bob::learn::misc::IVectorTrainer::finalize( - bob::learn::misc::IVectorMachine& machine, - const std::vector<bob::learn::misc::GMMStats>& data) -{ -} - bob::learn::misc::IVectorTrainer& bob::learn::misc::IVectorTrainer::operator= (const bob::learn::misc::IVectorTrainer &other) { if (this != &other) - { - bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine, - std::vector<bob::learn::misc::GMMStats> >::operator=(other); + { m_update_sigma = other.m_update_sigma; m_acc_Nij_wij2.reference(bob::core::array::ccopy(other.m_acc_Nij_wij2)); @@ -220,13 +199,11 @@ bob::learn::misc::IVectorTrainer& bob::learn::misc::IVectorTrainer::operator= bool bob::learn::misc::IVectorTrainer::operator== (const bob::learn::misc::IVectorTrainer &other) const { - return bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine, - std::vector<bob::learn::misc::GMMStats> >::operator==(other) && - m_update_sigma == other.m_update_sigma && - bob::core::array::isEqual(m_acc_Nij_wij2, other.m_acc_Nij_wij2) && - bob::core::array::isEqual(m_acc_Fnormij_wij, other.m_acc_Fnormij_wij) && - bob::core::array::isEqual(m_acc_Nij, other.m_acc_Nij) && - bob::core::array::isEqual(m_acc_Snormij, other.m_acc_Snormij); + return m_update_sigma == other.m_update_sigma && + bob::core::array::isEqual(m_acc_Nij_wij2, other.m_acc_Nij_wij2) && + bob::core::array::isEqual(m_acc_Fnormij_wij, other.m_acc_Fnormij_wij) && + bob::core::array::isEqual(m_acc_Nij, other.m_acc_Nij) && + bob::core::array::isEqual(m_acc_Snormij, other.m_acc_Snormij); } bool bob::learn::misc::IVectorTrainer::operator!= @@ -239,12 +216,10 @@ bool bob::learn::misc::IVectorTrainer::is_similar_to (const bob::learn::misc::IVectorTrainer &other, const double r_epsilon, const double a_epsilon) const { - return bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine, - std::vector<bob::learn::misc::GMMStats> >::is_similar_to(other, r_epsilon, a_epsilon) && - m_update_sigma == other.m_update_sigma && - bob::core::array::isClose(m_acc_Nij_wij2, other.m_acc_Nij_wij2, r_epsilon, a_epsilon) && - bob::core::array::isClose(m_acc_Fnormij_wij, other.m_acc_Fnormij_wij, r_epsilon, a_epsilon) && - bob::core::array::isClose(m_acc_Nij, other.m_acc_Nij, r_epsilon, a_epsilon) && - bob::core::array::isClose(m_acc_Snormij, other.m_acc_Snormij, r_epsilon, a_epsilon); + return m_update_sigma == other.m_update_sigma && + bob::core::array::isClose(m_acc_Nij_wij2, other.m_acc_Nij_wij2, r_epsilon, a_epsilon) && + bob::core::array::isClose(m_acc_Fnormij_wij, other.m_acc_Fnormij_wij, r_epsilon, a_epsilon) && + bob::core::array::isClose(m_acc_Nij, other.m_acc_Nij, r_epsilon, a_epsilon) && + bob::core::array::isClose(m_acc_Snormij, other.m_acc_Snormij, r_epsilon, a_epsilon); } diff --git a/bob/learn/misc/include/bob.learn.misc/ISVTrainer.h b/bob/learn/misc/include/bob.learn.misc/ISVTrainer.h index 1f98e7477960e799979e41982c4651c2b0d17c98..1f041419f40190fab36f614e012bb25be9904cd4 100644 --- a/bob/learn/misc/include/bob.learn.misc/ISVTrainer.h +++ b/bob/learn/misc/include/bob.learn.misc/ISVTrainer.h @@ -31,7 +31,7 @@ class ISVTrainer /** * @brief Constructor */ - ISVTrainer(const double relevance_factor=4., const double convergence_threshold = 0.001); + ISVTrainer(const double relevance_factor=4.); /** * @brief Copy onstructor @@ -146,8 +146,6 @@ class ISVTrainer double m_relevance_factor; - double m_convergence_threshold; ///< convergence threshold - boost::shared_ptr<boost::mt19937> m_rng; ///< The random number generator for the inialization}; }; diff --git a/bob/learn/misc/include/bob.learn.misc/IVectorTrainer.h b/bob/learn/misc/include/bob.learn.misc/IVectorTrainer.h index f3e82254d01e24c67d472280fabd2130752bff3e..4f496d659859f00ff1012b56f8094b4005fbafbb 100644 --- a/bob/learn/misc/include/bob.learn.misc/IVectorTrainer.h +++ b/bob/learn/misc/include/bob.learn.misc/IVectorTrainer.h @@ -9,11 +9,14 @@ #define BOB_LEARN_MISC_IVECTOR_TRAINER_H #include <blitz/array.h> -#include <bob.learn.misc/EMTrainer.h> #include <bob.learn.misc/IVectorMachine.h> #include <bob.learn.misc/GMMStats.h> #include <boost/shared_ptr.hpp> #include <vector> +#include <bob.core/array_copy.h> +#include <boost/random.hpp> + +#include <boost/random/mersenne_twister.hpp> namespace bob { namespace learn { namespace misc { @@ -25,15 +28,13 @@ namespace bob { namespace learn { namespace misc { * N. Dehak, P. Kenny, R. Dehak, P. Dumouchel, P. Ouellet, * IEEE Trans. on Audio, Speech and Language Processing */ -class IVectorTrainer: public bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine, std::vector<bob::learn::misc::GMMStats> > +class IVectorTrainer { public: /** * @brief Default constructor. Builds an IVectorTrainer */ - IVectorTrainer(const bool update_sigma=false, - const double convergence_threshold=0.001, - const size_t max_iterations=10, const bool compute_likelihood=false); + IVectorTrainer(const bool update_sigma=false); /** * @brief Copy constructor @@ -48,8 +49,7 @@ class IVectorTrainer: public bob::learn::misc::EMTrainer<bob::learn::misc::IVect /** * @brief Initialization before the EM loop */ - virtual void initialize(bob::learn::misc::IVectorMachine& ivector, - const std::vector<bob::learn::misc::GMMStats>& data); + virtual void initialize(bob::learn::misc::IVectorMachine& ivector); /** * @brief Calculates statistics across the dataset, @@ -68,20 +68,8 @@ class IVectorTrainer: public bob::learn::misc::EMTrainer<bob::learn::misc::IVect * @brief Maximisation step: Update the Total Variability matrix \f$T\f$ * and \f$\Sigma\f$ if update_sigma is enabled. */ - virtual void mStep(bob::learn::misc::IVectorMachine& ivector, - const std::vector<bob::learn::misc::GMMStats>& data); + virtual void mStep(bob::learn::misc::IVectorMachine& ivector); - /** - * @brief Computes the likelihood using current estimates - * @warning (currently unsupported) - */ - virtual double computeLikelihood(bob::learn::misc::IVectorMachine& ivector); - - /** - * @brief Finalization after the EM loop - */ - virtual void finalize(bob::learn::misc::IVectorMachine& ivector, - const std::vector<bob::learn::misc::GMMStats>& data); /** * @brief Assigns from a different IVectorTrainer @@ -152,6 +140,11 @@ class IVectorTrainer: public bob::learn::misc::EMTrainer<bob::learn::misc::IVect mutable blitz::Array<double,2> m_tmp_dt1; mutable blitz::Array<double,2> m_tmp_tt1; mutable blitz::Array<double,2> m_tmp_tt2; + + /** + * @brief The random number generator for the inialization + */ + boost::shared_ptr<boost::mt19937> m_rng; }; } } } // namespaces diff --git a/bob/learn/misc/isv_trainer.cpp b/bob/learn/misc/isv_trainer.cpp index f288fbb557c8c3729f502299ce92631fe9607735..0e91e8b9c06a1be0f5ca5d1ac4648c70f44fbe75 100644 --- a/bob/learn/misc/isv_trainer.cpp +++ b/bob/learn/misc/isv_trainer.cpp @@ -94,7 +94,7 @@ static auto ISVTrainer_doc = bob::extension::ClassDoc( "", true ) - .add_prototype("relevance_factor,convergence_threshold","") + .add_prototype("relevance_factor","") .add_prototype("other","") .add_prototype("","") .add_parameter("other", ":py:class:`bob.learn.misc.ISVTrainer`", "A ISVTrainer object to be copied.") @@ -121,9 +121,9 @@ static int PyBobLearnMiscISVTrainer_init_number(PyBobLearnMiscISVTrainerObject* char** kwlist = ISVTrainer_doc.kwlist(0); double relevance_factor = 4.; - double convergence_threshold = 0.001; + //Parsing the input argments - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "dd", kwlist, &relevance_factor, &convergence_threshold)) + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "d", kwlist, &relevance_factor)) return -1; if(relevance_factor < 0){ @@ -131,12 +131,7 @@ static int PyBobLearnMiscISVTrainer_init_number(PyBobLearnMiscISVTrainerObject* return -1; } - if(convergence_threshold < 0){ - PyErr_Format(PyExc_TypeError, "convergence_threshold argument must be greater than zero"); - return -1; - } - - self->cxx.reset(new bob::learn::misc::ISVTrainer(relevance_factor, convergence_threshold)); + self->cxx.reset(new bob::learn::misc::ISVTrainer(relevance_factor)); return 0; } @@ -153,15 +148,25 @@ static int PyBobLearnMiscISVTrainer_init(PyBobLearnMiscISVTrainerObject* self, P return 0; } case 1:{ - // If the constructor input is ISVTrainer object - return PyBobLearnMiscISVTrainer_init_copy(self, args, kwargs); - } - case 2:{ - // If the constructor input is ISVTrainer object - return PyBobLearnMiscISVTrainer_init_number(self, args, kwargs); + //Reading the input argument + PyObject* arg = 0; + if (PyTuple_Size(args)) + arg = PyTuple_GET_ITEM(args, 0); + else { + PyObject* tmp = PyDict_Values(kwargs); + auto tmp_ = make_safe(tmp); + arg = PyList_GET_ITEM(tmp, 0); + } + + if(PyBobLearnMiscISVTrainer_Check(arg)) + // If the constructor input is ISVTrainer object + return PyBobLearnMiscISVTrainer_init_copy(self, args, kwargs); + else + return PyBobLearnMiscISVTrainer_init_number(self, args, kwargs); + } default:{ - PyErr_Format(PyExc_RuntimeError, "number of arguments mismatch - %s requires only 0, 1 or 2 arguments, but you provided %d (see help)", Py_TYPE(self)->tp_name, nargs); + PyErr_Format(PyExc_RuntimeError, "number of arguments mismatch - %s requires only 0 or 1 arguments, but you provided %d (see help)", Py_TYPE(self)->tp_name, nargs); ISVTrainer_doc.print_usage(); return -1; } @@ -330,14 +335,14 @@ static PyGetSetDef PyBobLearnMiscISVTrainer_getseters[] = { { acc_u_a1.name(), (getter)PyBobLearnMiscISVTrainer_get_acc_u_a1, - (setter)PyBobLearnMiscISVTrainer_get_acc_u_a1, + (setter)PyBobLearnMiscISVTrainer_set_acc_u_a1, acc_u_a1.doc(), 0 }, { acc_u_a2.name(), (getter)PyBobLearnMiscISVTrainer_get_acc_u_a2, - (setter)PyBobLearnMiscISVTrainer_get_acc_u_a2, + (setter)PyBobLearnMiscISVTrainer_set_acc_u_a2, acc_u_a2.doc(), 0 }, diff --git a/bob/learn/misc/ivector_machine.cpp b/bob/learn/misc/ivector_machine.cpp index 1f058a3ec9c3aceede4bd82793c5a6571802e062..5251905af23293d566b9ed9efb24d9838f90120a 100644 --- a/bob/learn/misc/ivector_machine.cpp +++ b/bob/learn/misc/ivector_machine.cpp @@ -285,6 +285,43 @@ int PyBobLearnMiscIVectorMachine_setVarianceThreshold(PyBobLearnMiscIVectorMachi } +/***** ubm *****/ +static auto ubm = bob::extension::VariableDoc( + "ubm", + ":py:class:`bob.learn.misc.GMMMachine`", + "Returns the UBM (Universal Background Model", + "" +); +PyObject* PyBobLearnMiscIVectorMachine_getUBM(PyBobLearnMiscIVectorMachineObject* self, void*){ + BOB_TRY + + boost::shared_ptr<bob::learn::misc::GMMMachine> ubm_gmmMachine = self->cxx->getUbm(); + + //Allocating the correspondent python object + PyBobLearnMiscGMMMachineObject* retval = + (PyBobLearnMiscGMMMachineObject*)PyBobLearnMiscGMMMachine_Type.tp_alloc(&PyBobLearnMiscGMMMachine_Type, 0); + retval->cxx = ubm_gmmMachine; + + return Py_BuildValue("O",retval); + BOB_CATCH_MEMBER("ubm could not be read", 0) +} +int PyBobLearnMiscIVectorMachine_setUBM(PyBobLearnMiscIVectorMachineObject* self, PyObject* value, void*){ + BOB_TRY + + if (!PyBobLearnMiscGMMMachine_Check(value)){ + PyErr_Format(PyExc_RuntimeError, "%s %s expects a :py:class:`bob.learn.misc.GMMMachine`", Py_TYPE(self)->tp_name, ubm.name()); + return -1; + } + + PyBobLearnMiscGMMMachineObject* ubm_gmmMachine = 0; + PyArg_Parse(value, "O!", &PyBobLearnMiscGMMMachine_Type,&ubm_gmmMachine); + + self->cxx->setUbm(ubm_gmmMachine->cxx); + + return 0; + BOB_CATCH_MEMBER("ubm could not be set", -1) +} + static PyGetSetDef PyBobLearnMiscIVectorMachine_getseters[] = { { @@ -327,6 +364,14 @@ static PyGetSetDef PyBobLearnMiscIVectorMachine_getseters[] = { 0 }, + { + ubm.name(), + (getter)PyBobLearnMiscIVectorMachine_getUBM, + (setter)PyBobLearnMiscIVectorMachine_setUBM, + ubm.doc(), + 0 + }, + {0} // Sentinel }; @@ -483,6 +528,64 @@ static PyObject* PyBobLearnMiscIVectorMachine_resize(PyBobLearnMiscIVectorMachin } +/*** __compute_Id_TtSigmaInvT__ ***/ +static auto __compute_Id_TtSigmaInvT__ = bob::extension::FunctionDoc( + "__compute_Id_TtSigmaInvT__", + "", + "", + true +) +.add_prototype("stats") +.add_parameter("stats", ":py:class:`bob.learn.misc.GMMStats`", "Statistics as input"); +static PyObject* PyBobLearnMiscIVectorMachine_compute_Id_TtSigmaInvT__(PyBobLearnMiscIVectorMachineObject* self, PyObject* args, PyObject* kwargs) { + BOB_TRY + + char** kwlist = __compute_Id_TtSigmaInvT__.kwlist(0); + + PyBobLearnMiscGMMStatsObject* stats = 0; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!", kwlist, &PyBobLearnMiscGMMStats_Type, &stats)) + Py_RETURN_NONE; + + + blitz::Array<double,2> output(self->cxx->getDimRt(), self->cxx->getDimRt()); + self->cxx->computeIdTtSigmaInvT(*stats->cxx, output); + return PyBlitzArrayCxx_AsConstNumpy(output); + + BOB_CATCH_MEMBER("cannot __compute_Id_TtSigmaInvT__", 0) +} + + + +/*** __compute_TtSigmaInvFnorm__ ***/ +static auto __compute_TtSigmaInvFnorm__ = bob::extension::FunctionDoc( + "__compute_TtSigmaInvFnorm__", + "", + "", + true +) +.add_prototype("stats") +.add_parameter("stats", ":py:class:`bob.learn.misc.GMMStats`", "Statistics as input"); +static PyObject* PyBobLearnMiscIVectorMachine_compute_TtSigmaInvFnorm__(PyBobLearnMiscIVectorMachineObject* self, PyObject* args, PyObject* kwargs) { + BOB_TRY + + char** kwlist = __compute_TtSigmaInvFnorm__.kwlist(0); + + PyBobLearnMiscGMMStatsObject* stats = 0; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!", kwlist, &PyBobLearnMiscGMMStats_Type, &stats)) + Py_RETURN_NONE; + + + blitz::Array<double,1> output(self->cxx->getDimRt()); + self->cxx->computeTtSigmaInvFnorm(*stats->cxx, output); + return PyBlitzArrayCxx_AsConstNumpy(output); + + BOB_CATCH_MEMBER("cannot __compute_TtSigmaInvFnorm__", 0) +} + + + static PyMethodDef PyBobLearnMiscIVectorMachine_methods[] = { { @@ -509,6 +612,18 @@ static PyMethodDef PyBobLearnMiscIVectorMachine_methods[] = { METH_VARARGS|METH_KEYWORDS, resize.doc() }, + { + __compute_Id_TtSigmaInvT__.name(), + (PyCFunction)PyBobLearnMiscIVectorMachine_compute_Id_TtSigmaInvT__, + METH_VARARGS|METH_KEYWORDS, + __compute_Id_TtSigmaInvT__.doc() + }, + { + __compute_TtSigmaInvFnorm__.name(), + (PyCFunction)PyBobLearnMiscIVectorMachine_compute_TtSigmaInvFnorm__, + METH_VARARGS|METH_KEYWORDS, + __compute_TtSigmaInvFnorm__.doc() + }, /* { diff --git a/bob/learn/misc/ivector_trainer.cpp b/bob/learn/misc/ivector_trainer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ebd506737157e8f2f7e041bca09ac00e36020884 --- /dev/null +++ b/bob/learn/misc/ivector_trainer.cpp @@ -0,0 +1,455 @@ +/** + * @author Tiago de Freitas Pereira <tiago.pereira@idiap.ch> + * @date Tue 03 Fev 10:29:00 2015 + * + * @brief Python API for bob::learn::em + * + * Copyright (C) 2011-2014 Idiap Research Institute, Martigny, Switzerland + */ + +#include "main.h" +#include <boost/make_shared.hpp> + +/******************************************************************/ +/************ Constructor Section *********************************/ +/******************************************************************/ + +static inline bool f(PyObject* o){return o != 0 && PyObject_IsTrue(o) > 0;} /* converts PyObject to bool and returns false if object is NULL */ + +static int extract_GMMStats_1d(PyObject *list, + std::vector<bob::learn::misc::GMMStats>& training_data) +{ + for (int i=0; i<PyList_GET_SIZE(list); i++){ + + PyBobLearnMiscGMMStatsObject* stats; + if (!PyArg_Parse(PyList_GetItem(list, i), "O!", &PyBobLearnMiscGMMStats_Type, &stats)){ + PyErr_Format(PyExc_RuntimeError, "Expected GMMStats objects"); + return -1; + } + bob::learn::misc::GMMStats *stats_pointer = stats->cxx.get(); + std::cout << " #### " << std::endl; + training_data.push_back(*(stats_pointer)); + } + return 0; +} + + +static auto IVectorTrainer_doc = bob::extension::ClassDoc( + BOB_EXT_MODULE_PREFIX ".IVectorTrainer", + "IVectorTrainer" + "An IVectorTrainer to learn a Total Variability subspace :math:`$T$`" + " (and eventually a covariance matrix :math:`$\\Sigma$`).", + " References: [Dehak2010]" +).add_constructor( + bob::extension::FunctionDoc( + "__init__", + "Constructor. Builds a new IVectorTrainer", + "", + true + ) + .add_prototype("update_sigma","") + .add_prototype("other","") + .add_prototype("","") + .add_parameter("other", ":py:class:`bob.learn.misc.IVectorTrainer`", "A IVectorTrainer object to be copied.") + .add_parameter("update_sigma", "bool", "") +); + + +static int PyBobLearnMiscIVectorTrainer_init_copy(PyBobLearnMiscIVectorTrainerObject* self, PyObject* args, PyObject* kwargs) { + + char** kwlist = IVectorTrainer_doc.kwlist(1); + PyBobLearnMiscIVectorTrainerObject* o; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!", kwlist, &PyBobLearnMiscIVectorTrainer_Type, &o)){ + IVectorTrainer_doc.print_usage(); + return -1; + } + + self->cxx.reset(new bob::learn::misc::IVectorTrainer(*o->cxx)); + return 0; +} + + +static int PyBobLearnMiscIVectorTrainer_init_bool(PyBobLearnMiscIVectorTrainerObject* self, PyObject* args, PyObject* kwargs) { + + char** kwlist = IVectorTrainer_doc.kwlist(0); + PyObject* update_sigma = 0; + + //Parsing the input argments + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!", kwlist, &PyBool_Type, &update_sigma)) + return -1; + + self->cxx.reset(new bob::learn::misc::IVectorTrainer(f(update_sigma))); + return 0; +} + + +static int PyBobLearnMiscIVectorTrainer_init(PyBobLearnMiscIVectorTrainerObject* self, PyObject* args, PyObject* kwargs) { + BOB_TRY + + // get the number of command line arguments + int nargs = (args?PyTuple_Size(args):0) + (kwargs?PyDict_Size(kwargs):0); + + switch(nargs){ + case 0:{ + self->cxx.reset(new bob::learn::misc::IVectorTrainer()); + return 0; + } + case 1:{ + //Reading the input argument + PyObject* arg = 0; + if (PyTuple_Size(args)) + arg = PyTuple_GET_ITEM(args, 0); + else { + PyObject* tmp = PyDict_Values(kwargs); + auto tmp_ = make_safe(tmp); + arg = PyList_GET_ITEM(tmp, 0); + } + + // If the constructor input is IVectorTrainer object + if(PyBobLearnMiscIVectorTrainer_Check(arg)) + return PyBobLearnMiscIVectorTrainer_init_copy(self, args, kwargs); + else + return PyBobLearnMiscIVectorTrainer_init_bool(self, args, kwargs); + + } + default:{ + PyErr_Format(PyExc_RuntimeError, "number of arguments mismatch - %s requires only 0 or 1 arguments, but you provided %d (see help)", Py_TYPE(self)->tp_name, nargs); + IVectorTrainer_doc.print_usage(); + return -1; + } + } + BOB_CATCH_MEMBER("cannot create IVectorTrainer", 0) + return 0; +} + + +static void PyBobLearnMiscIVectorTrainer_delete(PyBobLearnMiscIVectorTrainerObject* self) { + self->cxx.reset(); + Py_TYPE(self)->tp_free((PyObject*)self); +} + + +int PyBobLearnMiscIVectorTrainer_Check(PyObject* o) { + return PyObject_IsInstance(o, reinterpret_cast<PyObject*>(&PyBobLearnMiscIVectorTrainer_Type)); +} + + +static PyObject* PyBobLearnMiscIVectorTrainer_RichCompare(PyBobLearnMiscIVectorTrainerObject* self, PyObject* other, int op) { + BOB_TRY + + if (!PyBobLearnMiscIVectorTrainer_Check(other)) { + PyErr_Format(PyExc_TypeError, "cannot compare `%s' with `%s'", Py_TYPE(self)->tp_name, Py_TYPE(other)->tp_name); + return 0; + } + auto other_ = reinterpret_cast<PyBobLearnMiscIVectorTrainerObject*>(other); + switch (op) { + case Py_EQ: + if (*self->cxx==*other_->cxx) Py_RETURN_TRUE; else Py_RETURN_FALSE; + case Py_NE: + if (*self->cxx==*other_->cxx) Py_RETURN_FALSE; else Py_RETURN_TRUE; + default: + Py_INCREF(Py_NotImplemented); + return Py_NotImplemented; + } + BOB_CATCH_MEMBER("cannot compare IVectorTrainer objects", 0) +} + + +/******************************************************************/ +/************ Variables Section ***********************************/ +/******************************************************************/ + +static auto acc_nij_wij2 = bob::extension::VariableDoc( + "acc_nij_wij2", + "array_like <float, 3D>", + "Accumulator updated during the E-step", + "" +); +PyObject* PyBobLearnMiscIVectorTrainer_get_acc_nij_wij2(PyBobLearnMiscIVectorTrainerObject* self, void*){ + BOB_TRY + return PyBlitzArrayCxx_AsConstNumpy(self->cxx->getAccNijWij2()); + BOB_CATCH_MEMBER("acc_nij_wij2 could not be read", 0) +} +int PyBobLearnMiscIVectorTrainer_set_acc_nij_wij2(PyBobLearnMiscIVectorTrainerObject* self, PyObject* value, void*){ + BOB_TRY + PyBlitzArrayObject* o; + if (!PyBlitzArray_Converter(value, &o)){ + PyErr_Format(PyExc_RuntimeError, "%s %s expects a 3D array of floats", Py_TYPE(self)->tp_name, acc_nij_wij2.name()); + return -1; + } + auto o_ = make_safe(o); + auto b = PyBlitzArrayCxx_AsBlitz<double,3>(o, "acc_nij_wij2"); + if (!b) return -1; + self->cxx->setAccNijWij2(*b); + return 0; + BOB_CATCH_MEMBER("acc_nij_wij2 could not be set", -1) +} + + +static auto acc_fnormij_wij = bob::extension::VariableDoc( + "acc_fnormij_wij", + "array_like <float, 3D>", + "Accumulator updated during the E-step", + "" +); +PyObject* PyBobLearnMiscIVectorTrainer_get_acc_fnormij_wij(PyBobLearnMiscIVectorTrainerObject* self, void*){ + BOB_TRY + return PyBlitzArrayCxx_AsConstNumpy(self->cxx->getAccFnormijWij()); + BOB_CATCH_MEMBER("acc_fnormij_wij could not be read", 0) +} +int PyBobLearnMiscIVectorTrainer_set_acc_fnormij_wij(PyBobLearnMiscIVectorTrainerObject* self, PyObject* value, void*){ + BOB_TRY + PyBlitzArrayObject* o; + if (!PyBlitzArray_Converter(value, &o)){ + PyErr_Format(PyExc_RuntimeError, "%s %s expects a 3D array of floats", Py_TYPE(self)->tp_name, acc_fnormij_wij.name()); + return -1; + } + auto o_ = make_safe(o); + auto b = PyBlitzArrayCxx_AsBlitz<double,3>(o, "acc_fnormij_wij"); + if (!b) return -1; + self->cxx->setAccFnormijWij(*b); + return 0; + BOB_CATCH_MEMBER("acc_fnormij_wij could not be set", -1) +} + + +static auto acc_nij = bob::extension::VariableDoc( + "acc_nij", + "array_like <float, 1D>", + "Accumulator updated during the E-step", + "" +); +PyObject* PyBobLearnMiscIVectorTrainer_get_acc_nij(PyBobLearnMiscIVectorTrainerObject* self, void*){ + BOB_TRY + return PyBlitzArrayCxx_AsConstNumpy(self->cxx->getAccNij()); + BOB_CATCH_MEMBER("acc_nij could not be read", 0) +} +int PyBobLearnMiscIVectorTrainer_set_acc_nij(PyBobLearnMiscIVectorTrainerObject* self, PyObject* value, void*){ + BOB_TRY + PyBlitzArrayObject* o; + if (!PyBlitzArray_Converter(value, &o)){ + PyErr_Format(PyExc_RuntimeError, "%s %s expects a 1D array of floats", Py_TYPE(self)->tp_name, acc_nij.name()); + return -1; + } + auto o_ = make_safe(o); + auto b = PyBlitzArrayCxx_AsBlitz<double,1>(o, "acc_nij"); + if (!b) return -1; + self->cxx->setAccNij(*b); + return 0; + BOB_CATCH_MEMBER("acc_nij could not be set", -1) +} + + +static auto acc_snormij = bob::extension::VariableDoc( + "acc_snormij", + "array_like <float, 2D>", + "Accumulator updated during the E-step", + "" +); +PyObject* PyBobLearnMiscIVectorTrainer_get_acc_snormij(PyBobLearnMiscIVectorTrainerObject* self, void*){ + BOB_TRY + return PyBlitzArrayCxx_AsConstNumpy(self->cxx->getAccSnormij()); + BOB_CATCH_MEMBER("acc_snormij could not be read", 0) +} +int PyBobLearnMiscIVectorTrainer_set_acc_snormij(PyBobLearnMiscIVectorTrainerObject* self, PyObject* value, void*){ + BOB_TRY + PyBlitzArrayObject* o; + if (!PyBlitzArray_Converter(value, &o)){ + PyErr_Format(PyExc_RuntimeError, "%s %s expects a 2D array of floats", Py_TYPE(self)->tp_name, acc_snormij.name()); + return -1; + } + auto o_ = make_safe(o); + auto b = PyBlitzArrayCxx_AsBlitz<double,2>(o, "acc_snormij"); + if (!b) return -1; + self->cxx->setAccSnormij(*b); + return 0; + BOB_CATCH_MEMBER("acc_snormij could not be set", -1) +} + + + + +static PyGetSetDef PyBobLearnMiscIVectorTrainer_getseters[] = { + { + acc_nij_wij2.name(), + (getter)PyBobLearnMiscIVectorTrainer_get_acc_nij_wij2, + (setter)PyBobLearnMiscIVectorTrainer_set_acc_nij_wij2, + acc_nij_wij2.doc(), + 0 + }, + { + acc_fnormij_wij.name(), + (getter)PyBobLearnMiscIVectorTrainer_get_acc_fnormij_wij, + (setter)PyBobLearnMiscIVectorTrainer_set_acc_fnormij_wij, + acc_fnormij_wij.doc(), + 0 + }, + { + acc_nij.name(), + (getter)PyBobLearnMiscIVectorTrainer_get_acc_nij, + (setter)PyBobLearnMiscIVectorTrainer_set_acc_nij, + acc_nij.doc(), + 0 + }, + { + acc_snormij.name(), + (getter)PyBobLearnMiscIVectorTrainer_get_acc_snormij, + (setter)PyBobLearnMiscIVectorTrainer_set_acc_snormij, + acc_snormij.doc(), + 0 + }, + + {0} // Sentinel +}; + + +/******************************************************************/ +/************ Functions Section ***********************************/ +/******************************************************************/ + +/*** initialize ***/ +static auto initialize = bob::extension::FunctionDoc( + "initialize", + "Initialization before the EM steps", + "", + true +) +.add_prototype("ivector_machine") +.add_parameter("ivector_machine", ":py:class:`bob.learn.misc.ISVBase`", "IVectorMachine Object"); +static PyObject* PyBobLearnMiscIVectorTrainer_initialize(PyBobLearnMiscIVectorTrainerObject* self, PyObject* args, PyObject* kwargs) { + BOB_TRY + + /* Parses input arguments in a single shot */ + char** kwlist = initialize.kwlist(0); + + PyBobLearnMiscIVectorMachineObject* ivector_machine = 0; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!", kwlist, &PyBobLearnMiscIVectorMachine_Type, &ivector_machine)) Py_RETURN_NONE; + + self->cxx->initialize(*ivector_machine->cxx); + + BOB_CATCH_MEMBER("cannot perform the initialize method", 0) + + Py_RETURN_NONE; +} + + +/*** e_step ***/ +static auto e_step = bob::extension::FunctionDoc( + "e_step", + "Call the e-step procedure (for the U subspace).", + "", + true +) +.add_prototype("ivector_machine,stats") +.add_parameter("ivector_machine", ":py:class:`bob.learn.misc.ISVBase`", "IVectorMachine Object") +.add_parameter("stats", ":py:class:`bob.learn.misc.GMMStats`", "GMMStats Object"); +static PyObject* PyBobLearnMiscIVectorTrainer_e_step(PyBobLearnMiscIVectorTrainerObject* self, PyObject* args, PyObject* kwargs) { + BOB_TRY + + // Parses input arguments in a single shot + char** kwlist = e_step.kwlist(0); + + PyBobLearnMiscIVectorMachineObject* ivector_machine = 0; + PyObject* stats = 0; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!O!", kwlist, &PyBobLearnMiscIVectorMachine_Type, &ivector_machine, + &PyList_Type, &stats)) Py_RETURN_NONE; + + std::vector<bob::learn::misc::GMMStats> training_data; + if(extract_GMMStats_1d(stats ,training_data)==0) + self->cxx->eStep(*ivector_machine->cxx, training_data); + + BOB_CATCH_MEMBER("cannot perform the e_step method", 0) + + Py_RETURN_NONE; +} + + +/*** m_step ***/ +static auto m_step = bob::extension::FunctionDoc( + "m_step", + "Call the m-step procedure (for the U subspace).", + "", + true +) +.add_prototype("ivector_machine") +.add_parameter("ivector_machine", ":py:class:`bob.learn.misc.ISVBase`", "IVectorMachine Object"); +static PyObject* PyBobLearnMiscIVectorTrainer_m_step(PyBobLearnMiscIVectorTrainerObject* self, PyObject* args, PyObject* kwargs) { + BOB_TRY + + // Parses input arguments in a single shot + char** kwlist = m_step.kwlist(0); + + PyBobLearnMiscIVectorMachineObject* ivector_machine = 0; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!", kwlist, &PyBobLearnMiscIVectorMachine_Type, &ivector_machine)) Py_RETURN_NONE; + + self->cxx->mStep(*ivector_machine->cxx); + + BOB_CATCH_MEMBER("cannot perform the m_step method", 0) + + Py_RETURN_NONE; +} + + + +static PyMethodDef PyBobLearnMiscIVectorTrainer_methods[] = { + { + initialize.name(), + (PyCFunction)PyBobLearnMiscIVectorTrainer_initialize, + METH_VARARGS|METH_KEYWORDS, + initialize.doc() + }, + { + e_step.name(), + (PyCFunction)PyBobLearnMiscIVectorTrainer_e_step, + METH_VARARGS|METH_KEYWORDS, + e_step.doc() + }, + { + m_step.name(), + (PyCFunction)PyBobLearnMiscIVectorTrainer_m_step, + METH_VARARGS|METH_KEYWORDS, + m_step.doc() + }, + {0} /* Sentinel */ +}; + + +/******************************************************************/ +/************ Module Section **************************************/ +/******************************************************************/ + +// Define the Gaussian type struct; will be initialized later +PyTypeObject PyBobLearnMiscIVectorTrainer_Type = { + PyVarObject_HEAD_INIT(0,0) + 0 +}; + +bool init_BobLearnMiscIVectorTrainer(PyObject* module) +{ + // initialize the type struct + PyBobLearnMiscIVectorTrainer_Type.tp_name = IVectorTrainer_doc.name(); + PyBobLearnMiscIVectorTrainer_Type.tp_basicsize = sizeof(PyBobLearnMiscIVectorTrainerObject); + PyBobLearnMiscIVectorTrainer_Type.tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE;//Enable the class inheritance; + PyBobLearnMiscIVectorTrainer_Type.tp_doc = IVectorTrainer_doc.doc(); + + // set the functions + PyBobLearnMiscIVectorTrainer_Type.tp_new = PyType_GenericNew; + PyBobLearnMiscIVectorTrainer_Type.tp_init = reinterpret_cast<initproc>(PyBobLearnMiscIVectorTrainer_init); + PyBobLearnMiscIVectorTrainer_Type.tp_dealloc = reinterpret_cast<destructor>(PyBobLearnMiscIVectorTrainer_delete); + PyBobLearnMiscIVectorTrainer_Type.tp_richcompare = reinterpret_cast<richcmpfunc>(PyBobLearnMiscIVectorTrainer_RichCompare); + PyBobLearnMiscIVectorTrainer_Type.tp_methods = PyBobLearnMiscIVectorTrainer_methods; + PyBobLearnMiscIVectorTrainer_Type.tp_getset = PyBobLearnMiscIVectorTrainer_getseters; + //PyBobLearnMiscIVectorTrainer_Type.tp_call = reinterpret_cast<ternaryfunc>(PyBobLearnMiscIVectorTrainer_compute_likelihood); + + + // check that everything is fine + if (PyType_Ready(&PyBobLearnMiscIVectorTrainer_Type) < 0) return false; + + // add the type to the module + Py_INCREF(&PyBobLearnMiscIVectorTrainer_Type); + return PyModule_AddObject(module, "_IVectorTrainer", (PyObject*)&PyBobLearnMiscIVectorTrainer_Type) >= 0; +} + diff --git a/bob/learn/misc/main.cpp b/bob/learn/misc/main.cpp index 54a5d486de04af187877354d6e3a958f3d596a79..c4875f85832d609974bf2cfc2117116379b7baf0 100644 --- a/bob/learn/misc/main.cpp +++ b/bob/learn/misc/main.cpp @@ -78,6 +78,8 @@ static PyObject* create_module (void) { if (!init_BobLearnMiscISVTrainer(module)) return 0; if (!init_BobLearnMiscIVectorMachine(module)) return 0; + if (!init_BobLearnMiscIVectorTrainer(module)) return 0; + if (!init_BobLearnMiscPLDABase(module)) return 0; if (!init_BobLearnMiscPLDAMachine(module)) return 0; diff --git a/bob/learn/misc/main.h b/bob/learn/misc/main.h index b3a4fb50531de118a146c14769e6157b153936e8..830cd3e70054a7ef0b0a54a95a3fce3a95517f5d 100644 --- a/bob/learn/misc/main.h +++ b/bob/learn/misc/main.h @@ -37,6 +37,8 @@ #include <bob.learn.misc/IVectorMachine.h> +#include <bob.learn.misc/IVectorTrainer.h> + #include <bob.learn.misc/PLDAMachine.h> #include <bob.learn.misc/ZTNorm.h> @@ -244,6 +246,17 @@ bool init_BobLearnMiscIVectorMachine(PyObject* module); int PyBobLearnMiscIVectorMachine_Check(PyObject* o); +// IVectorTrainer +typedef struct { + PyObject_HEAD + boost::shared_ptr<bob::learn::misc::IVectorTrainer> cxx; +} PyBobLearnMiscIVectorTrainerObject; + +extern PyTypeObject PyBobLearnMiscIVectorTrainer_Type; +bool init_BobLearnMiscIVectorTrainer(PyObject* module); +int PyBobLearnMiscIVectorTrainer_Check(PyObject* o); + + // PLDABase typedef struct { PyObject_HEAD diff --git a/bob/learn/misc/test_ivector_trainer.py b/bob/learn/misc/test_ivector_trainer.py index 2e047e320a76e81cc31b7ae09c0ef2aa06a5721f..8f87ead34a28e9f918d08e2743e8606a8d8c463d 100644 --- a/bob/learn/misc/test_ivector_trainer.py +++ b/bob/learn/misc/test_ivector_trainer.py @@ -28,8 +28,8 @@ class IVectorTrainerPy(): def initialize(self, machine, data): ubm = machine.ubm - self.m_dim_c = ubm.dim_c - self.m_dim_d = ubm.dim_d + self.m_dim_c = ubm.shape[0] + self.m_dim_d = ubm.shape[1] self.m_dim_t = machine.t.shape[1] self.m_meansupervector = ubm.mean_supervector t = numpy.random.randn(self.m_dim_c*self.m_dim_d, self.m_dim_t) diff --git a/setup.py b/setup.py index 088252fe4b66182f8c24adbed78cb0cee10f3308..209045070d4f56f4ae27545f737c068ddb5a7ccd 100644 --- a/setup.py +++ b/setup.py @@ -75,7 +75,7 @@ setup( #"bob/learn/misc/cpp/EMPCATrainer.cpp", "bob/learn/misc/cpp/GMMBaseTrainer.cpp", - #"bob/learn/misc/cpp/IVectorTrainer.cpp", + "bob/learn/misc/cpp/IVectorTrainer.cpp", "bob/learn/misc/cpp/KMeansTrainer.cpp", "bob/learn/misc/cpp/MAP_GMMTrainer.cpp", "bob/learn/misc/cpp/ML_GMMTrainer.cpp", @@ -126,6 +126,8 @@ setup( "bob/learn/misc/isv_trainer.cpp", "bob/learn/misc/ivector_machine.cpp", + "bob/learn/misc/ivector_trainer.cpp", + "bob/learn/misc/plda_base.cpp", "bob/learn/misc/plda_machine.cpp", "bob/learn/misc/ztnorm.cpp",