Commit ae73d4cd authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Binding IVectorTrainer

parent 1b855768
......@@ -16,6 +16,7 @@ from .__ML_gmm_trainer__ import *
from .__MAP_gmm_trainer__ import *
from .__jfa_trainer__ import *
from .__isv_trainer__ import *
from .__ivector_trainer__ import *
def ztnorm_same_value(vect_a, vect_b):
......
......@@ -11,7 +11,7 @@ import numpy
# define the class
class ISVTrainer (_ISVTrainer):
def __init__(self, max_iterations=10, relevance_factor=4., convergence_threshold = 0.001):
def __init__(self, max_iterations=10, relevance_factor=4.):
"""
:py:class:`bob.learn.misc.ISVTrainer` constructor
......@@ -19,7 +19,7 @@ class ISVTrainer (_ISVTrainer):
max_iterations
Number of maximum iterations
"""
_ISVTrainer.__init__(self, relevance_factor, convergence_threshold)
_ISVTrainer.__init__(self, relevance_factor)
self._max_iterations = max_iterations
......
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
# Tue Fev 03 13:20:10 2015 +0200
#
# Copyright (C) 2011-2015 Idiap Research Institute, Martigny, Switzerland
from ._library import _IVectorTrainer
import numpy
# define the class
class IVectorTrainer (_IVectorTrainer):
def __init__(self, max_iterations=10, update_sigma=False):
"""
:py:class:`bob.learn.misc.IVectorTrainer` constructor
Keyword Parameters:
max_iterations
Number of maximum iterations
update_sigma
"""
_IVectorTrainer.__init__(self, update_sigma)
self._max_iterations = max_iterations
def train(self, ivector_machine, data):
"""
Train the :py:class:`bob.learn.misc.IVectorMachine` using data
Keyword Parameters:
ivector_machine
The `:py:class:bob.learn.misc.IVectorMachine` class
data
The data to be trained
"""
#Initialization
self.initialize(ivector_machine, data);
for i in range(self._max_iterations):
#eStep
self.eStep(ivector_machine, data);
#mStep
self.mStep(ivector_machine);
# copy the documentation from the base class
__doc__ = _IVectorTrainer.__doc__
......@@ -19,17 +19,16 @@
//////////////////////////// ISVTrainer ///////////////////////////
bob::learn::misc::ISVTrainer::ISVTrainer(const double relevance_factor, const double convergence_threshold):
bob::learn::misc::ISVTrainer::ISVTrainer(const double relevance_factor):
m_relevance_factor(relevance_factor),
m_convergence_threshold(convergence_threshold),
m_rng(new boost::mt19937())
{}
bob::learn::misc::ISVTrainer::ISVTrainer(const bob::learn::misc::ISVTrainer& other):
m_convergence_threshold(other.m_convergence_threshold),
m_relevance_factor(other.m_relevance_factor),
m_rng(other.m_rng)
{}
{
m_relevance_factor = other.m_relevance_factor;
}
bob::learn::misc::ISVTrainer::~ISVTrainer()
{}
......@@ -39,7 +38,6 @@ bob::learn::misc::ISVTrainer& bob::learn::misc::ISVTrainer::operator=
{
if (this != &other)
{
m_convergence_threshold = other.m_convergence_threshold;
m_rng = other.m_rng;
m_relevance_factor = other.m_relevance_factor;
}
......@@ -48,8 +46,7 @@ bob::learn::misc::ISVTrainer& bob::learn::misc::ISVTrainer::operator=
bool bob::learn::misc::ISVTrainer::operator==(const bob::learn::misc::ISVTrainer& b) const
{
return m_convergence_threshold == b.m_convergence_threshold &&
m_rng == b.m_rng &&
return m_rng == b.m_rng &&
m_relevance_factor == b.m_relevance_factor;
}
......@@ -61,9 +58,8 @@ bool bob::learn::misc::ISVTrainer::operator!=(const bob::learn::misc::ISVTrainer
bool bob::learn::misc::ISVTrainer::is_similar_to(const bob::learn::misc::ISVTrainer& b,
const double r_epsilon, const double a_epsilon) const
{
return m_convergence_threshold == b.m_convergence_threshold &&
m_rng == b.m_rng &&
m_relevance_factor == b.m_relevance_factor;
return m_rng == b.m_rng &&
m_relevance_factor == b.m_relevance_factor;
}
void bob::learn::misc::ISVTrainer::initialize(bob::learn::misc::ISVBase& machine,
......
......@@ -7,30 +7,26 @@
#include <bob.learn.misc/IVectorTrainer.h>
#include <bob.learn.misc/IVectorMachine.h>
#include <bob.core/check.h>
#include <bob.core/array_copy.h>
#include <bob.core/array_random.h>
#include <bob.core/check.h>
#include <bob.math/inv.h>
#include <bob.core/check.h>
#include <bob.core/array_repmat.h>
#include <algorithm>
#include <bob.math/linear.h>
#include <bob.math/linsolve.h>
#include <boost/shared_ptr.hpp>
bob::learn::misc::IVectorTrainer::IVectorTrainer(const bool update_sigma,
const double convergence_threshold,
const size_t max_iterations, bool compute_likelihood):
bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine,
std::vector<bob::learn::misc::GMMStats> >(convergence_threshold,
max_iterations, compute_likelihood),
m_update_sigma(update_sigma)
{
}
bob::learn::misc::IVectorTrainer::IVectorTrainer(const bool update_sigma):
m_update_sigma(update_sigma),
m_rng(new boost::mt19937())
{}
bob::learn::misc::IVectorTrainer::IVectorTrainer(const bob::learn::misc::IVectorTrainer& other):
bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine,
std::vector<bob::learn::misc::GMMStats> >(other),
m_update_sigma(other.m_update_sigma)
{
m_rng = other.m_rng;
m_acc_Nij_wij2.reference(bob::core::array::ccopy(other.m_acc_Nij_wij2));
m_acc_Fnormij_wij.reference(bob::core::array::ccopy(other.m_acc_Fnormij_wij));
m_acc_Nij.reference(bob::core::array::ccopy(other.m_acc_Nij));
......@@ -51,11 +47,10 @@ bob::learn::misc::IVectorTrainer::~IVectorTrainer()
}
void bob::learn::misc::IVectorTrainer::initialize(
bob::learn::misc::IVectorMachine& machine,
const std::vector<bob::learn::misc::GMMStats>& data)
bob::learn::misc::IVectorMachine& machine)
{
const int C = machine.getDimC();
const int D = machine.getDimD();
const int C = machine.getNGaussians();
const int D = machine.getNInputs();
const int Rt = machine.getDimRt();
// Cache
......@@ -91,7 +86,7 @@ void bob::learn::misc::IVectorTrainer::eStep(
const std::vector<bob::learn::misc::GMMStats>& data)
{
blitz::Range rall = blitz::Range::all();
const int C = machine.getDimC();
const int C = machine.getNGaussians();
// Reinitializes accumulators to 0
m_acc_Nij_wij2 = 0.;
......@@ -144,14 +139,13 @@ void bob::learn::misc::IVectorTrainer::eStep(
}
void bob::learn::misc::IVectorTrainer::mStep(
bob::learn::misc::IVectorMachine& machine,
const std::vector<bob::learn::misc::GMMStats>& data)
bob::learn::misc::IVectorMachine& machine)
{
blitz::Range rall = blitz::Range::all();
blitz::Array<double,2>& T = machine.updateT();
blitz::Array<double,1>& sigma = machine.updateSigma();
const int C = (int)machine.getDimC();
const int D = (int)machine.getDimD();
const int C = (int)machine.getNGaussians();
const int D = (int)machine.getNInputs();
for (int c=0; c<C; ++c)
{
// Solves linear system A.T = B to update T, based on accumulators of
......@@ -178,26 +172,11 @@ void bob::learn::misc::IVectorTrainer::mStep(
}
double bob::learn::misc::IVectorTrainer::computeLikelihood(
bob::learn::misc::IVectorMachine& machine)
{
// TODO: implementation
return 0;
}
void bob::learn::misc::IVectorTrainer::finalize(
bob::learn::misc::IVectorMachine& machine,
const std::vector<bob::learn::misc::GMMStats>& data)
{
}
bob::learn::misc::IVectorTrainer& bob::learn::misc::IVectorTrainer::operator=
(const bob::learn::misc::IVectorTrainer &other)
{
if (this != &other)
{
bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine,
std::vector<bob::learn::misc::GMMStats> >::operator=(other);
{
m_update_sigma = other.m_update_sigma;
m_acc_Nij_wij2.reference(bob::core::array::ccopy(other.m_acc_Nij_wij2));
......@@ -220,13 +199,11 @@ bob::learn::misc::IVectorTrainer& bob::learn::misc::IVectorTrainer::operator=
bool bob::learn::misc::IVectorTrainer::operator==
(const bob::learn::misc::IVectorTrainer &other) const
{
return bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine,
std::vector<bob::learn::misc::GMMStats> >::operator==(other) &&
m_update_sigma == other.m_update_sigma &&
bob::core::array::isEqual(m_acc_Nij_wij2, other.m_acc_Nij_wij2) &&
bob::core::array::isEqual(m_acc_Fnormij_wij, other.m_acc_Fnormij_wij) &&
bob::core::array::isEqual(m_acc_Nij, other.m_acc_Nij) &&
bob::core::array::isEqual(m_acc_Snormij, other.m_acc_Snormij);
return m_update_sigma == other.m_update_sigma &&
bob::core::array::isEqual(m_acc_Nij_wij2, other.m_acc_Nij_wij2) &&
bob::core::array::isEqual(m_acc_Fnormij_wij, other.m_acc_Fnormij_wij) &&
bob::core::array::isEqual(m_acc_Nij, other.m_acc_Nij) &&
bob::core::array::isEqual(m_acc_Snormij, other.m_acc_Snormij);
}
bool bob::learn::misc::IVectorTrainer::operator!=
......@@ -239,12 +216,10 @@ bool bob::learn::misc::IVectorTrainer::is_similar_to
(const bob::learn::misc::IVectorTrainer &other, const double r_epsilon,
const double a_epsilon) const
{
return bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine,
std::vector<bob::learn::misc::GMMStats> >::is_similar_to(other, r_epsilon, a_epsilon) &&
m_update_sigma == other.m_update_sigma &&
bob::core::array::isClose(m_acc_Nij_wij2, other.m_acc_Nij_wij2, r_epsilon, a_epsilon) &&
bob::core::array::isClose(m_acc_Fnormij_wij, other.m_acc_Fnormij_wij, r_epsilon, a_epsilon) &&
bob::core::array::isClose(m_acc_Nij, other.m_acc_Nij, r_epsilon, a_epsilon) &&
bob::core::array::isClose(m_acc_Snormij, other.m_acc_Snormij, r_epsilon, a_epsilon);
return m_update_sigma == other.m_update_sigma &&
bob::core::array::isClose(m_acc_Nij_wij2, other.m_acc_Nij_wij2, r_epsilon, a_epsilon) &&
bob::core::array::isClose(m_acc_Fnormij_wij, other.m_acc_Fnormij_wij, r_epsilon, a_epsilon) &&
bob::core::array::isClose(m_acc_Nij, other.m_acc_Nij, r_epsilon, a_epsilon) &&
bob::core::array::isClose(m_acc_Snormij, other.m_acc_Snormij, r_epsilon, a_epsilon);
}
......@@ -31,7 +31,7 @@ class ISVTrainer
/**
* @brief Constructor
*/
ISVTrainer(const double relevance_factor=4., const double convergence_threshold = 0.001);
ISVTrainer(const double relevance_factor=4.);
/**
* @brief Copy onstructor
......@@ -146,8 +146,6 @@ class ISVTrainer
double m_relevance_factor;
double m_convergence_threshold; ///< convergence threshold
boost::shared_ptr<boost::mt19937> m_rng; ///< The random number generator for the inialization};
};
......
......@@ -9,11 +9,14 @@
#define BOB_LEARN_MISC_IVECTOR_TRAINER_H
#include <blitz/array.h>
#include <bob.learn.misc/EMTrainer.h>
#include <bob.learn.misc/IVectorMachine.h>
#include <bob.learn.misc/GMMStats.h>
#include <boost/shared_ptr.hpp>
#include <vector>
#include <bob.core/array_copy.h>
#include <boost/random.hpp>
#include <boost/random/mersenne_twister.hpp>
namespace bob { namespace learn { namespace misc {
......@@ -25,15 +28,13 @@ namespace bob { namespace learn { namespace misc {
* N. Dehak, P. Kenny, R. Dehak, P. Dumouchel, P. Ouellet,
* IEEE Trans. on Audio, Speech and Language Processing
*/
class IVectorTrainer: public bob::learn::misc::EMTrainer<bob::learn::misc::IVectorMachine, std::vector<bob::learn::misc::GMMStats> >
class IVectorTrainer
{
public:
/**
* @brief Default constructor. Builds an IVectorTrainer
*/
IVectorTrainer(const bool update_sigma=false,
const double convergence_threshold=0.001,
const size_t max_iterations=10, const bool compute_likelihood=false);
IVectorTrainer(const bool update_sigma=false);
/**
* @brief Copy constructor
......@@ -48,8 +49,7 @@ class IVectorTrainer: public bob::learn::misc::EMTrainer<bob::learn::misc::IVect
/**
* @brief Initialization before the EM loop
*/
virtual void initialize(bob::learn::misc::IVectorMachine& ivector,
const std::vector<bob::learn::misc::GMMStats>& data);
virtual void initialize(bob::learn::misc::IVectorMachine& ivector);
/**
* @brief Calculates statistics across the dataset,
......@@ -68,20 +68,8 @@ class IVectorTrainer: public bob::learn::misc::EMTrainer<bob::learn::misc::IVect
* @brief Maximisation step: Update the Total Variability matrix \f$T\f$
* and \f$\Sigma\f$ if update_sigma is enabled.
*/
virtual void mStep(bob::learn::misc::IVectorMachine& ivector,
const std::vector<bob::learn::misc::GMMStats>& data);
virtual void mStep(bob::learn::misc::IVectorMachine& ivector);
/**
* @brief Computes the likelihood using current estimates
* @warning (currently unsupported)
*/
virtual double computeLikelihood(bob::learn::misc::IVectorMachine& ivector);
/**
* @brief Finalization after the EM loop
*/
virtual void finalize(bob::learn::misc::IVectorMachine& ivector,
const std::vector<bob::learn::misc::GMMStats>& data);
/**
* @brief Assigns from a different IVectorTrainer
......@@ -152,6 +140,11 @@ class IVectorTrainer: public bob::learn::misc::EMTrainer<bob::learn::misc::IVect
mutable blitz::Array<double,2> m_tmp_dt1;
mutable blitz::Array<double,2> m_tmp_tt1;
mutable blitz::Array<double,2> m_tmp_tt2;
/**
* @brief The random number generator for the inialization
*/
boost::shared_ptr<boost::mt19937> m_rng;
};
} } } // namespaces
......
......@@ -94,7 +94,7 @@ static auto ISVTrainer_doc = bob::extension::ClassDoc(
"",
true
)
.add_prototype("relevance_factor,convergence_threshold","")
.add_prototype("relevance_factor","")
.add_prototype("other","")
.add_prototype("","")
.add_parameter("other", ":py:class:`bob.learn.misc.ISVTrainer`", "A ISVTrainer object to be copied.")
......@@ -121,9 +121,9 @@ static int PyBobLearnMiscISVTrainer_init_number(PyBobLearnMiscISVTrainerObject*
char** kwlist = ISVTrainer_doc.kwlist(0);
double relevance_factor = 4.;
double convergence_threshold = 0.001;
//Parsing the input argments
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "dd", kwlist, &relevance_factor, &convergence_threshold))
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "d", kwlist, &relevance_factor))
return -1;
if(relevance_factor < 0){
......@@ -131,12 +131,7 @@ static int PyBobLearnMiscISVTrainer_init_number(PyBobLearnMiscISVTrainerObject*
return -1;
}
if(convergence_threshold < 0){
PyErr_Format(PyExc_TypeError, "convergence_threshold argument must be greater than zero");
return -1;
}
self->cxx.reset(new bob::learn::misc::ISVTrainer(relevance_factor, convergence_threshold));
self->cxx.reset(new bob::learn::misc::ISVTrainer(relevance_factor));
return 0;
}
......@@ -153,15 +148,25 @@ static int PyBobLearnMiscISVTrainer_init(PyBobLearnMiscISVTrainerObject* self, P
return 0;
}
case 1:{
// If the constructor input is ISVTrainer object
return PyBobLearnMiscISVTrainer_init_copy(self, args, kwargs);
}
case 2:{
// If the constructor input is ISVTrainer object
return PyBobLearnMiscISVTrainer_init_number(self, args, kwargs);
//Reading the input argument
PyObject* arg = 0;
if (PyTuple_Size(args))
arg = PyTuple_GET_ITEM(args, 0);
else {
PyObject* tmp = PyDict_Values(kwargs);
auto tmp_ = make_safe(tmp);
arg = PyList_GET_ITEM(tmp, 0);
}
if(PyBobLearnMiscISVTrainer_Check(arg))
// If the constructor input is ISVTrainer object
return PyBobLearnMiscISVTrainer_init_copy(self, args, kwargs);
else
return PyBobLearnMiscISVTrainer_init_number(self, args, kwargs);
}
default:{
PyErr_Format(PyExc_RuntimeError, "number of arguments mismatch - %s requires only 0, 1 or 2 arguments, but you provided %d (see help)", Py_TYPE(self)->tp_name, nargs);
PyErr_Format(PyExc_RuntimeError, "number of arguments mismatch - %s requires only 0 or 1 arguments, but you provided %d (see help)", Py_TYPE(self)->tp_name, nargs);
ISVTrainer_doc.print_usage();
return -1;
}
......@@ -330,14 +335,14 @@ static PyGetSetDef PyBobLearnMiscISVTrainer_getseters[] = {
{
acc_u_a1.name(),
(getter)PyBobLearnMiscISVTrainer_get_acc_u_a1,
(setter)PyBobLearnMiscISVTrainer_get_acc_u_a1,
(setter)PyBobLearnMiscISVTrainer_set_acc_u_a1,
acc_u_a1.doc(),
0
},
{
acc_u_a2.name(),
(getter)PyBobLearnMiscISVTrainer_get_acc_u_a2,
(setter)PyBobLearnMiscISVTrainer_get_acc_u_a2,
(setter)PyBobLearnMiscISVTrainer_set_acc_u_a2,
acc_u_a2.doc(),
0
},
......
......@@ -285,6 +285,43 @@ int PyBobLearnMiscIVectorMachine_setVarianceThreshold(PyBobLearnMiscIVectorMachi
}
/***** ubm *****/
static auto ubm = bob::extension::VariableDoc(
"ubm",
":py:class:`bob.learn.misc.GMMMachine`",
"Returns the UBM (Universal Background Model",
""
);
PyObject* PyBobLearnMiscIVectorMachine_getUBM(PyBobLearnMiscIVectorMachineObject* self, void*){
BOB_TRY
boost::shared_ptr<bob::learn::misc::GMMMachine> ubm_gmmMachine = self->cxx->getUbm();
//Allocating the correspondent python object
PyBobLearnMiscGMMMachineObject* retval =
(PyBobLearnMiscGMMMachineObject*)PyBobLearnMiscGMMMachine_Type.tp_alloc(&PyBobLearnMiscGMMMachine_Type, 0);
retval->cxx = ubm_gmmMachine;
return Py_BuildValue("O",retval);
BOB_CATCH_MEMBER("ubm could not be read", 0)
}
int PyBobLearnMiscIVectorMachine_setUBM(PyBobLearnMiscIVectorMachineObject* self, PyObject* value, void*){
BOB_TRY
if (!PyBobLearnMiscGMMMachine_Check(value)){
PyErr_Format(PyExc_RuntimeError, "%s %s expects a :py:class:`bob.learn.misc.GMMMachine`", Py_TYPE(self)->tp_name, ubm.name());
return -1;
}
PyBobLearnMiscGMMMachineObject* ubm_gmmMachine = 0;
PyArg_Parse(value, "O!", &PyBobLearnMiscGMMMachine_Type,&ubm_gmmMachine);
self->cxx->setUbm(ubm_gmmMachine->cxx);
return 0;
BOB_CATCH_MEMBER("ubm could not be set", -1)
}
static PyGetSetDef PyBobLearnMiscIVectorMachine_getseters[] = {
{
......@@ -327,6 +364,14 @@ static PyGetSetDef PyBobLearnMiscIVectorMachine_getseters[] = {
0
},
{
ubm.name(),
(getter)PyBobLearnMiscIVectorMachine_getUBM,
(setter)PyBobLearnMiscIVectorMachine_setUBM,
ubm.doc(),
0
},
{0} // Sentinel
};
......@@ -483,6 +528,64 @@ static PyObject* PyBobLearnMiscIVectorMachine_resize(PyBobLearnMiscIVectorMachin
}
/*** __compute_Id_TtSigmaInvT__ ***/
static auto __compute_Id_TtSigmaInvT__ = bob::extension::FunctionDoc(
"__compute_Id_TtSigmaInvT__",
"",
"",
true
)
.add_prototype("stats")
.add_parameter("stats", ":py:class:`bob.learn.misc.GMMStats`", "Statistics as input");
static PyObject* PyBobLearnMiscIVectorMachine_compute_Id_TtSigmaInvT__(PyBobLearnMiscIVectorMachineObject* self, PyObject* args, PyObject* kwargs) {
BOB_TRY
char** kwlist = __compute_Id_TtSigmaInvT__.kwlist(0);
PyBobLearnMiscGMMStatsObject* stats = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!", kwlist, &PyBobLearnMiscGMMStats_Type, &stats))
Py_RETURN_NONE;
blitz::Array<double,2> output(self->cxx->getDimRt(), self->cxx->getDimRt());
self->cxx->computeIdTtSigmaInvT(*stats->cxx, output);
return PyBlitzArrayCxx_AsConstNumpy(output);
BOB_CATCH_MEMBER("cannot __compute_Id_TtSigmaInvT__", 0)
}
/*** __compute_TtSigmaInvFnorm__ ***/
static auto __compute_TtSigmaInvFnorm__ = bob::extension::FunctionDoc(
"__compute_TtSigmaInvFnorm__",
"",
"",
true
)
.add_prototype("stats")
.add_parameter("stats", ":py:class:`bob.learn.misc.GMMStats`", "Statistics as input");
static PyObject* PyBobLearnMiscIVectorMachine_compute_TtSigmaInvFnorm__(PyBobLearnMiscIVectorMachineObject* self, PyObject* args, PyObject* kwargs) {
BOB_TRY
char** kwlist = __compute_TtSigmaInvFnorm__.kwlist(0);
PyBobLearnMiscGMMStatsObject* stats = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!", kwlist, &PyBobLearnMiscGMMStats_Type, &stats))
Py_RETURN_NONE;
blitz::Array<double,1> output(self->cxx->getDimRt());
self->cxx->computeTtSigmaInvFnorm(*stats->cxx, output);
return PyBlitzArrayCxx_AsConstNumpy(output);
BOB_CATCH_MEMBER("cannot __compute_TtSigmaInvFnorm__", 0)
}
static PyMethodDef PyBobLearnMiscIVectorMachine_methods[] = {
{
......@@ -509,6 +612,18 @@ static PyMethodDef PyBobLearnMiscIVectorMachine_methods[] = {
METH_VARARGS|METH_KEYWORDS,
resize.doc()
},
{
__compute_Id_TtSigmaInvT__.name(),
(PyCFunction)PyBobLearnMiscIVectorMachine_compute_Id_TtSigmaInvT__,
METH_VARARGS|METH_KEYWORDS,
__compute_Id_TtSigmaInvT__.doc()
},
{
__compute_TtSigmaInvFnorm__.name(),
(PyCFunction)PyBobLearnMiscIVectorMachine_compute_TtSigmaInvFnorm__,
METH_VARARGS|METH_KEYWORDS,
__compute_TtSigmaInvFnorm__.doc()
},
/*
{
......
/**
* @author Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
* @date Tue 03 Fev 10:29:00 2015
*
* @brief Python API for bob::learn::em