diff --git a/bob/learn/misc/cpp/GMMMachine.cpp b/bob/learn/misc/cpp/GMMMachine.cpp
index a8636ce1f7dadc0b10659de8ed23d0a25c190a42..6261d42d6ffe7eae63dc262e3a210ca972678720 100644
--- a/bob/learn/misc/cpp/GMMMachine.cpp
+++ b/bob/learn/misc/cpp/GMMMachine.cpp
@@ -94,23 +94,9 @@ void bob::learn::misc::GMMMachine::copy(const GMMMachine& other) {
 bob::learn::misc::GMMMachine::~GMMMachine() { }
 
 
-void bob::learn::misc::GMMMachine::resize(const size_t n_gaussians, const size_t n_inputs) {
-  m_n_gaussians = n_gaussians;
-  m_n_inputs = n_inputs;
-
-  // Initialise weights
-  m_weights.resize(m_n_gaussians);
-  m_weights = 1.0 / m_n_gaussians;
-
-  // Initialise Gaussians
-  m_gaussians.clear();
-  for(size_t i=0; i<m_n_gaussians; ++i)
-    m_gaussians.push_back(boost::shared_ptr<bob::learn::misc::Gaussian>(new bob::learn::misc::Gaussian(n_inputs)));
-
-  // Initialise cache arrays
-  initCache();
-}
-
+/////////////////////
+// Setters 
+////////////////////
 
 void bob::learn::misc::GMMMachine::setWeights(const blitz::Array<double,1> &weights) {
   bob::core::array::assertSameShape(weights, m_weights);
@@ -131,13 +117,6 @@ void bob::learn::misc::GMMMachine::setMeans(const blitz::Array<double,2> &means)
   m_cache_supervector = false;
 }
 
-void bob::learn::misc::GMMMachine::getMeans(blitz::Array<double,2> &means) const {
-  bob::core::array::assertSameDimensionLength(means.extent(0), m_n_gaussians);
-  bob::core::array::assertSameDimensionLength(means.extent(1), m_n_inputs);
-  for(size_t i=0; i<m_n_gaussians; ++i)
-    means(i,blitz::Range::all()) = m_gaussians[i]->getMean();
-}
-
 void bob::learn::misc::GMMMachine::setMeanSupervector(const blitz::Array<double,1> &mean_supervector) {
   bob::core::array::assertSameDimensionLength(mean_supervector.extent(0), m_n_gaussians*m_n_inputs);
   for(size_t i=0; i<m_n_gaussians; ++i)
@@ -145,11 +124,6 @@ void bob::learn::misc::GMMMachine::setMeanSupervector(const blitz::Array<double,
   m_cache_supervector = false;
 }
 
-void bob::learn::misc::GMMMachine::getMeanSupervector(blitz::Array<double,1> &mean_supervector) const {
-  bob::core::array::assertSameDimensionLength(mean_supervector.extent(0), m_n_gaussians*m_n_inputs);
-  for(size_t i=0; i<m_n_gaussians; ++i)
-    mean_supervector(blitz::Range(i*m_n_inputs, (i+1)*m_n_inputs-1)) = m_gaussians[i]->getMean();
-}
 
 void bob::learn::misc::GMMMachine::setVariances(const blitz::Array<double, 2 >& variances) {
   bob::core::array::assertSameDimensionLength(variances.extent(0), m_n_gaussians);
@@ -161,13 +135,6 @@ void bob::learn::misc::GMMMachine::setVariances(const blitz::Array<double, 2 >&
   m_cache_supervector = false;
 }
 
-void bob::learn::misc::GMMMachine::getVariances(blitz::Array<double, 2 >& variances) const {
-  bob::core::array::assertSameDimensionLength(variances.extent(0), m_n_gaussians);
-  bob::core::array::assertSameDimensionLength(variances.extent(1), m_n_inputs);
-  for(size_t i=0; i<m_n_gaussians; ++i)
-    variances(i,blitz::Range::all()) = m_gaussians[i]->getVariance();
-}
-
 void bob::learn::misc::GMMMachine::setVarianceSupervector(const blitz::Array<double,1> &variance_supervector) {
   bob::core::array::assertSameDimensionLength(variance_supervector.extent(0), m_n_gaussians*m_n_inputs);
   for(size_t i=0; i<m_n_gaussians; ++i) {
@@ -177,13 +144,6 @@ void bob::learn::misc::GMMMachine::setVarianceSupervector(const blitz::Array<dou
   m_cache_supervector = false;
 }
 
-void bob::learn::misc::GMMMachine::getVarianceSupervector(blitz::Array<double,1> &variance_supervector) const {
-  bob::core::array::assertSameDimensionLength(variance_supervector.extent(0), m_n_gaussians*m_n_inputs);
-  for(size_t i=0; i<m_n_gaussians; ++i) {
-    variance_supervector(blitz::Range(i*m_n_inputs, (i+1)*m_n_inputs-1)) = m_gaussians[i]->getVariance();
-  }
-}
-
 void bob::learn::misc::GMMMachine::setVarianceThresholds(const double value) {
   for(size_t i=0; i<m_n_gaussians; ++i)
     m_gaussians[i]->setVarianceThresholds(value);
@@ -205,11 +165,60 @@ void bob::learn::misc::GMMMachine::setVarianceThresholds(const blitz::Array<doub
   m_cache_supervector = false;
 }
 
-void bob::learn::misc::GMMMachine::getVarianceThresholds(blitz::Array<double, 2>& variance_thresholds) const {
-  bob::core::array::assertSameDimensionLength(variance_thresholds.extent(0), m_n_gaussians);
-  bob::core::array::assertSameDimensionLength(variance_thresholds.extent(1), m_n_inputs);
+/////////////////////
+// Getters 
+////////////////////
+
+const blitz::Array<double,2> bob::learn::misc::GMMMachine::getMeans() const {
+
+  blitz::Array<double,2> means(m_n_gaussians,m_n_inputs);  
+  for(size_t i=0; i<m_n_gaussians; ++i)
+    means(i,blitz::Range::all()) = m_gaussians[i]->getMean();
+    
+  return means;
+}
+
+const blitz::Array<double,2> bob::learn::misc::GMMMachine::getVariances() const{
+  
+  blitz::Array<double,2> variances(m_n_gaussians,m_n_inputs);
+  for(size_t i=0; i<m_n_gaussians; ++i)
+    variances(i,blitz::Range::all()) = m_gaussians[i]->getVariance();
+
+  return variances;
+}
+
+
+const blitz::Array<double,2>  bob::learn::misc::GMMMachine::getVarianceThresholds() const {
+  //bob::core::array::assertSameDimensionLength(variance_thresholds.extent(0), m_n_gaussians);
+  //bob::core::array::assertSameDimensionLength(variance_thresholds.extent(1), m_n_inputs);
+  blitz::Array<double, 2> variance_thresholds(m_n_gaussians, m_n_inputs);
   for(size_t i=0; i<m_n_gaussians; ++i)
     variance_thresholds(i,blitz::Range::all()) = m_gaussians[i]->getVarianceThresholds();
+
+  return variance_thresholds;
+}
+
+
+/////////////////////
+// Methods
+////////////////////
+
+
+void bob::learn::misc::GMMMachine::resize(const size_t n_gaussians, const size_t n_inputs) {
+  m_n_gaussians = n_gaussians;
+  m_n_inputs = n_inputs;
+
+  // Initialise weights
+  m_weights.resize(m_n_gaussians);
+  m_weights = 1.0 / m_n_gaussians;
+
+  // Initialise Gaussians
+  m_gaussians.clear();
+  for(size_t i=0; i<m_n_gaussians; ++i)
+    m_gaussians.push_back(boost::shared_ptr<bob::learn::misc::Gaussian>(new bob::learn::misc::Gaussian(n_inputs)));
+
+  // Initialise cache arrays
+  initCache();
 }
 
 double bob::learn::misc::GMMMachine::logLikelihood(const blitz::Array<double, 1> &x,
@@ -252,23 +261,6 @@ double bob::learn::misc::GMMMachine::logLikelihood_(const blitz::Array<double, 1
   return logLikelihood_(x,m_cache_log_weighted_gaussian_likelihoods);
 }
 
-/*
-void bob::learn::misc::GMMMachine::forward(const blitz::Array<double,1>& input, double& output) const {
-  if(static_cast<size_t>(input.extent(0)) != m_n_inputs) {
-    boost::format m("expected input size (%u) does not match the size of input array (%d)");
-    m % m_n_inputs % input.extent(0);
-    throw std::runtime_error(m.str());
-  }
-
-  forward_(input,output);
-}
-
-void bob::learn::misc::GMMMachine::forward_(const blitz::Array<double,1>& input,
-    double& output) const {
-  output = logLikelihood(input);
-}
-*/
-
 void bob::learn::misc::GMMMachine::accStatistics(const blitz::Array<double,2>& input,
     bob::learn::misc::GMMStats& stats) const {
   // iterate over data
@@ -342,18 +334,10 @@ void bob::learn::misc::GMMMachine::accStatisticsInternal(const blitz::Array<doub
   stats.sumPxx += (m_cache_Px(i,j) * x(j));
 }
 
-boost::shared_ptr<const bob::learn::misc::Gaussian> bob::learn::misc::GMMMachine::getGaussian(const size_t i) const {
+boost::shared_ptr<bob::learn::misc::Gaussian> bob::learn::misc::GMMMachine::getGaussian(const size_t i) {
   if (i>=m_n_gaussians) {
     throw std::runtime_error("getGaussian(): index out of bounds");
   }
-  boost::shared_ptr<const bob::learn::misc::Gaussian> res = m_gaussians[i];
-  return res;
-}
-
-boost::shared_ptr<bob::learn::misc::Gaussian> bob::learn::misc::GMMMachine::updateGaussian(const size_t i) {
-  if (i>=m_n_gaussians) {
-    throw std::runtime_error("updateGaussian(): index out of bounds");
-  }
   return m_gaussians[i];
 }
 
diff --git a/bob/learn/misc/gmm_machine.cpp b/bob/learn/misc/gmm_machine.cpp
index d5fe205f4d8789413b149b5f34d020519a772516..05e4ad0926341f0e895dcb9bbadef09708ae5d47 100644
--- a/bob/learn/misc/gmm_machine.cpp
+++ b/bob/learn/misc/gmm_machine.cpp
@@ -20,7 +20,7 @@ static auto GMMMachine_doc = bob::extension::ClassDoc(
 ).add_constructor(
   bob::extension::FunctionDoc(
     "__init__",
-    "A container for GMM statistics.",
+    "Creates a GMMMachine",
     "",
     true
   )
@@ -184,6 +184,145 @@ PyObject* PyBobLearnMiscGMMMachine_getShape(PyBobLearnMiscGMMMachineObject* self
   BOB_CATCH_MEMBER("shape could not be read", 0)
 }
 
+/***** MEAN *****/
+
+static auto means = bob::extension::VariableDoc(
+  "means",
+  "array_like <float, 2D>",
+  "The means of the gaussians",
+  ""
+);
+PyObject* PyBobLearnMiscGMMMachine_getMeans(PyBobLearnMiscGMMMachineObject* self, void*){
+  BOB_TRY
+  return PyBlitzArrayCxx_AsConstNumpy(self->cxx->getMeans());
+  BOB_CATCH_MEMBER("means could not be read", 0)
+}
+int PyBobLearnMiscGMMMachine_setMeans(PyBobLearnMiscGMMMachineObject* self, PyObject* value, void*){
+  BOB_TRY
+  PyBlitzArrayObject* o;
+  if (!PyBlitzArray_Converter(value, &o)){
+    PyErr_Format(PyExc_RuntimeError, "%s %s expects a 2D array of floats", Py_TYPE(self)->tp_name, means.name());
+    return -1;
+  }
+  auto o_ = make_safe(o);
+  auto b = PyBlitzArrayCxx_AsBlitz<double,2>(o, "means");
+  if (!b) return -1;
+  self->cxx->setMeans(*b);
+  return 0;
+  BOB_CATCH_MEMBER("means could not be set", -1)
+}
+
+/***** Variance *****/
+static auto variances = bob::extension::VariableDoc(
+  "variances",
+  "array_like <float, 2D>",
+  "Variances of the gaussians",
+  ""
+);
+PyObject* PyBobLearnMiscGMMMachine_getVariances(PyBobLearnMiscGMMMachineObject* self, void*){
+  BOB_TRY
+  return PyBlitzArrayCxx_AsConstNumpy(self->cxx->getVariances());
+  BOB_CATCH_MEMBER("variances could not be read", 0)
+}
+int PyBobLearnMiscGMMMachine_setVariances(PyBobLearnMiscGMMMachineObject* self, PyObject* value, void*){
+  BOB_TRY
+  PyBlitzArrayObject* o;
+  if (!PyBlitzArray_Converter(value, &o)){
+    PyErr_Format(PyExc_RuntimeError, "%s %s expects a 2D array of floats", Py_TYPE(self)->tp_name, variances.name());
+    return -1;
+  }
+  auto o_ = make_safe(o);
+  auto b = PyBlitzArrayCxx_AsBlitz<double,2>(o, "variances");
+  if (!b) return -1;
+  self->cxx->setVariances(*b);
+  return 0;
+  BOB_CATCH_MEMBER("variances could not be set", -1)
+}
+
+/***** Weights *****/
+static auto weights = bob::extension::VariableDoc(
+  "weights",
+  "array_like <float, 1D>",
+  "The weights (also known as \"mixing coefficients\")",
+  ""
+);
+PyObject* PyBobLearnMiscGMMMachine_getWeights(PyBobLearnMiscGMMMachineObject* self, void*){
+  BOB_TRY
+  return PyBlitzArrayCxx_AsConstNumpy(self->cxx->getWeights());
+  BOB_CATCH_MEMBER("weights could not be read", 0)
+}
+int PyBobLearnMiscGMMMachine_setWeights(PyBobLearnMiscGMMMachineObject* self, PyObject* value, void*){
+  BOB_TRY
+  PyBlitzArrayObject* o;
+  if (!PyBlitzArray_Converter(value, &o)){
+    PyErr_Format(PyExc_RuntimeError, "%s %s expects a 1D array of floats", Py_TYPE(self)->tp_name, weights.name());
+    return -1;
+  }
+  auto o_ = make_safe(o);
+  auto b = PyBlitzArrayCxx_AsBlitz<double,1>(o, "weights");
+  if (!b) return -1;
+  self->cxx->setWeights(*b);
+  return 0;
+  BOB_CATCH_MEMBER("weights could not be set", -1)
+}
+
+
+/***** variance_supervector *****/
+static auto variance_supervector = bob::extension::VariableDoc(
+  "variance_supervector",
+  "array_like <float, 1D>",
+  "The variance supervector of the GMMMachine",
+  "Concatenation of the variance vectors of each Gaussian of the GMMMachine"
+);
+PyObject* PyBobLearnMiscGMMMachine_getVarianceSupervector(PyBobLearnMiscGMMMachineObject* self, void*){
+  BOB_TRY
+  return PyBlitzArrayCxx_AsConstNumpy(self->cxx->getVarianceSupervector());
+  BOB_CATCH_MEMBER("variance_supervector could not be read", 0)
+}
+
+
+/***** mean_supervector *****/
+static auto mean_supervector = bob::extension::VariableDoc(
+  "mean_supervector",
+  "array_like <float, 1D>",
+  "The mean supervector of the GMMMachine",
+  "Concatenation of the mean vectors of each Gaussian of the GMMMachine"
+);
+PyObject* PyBobLearnMiscGMMMachine_getMeanSupervector(PyBobLearnMiscGMMMachineObject* self, void*){
+  BOB_TRY
+  return PyBlitzArrayCxx_AsConstNumpy(self->cxx->getMeanSupervector());
+  BOB_CATCH_MEMBER("mean_supervector could not be read", 0)
+}
+
+
+/***** variance_thresholds *****/
+static auto variance_thresholds = bob::extension::VariableDoc(
+  "variance_thresholds",
+  "array_like <double, 2D>",
+  "Set the variance flooring thresholds in each dimension to the same vector for all Gaussian components if the argument is a 1D numpy arrray, and equal for all Gaussian components and dimensions if the parameter is a scalar. ",
+  ""
+);
+PyObject* PyBobLearnMiscGMMMachine_getVarianceThresholds(PyBobLearnMiscGMMMachineObject* self, void*){
+  BOB_TRY
+  return PyBlitzArrayCxx_AsConstNumpy(self->cxx->getVarianceThresholds());
+  BOB_CATCH_MEMBER("variance_thresholds could not be read", 0)
+}
+int PyBobLearnMiscGMMMachine_setVarianceThresholds(PyBobLearnMiscGMMMachineObject* self, PyObject* value, void*){
+  BOB_TRY
+  PyBlitzArrayObject* o;
+  if (!PyBlitzArray_Converter(value, &o)){
+    PyErr_Format(PyExc_RuntimeError, "%s %s expects a 2D array of floats", Py_TYPE(self)->tp_name, variance_thresholds.name());
+    return -1;
+  }
+  auto o_ = make_safe(o);
+  auto b = PyBlitzArrayCxx_AsBlitz<double,2>(o, "variance_thresholds");
+  if (!b) return -1;
+  self->cxx->setVarianceThresholds(*b);
+  return 0;
+  BOB_CATCH_MEMBER("variance_thresholds could not be set", -1)  
+}
+
+
 
 
 static PyGetSetDef PyBobLearnMiscGMMMachine_getseters[] = { 
@@ -194,7 +333,50 @@ static PyGetSetDef PyBobLearnMiscGMMMachine_getseters[] = {
    shape.doc(),
    0
   },
+  {
+   means.name(),
+   (getter)PyBobLearnMiscGMMMachine_getMeans,
+   (setter)PyBobLearnMiscGMMMachine_setMeans,
+   means.doc(),
+   0
+  },
+  {
+   variances.name(),
+   (getter)PyBobLearnMiscGMMMachine_getVariances,
+   (setter)PyBobLearnMiscGMMMachine_setVariances,
+   variances.doc(),
+   0
+  },
+  {
+   weights.name(),
+   (getter)PyBobLearnMiscGMMMachine_getWeights,
+   (setter)PyBobLearnMiscGMMMachine_setWeights,
+   weights.doc(),
+   0
+  },
+  {
+   variance_thresholds.name(),
+   (getter)PyBobLearnMiscGMMMachine_getVarianceThresholds,
+   (setter)PyBobLearnMiscGMMMachine_setVarianceThresholds,
+   variance_thresholds.doc(),
+   0
+  },
+  {
+   variance_supervector.name(),
+   (getter)PyBobLearnMiscGMMMachine_getVarianceSupervector,
+   0,
+   variance_supervector.doc(),
+   0
+  },
 
+  {
+   mean_supervector.name(),
+   (getter)PyBobLearnMiscGMMMachine_getMeanSupervector,
+   0,
+   mean_supervector.doc(),
+   0
+  },
+  
   {0}  // Sentinel
 };
 
@@ -328,6 +510,191 @@ static PyObject* PyBobLearnMiscGMMMachine_resize(PyBobLearnMiscGMMMachineObject*
 }
 
 
+/*** log_likelihood ***/
+static auto log_likelihood = bob::extension::FunctionDoc(
+  "log_likelihood",
+  "Output the log likelihood of the sample, x, i.e. log(p(x|GMM)). Inputs are checked.",
+  ".. note:: The :py:meth:`__call__` function is an alias for this.", 
+  true
+)
+.add_prototype("input","output")
+.add_parameter("input", "array_like <float, 1D>", "Input vector")
+.add_return("output","float","The log likelihood");
+static PyObject* PyBobLearnMiscGMMMachine_loglikelihood(PyBobLearnMiscGMMMachineObject* self, PyObject* args, PyObject* kwargs) {
+  BOB_TRY
+  
+  char** kwlist = log_likelihood.kwlist(0);
+
+  PyBlitzArrayObject* input = 0;
+
+  if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&", kwlist, &PyBlitzArray_Converter, &input)) Py_RETURN_NONE;
+  //protects acquired resources through this scope
+  auto input_ = make_safe(input);
+
+  double value = self->cxx->logLikelihood(*PyBlitzArrayCxx_AsBlitz<double,1>(input));
+  return Py_BuildValue("d", value);
+
+  BOB_CATCH_MEMBER("cannot compute the likelihood", 0)
+}
+
+
+/*** log_likelihood_ ***/
+static auto log_likelihood_ = bob::extension::FunctionDoc(
+  "log_likelihood_",
+  "Output the log likelihood of the sample, x, i.e. log(p(x|GMM)). Inputs are NOT checked.",
+  "", 
+  true
+)
+.add_prototype("input","output")
+.add_parameter("input", "array_like <float, 1D>", "Input vector")
+.add_return("output","float","The log likelihood");
+static PyObject* PyBobLearnMiscGMMMachine_loglikelihood_(PyBobLearnMiscGMMMachineObject* self, PyObject* args, PyObject* kwargs) {
+  BOB_TRY
+  
+  char** kwlist = log_likelihood_.kwlist(0);
+
+  PyBlitzArrayObject* input = 0;
+
+  if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&", kwlist, &PyBlitzArray_Converter, &input)) Py_RETURN_NONE;
+  //protects acquired resources through this scope
+  auto input_ = make_safe(input);
+
+  double value = self->cxx->logLikelihood_(*PyBlitzArrayCxx_AsBlitz<double,1>(input));
+  return Py_BuildValue("d", value);
+
+  BOB_CATCH_MEMBER("cannot compute the likelihood", 0)
+}
+
+
+/*** acc_statistics ***/
+static auto acc_statistics = bob::extension::FunctionDoc(
+  "acc_statistics",
+  "Accumulate the GMM statistics for this sample(s). Inputs are checked.",
+  "", 
+  true
+)
+.add_prototype("input,stats")
+.add_parameter("input", "array_like <float, 2D>", "Input vector")
+.add_parameter("stats", ":py:class:`bob.learn.misc.GMMStats`", "Statistics of the GMM");
+static PyObject* PyBobLearnMiscGMMMachine_accStatistics(PyBobLearnMiscGMMMachineObject* self, PyObject* args, PyObject* kwargs) {
+  BOB_TRY
+
+  char** kwlist = acc_statistics.kwlist(0);
+
+  PyBlitzArrayObject* input           = 0;
+  PyBobLearnMiscGMMStatsObject* stats = 0;
+
+  if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&O!", kwlist, &PyBlitzArray_Converter,&input, 
+                                                                 &PyBobLearnMiscGMMStats_Type, &stats))
+    Py_RETURN_NONE;
+
+  //protects acquired resources through this scope
+  auto input_ = make_safe(input);
+  self->cxx->accStatistics(*PyBlitzArrayCxx_AsBlitz<double,2>(input), *stats->cxx);
+
+  BOB_CATCH_MEMBER("cannot accumulate the statistics", 0)
+  Py_RETURN_NONE;
+}
+
+
+/*** acc_statistics_ ***/
+static auto acc_statistics_ = bob::extension::FunctionDoc(
+  "acc_statistics_",
+  "Accumulate the GMM statistics for this sample(s). Inputs are NOT checked.",
+  "", 
+  true
+)
+.add_prototype("input,stats")
+.add_parameter("input", "array_like <float, 2D>", "Input vector")
+.add_parameter("stats", ":py:class:`bob.learn.misc.GMMStats`", "Statistics of the GMM");
+static PyObject* PyBobLearnMiscGMMMachine_accStatistics_(PyBobLearnMiscGMMMachineObject* self, PyObject* args, PyObject* kwargs) {
+  BOB_TRY
+  
+  char** kwlist = acc_statistics_.kwlist(0);
+
+  PyBlitzArrayObject* input = 0;
+  PyBobLearnMiscGMMStatsObject* stats = 0;
+
+
+
+ if(!PyArg_ParseTupleAndKeywords(args, kwargs, "O&O!", kwlist, &PyBlitzArray_Converter,&input, 
+                                                                 &PyBobLearnMiscGMMStats_Type, &stats))
+    Py_RETURN_NONE;
+
+  //protects acquired resources through this scope
+  auto input_ = make_safe(input);
+  self->cxx->accStatistics_(*PyBlitzArrayCxx_AsBlitz<double,2>(input), *stats->cxx);
+
+  BOB_CATCH_MEMBER("cannot accumulate the statistics", 0)
+  Py_RETURN_NONE;
+}
+
+
+
+/*** set_variance_thresholds ***/
+static auto set_variance_thresholds = bob::extension::FunctionDoc(
+  "set_variance_thresholds",
+  "Set the variance flooring thresholds in each dimension to the same vector for all Gaussian components if the argument is a 1D numpy arrray, and equal for all Gaussian components and dimensions if the parameter is a scalar.",
+  "",
+  true
+)
+.add_prototype("input")
+.add_parameter("input", "array_like <float, 1D>", "Input vector");
+static PyObject* PyBobLearnMiscGMMMachine_setVarianceThresholds_method(PyBobLearnMiscGMMMachineObject* self, PyObject* args, PyObject* kwargs) {
+  BOB_TRY
+
+  char** kwlist = set_variance_thresholds.kwlist(0);
+
+  PyBlitzArrayObject* input = 0;
+
+ if(!PyArg_ParseTupleAndKeywords(args, kwargs, "O&", kwlist, &PyBlitzArray_Converter,&input))
+    Py_RETURN_NONE;
+
+  //protects acquired resources through this scope
+  auto input_ = make_safe(input);
+  self->cxx->setVarianceThresholds(*PyBlitzArrayCxx_AsBlitz<double,1>(input));
+
+  BOB_CATCH_MEMBER("cannot accumulate set the variance threshold", 0)
+  Py_RETURN_NONE;
+}
+
+
+
+
+/*** get_gaussian ***/
+static auto get_gaussian = bob::extension::FunctionDoc(
+  "get_gaussian",
+  "Get the specified Gaussian component.",
+  ".. note:: An exception is thrown if i is out of range.", 
+  true
+)
+.add_prototype("i","gaussian")
+.add_parameter("i", "int", "Index of the gaussian")
+.add_return("gaussian",":py:class:`bob.learn.misc.Gaussian`","Gaussian object");
+static PyObject* PyBobLearnMiscGMMMachine_get_gaussian(PyBobLearnMiscGMMMachineObject* self, PyObject* args, PyObject* kwargs) {
+  BOB_TRY
+  
+  char** kwlist = get_gaussian.kwlist(0);
+
+  int i = 0;
+
+  if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i", kwlist, &i)) Py_RETURN_NONE;
+ 
+  boost::shared_ptr<bob::learn::misc::Gaussian> gaussian = self->cxx->getGaussian(i);
+
+  //Allocating the correspondent python object
+  PyBobLearnMiscGaussianObject* retval =
+    (PyBobLearnMiscGaussianObject*)PyBobLearnMiscGaussian_Type.tp_alloc(&PyBobLearnMiscGaussian_Type, 0);
+
+  retval->cxx = gaussian;
+   
+  //return reinterpret_cast<PyObject*>(retval);
+  return Py_BuildValue("O",retval);
+
+  BOB_CATCH_MEMBER("cannot compute the likelihood", 0)
+}
+
+
 
 static PyMethodDef PyBobLearnMiscGMMMachine_methods[] = {
   {
@@ -354,6 +721,45 @@ static PyMethodDef PyBobLearnMiscGMMMachine_methods[] = {
     METH_VARARGS|METH_KEYWORDS,
     resize.doc()
   },
+  {
+    log_likelihood.name(),
+    (PyCFunction)PyBobLearnMiscGMMMachine_loglikelihood,
+    METH_VARARGS|METH_KEYWORDS,
+    log_likelihood.doc()
+  },
+  {
+    log_likelihood_.name(),
+    (PyCFunction)PyBobLearnMiscGMMMachine_loglikelihood_,
+    METH_VARARGS|METH_KEYWORDS,
+    log_likelihood_.doc()
+  },
+  {
+    acc_statistics.name(),
+    (PyCFunction)PyBobLearnMiscGMMMachine_accStatistics,
+    METH_VARARGS|METH_KEYWORDS,
+    acc_statistics.doc()
+  },
+  {
+    acc_statistics_.name(),
+    (PyCFunction)PyBobLearnMiscGMMMachine_accStatistics_,
+    METH_VARARGS|METH_KEYWORDS,
+    acc_statistics_.doc()
+  },
+ 
+  {
+    get_gaussian.name(),
+    (PyCFunction)PyBobLearnMiscGMMMachine_get_gaussian,
+    METH_VARARGS|METH_KEYWORDS,
+    get_gaussian.doc()
+  },
+
+  {
+    set_variance_thresholds.name(),
+    (PyCFunction)PyBobLearnMiscGMMMachine_setVarianceThresholds_method,
+    METH_VARARGS|METH_KEYWORDS,
+    set_variance_thresholds.doc()
+  },
+  
   {0} /* Sentinel */
 };
 
@@ -383,7 +789,7 @@ bool init_BobLearnMiscGMMMachine(PyObject* module)
   PyBobLearnMiscGMMMachine_Type.tp_richcompare = reinterpret_cast<richcmpfunc>(PyBobLearnMiscGMMMachine_RichCompare);
   PyBobLearnMiscGMMMachine_Type.tp_methods = PyBobLearnMiscGMMMachine_methods;
   PyBobLearnMiscGMMMachine_Type.tp_getset = PyBobLearnMiscGMMMachine_getseters;
-  PyBobLearnMiscGMMMachine_Type.tp_call = 0;
+  PyBobLearnMiscGMMMachine_Type.tp_call = reinterpret_cast<ternaryfunc>(PyBobLearnMiscGMMMachine_loglikelihood);
 
 
   // check that everything is fine
diff --git a/bob/learn/misc/include/bob.learn.misc/GMMMachine.h b/bob/learn/misc/include/bob.learn.misc/GMMMachine.h
index 331c1a49738d520e22959860ddbffe2bbf6b3b27..e03152701da1bfaee58849bda69856b66f6c617f 100644
--- a/bob/learn/misc/include/bob.learn.misc/GMMMachine.h
+++ b/bob/learn/misc/include/bob.learn.misc/GMMMachine.h
@@ -77,11 +77,6 @@ class GMMMachine
      */
     virtual ~GMMMachine();
 
-    /**
-     * Get number of inputs
-     */
-    inline size_t getNInputs() const
-    { return m_n_inputs; }
 
     /**
      * Reset the input dimensionality, and the number of Gaussian components.
@@ -91,23 +86,21 @@ class GMMMachine
      */
     void resize(const size_t n_gaussians, const size_t n_inputs);
 
-    /**
-     * Set the weights
-     */
-    void setWeights(const blitz::Array<double,1> &weights);
+
+    /////////////////////////
+    // Getters
+    ////////////////////////
 
     /**
-     * Get the weights ("mixing coefficients") of the Gaussian components
+     * Get number of inputs
      */
-    inline const blitz::Array<double,1>& getWeights() const
-    { return m_weights; }
+    size_t getNInputs() const
+    { return m_n_inputs; }
 
     /**
-     * Get the weights in order to be updated
-     * ("mixing coefficients") of the Gaussian components
-     * @warning Only trainers should use this function for efficiency reason
+     * Get the weights ("mixing coefficients") of the Gaussian components
      */
-    inline blitz::Array<double,1>& updateWeights()
+    const blitz::Array<double,1>& getWeights() const
     { return m_weights; }
 
     /**
@@ -116,53 +109,71 @@ class GMMMachine
     inline const blitz::Array<double,1>& getLogWeights() const
     { return m_cache_log_weights; }
 
-    /**
-     * Update the log of the weights in cache
-     * @warning Should be used by trainer only when using updateWeights()
-     */
-    void recomputeLogWeights() const;
 
-    /**
-     * Set the means
-     */
-    void setMeans(const blitz::Array<double,2> &means);
-    /**
-     * Set the means from a supervector
-     */
-    void setMeanSupervector(const blitz::Array<double,1> &mean_supervector);
     /**
      * Get the means
-     */
-    void getMeans(blitz::Array<double,2> &means) const;
+     */    
+    const blitz::Array<double,2> getMeans() const;
+    
     /**
      * Get the mean supervector
      */
     void getMeanSupervector(blitz::Array<double,1> &mean_supervector) const;
+    
      /**
      * Returns a const reference to the supervector (Put in cache)
      */
     const blitz::Array<double,1>& getMeanSupervector() const;
+        
+    /**
+     * Get the variances
+     */
+    const blitz::Array<double,2> getVariances() const;
+    
+    /**
+     * Get the variance supervector
+     */
+    //void getVarianceSupervector(blitz::Array<double,1> &variance_supervector) const;
+    
+    /**
+     * Returns a const reference to the supervector (Put in cache)
+     */
+    const blitz::Array<double,1>& getVarianceSupervector() const;
+    
 
     /**
-     * Set the variances
+     * Get the variance flooring thresholds for each Gaussian in each dimension
      */
-    void setVariances(const blitz::Array<double,2> &variances);
+    const blitz::Array<double,2> getVarianceThresholds() const;
+
+
+
+    ///////////////////////
+    // Setters
+    ///////////////////////
+
     /**
-     * Set the variances from a supervector
+     * Set the weights
      */
-    void setVarianceSupervector(const blitz::Array<double,1> &variance_supervector);
+    void setWeights(const blitz::Array<double,1> &weights);
+
     /**
-     * Get the variances
+     * Set the means
      */
-    void getVariances(blitz::Array<double,2> &variances) const;
+    void setMeans(const blitz::Array<double,2> &means);
     /**
-     * Get the variance supervector
+     * Set the means from a supervector
      */
-    void getVarianceSupervector(blitz::Array<double,1> &variance_supervector) const;
+    void setMeanSupervector(const blitz::Array<double,1> &mean_supervector);
+
     /**
-     * Returns a const reference to the supervector (Put in cache)
+     * Set the variances
      */
-    const blitz::Array<double,1>& getVarianceSupervector() const;
+    void setVariances(const blitz::Array<double,2> &variances);
+    /**
+     * Set the variances from a supervector
+     */
+    void setVarianceSupervector(const blitz::Array<double,1> &variance_supervector);
 
     /**
      * Set the variance flooring thresholds in each dimension
@@ -177,10 +188,28 @@ class GMMMachine
      * Set the variance flooring thresholds for each Gaussian in each dimension
      */
     void setVarianceThresholds(const blitz::Array<double,2> &variance_thresholds);
+
+
+    ////////////////
+    // Methods
+    /////////////////
+
     /**
-     * Get the variance flooring thresholds for each Gaussian in each dimension
+     * Get the weights in order to be updated
+     * ("mixing coefficients") of the Gaussian components
+     * @warning Only trainers should use this function for efficiency reason
      */
-    void getVarianceThresholds(blitz::Array<double,2> &variance_thresholds) const;
+    inline blitz::Array<double,1>& updateWeights()
+    { return m_weights; }
+
+
+    /**
+     * Update the log of the weights in cache
+     * @warning Should be used by trainer only when using updateWeights()
+     */
+    void recomputeLogWeights() const;
+
+
 
     /**
      * Output the log likelihood of the sample, x, i.e. log(p(x|GMMMachine))
@@ -214,20 +243,6 @@ class GMMMachine
      */
     double logLikelihood_(const blitz::Array<double, 1> &x) const;
 
-    /**
-     * Output the log likelihood of the sample, x
-     * (overrides Machine::forward)
-     * Dimension of the input is checked
-     */
-    //void forward(const blitz::Array<double,1>& input, double& output) const;
-
-    /**
-     * Output the log likelihood of the sample, x
-     * (overrides Machine::forward_)
-     * @warning Dimension of the input is not checked
-     */
-    //void forward_(const blitz::Array<double,1>& input, double& output) const;
-
     /**
      * Accumulates the GMM statistics over a set of samples.
      * @see bool accStatistics(const blitz::Array<double,1> &x, GMMStats stats)
@@ -260,13 +275,6 @@ class GMMMachine
      */
     void accStatistics_(const blitz::Array<double,1> &x, GMMStats &stats) const;
 
-    /**
-     * Get a pointer to a particular Gaussian component
-     * @param[in] i The index of the Gaussian component
-     * @return A smart pointer to the i'th Gaussian component
-     *         if it exists, otherwise throws an exception
-     */
-    boost::shared_ptr<const bob::learn::misc::Gaussian> getGaussian(const size_t i) const;
 
     /**
      * Get a pointer to a particular Gaussian component
@@ -274,7 +282,7 @@ class GMMMachine
      * @return A smart pointer to the i'th Gaussian component
      *         if it exists, otherwise throws an exception
      */
-    boost::shared_ptr<bob::learn::misc::Gaussian> updateGaussian(const size_t i);
+    boost::shared_ptr<bob::learn::misc::Gaussian> getGaussian(const size_t i);
 
 
     /**