diff --git a/bob/learn/em/gaussian.cpp b/bob/learn/em/gaussian.cpp
index 8021a718ff735773748763bea64554738d02015d..35c6e94ba391c993a224bc4aba09ed5437b28895 100644
--- a/bob/learn/em/gaussian.cpp
+++ b/bob/learn/em/gaussian.cpp
@@ -375,7 +375,7 @@ static PyObject* PyBobLearnEMGaussian_resize(PyBobLearnEMGaussianObject* self, P
 static auto log_likelihood = bob::extension::FunctionDoc(
   "log_likelihood",
   "Output the log likelihood of the sample, x. The input size is checked.",
-  ".. note:: The :py:meth:`__call__` function is an alias for this.", 
+  ".. note:: The ``__call__`` function is an alias for this.",
   true
 )
 .add_prototype("input","output")
diff --git a/bob/learn/em/gmm_machine.cpp b/bob/learn/em/gmm_machine.cpp
index f9cec3cc09b0eed4329c249695110677b61f6dbe..061f39f7958f6b668d4640b476caadc14c4ef2eb 100644
--- a/bob/learn/em/gmm_machine.cpp
+++ b/bob/learn/em/gmm_machine.cpp
@@ -643,7 +643,7 @@ static PyObject* PyBobLearnEMGMMMachine_resize(PyBobLearnEMGMMMachineObject* sel
 static auto log_likelihood = bob::extension::FunctionDoc(
   "log_likelihood",
   "Output the log likelihood of the sample, x, i.e. :math:`log(p(x|GMM))`. Inputs are checked.",
-  ".. note:: The :py:meth:`__call__` function is an alias for this. \n "
+  ".. note:: The ``__call__`` function is an alias for this. \n "
   "If `input` is 2D the average along the samples will be computed (:math:`\\frac{log(p(x|GMM))}{N}`) ",
   true
 )
diff --git a/bob/learn/em/isv_base.cpp b/bob/learn/em/isv_base.cpp
index 0a615035e2f26982efc0f7ec28b84b02f196c899..a3f09559a52c1e27a9a8f962db7c76c274a10e98 100644
--- a/bob/learn/em/isv_base.cpp
+++ b/bob/learn/em/isv_base.cpp
@@ -182,11 +182,10 @@ PyObject* PyBobLearnEMISVBase_getShape(PyBobLearnEMISVBaseObject* self, void*) {
 static auto supervector_length = bob::extension::VariableDoc(
   "supervector_length",
   "int",
-
-  "Returns the supervector length."
-  "NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
-  
-  "@warning An exception is thrown if no Universal Background Model has been set yet."
+  "Returns the supervector length.",
+  "NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
+  "WARNING An exception is thrown if no Universal Background Model has been set yet."
+  ""
 );
 PyObject* PyBobLearnEMISVBase_getSupervectorLength(PyBobLearnEMISVBaseObject* self, void*) {
   BOB_TRY
diff --git a/bob/learn/em/isv_machine.cpp b/bob/learn/em/isv_machine.cpp
index 9d5748b4c6991503dfa95a95dcf2f77f282b5d6b..375bf48feb5f49edf1ec16c5128488ce88db0438 100644
--- a/bob/learn/em/isv_machine.cpp
+++ b/bob/learn/em/isv_machine.cpp
@@ -172,10 +172,10 @@ static auto supervector_length = bob::extension::VariableDoc(
   "supervector_length",
   "int",
 
-  "Returns the supervector length."
-  "NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
-  
+  "Returns the supervector length.",
+  "NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
   "@warning An exception is thrown if no Universal Background Model has been set yet."
+  ""
 );
 PyObject* PyBobLearnEMISVMachine_getSupervectorLength(PyBobLearnEMISVMachineObject* self, void*) {
   BOB_TRY
diff --git a/bob/learn/em/ivector_machine.cpp b/bob/learn/em/ivector_machine.cpp
index c29076c51f3242b38b3157def92bae229e546b52..da4de2d89526d8d099431d219568e596d09ee328 100644
--- a/bob/learn/em/ivector_machine.cpp
+++ b/bob/learn/em/ivector_machine.cpp
@@ -187,10 +187,10 @@ static auto supervector_length = bob::extension::VariableDoc(
   "supervector_length",
   "int",
 
-  "Returns the supervector length."
-  "NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
-
+  "Returns the supervector length.",
+  "NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
   "@warning An exception is thrown if no Universal Background Model has been set yet."
+  ""
 );
 PyObject* PyBobLearnEMIVectorMachine_getSupervectorLength(PyBobLearnEMIVectorMachineObject* self, void*) {
   BOB_TRY
@@ -472,7 +472,7 @@ static PyObject* PyBobLearnEMIVectorMachine_IsSimilarTo(PyBobLearnEMIVectorMachi
 static auto project = bob::extension::FunctionDoc(
   "project",
   "Projects the given GMM statistics into the i-vector subspace",
-  ".. note:: The :py:meth:`__call__` function is an alias for this function",
+  ".. note:: The ``__call__`` function is an alias for this function",
   true
 )
 .add_prototype("stats")
diff --git a/bob/learn/em/jfa_base.cpp b/bob/learn/em/jfa_base.cpp
index e15c103c9ee9fd9355296505b23cfef47b65e4f4..8a92d65405cf438c36bf95454df8fea52dba9058 100644
--- a/bob/learn/em/jfa_base.cpp
+++ b/bob/learn/em/jfa_base.cpp
@@ -190,10 +190,10 @@ static auto supervector_length = bob::extension::VariableDoc(
   "supervector_length",
   "int",
 
-  "Returns the supervector length."
-  "NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
-  
+  "Returns the supervector length.",
+  "NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
   "@warning An exception is thrown if no Universal Background Model has been set yet."
+  ""
 );
 PyObject* PyBobLearnEMJFABase_getSupervectorLength(PyBobLearnEMJFABaseObject* self, void*) {
   BOB_TRY
diff --git a/bob/learn/em/jfa_machine.cpp b/bob/learn/em/jfa_machine.cpp
index a90dd02b1c65778568436d7a6c8968501b9dcee7..fce093625fcd17aacd68b240e15137dcad332cf4 100644
--- a/bob/learn/em/jfa_machine.cpp
+++ b/bob/learn/em/jfa_machine.cpp
@@ -172,10 +172,10 @@ static auto supervector_length = bob::extension::VariableDoc(
   "supervector_length",
   "int",
 
-  "Returns the supervector length."
-  "NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
-
+  "Returns the supervector length.",
+  "NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
   "@warning An exception is thrown if no Universal Background Model has been set yet."
+  ""
 );
 PyObject* PyBobLearnEMJFAMachine_getSupervectorLength(PyBobLearnEMJFAMachineObject* self, void*) {
   BOB_TRY
@@ -621,7 +621,7 @@ static PyObject* PyBobLearnEMJFAMachine_ForwardUx(PyBobLearnEMJFAMachineObject*
 static auto log_likelihood = bob::extension::FunctionDoc(
   "log_likelihood",
   "Computes the log-likelihood of the given samples",
-  ".. note:: the :py:meth:`__call__` function is an alias for this function.",
+  ".. note:: the ``__call__`` function is an alias for this function.",
   true
 )
 .add_prototype("stats")
diff --git a/bob/learn/em/plda_machine.cpp b/bob/learn/em/plda_machine.cpp
index acd55004b26d4b96b22a9da6a8ae8f38fa825c64..83c9b4223815ee45d06ce60b92d94abaf3369ffc 100644
--- a/bob/learn/em/plda_machine.cpp
+++ b/bob/learn/em/plda_machine.cpp
@@ -301,7 +301,7 @@ int PyBobLearnEMPLDAMachine_setWeightedSum(PyBobLearnEMPLDAMachineObject* self,
 static auto log_likelihood = bob::extension::VariableDoc(
   "log_likelihood",
   "float",
-  "",
+  "Get the current log likelihood",
   ""
 );
 static PyObject* PyBobLearnEMPLDAMachine_getLogLikelihood(PyBobLearnEMPLDAMachineObject* self, PyObject* args, PyObject* kwargs) {
diff --git a/doc/conf.py b/doc/conf.py
index ae503321b55e9743d36db318dc935a285ace2512..f314ce9b26482c7e0cb4931a83f87095ac4ef8ed 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -79,7 +79,7 @@ source_suffix = '.rst'
 master_doc = 'index'
 
 # General information about the project.
-project = u'bob.learn.em'
+project = u'bob.core'
 import time
 copyright = u'%s, Idiap Research Institute' % time.strftime('%Y')
 
@@ -131,7 +131,7 @@ pygments_style = 'sphinx'
 
 # Some variables which are useful for generated material
 project_variable = project.replace('.', '_')
-short_description = u'Bindings for EM machines and trainers of Bob'
+short_description = u'Core utilities required on all Bob modules'
 owner = [u'Idiap Research Institute']
 
 
@@ -239,7 +239,8 @@ autodoc_default_flags = [
 from bob.extension.utils import link_documentation, load_requirements
 sphinx_requirements = "extra-intersphinx.txt"
 if os.path.exists(sphinx_requirements):
-    intersphinx_mapping = link_documentation(additional_packages=load_requirements(sphinx_requirements))
+    intersphinx_mapping = link_documentation(
+        additional_packages=['python', 'numpy'] + load_requirements(sphinx_requirements))
 else:
     intersphinx_mapping = link_documentation()
 
diff --git a/doc/extra-intersphinx.txt b/doc/extra-intersphinx.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e0c2a1af70a9f804401fb9c4654cb2338090983c
--- /dev/null
+++ b/doc/extra-intersphinx.txt
@@ -0,0 +1,2 @@
+# The bob.core>2.0.5 in the requirements.txt is making the bob.core not download
+bob.core
diff --git a/doc/guide.rst b/doc/guide.rst
index 8cf1d57e9154576e0f49eb68419af7642a2a1ce3..95fc5160cada4701feb9f635a53d335569bfa031 100644
--- a/doc/guide.rst
+++ b/doc/guide.rst
@@ -212,7 +212,7 @@ Once the :py:class:`bob.learn.em.JFAMachine` has been configured for a
 specific class, the log-likelihood (score) that an input sample belongs to the
 enrolled class, can be estimated, by first computing the GMM sufficient
 statistics of this input sample, and then calling the
-:py:meth:`bob.learn.em.JFAMachine.forward` on the sufficient statistics.
+:py:meth:`bob.learn.em.JFAMachine.log_likelihood` on the sufficient statistics.
 
 .. doctest::
   :options: +NORMALIZE_WHITESPACE
@@ -266,7 +266,7 @@ Once the :py:class:`bob.learn.em.ISVMachine` has been configured for a
 specific class, the log-likelihood (score) that an input sample belongs to the
 enrolled class, can be estimated, by first computing the GMM sufficient
 statistics of this input sample, and then calling the
-:py:meth:`bob.learn.em.ISVMachine.forward` on the sufficient statistics.
+``__call__`` on the sufficient statistics.
 
 .. doctest::
   :options: +NORMALIZE_WHITESPACE
@@ -565,7 +565,7 @@ Next, we initialize a trainer, which is an instance of
    >>> jfa_trainer = bob.learn.em.JFATrainer()
 
 The training process is started by calling the
-:py:meth:`bob.learn.em.JFATrainer.train`.
+:py:meth:`bob.learn.em.train`.
 
 .. doctest::
    :options: +NORMALIZE_WHITESPACE
@@ -626,7 +626,7 @@ Next, we initialize a trainer, which is an instance of
    >>> isv_trainer = bob.learn.em.ISVTrainer(relevance_factor=4.) # 4 is the relevance factor
 
 The training process is started by calling the
-:py:meth:`bob.learn.em.ISVTrainer.train`.
+:py:meth:`bob.learn.em.train`.
 
 .. doctest::
    :options: +NORMALIZE_WHITESPACE
@@ -675,7 +675,7 @@ Next, we initialize a trainer, which is an instance of
    >>> TRAINING_STATS_flatten = [gs11, gs12, gs21, gs22]
 
 The training process is started by calling the
-:py:meth:`bob.learn.em.IVectorTrainer.train`.
+:py:meth:`bob.learn.em.train`.
 
 .. doctest::
    :options: +NORMALIZE_WHITESPACE
@@ -717,7 +717,7 @@ dimensionality 3.
 
 Learning a PLDA model can be performed by instantiating the class
 :py:class:`bob.learn.em.PLDATrainer`, and calling the
-:py:meth:`bob.learn.em.PLDATrainer.train()` method.
+:py:meth:`bob.learn.em.train` method.
 
 .. doctest::
 
@@ -769,8 +769,8 @@ separately for each model.
 In a verification scenario, there are two possible hypotheses: 1.
 :math:`x_{test}` and :math:`x_{enroll}` share the same class.  2.
 :math:`x_{test}` and :math:`x_{enroll}` are from different classes.  Using the
-methods :py:meth:`bob.learn.em.PLDAMachine.forward` or
-:py:meth:`bob.learn.em.PLDAMachine.__call__` function, the corresponding
+methods :py:meth:`bob.learn.em.PLDAMachine.log_likelihood_ratio` or
+its alias ``__call__`` function, the corresponding
 log-likelihood ratio will be computed, which is defined in more formal way by:
 :math:`s = \ln(P(x_{test},x_{enroll})) - \ln(P(x_{test})P(x_{enroll}))`