Commit ad2bfd0e authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

[sphinx] Fixed warnings

parent d14221fa
Pipeline #4885 passed with stages
in 16 minutes and 23 seconds
......@@ -375,7 +375,7 @@ static PyObject* PyBobLearnEMGaussian_resize(PyBobLearnEMGaussianObject* self, P
static auto log_likelihood = bob::extension::FunctionDoc(
"log_likelihood",
"Output the log likelihood of the sample, x. The input size is checked.",
".. note:: The :py:meth:`__call__` function is an alias for this.",
".. note:: The ``__call__`` function is an alias for this.",
true
)
.add_prototype("input","output")
......
......@@ -643,7 +643,7 @@ static PyObject* PyBobLearnEMGMMMachine_resize(PyBobLearnEMGMMMachineObject* sel
static auto log_likelihood = bob::extension::FunctionDoc(
"log_likelihood",
"Output the log likelihood of the sample, x, i.e. :math:`log(p(x|GMM))`. Inputs are checked.",
".. note:: The :py:meth:`__call__` function is an alias for this. \n "
".. note:: The ``__call__`` function is an alias for this. \n "
"If `input` is 2D the average along the samples will be computed (:math:`\\frac{log(p(x|GMM))}{N}`) ",
true
)
......
......@@ -182,11 +182,10 @@ PyObject* PyBobLearnEMISVBase_getShape(PyBobLearnEMISVBaseObject* self, void*) {
static auto supervector_length = bob::extension::VariableDoc(
"supervector_length",
"int",
"Returns the supervector length."
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
"@warning An exception is thrown if no Universal Background Model has been set yet."
"Returns the supervector length.",
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
"WARNING An exception is thrown if no Universal Background Model has been set yet."
""
);
PyObject* PyBobLearnEMISVBase_getSupervectorLength(PyBobLearnEMISVBaseObject* self, void*) {
BOB_TRY
......
......@@ -172,10 +172,10 @@ static auto supervector_length = bob::extension::VariableDoc(
"supervector_length",
"int",
"Returns the supervector length."
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
"Returns the supervector length.",
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
"@warning An exception is thrown if no Universal Background Model has been set yet."
""
);
PyObject* PyBobLearnEMISVMachine_getSupervectorLength(PyBobLearnEMISVMachineObject* self, void*) {
BOB_TRY
......
......@@ -187,10 +187,10 @@ static auto supervector_length = bob::extension::VariableDoc(
"supervector_length",
"int",
"Returns the supervector length."
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
"Returns the supervector length.",
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
"@warning An exception is thrown if no Universal Background Model has been set yet."
""
);
PyObject* PyBobLearnEMIVectorMachine_getSupervectorLength(PyBobLearnEMIVectorMachineObject* self, void*) {
BOB_TRY
......@@ -472,7 +472,7 @@ static PyObject* PyBobLearnEMIVectorMachine_IsSimilarTo(PyBobLearnEMIVectorMachi
static auto project = bob::extension::FunctionDoc(
"project",
"Projects the given GMM statistics into the i-vector subspace",
".. note:: The :py:meth:`__call__` function is an alias for this function",
".. note:: The ``__call__`` function is an alias for this function",
true
)
.add_prototype("stats")
......
......@@ -190,10 +190,10 @@ static auto supervector_length = bob::extension::VariableDoc(
"supervector_length",
"int",
"Returns the supervector length."
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
"Returns the supervector length.",
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
"@warning An exception is thrown if no Universal Background Model has been set yet."
""
);
PyObject* PyBobLearnEMJFABase_getSupervectorLength(PyBobLearnEMJFABaseObject* self, void*) {
BOB_TRY
......
......@@ -172,10 +172,10 @@ static auto supervector_length = bob::extension::VariableDoc(
"supervector_length",
"int",
"Returns the supervector length."
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
"Returns the supervector length.",
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
"@warning An exception is thrown if no Universal Background Model has been set yet."
""
);
PyObject* PyBobLearnEMJFAMachine_getSupervectorLength(PyBobLearnEMJFAMachineObject* self, void*) {
BOB_TRY
......@@ -621,7 +621,7 @@ static PyObject* PyBobLearnEMJFAMachine_ForwardUx(PyBobLearnEMJFAMachineObject*
static auto log_likelihood = bob::extension::FunctionDoc(
"log_likelihood",
"Computes the log-likelihood of the given samples",
".. note:: the :py:meth:`__call__` function is an alias for this function.",
".. note:: the ``__call__`` function is an alias for this function.",
true
)
.add_prototype("stats")
......
......@@ -301,7 +301,7 @@ int PyBobLearnEMPLDAMachine_setWeightedSum(PyBobLearnEMPLDAMachineObject* self,
static auto log_likelihood = bob::extension::VariableDoc(
"log_likelihood",
"float",
"",
"Get the current log likelihood",
""
);
static PyObject* PyBobLearnEMPLDAMachine_getLogLikelihood(PyBobLearnEMPLDAMachineObject* self, PyObject* args, PyObject* kwargs) {
......
......@@ -79,7 +79,7 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
project = u'bob.learn.em'
project = u'bob.core'
import time
copyright = u'%s, Idiap Research Institute' % time.strftime('%Y')
......@@ -131,7 +131,7 @@ pygments_style = 'sphinx'
# Some variables which are useful for generated material
project_variable = project.replace('.', '_')
short_description = u'Bindings for EM machines and trainers of Bob'
short_description = u'Core utilities required on all Bob modules'
owner = [u'Idiap Research Institute']
......@@ -239,7 +239,8 @@ autodoc_default_flags = [
from bob.extension.utils import link_documentation, load_requirements
sphinx_requirements = "extra-intersphinx.txt"
if os.path.exists(sphinx_requirements):
intersphinx_mapping = link_documentation(additional_packages=load_requirements(sphinx_requirements))
intersphinx_mapping = link_documentation(
additional_packages=['python', 'numpy'] + load_requirements(sphinx_requirements))
else:
intersphinx_mapping = link_documentation()
......
# The bob.core>2.0.5 in the requirements.txt is making the bob.core not download
bob.core
......@@ -212,7 +212,7 @@ Once the :py:class:`bob.learn.em.JFAMachine` has been configured for a
specific class, the log-likelihood (score) that an input sample belongs to the
enrolled class, can be estimated, by first computing the GMM sufficient
statistics of this input sample, and then calling the
:py:meth:`bob.learn.em.JFAMachine.forward` on the sufficient statistics.
:py:meth:`bob.learn.em.JFAMachine.log_likelihood` on the sufficient statistics.
.. doctest::
:options: +NORMALIZE_WHITESPACE
......@@ -266,7 +266,7 @@ Once the :py:class:`bob.learn.em.ISVMachine` has been configured for a
specific class, the log-likelihood (score) that an input sample belongs to the
enrolled class, can be estimated, by first computing the GMM sufficient
statistics of this input sample, and then calling the
:py:meth:`bob.learn.em.ISVMachine.forward` on the sufficient statistics.
``__call__`` on the sufficient statistics.
.. doctest::
:options: +NORMALIZE_WHITESPACE
......@@ -565,7 +565,7 @@ Next, we initialize a trainer, which is an instance of
>>> jfa_trainer = bob.learn.em.JFATrainer()
The training process is started by calling the
:py:meth:`bob.learn.em.JFATrainer.train`.
:py:meth:`bob.learn.em.train`.
.. doctest::
:options: +NORMALIZE_WHITESPACE
......@@ -626,7 +626,7 @@ Next, we initialize a trainer, which is an instance of
>>> isv_trainer = bob.learn.em.ISVTrainer(relevance_factor=4.) # 4 is the relevance factor
The training process is started by calling the
:py:meth:`bob.learn.em.ISVTrainer.train`.
:py:meth:`bob.learn.em.train`.
.. doctest::
:options: +NORMALIZE_WHITESPACE
......@@ -675,7 +675,7 @@ Next, we initialize a trainer, which is an instance of
>>> TRAINING_STATS_flatten = [gs11, gs12, gs21, gs22]
The training process is started by calling the
:py:meth:`bob.learn.em.IVectorTrainer.train`.
:py:meth:`bob.learn.em.train`.
.. doctest::
:options: +NORMALIZE_WHITESPACE
......@@ -717,7 +717,7 @@ dimensionality 3.
Learning a PLDA model can be performed by instantiating the class
:py:class:`bob.learn.em.PLDATrainer`, and calling the
:py:meth:`bob.learn.em.PLDATrainer.train()` method.
:py:meth:`bob.learn.em.train` method.
.. doctest::
......@@ -769,8 +769,8 @@ separately for each model.
In a verification scenario, there are two possible hypotheses: 1.
:math:`x_{test}` and :math:`x_{enroll}` share the same class. 2.
:math:`x_{test}` and :math:`x_{enroll}` are from different classes. Using the
methods :py:meth:`bob.learn.em.PLDAMachine.forward` or
:py:meth:`bob.learn.em.PLDAMachine.__call__` function, the corresponding
methods :py:meth:`bob.learn.em.PLDAMachine.log_likelihood_ratio` or
its alias ``__call__`` function, the corresponding
log-likelihood ratio will be computed, which is defined in more formal way by:
:math:`s = \ln(P(x_{test},x_{enroll})) - \ln(P(x_{test})P(x_{enroll}))`
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment