Commit e98e995c authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Merge branch 'p1' into 'master'

[Automatic] update links and the ci mostly



See merge request !17
parents ba7c6ca0 ad2bfd0e
Pipeline #4886 passed with stages
in 30 minutes and 11 seconds
......@@ -375,7 +375,7 @@ static PyObject* PyBobLearnEMGaussian_resize(PyBobLearnEMGaussianObject* self, P
static auto log_likelihood = bob::extension::FunctionDoc(
"log_likelihood",
"Output the log likelihood of the sample, x. The input size is checked.",
".. note:: The :py:meth:`__call__` function is an alias for this.",
".. note:: The ``__call__`` function is an alias for this.",
true
)
.add_prototype("input","output")
......
......@@ -643,7 +643,7 @@ static PyObject* PyBobLearnEMGMMMachine_resize(PyBobLearnEMGMMMachineObject* sel
static auto log_likelihood = bob::extension::FunctionDoc(
"log_likelihood",
"Output the log likelihood of the sample, x, i.e. :math:`log(p(x|GMM))`. Inputs are checked.",
".. note:: The :py:meth:`__call__` function is an alias for this. \n "
".. note:: The ``__call__`` function is an alias for this. \n "
"If `input` is 2D the average along the samples will be computed (:math:`\\frac{log(p(x|GMM))}{N}`) ",
true
)
......
......@@ -182,11 +182,10 @@ PyObject* PyBobLearnEMISVBase_getShape(PyBobLearnEMISVBaseObject* self, void*) {
static auto supervector_length = bob::extension::VariableDoc(
"supervector_length",
"int",
"Returns the supervector length."
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
"@warning An exception is thrown if no Universal Background Model has been set yet."
"Returns the supervector length.",
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
"WARNING An exception is thrown if no Universal Background Model has been set yet."
""
);
PyObject* PyBobLearnEMISVBase_getSupervectorLength(PyBobLearnEMISVBaseObject* self, void*) {
BOB_TRY
......
......@@ -172,10 +172,10 @@ static auto supervector_length = bob::extension::VariableDoc(
"supervector_length",
"int",
"Returns the supervector length."
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
"Returns the supervector length.",
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
"@warning An exception is thrown if no Universal Background Model has been set yet."
""
);
PyObject* PyBobLearnEMISVMachine_getSupervectorLength(PyBobLearnEMISVMachineObject* self, void*) {
BOB_TRY
......
......@@ -187,10 +187,10 @@ static auto supervector_length = bob::extension::VariableDoc(
"supervector_length",
"int",
"Returns the supervector length."
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
"Returns the supervector length.",
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
"@warning An exception is thrown if no Universal Background Model has been set yet."
""
);
PyObject* PyBobLearnEMIVectorMachine_getSupervectorLength(PyBobLearnEMIVectorMachineObject* self, void*) {
BOB_TRY
......@@ -472,7 +472,7 @@ static PyObject* PyBobLearnEMIVectorMachine_IsSimilarTo(PyBobLearnEMIVectorMachi
static auto project = bob::extension::FunctionDoc(
"project",
"Projects the given GMM statistics into the i-vector subspace",
".. note:: The :py:meth:`__call__` function is an alias for this function",
".. note:: The ``__call__`` function is an alias for this function",
true
)
.add_prototype("stats")
......
......@@ -190,10 +190,10 @@ static auto supervector_length = bob::extension::VariableDoc(
"supervector_length",
"int",
"Returns the supervector length."
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
"Returns the supervector length.",
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
"@warning An exception is thrown if no Universal Background Model has been set yet."
""
);
PyObject* PyBobLearnEMJFABase_getSupervectorLength(PyBobLearnEMJFABaseObject* self, void*) {
BOB_TRY
......
......@@ -172,10 +172,10 @@ static auto supervector_length = bob::extension::VariableDoc(
"supervector_length",
"int",
"Returns the supervector length."
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality",
"Returns the supervector length.",
"NGaussians x NInputs: Number of Gaussian components by the feature dimensionality"
"@warning An exception is thrown if no Universal Background Model has been set yet."
""
);
PyObject* PyBobLearnEMJFAMachine_getSupervectorLength(PyBobLearnEMJFAMachineObject* self, void*) {
BOB_TRY
......@@ -621,7 +621,7 @@ static PyObject* PyBobLearnEMJFAMachine_ForwardUx(PyBobLearnEMJFAMachineObject*
static auto log_likelihood = bob::extension::FunctionDoc(
"log_likelihood",
"Computes the log-likelihood of the given samples",
".. note:: the :py:meth:`__call__` function is an alias for this function.",
".. note:: the ``__call__`` function is an alias for this function.",
true
)
.add_prototype("stats")
......
......@@ -301,7 +301,7 @@ int PyBobLearnEMPLDAMachine_setWeightedSum(PyBobLearnEMPLDAMachineObject* self,
static auto log_likelihood = bob::extension::VariableDoc(
"log_likelihood",
"float",
"",
"Get the current log likelihood",
""
);
static PyObject* PyBobLearnEMPLDAMachine_getLogLikelihood(PyBobLearnEMPLDAMachineObject* self, PyObject* args, PyObject* kwargs) {
......
......@@ -30,9 +30,29 @@ extensions = [
import sphinx
if sphinx.__version__ >= "1.4.1":
extensions.append('sphinx.ext.imgmath')
imgmath_image_format = 'svg'
else:
extensions.append('sphinx.ext.pngmath')
# Be picky about warnings
nitpicky = True
# Ignores stuff we can't easily resolve on other project's sphinx manuals
nitpick_ignore = []
# Allows the user to override warnings from a separate file
if os.path.exists('nitpick-exceptions.txt'):
for line in open('nitpick-exceptions.txt'):
if line.strip() == "" or line.startswith("#"):
continue
dtype, target = line.split(None, 1)
target = target.strip()
try: # python 2.x
target = unicode(target)
except NameError:
pass
nitpick_ignore.append((dtype, target))
# Always includes todos
todo_include_todos = True
......@@ -59,7 +79,7 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
project = u'bob.learn.em'
project = u'bob.core'
import time
copyright = u'%s, Idiap Research Institute' % time.strftime('%Y')
......@@ -111,7 +131,7 @@ pygments_style = 'sphinx'
# Some variables which are useful for generated material
project_variable = project.replace('.', '_')
short_description = u'Bindings for EM machines and trainers of Bob'
short_description = u'Core utilities required on all Bob modules'
owner = [u'Idiap Research Institute']
......@@ -216,8 +236,14 @@ autodoc_default_flags = [
]
# For inter-documentation mapping:
from bob.extension.utils import link_documentation
intersphinx_mapping = link_documentation()
from bob.extension.utils import link_documentation, load_requirements
sphinx_requirements = "extra-intersphinx.txt"
if os.path.exists(sphinx_requirements):
intersphinx_mapping = link_documentation(
additional_packages=['python', 'numpy'] + load_requirements(sphinx_requirements))
else:
intersphinx_mapping = link_documentation()
# We want to remove all private (i.e. _. or __.__) members
# that are not in the list of accepted functions
......
# The bob.core>2.0.5 in the requirements.txt is making the bob.core not download
bob.core
......@@ -212,7 +212,7 @@ Once the :py:class:`bob.learn.em.JFAMachine` has been configured for a
specific class, the log-likelihood (score) that an input sample belongs to the
enrolled class, can be estimated, by first computing the GMM sufficient
statistics of this input sample, and then calling the
:py:meth:`bob.learn.em.JFAMachine.forward` on the sufficient statistics.
:py:meth:`bob.learn.em.JFAMachine.log_likelihood` on the sufficient statistics.
.. doctest::
:options: +NORMALIZE_WHITESPACE
......@@ -266,7 +266,7 @@ Once the :py:class:`bob.learn.em.ISVMachine` has been configured for a
specific class, the log-likelihood (score) that an input sample belongs to the
enrolled class, can be estimated, by first computing the GMM sufficient
statistics of this input sample, and then calling the
:py:meth:`bob.learn.em.ISVMachine.forward` on the sufficient statistics.
``__call__`` on the sufficient statistics.
.. doctest::
:options: +NORMALIZE_WHITESPACE
......@@ -565,7 +565,7 @@ Next, we initialize a trainer, which is an instance of
>>> jfa_trainer = bob.learn.em.JFATrainer()
The training process is started by calling the
:py:meth:`bob.learn.em.JFATrainer.train`.
:py:meth:`bob.learn.em.train`.
.. doctest::
:options: +NORMALIZE_WHITESPACE
......@@ -626,7 +626,7 @@ Next, we initialize a trainer, which is an instance of
>>> isv_trainer = bob.learn.em.ISVTrainer(relevance_factor=4.) # 4 is the relevance factor
The training process is started by calling the
:py:meth:`bob.learn.em.ISVTrainer.train`.
:py:meth:`bob.learn.em.train`.
.. doctest::
:options: +NORMALIZE_WHITESPACE
......@@ -675,7 +675,7 @@ Next, we initialize a trainer, which is an instance of
>>> TRAINING_STATS_flatten = [gs11, gs12, gs21, gs22]
The training process is started by calling the
:py:meth:`bob.learn.em.IVectorTrainer.train`.
:py:meth:`bob.learn.em.train`.
.. doctest::
:options: +NORMALIZE_WHITESPACE
......@@ -717,7 +717,7 @@ dimensionality 3.
Learning a PLDA model can be performed by instantiating the class
:py:class:`bob.learn.em.PLDATrainer`, and calling the
:py:meth:`bob.learn.em.PLDATrainer.train()` method.
:py:meth:`bob.learn.em.train` method.
.. doctest::
......@@ -769,8 +769,8 @@ separately for each model.
In a verification scenario, there are two possible hypotheses: 1.
:math:`x_{test}` and :math:`x_{enroll}` share the same class. 2.
:math:`x_{test}` and :math:`x_{enroll}` are from different classes. Using the
methods :py:meth:`bob.learn.em.PLDAMachine.forward` or
:py:meth:`bob.learn.em.PLDAMachine.__call__` function, the corresponding
methods :py:meth:`bob.learn.em.PLDAMachine.log_likelihood_ratio` or
its alias ``__call__`` function, the corresponding
log-likelihood ratio will be computed, which is defined in more formal way by:
:math:`s = \ln(P(x_{test},x_{enroll})) - \ln(P(x_{test})P(x_{enroll}))`
......
......@@ -10,7 +10,7 @@
.. _argparse: http://code.google.com/p/argparse/
.. _blitz++: http://www.oonumerics.org/blitz
.. _bob's idiap guide: http://github.com/idiap/bob/wiki/Using-Bob-at-Idiap
.. _bob's idiap guide: https://gitlab.idiap.ch/bob/bob/wikis/Using-Bob-at-Idiap
.. _bob's website: https://www.idiap.ch/software/bob
.. _boost: http://www.boost.org
.. _buildbot: http://trac.buildbot.net
......@@ -50,13 +50,13 @@
.. _python: http://www.python.org
.. _pypi: http://pypi.python.org
.. _qt4: http://qt.nokia.com/
.. _satellite packages: https://github.com/idiap/bob/wiki/Satellite-Packages
.. _satellite packages: https://gitlab.idiap.ch/bob/bob/wikis/Packages
.. _scipy: http://www.scipy.org
.. _setuptools: http://trac.edgewall.org/wiki/setuptools
.. _sphinx: http://sphinx.pocoo.org
.. _sqlalchemy: http://www.sqlalchemy.org/
.. _sqlite: http://www.sqlite.org/
.. _submit a new bug report: https://github.com/idiap/bob/issues
.. _submit a new bug report: https://groups.google.com/forum/?fromgroups#!forum/bob-devel
.. _torch 3 vision: http://torch3vision.idiap.ch
.. _torch 3: http://www.torch.ch
.. _torch 5: http://torch5.sourceforge.net
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment