Commit 915c110e authored by Manuel Günther's avatar Manuel Günther
Browse files

Started documenting

parent 21783cb5
from . import algorithm
from . import tools
from . import test
def get_config():
"""Returns a string containing the configuration information.
......@@ -2,3 +2,6 @@ from .GMM import GMM, GMMRegular
from .JFA import JFA
from .ISV import ISV
from .IVector import IVector
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
......@@ -3,3 +3,6 @@ from .command_line import *
from .gmm import *
from .isv import *
from .ivector import *
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
......@@ -179,33 +179,3 @@ def train_whitener(algorithm, force=False):
algorithm.load_whitener(fs.whitener_file)"Writing projector into file %s", fs.projector_file)
def whitening_ivector(self, indices=None, force=False):
""Performs IVector projection""
# read UBM into the IVector class
projector = self.m_tool.load_whitening(self.m_configuration.whitening_file)
projected_ivec_files = self.projected_list(groups=self.groups(), directory=self.m_configuration.ivec_directory)
projected_whitened_files = self.projected_list(groups=self.groups(), directory=self.m_configuration.whitening_directory)
# select a subset of indices to iterate
if indices != None:
index_range = range(indices[0], indices[1])"- Projection: splitting of index range %s" % str(indices))
index_range = range(len(projected_ivec_files))"- Projection: projecting %d gmm stats from directory '%s' to directory '%s'" % (len(index_range), self.m_configuration.ivec_directory, self.m_configuration.whitening_directory))
# extract the features
for i in index_range:
if not self.m_tool_chain.__check_file__(projected_whitened_files[i], force):
# load feature
feature = facereclib.utils.load(str(projected_ivec_files[i]))
# project feature
whitened = self.m_tool.project_whitening(feature)
# write it
facereclib.utils.ensure_dir(os.path.dirname(projected_whitened_files[i])), str(projected_whitened_files[i]))
......@@ -86,7 +86,7 @@ release = distribution.version
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
#exclude_patterns = ['**/links.rst']
exclude_patterns = ['links.rst', 'references.rst']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
......@@ -133,12 +133,12 @@ if sphinx.__version__ >= "1.0":
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = ''
html_logo = 'img/logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = ''
html_favicon = 'img/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
......@@ -187,7 +187,7 @@ html_favicon = ''
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'bob_example_project_doc'
htmlhelp_basename = 'bob_bio_gmm_doc'
# -- Options for LaTeX output --------------------------------------------------
......@@ -201,7 +201,7 @@ latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'bob_example_project.tex', u'Bob',
('index', 'bob_bio_gmm.tex', u'Bob',
u'Biometrics Group, Idiap Research Institute', 'manual'),
......@@ -236,7 +236,7 @@ rst_epilog = ''
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'bob.example.project', u'Bob Example Project Documentation', [u'Idiap Research Institute'], 1)
('index', '', u'Gaussian Mixture Models in Documentation', [u'Idiap Research Institute'], 1)
# Default processing flags for sphinx
......@@ -246,8 +246,14 @@ autodoc_default_flags = ['members', 'undoc-members', 'inherited-members', 'show-
# For inter-documentation mapping:
from bob.extension.utils import link_documentation
intersphinx_mapping = link_documentation(['python', 'numpy', '', 'bob.db.verification.utils'])
intersphinx_mapping = link_documentation(['python', 'numpy', ''])
def skip(app, what, name, obj, skip, options):
# Do not skip the __call__ and the __str__ functions as we have special implementations for them.
if name in ("__call__"):
return False
return skip
def setup(app):
app.connect("autodoc-skip-member", skip)
Implementation Details
.. todo::
Document the details of the GMM-based algorithms.
.. include:: links.rst
Tools implemented in
.. autosummary::
.. automodule::
......@@ -4,17 +4,57 @@
Biometric Recognition with Gaussian Mixture Models
Gaussian Mixture Model based Algorithms
This package is part of the ```` packages, which provide open source tools to run comparable and reproducible biometric recognition experiments.
In this package, algorithms for executing experiments based on Gaussian Mixture Models are provided, including scripts to run the training procedures of the Expectation-Maximization loops in parallel.
Package Documentation
For more detailed information about the structure of the ```` packages, please refer to the documentation of :ref:` <>`.
Particularly, the installation of this and other ```` packages, please read the :ref:``.
.. automodule::
In the following, we provide more detailed information about the particularities of this package only.
Users Guide
.. automodule::
.. toctree::
:maxdepth: 2
Reference Manual
.. toctree::
:maxdepth: 2
.. include:: references.rst
This documentation is still under development.
Here is a list of things that needs to be done:
.. todolist::
Indices and tables
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
.. include:: links.rst
Executing the Training in Parallel
Sometimes the training of the GMM-based models require a lot of time.
However, the training procedures can be parallelized, i.e., by running the E-steps of the EM loop in parallel.
For this purpose, we provide a set of scripts ``./bin/``, ``./bin/`` and ``./bin/``.
These scripts integrate perfectly into the ```` packages.
Particularly, they have exactly the same set of options as documented in :ref:``.
In fact, the scripts above only run in parallelized mode, i.e., the ``--grid`` option is required.
During the submission of the jobs, several hundred jobs might be created (depending on the ``number_of_..._training_iterations`` that you specify in the :py:class:`` constructor).
However, after the training has finished, it is possible to use the normal ``./bin/`` script to run similar experiments, e.g., if you want to change the protocol of your experiment.
.. todo:: improve the documentation of the parallelized scripts.
Python API for
.. todo:: Improve documentation of the functions and classes of
Generic functions
Miscellaneous functions
.. autosummary::
Tools to run recognition experiments
Command line generation
.. autosummary::
Parallel GMM
.. autosummary::
Parallel ISV
.. autosummary::
Parallel I-Vector
.. autosummary::
Integration with
.. autosummary::
.. automodule::
.. include:: links.rst
.. todo:: Provide the correct references for the algorithms defined in this package.
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment