Commit 32dd5b0d authored by André Anjos's avatar André Anjos 💬

[doc] Better docs

parent 30c20b7a
Pipeline #2841 passed with stage
in 5 minutes and 7 seconds
from .MiuraMatch import MiuraMatch
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
from ..algorithms.MiuraMatch import MiuraMatch from ..algorithms import MiuraMatch
huangwl_tool = MiuraMatch(ch=18, cw=28) huangwl_tool = MiuraMatch(ch=18, cw=28)
huangwl_gpu_tool = MiuraMatch(ch=18, cw=28, gpu=True) huangwl_gpu_tool = MiuraMatch(ch=18, cw=28, gpu=True)
......
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
from ...extractors.LocalBinaryPatterns import LocalBinaryPatterns from ...extractors import LocalBinaryPatterns
# Parameters # Parameters
......
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
from ...extractors.MaximumCurvature import MaximumCurvature from ...extractors import MaximumCurvature
# Parameters # Parameters
......
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
from ...extractors.NormalisedCrossCorrelation import NormalisedCrossCorrelation from ...extractors import NormalisedCrossCorrelation
feature_extractor = NormalisedCrossCorrelation() feature_extractor = NormalisedCrossCorrelation()
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
from ...extractors.RepeatedLineTracking import RepeatedLineTracking from ...extractors import RepeatedLineTracking
# Maximum number of iterations # Maximum number of iterations
NUMBER_ITERATIONS = 3000 NUMBER_ITERATIONS = 3000
......
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
from ...extractors.WideLineDetector import WideLineDetector from ...extractors import WideLineDetector
# Radius of the circular neighbourhood region # Radius of the circular neighbourhood region
RADIUS_NEIGHBOURHOOD_REGION = 5 RADIUS_NEIGHBOURHOOD_REGION = 5
......
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
from ...preprocessors.FingerCrop import FingerCrop from ...preprocessors import FingerCrop
# Contour localization mask # Contour localization mask
CONTOUR_MASK_HEIGHT = 4 # Height of the mask CONTOUR_MASK_HEIGHT = 4 # Height of the mask
......
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
from ...preprocessors.FingerCrop import FingerCrop from ...preprocessors import FingerCrop
# Contour localization mask # Contour localization mask
CONTOUR_MASK_HEIGHT = 4 # Height of the mask CONTOUR_MASK_HEIGHT = 4 # Height of the mask
......
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
from ...preprocessors.FingerCrop import FingerCrop from ...preprocessors import FingerCrop
# Contour localization mask # Contour localization mask
......
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
from ...preprocessors.FingerCrop import FingerCrop from ...preprocessors import FingerCrop
# Contour localization mask # Contour localization mask
......
...@@ -13,7 +13,7 @@ class LocalBinaryPatterns (Extractor): ...@@ -13,7 +13,7 @@ class LocalBinaryPatterns (Extractor):
"""LBP feature extractor """LBP feature extractor
Parameters fixed based on L. Mirmohamadsadeghi and A. Drygajlo. Palm vein Parameters fixed based on L. Mirmohamadsadeghi and A. Drygajlo. Palm vein
recognition uisng local texture patterns, IET Biometrics, pp. 1-9, 2013. recognition using local texture patterns, IET Biometrics, pp. 1-9, 2013.
""" """
def __init__( def __init__(
......
...@@ -5,10 +5,10 @@ import numpy ...@@ -5,10 +5,10 @@ import numpy
import bob.io.base import bob.io.base
from bob.bio.base.features.Extractor import Extractor from bob.bio.base.extractor import Extractor
class MaximumCurvature (Extractor): class PrincipalCurvature (Extractor):
"""MiuraMax feature extractor """MiuraMax feature extractor
Based on J.H. Choi, W. Song, T. Kim, S.R. Lee and H.C. Kim, Finger vein Based on J.H. Choi, W. Song, T. Kim, S.R. Lee and H.C. Kim, Finger vein
...@@ -60,7 +60,7 @@ class MaximumCurvature (Extractor): ...@@ -60,7 +60,7 @@ class MaximumCurvature (Extractor):
gy[indices] = 0 gy[indices] = 0
# Normalize # Normalize
Gmag( find(Gmag == 0) ) = 1 # Avoid dividing by zero Gmag[find[Gmag==0]] = 1 # Avoid dividing by zero
gx = gx/Gmag gx = gx/Gmag
gy = gy/Gmag gy = gy/Gmag
...@@ -72,8 +72,8 @@ class MaximumCurvature (Extractor): ...@@ -72,8 +72,8 @@ class MaximumCurvature (Extractor):
veins = lambda1*finger_mask veins = lambda1*finger_mask
# Normalise # Normalise
veins = veins - min(veins(:)) veins = veins - min(veins[:])
veins = veins/max(veins(:)) veins = veins/max(veins[:])
veins = veins*finger_mask veins = veins*finger_mask
......
from .LocalBinaryPatterns import LocalBinaryPatterns
from .NormalisedCrossCorrelation import NormalisedCrossCorrelation
from .PrincipalCurvature import PrincipalCurvature
from .RepeatedLineTracking import RepeatedLineTracking
from .WideLineDetector import WideLineDetector
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
from .FingerCrop import FingerCrop
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
...@@ -10,14 +10,14 @@ This section contains a listing of all functionality available on this library ...@@ -10,14 +10,14 @@ This section contains a listing of all functionality available on this library
which can be used for vein experiments. which can be used for vein experiments.
Pre-configured Databases Databases
------------------------ ---------
.. automodule:: bob.bio.vein.configurations.databases .. automodule:: bob.bio.vein.configurations.databases
Preprocessors Pre-processors
------------- --------------
.. automodule:: bob.bio.vein.preprocessors .. automodule:: bob.bio.vein.preprocessors
......
.. vim: set fileencoding=utf-8 :
.. date: Thu Sep 20 11:58:57 CEST 2012
.. _bob.bio.vein.baselines:
.. warning::
This document was copied from ``bob.bio.spear`` and is not updated. We're
working on it. Please don't use it as of now.
===============================
Executing Baseline Algorithms
===============================
The first thing you might want to do is to execute one of the vein
recognition algorithms that are implemented in ``bob.bio.vein``.
Setting up your Database
------------------------
For example, you can easily download the audio samples of the `PUT`_
database.
By default, ``bob.bio.vein`` does not know, where the original database files
are located. Hence, before running experiments you have to specify the raw
database directories. How this is done is explained in more detail in the
:ref:`bob.bio.base.installation`.
Running Baseline Experiments
----------------------------
To run the baseline experiments, you can use the ``./bin/verify.py`` script by
just going to the console and typing:
.. code-block:: sh
$ ./bin/verify.py
This script is explained in more detail in :ref:`bob.bio.base.experiments`.
The ``./bin/verify.py --help`` option shows you, which other options you have.
Here is an almost complete extract:
* ``--database``: The database and protocol you want to use.
* ``--algorithms``: The recognition algorithms that you want to execute.
* ``--all``: Execute all algorithms that are implemented.
* ``--temp-directory``: The directory where temporary files of the experiments
are put to.
* ``--result-directory``: The directory where resulting score files of the
experiments are put to.
* ``--evaluate``: After running the experiments, the resulting score files will
be evaluated, and the result is written to console.
* ``--dry-run``: Instead of executing the algorithm (or the evaluation), only
print the command that would have been executed.
* ``--verbose``: Increase the verbosity level of the script.
By default, only the commands that are executed are printed, and the rest of
the calculation runs quietly. You can increase the verbosity by adding the
``--verbose`` parameter repeatedly (up to three times).
Usually it is a good idea to have at least verbose level 2 (i.e., calling
``./bin/verify.py --verbose --verbose``, or the short version
``./bin/verify.py -vv``).
Running in Parallel
~~~~~~~~~~~~~~~~~~~
To run the experiments in parallel, as usual, you can define an SGE grid
configuration, or run with parallel threads on the local machine. For the
``./bin/verify.py`` script, the grid configuration is adapted to each of the
algorithms. Hence, to run in the SGE grid, you can simply add the ``--grid``
command line option, without parameters. Similarly, to run the experiments in
parallel on the local machine, simply add a ``--parallel <N>`` option, where
``<N>`` specifies the number of parallel jobs you want to execute.
The Algorithms
--------------
The algorithms present a set of state-of-the-art vein recognition algorithms.
Here is the list of short-cuts:
* ``gmm``: *Gaussian Mixture Models* (GMM) `[Rey00]`.
- algorithm : :py:class:`bob.bio.gmm.algorithm.GMM`
* ``isv``: As an extension of the GMM algorithm, *Inter-Session Variability* (ISV) modeling `[Vogt08]` is used to learn what variations in samples are introduced by identity changes and which not.
- algorithm : :py:class:`bob.bio.gmm.algorithm.ISV`
* ``ivector``: Another extension of the GMM algorithm is *Total Variability* (TV) modeling `[Dehak11]` (aka. I-Vector), which tries to learn a subspace in the GMM super-vector space.
- algorithm : :py:class:`bob.bio.gmm.algorithm.IVector`
.. note::
The ``ivector`` algorithm needs a lot of training data and fails on small databases such as the `Voxforge`_ database.
Evaluation Results
------------------
To evaluate the results, one can use ``./bin/evaluate.py`` command. Several
types of evaluation can be achieved, see :ref:`bob.bio.base.evaluate` for
details. Particularly, here we can enable ROC curves, DET plots, CMC curves
and the computation of EER/HTER or minDCF.
Experiments on different databases
----------------------------------
To make you more familiar with the tool, we provide you examples of different
toolchains applied on different databases: Voxforge, BANCA, TIMIT, MOBIO, and
NIST SRE 2012.
`Voxforge`_ is a free database used in free speech recognition engines. We
randomly selected a small part of the english corpus (< 1GB). It is used as a
toy example for our speaker recognition tool since experiment can be easily run
on a local machine, and the results can be obtained in a reasonnable amount of
time (< 2h).
Unlike TIMIT and BANCA, this dataset is completely free of charge.
More details about how to download the audio files used in our experiments, and
how the data is split into Training, Development and Evaluation set can be
found here::
https://pypi.python.org/pypi/bob.db.putvein
One example of command line is::
$ bin/verify.py -d putvein -p energy-2gauss -e mfcc-60 -a gmm-voxforge -s ubm_gmm --groups {dev,eval}
In this example, we used the following configuration:
* Energy-based VAD,
* (19 MFCC features + Energy) + First and second derivatives,
* **UBM-GMM** Modelling (with 256 Gaussians), the scoring is done using the linear approximation of the LLR.
The performance of the system on DEV and EVAL are:
* ``DEV: EER = 1.89%``
* ``EVAL: HTER = 1.56%``
If you want to run the same experiment on SGE::
$ bin/verify.py -d voxforge -p energy-2gauss -e mfcc-60 -a gmm-voxforge -s ubm_gmm --groups {dev,eval} -g grid
If you want to run the parallel implementation of the UBM on the SGE::
$ bin/verify_gmm.py -d voxforge -p energy-2gauss -e mfcc-60 -a gmm-voxforge -s ubm_gmm_sge --groups {dev,eval} -g grid
If you want to run the parallel implementation of the UBM on your local machine::
$ bin/verify_gmm.py -d voxforge -p energy-2gauss -e mfcc-60 -a gmm-voxforge -s ubm_gmm_local --groups {dev,eval} -g local
Another example is to use **ISV** toolchain instead of UBM-GMM::
$ bin/verify.py -d voxforge -p energy-2gauss -e mfcc-60 -a isv-voxforge -s isv --groups {dev,eval} -g grid
* ``DEV: EER = 1.41%``
* ``EVAL: HTER = 1.52%``
One can also try **JFA** toolchain::
$ bin/verify.py -d voxforge -p energy-2gauss -e mfcc-60 -a jfa-voxforge -s jfa --groups {dev,eval} -g grid
* ``DEV: EER = 4.04%``
* ``EVAL: HTER = 5.11%``
or also **IVector** toolchain where **Whitening, L-Norm, LDA, WCCN** are used like in this example where the score computation is done using **Cosine distance**::
$ bin/verify.py -d voxforge -p energy-2gauss -e mfcc-60 -a ivec-cosine-voxforge -s ivec-cosine --groups {dev,eval} -g grid
* ``DEV: EER = 7.33%``
* ``EVAL: HTER = 13.80%``
The scoring computation can also be done using **PLDA**::
$ bin/verify.py -d voxforge -p energy-2gauss -e mfcc-60 -a ivec-plda-voxforge -s ivec-plda --groups {dev,eval} -g grid
* ``DEV: EER = 11.33%``
* ``EVAL: HTER = 13.15%``
Note that in the previous examples, our goal is not to optimize the parameters on the DEV set but to provide examples of use.
2. BANCA dataset
~~~~~~~~~~~~~~~~
`BANCA`_ is a simple bimodal database with relatively clean data. The results are already very good with a simple baseline UBM-GMM system. An example of use can be::
$ bin/verify.py -vv -d banca-audio -p energy-2gauss -e mfcc-60 -a gmm-banca -s banca_G --groups {dev,eval}
The configuration in this example is similar to the previous one with the only difference of using the regular LLR instead of its linear approximation.
Here is the performance of this system:
* ``DEV: EER = 0.91%``
* ``EVAL: EER = 0.75%``
3. TIMIT dataset
~~~~~~~~~~~~~~~~
`TIMIT`_ is one of the oldest databases (year 1993) used to evaluate speaker recognition systems. In the following example, the processing is done on the development set, and LFCC features are used::
$ bin/verify.py -vv -d timit -p energy-2gauss -e lfcc-60 -a gmm-timit -s timit
Here is the performance of the system on the Development set:
* ``DEV: EER = 2.68%``
MOBIO dataset
~~~~~~~~~~~~~
This is a more challenging database. The noise and the short duration of the segments make the task of speaker recognition relatively difficult. The following experiment on male group (Mobile-0) uses the 4Hz modulation energy based VAD, and the ISV (with dimU=50) modelling technique::
$ bin/verify_isv.py -vv -d mobio-audio-male -p mod-4hz -e mfcc-60 -a isv-mobio -s isv --groups {dev,eval} -g demanding
Here is the performance of this system:
* ``DEV: EER = 13.81%``
* ``EVAL: HTER = 10.90%``
To generate the results presented in the ICASSP 2014 paper, please check the script included in the `icassp` folder of the toolbox.
Note that the MOBIO dataset has different protocols, and that are all implemented in `bob.db.mobio`_. But in this toolbox, we provide separately mobile-0 protocol (into filelist format) for simplicity.
NIST SRE 2012
~~~~~~~~~~~~~
We first invite you to read the paper describing our system submitted to the NIST SRE 2012 Evaluation. The protocols on the development set are the results of a joint work by the I4U group. To reproduce the results, please check this dedicated package::
https://pypi.python.org/pypi/spear.nist_sre12
.. note::
For any additional information, please use our mailing list::
https://groups.google.com/forum/#!forum/bob-devel
.. include:: links.rst
...@@ -15,29 +15,31 @@ import pkg_resources ...@@ -15,29 +15,31 @@ import pkg_resources
# -- General configuration ----------------------------------------------------- # -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here. # If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0' needs_sphinx = '1.3'
# Add any Sphinx extension module names here, as strings. They can be extensions # Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [ extensions = [
'sphinx.ext.todo',
'sphinx.ext.coverage', 'sphinx.ext.todo',
'sphinx.ext.pngmath', 'sphinx.ext.coverage',
'sphinx.ext.ifconfig', 'sphinx.ext.pngmath',
'sphinx.ext.autodoc', 'sphinx.ext.ifconfig',
'sphinx.ext.autosummary', 'sphinx.ext.autodoc',
'sphinx.ext.doctest', 'sphinx.ext.autosummary',
'sphinx.ext.intersphinx', 'sphinx.ext.doctest',
] 'sphinx.ext.intersphinx',
'sphinx.ext.graphviz',
# The viewcode extension appeared only on Sphinx >= 1.0.0 'sphinx.ext.napoleon',
import sphinx 'sphinx.ext.viewcode',
if sphinx.__version__ >= "1.0": ]
extensions.append('sphinx.ext.viewcode')
# Always includes todos # Always includes todos
todo_include_todos = True todo_include_todos = True
# Generates auto-summary automatically
autosummary_generate = True
# If we are on OSX, the 'dvipng' path maybe different # If we are on OSX, the 'dvipng' path maybe different
dvipng_osx = '/opt/local/libexec/texlive/binaries/dvipng' dvipng_osx = '/opt/local/libexec/texlive/binaries/dvipng'
if os.path.exists(dvipng_osx): pngmath_dvipng = dvipng_osx if os.path.exists(dvipng_osx): pngmath_dvipng = dvipng_osx
...@@ -55,7 +57,7 @@ source_suffix = '.rst' ...@@ -55,7 +57,7 @@ source_suffix = '.rst'
master_doc = 'index' master_doc = 'index'
# General information about the project. # General information about the project.
project = u'The Fingervein Recognition Library (FingerveinRecLib)' project = u'Vein Biometrics Recognition Library (bob.bio.vein)'
import time import time
copyright = u'%s, Idiap Research Institute' % time.strftime('%Y') copyright = u'%s, Idiap Research Institute' % time.strftime('%Y')
...@@ -113,7 +115,8 @@ rst_epilog = '' ...@@ -113,7 +115,8 @@ rst_epilog = ''
# The theme to use for HTML and HTML Help pages. See the documentation for # The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes. # a list of builtin themes.
html_theme = 'default' import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme # Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the # further. For a list of options available for each theme, see the
...@@ -121,7 +124,7 @@ html_theme = 'default' ...@@ -121,7 +124,7 @@ html_theme = 'default'
#html_theme_options = {} #html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory. # Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = [] html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to # The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation". # "<project> v<release> documentation".
...@@ -200,9 +203,13 @@ latex_font_size = '10pt' ...@@ -200,9 +203,13 @@ latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]). # (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [ latex_documents = [
('index', 'FingerveinRecLib.tex', u'FingerveinRecLib Documentation', (
u'Biometrics Group, Idiap Research Institute', 'manual'), 'index',
] 'bobbiovein.tex',
u'Vein Biometrics Recognition Library (bob.bio.vein)',
u'Biometrics Group, Idiap Research Institute', 'manual',
),
]
# The name of an image file (relative to this directory) to place at the top of # The name of an image file (relative to this directory) to place at the top of
# the title page. # the title page.
...@@ -230,7 +237,7 @@ latex_documents = [ ...@@ -230,7 +237,7 @@ latex_documents = [
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [
('index', 'bobbiovein', u'FingerveinRecLib Documentation', [u'Idiap Research Institute'], 1) ('index', 'bobbiovein', u'Vein Biometrics Recognition Library', [u'Idiap Research Institute'], 1)
] ]
# Default processing flags for sphinx # Default processing flags for sphinx
...@@ -241,10 +248,9 @@ autodoc_default_flags = ['members', 'undoc-members', 'inherited-members', 'show- ...@@ -241,10 +248,9 @@ autodoc_default_flags = ['members', 'undoc-members', 'inherited-members', 'show-
# For inter-documentation mapping: # For inter-documentation mapping:
from bob.extension.utils import link_documentation from bob.extension.utils import link_documentation
intersphinx_mapping = link_documentation([ intersphinx_mapping = link_documentation([
'python', 'numpy', 'scipy', 'gridtk', 'python',
'bob.extension', 'bob.math', 'bob.io.base', 'bob.ip.base', 'bob.ip.gabor', 'bob.learn.linear', 'bob.learn.misc', 'facereclib' 'numpy',
'bob.db.verification.utils', 'bob.db.verification.filelist', 'scipy',
'bob.db.vera', 'bob.db.utfvp'
]) ])
......
...@@ -21,7 +21,7 @@ Users Guide ...@@ -21,7 +21,7 @@ Users Guide
.. toctree:: .. toctree::
:maxdepth: 2 :maxdepth: 2
experiments baselines
references references
api api
......
.. vim: set fileencoding=utf-8 : .. vim: set fileencoding=utf-8 :
.. date: Thu Jan 15 15:58:57 CEST 2015 .. date: Thu Jan 15 15:58:57 CEST 2015
========== ============
References References
========== ============
.. [KUU02] *M. Kono, H. Ueki and S. Umemura*, **Near-infrared finger vein patterns for personal identification**, Applied Optics, Vol. 41, Issue 35, pp. 7429-7436 (2002). .. [KUU02] *M. Kono, H. Ueki and S. Umemura*, **Near-infrared finger vein patterns for personal identification**, Applied Optics, Vol. 41, Issue 35, pp. 7429-7436 (2002).
......
...@@ -11,7 +11,7 @@ setup( ...@@ -11,7 +11,7 @@ setup(
name='bob.bio.vein', name='bob.bio.vein',
version=open("version.txt").read().rstrip(), version=open("version.txt").read().rstrip(),
description='Vein recognition based on Bob and the bob.bio framework', description='Vein biometrics recognition based on Bob and the bob.bio framework',
url='https://gitlab.idiap.ch/biometric/bob.bio.vein', url='https://gitlab.idiap.ch/biometric/bob.bio.vein',
license='GPLv3', license='GPLv3',
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment