From 2603ceb7bfd7265abac694c38ed996b445aeb8f4 Mon Sep 17 00:00:00 2001
From: Manuel Guenther <manuel.guenther@idiap.ch>
Date: Mon, 29 Jun 2015 15:48:39 +0200
Subject: [PATCH] Improved documentation of classes.

---
 bob/bio/face/algorithm/GaborJet.py    | 148 ++++++++++++++++++++++++--
 bob/bio/face/algorithm/Histogram.py   |  83 ++++++++++++++-
 bob/bio/face/extractor/DCTBlocks.py   |  49 ++++++++-
 bob/bio/face/extractor/Eigenface.py   |   4 +-
 bob/bio/face/extractor/GridGraph.py   |  92 +++++++++++++++-
 bob/bio/face/extractor/LGBPHS.py      |  65 ++++++++++-
 bob/bio/face/preprocessor/INormLBP.py |  32 ++++--
 bob/bio/face/test/test_databases.py   |   4 +
 doc/conf.py                           |  10 +-
 9 files changed, 453 insertions(+), 34 deletions(-)

diff --git a/bob/bio/face/algorithm/GaborJet.py b/bob/bio/face/algorithm/GaborJet.py
index 36cc2cc0..05148163 100644
--- a/bob/bio/face/algorithm/GaborJet.py
+++ b/bob/bio/face/algorithm/GaborJet.py
@@ -11,7 +11,34 @@ import math
 from bob.bio.base.algorithm import Algorithm
 
 class GaborJet (Algorithm):
-  """Algorithm chain for computing Gabor jets, Gabor graphs, and Gabor graph comparisons"""
+  """Computes a comparison of lists of Gabor jets using a similarity function of :py:class:`bob.ip.gabor.Similarity`.
+
+  The model enrollment simply stores all extracted Gabor jets for all enrollment features.
+  By default (i.e., ``multiple_feature_scoring = 'max_jet'``), the scoring uses an advanced local strategy.
+  For each node, the similarity between the given probe jet and all model jets is computed, and only the *highest* value is kept.
+  These values are finally averaged over all node positions.
+  Other strategies can be obtained using a different ``multiple_feature_scoring``.
+
+  **Parameters:**
+
+  gabor_jet_similarity_type : str:
+    The type of Gabor jet similarity to compute.
+    Please refer to the documentation of :py:class:`bob.ip.gabor.Similarity` for a list of possible values.
+
+  multiple_feature_scoring : str
+    How to fuse the local similarities into a single similarity value.
+    Possible values are:
+
+    * ``'average_model'`` : During enrollment, an average model is computed using functionality of :ref:`bob.ip.gabor <bob.ip.gabor>`.
+    * ``'average'`` : For each node, the average similarity is computed. Finally, the average of those similarities is returned.
+    * ``'min_jet', 'max_jet', 'med_jet'`` : For each node, the minimum, maximum or median similarity is computed. Finally, the average of those similarities is returned.
+    * ``'min_graph', 'max_graph', 'med_graph'`` : For each node, the average similarity is computed. Finally, the minimum, maximum or median of those similarities is returned.
+
+  gabor_directions, gabor_scales, gabor_sigma, gabor_maximum_frequency, gabor_frequency_step, gabor_power_of_k, gabor_dc_free
+    These parameters are required by the disparity-based Gabor jet similarity functions, see :py:class:`bob.ip.gabor.Similarity`..
+    The default values are identical to the ones in the :py:class:`bob.bio.face.extractor.GridGraph`.
+    Please assure that this class and the :py:class:`bob.bio.face.extractor.GridGraph` class get the same configuration, otherwise unexpected things might happen.
+  """
 
   def __init__(
       self,
@@ -90,7 +117,27 @@ class GaborJet (Algorithm):
     assert all(isinstance(f, bob.ip.gabor.Jet) for f in feature)
 
   def enroll(self, enroll_features):
-    """Enrolls the model by computing an average graph for each model"""
+    """enroll(enroll_features) -> model
+
+    Enrolls the model using one of several strategies.
+    Commonly, the bunch graph strategy [WFK97]_ is applied, by storing several Gabor jets for each node.
+
+    When ``multiple_feature_scoring = 'average_model'``, for each node the average :py:class:`bob.ip.gabor.Jet` is computed.
+    Otherwise, all enrollment jets are stored, grouped by node.
+
+    **Parameters:**
+
+    enroll_features : [[:py:class:`bob.ip.gabor.Jet`]]
+      The list of enrollment features.
+      Each sub-list contains a full graph.
+
+    **Returns:**
+
+    model : [[:py:class:`bob.ip.gabor.Jet`]]
+      The enrolled model.
+      Each sub-list contains a list of jets, which correspond to the same node.
+      When ``multiple_feature_scoring = 'average_model'`` each sub-list contains a single :py:class:`bob.ip.gabor.Jet`.
+    """
     [self._check_feature(feature) for feature in enroll_features]
     assert len(enroll_features)
     assert all(len(feature) == len(enroll_features[0]) for feature in enroll_features)
@@ -106,7 +153,16 @@ class GaborJet (Algorithm):
 
 
   def write_model(self, model, model_file):
-    """Saves the enrolled model of Gabor jets to file."""
+    """Writes the model enrolled by the :py:meth:`enroll` function to the given file.
+
+    **Parameters:**
+
+    model : [[:py:class:`bob.ip.gabor.Jet`]]
+      The enrolled model.
+
+    model_file : str or :py:class:`bob.io.base.HDF5File`
+      The name of the file or the file opened for writing.
+    """
     f = bob.io.base.HDF5File(model_file, 'w')
     # several model graphs
     f.set("NumberOfNodes", len(model))
@@ -118,7 +174,22 @@ class GaborJet (Algorithm):
       f.cd("..")
     f.close()
 
+
   def read_model(self, model_file):
+    """read_model(model_file) -> model
+
+    Reads the model written by the :py:meth:`write_model` function from the given file.
+
+    **Parameters:**
+
+    model_file : str or :py:class:`bob.io.base.HDF5File`
+      The name of the file or the file opened for reading.
+
+    **Returns:**
+
+    model : [[:py:class:`bob.ip.gabor.Jet`]]
+      The list of Gabor jets read from file.
+    """
     f = bob.io.base.HDF5File(model_file)
     count = f.get("NumberOfNodes")
     model = []
@@ -131,11 +202,41 @@ class GaborJet (Algorithm):
 
 
   def read_probe(self, probe_file):
+    """read_probe(probe_file) -> probe
+
+    Reads the probe file, e.g., as written by the :py:meth:`bob.bio.face.extractor.GridGraph.write_feature` function from the given file.
+
+    **Parameters:**
+
+    probe_file : str or :py:class:`bob.io.base.HDF5File`
+      The name of the file or the file opened for reading.
+
+    **Returns:**
+
+    probe : [:py:class:`bob.ip.gabor.Jet`]
+      The list of Gabor jets read from file.
+    """
     return bob.ip.gabor.load_jets(bob.io.base.HDF5File(probe_file))
 
 
   def score(self, model, probe):
-    """Computes the score of the probe and the model"""
+    """score(model, probe) -> score
+
+    Computes the score of the probe and the model using the desired Gabor jet similarity function and the desired score fusion strategy.
+
+    **Parameters:**
+
+    model : [[:py:class:`bob.ip.gabor.Jet`]]
+      The model enrolled by the :py:meth:`enroll` function.
+
+    probe : [:py:class:`bob.ip.gabor.Jet`]
+      The probe read by the :py:meth:`read_probe` function.
+
+    **Returns:**
+
+    score : float
+      The fused similarity score.
+    """
     self._check_feature(probe)
     [self._check_feature(m) for m in model]
     assert len(model) == len(probe)
@@ -148,7 +249,42 @@ class GaborJet (Algorithm):
 
 
   def score_for_multiple_probes(self, model, probes):
-    """This function computes the score between the given model graph(s) and several given probe graphs."""
+    """score(model, probes) -> score
+
+    This function computes the score between the given model graph(s) and several given probe graphs.
+    The same local scoring strategy as for several model jets is applied, but this time the local scoring strategy is applied between all graphs from the model and probes.
+
+    **Parameters:**
+
+    model : [[:py:class:`bob.ip.gabor.Jet`]]
+      The model enrolled by the :py:meth:`enroll` function.
+      The sub-lists are groups by nodes.
+
+    probes : [[:py:class:`bob.ip.gabor.Jet`]]
+      A list of probe graphs.
+      The sub-lists are groups by graph.
+
+    **Returns:**
+
+    score : float
+      The fused similarity score.
+    """
     [self._check_feature(probe) for probe in probes]
+    [self._check_feature(m) for m in model]
+    assert all(len(model) == len(probe) for probe in probes)
+
+    jet_scoring = numpy.average if self.jet_scoring is None else self.jet_scoring
     graph_scoring = numpy.average if self.graph_scoring is None else self.graph_scoring
-    return graph_scoring([self.score(model, probe) for probe in probes])
+    local_scores = [jet_scoring([self.similarity_function(m, probe[n]) for m in model[n] for probe in probes]) for n in range(len(model))]
+    return graph_scoring(local_scores)
+
+
+  # overwrite functions to avoid them being documented.
+  def train_projector(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def load_projector(*args, **kwargs) : pass
+  def project(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def write_feature(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def read_feature(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def train_enroller(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def load_enroller(*args, **kwargs) : pass
+  def score_for_multiple_models(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
diff --git a/bob/bio/face/algorithm/Histogram.py b/bob/bio/face/algorithm/Histogram.py
index 01399c55..76604918 100644
--- a/bob/bio/face/algorithm/Histogram.py
+++ b/bob/bio/face/algorithm/Histogram.py
@@ -9,7 +9,24 @@ import numpy
 from bob.bio.base.algorithm import Algorithm
 
 class Histogram (Algorithm):
-  """Tool chain for computing local Gabor binary pattern histogram sequences"""
+  """Computes the distance between histogram sequences.
+
+  Both sparse and non-sparse representations of histograms are supported.
+  For enrollment, to date only the averaging of histograms is implemented.
+
+  **Parameters:**
+
+  distance_function : function
+    The function to be used to compare two histograms.
+    This function should accept sparse histograms.
+
+  is_distance_function : bool
+    Is the given ``distance_function`` distance function (lower values are better) or a similarity function (higher values are better)?
+
+  multiple_probe_scoring : str or ``None``
+    The way, scores are fused when multiple probes are available.
+    See :py:func:`bob.bio.base.score_fusion_strategy` for possible values.
+  """
 
   def __init__(
       self,
@@ -17,7 +34,6 @@ class Histogram (Algorithm):
       is_distance_function = True,
       multiple_probe_scoring = 'average'
   ):
-    """Initializes the local Gabor binary pattern histogram sequence tool"""
 
     # call base class constructor
     Algorithm.__init__(
@@ -50,7 +66,19 @@ class Histogram (Algorithm):
 
 
   def enroll(self, enroll_features):
-    """Enrolling model by taking the average of all features"""
+    """enroll(enroll_features) -> model
+
+    Enrolls a model by taking the average of all histograms.
+
+    enroll_features : [1D or 2D :py:class:`numpy.ndarray`]
+      The histograms that should be averaged.
+      Histograms can be specified sparse (2D) or non-sparse (1D)
+
+    **Returns:**
+
+    model : 1D or 2D :py:class:`numpy.ndarray`
+      The averaged histogram, sparse  (2D) or non-sparse (1D).
+    """
     assert len(enroll_features)
     sparse = self._is_sparse(enroll_features[0])
     [self._check_feature(feature, sparse) for feature in enroll_features]
@@ -87,8 +115,44 @@ class Histogram (Algorithm):
     return model
 
 
+  def read_probe(self, probe_file):
+    """read_probe(probe_file) -> probe
+
+    Reads the probe feature from the given file.
+
+    **Parameters:**
+
+    probe_file : str or :py:class:`bob.io.base.HDF5File`
+      The file (open for reading) or the name of an existing file to read from.
+
+    **Returns:**
+
+    probe : 1D or 2D :py:class:`numpy.ndarray`
+      The probe read by the :py:meth:`read_probe` function.
+    """
+    return bob.bio.base.load(probe_file)
+
+
   def score(self, model, probe):
-    """Computes the score using the specified histogram measure; returns a similarity value (bigger -> better)"""
+    """score(model, probe) -> score
+
+    Computes the score of the probe and the model using the desired histogram distance function.
+    The resulting score is the negative distance, if ``is_distance_function = True``.
+    Both sparse and non-sparse models and probes are accepted, but their sparseness must agree.
+
+    **Parameters:**
+
+    model : 1D or 2D :py:class:`numpy.ndarray`
+      The model enrolled by the :py:meth:`enroll` function.
+
+    probe : 1D or 2D :py:class:`numpy.ndarray`
+      The probe read by the :py:meth:`read_probe` function.
+
+    **Returns:**
+
+    score : float
+      The resulting similarity score.
+    """
     sparse = self._is_sparse(probe)
     self._check_feature(model, sparse)
     self._check_feature(probe, sparse)
@@ -98,3 +162,14 @@ class Histogram (Algorithm):
       return self.factor * self.distance_function(model[0,:], model[1,:], probe[0,:], probe[1,:])
     else:
       return self.factor * self.distance_function(model, probe)
+
+
+  # overwrite functions to avoid them being documented.
+  def train_projector(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def load_projector(*args, **kwargs) : pass
+  def project(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def write_feature(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def read_feature(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def train_enroller(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def load_enroller(*args, **kwargs) : pass
+  def score_for_multiple_models(*args, **kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
diff --git a/bob/bio/face/extractor/DCTBlocks.py b/bob/bio/face/extractor/DCTBlocks.py
index c57ed7d4..ea9bf6c8 100644
--- a/bob/bio/face/extractor/DCTBlocks.py
+++ b/bob/bio/face/extractor/DCTBlocks.py
@@ -11,7 +11,34 @@ from bob.bio.base.extractor import Extractor
 
 class DCTBlocks (Extractor):
 
-  """Extracts DCT blocks"""
+  """Extracts *Discrete Cosine Transform* (DCT) features from (overlapping) image blocks.
+  These features are based on the :py:class:`bob.ip.base.DCTFeatures` class.
+  The default parametrization is the one that performed best on the BANCA database in [WMM+11]_.
+
+  Usually, these features are used in combination with the algorithms defined in :ref:`bob.bio.gmm <bob.bio.gmm>`.
+  However, you can try to use them with other algorithms.
+
+  **Parameters:**
+
+  block_size : int or (int, int)
+    The size of the blocks that will be extracted.
+    This parameter might be either a single integral value, or a pair ``(block_height, block_width)`` of integral values.
+
+  block_overlap : int or (int, int)
+    The overlap of the blocks in vertical and horizontal direction.
+    This parameter might be either a single integral value, or a pair ``(block_overlap_y, block_overlap_x)`` of integral values.
+    It needs to be smaller than the ``block_size``.
+
+  number_of_dct_coefficients : int
+    The number of DCT coefficients to use.
+    The actual number will be one less since the first DCT coefficient (which should be 0, if normalization is used) will be removed.
+
+  normalize_blocks : bool
+    Normalize the values of the blocks to zero mean and unit standard deviation before extracting DCT coefficients.
+
+  normalize_dcts : bool
+    Normalize the values of the DCT components to zero mean and unit standard deviation. Default is ``True``.
+  """
   def __init__(
       self,
       block_size = 12,    # 1 or two parameters for block size
@@ -48,10 +75,28 @@ class DCTBlocks (Extractor):
     self.dct_features = bob.ip.base.DCTFeatures(number_of_dct_coefficients, block_size, block_overlap, normalize_blocks, normalize_dcts)
 
   def __call__(self, image):
-    """Computes and returns DCT blocks for the given input image"""
+    """__call__(image) -> feature
+
+    Computes and returns DCT blocks for the given input image.
+
+    **Parameters:**
+
+    image : 2D :py:class:`numpy.ndarray` (floats)
+      The image to extract the features from.
+
+    **Returns:**
+
+    feature : 2D :py:class:`numpy.ndarray` (floats)
+      The extracted DCT features for all blocks inside the image.
+      The first index is the block index, while the second index is the DCT coefficient.
+    """
     assert isinstance(image, numpy.ndarray)
     assert image.ndim == 2
     assert image.dtype == numpy.float64
 
     # Computes DCT features
     return self.dct_features(image)
+
+  # re-define the train function to get it non-documented
+  def train(*args,**kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def load(*args,**kwargs) : pass
diff --git a/bob/bio/face/extractor/Eigenface.py b/bob/bio/face/extractor/Eigenface.py
index 70530db9..c0dc456c 100644
--- a/bob/bio/face/extractor/Eigenface.py
+++ b/bob/bio/face/extractor/Eigenface.py
@@ -99,12 +99,12 @@ class Eigenface (Extractor):
 
     **Parameters:**
 
-    image : 2D :py:class:`numpy.ndarray`
+    image : 2D :py:class:`numpy.ndarray` (floats)
       The image to extract the eigenface feature from.
 
     **Returns:**
 
-    feature : 1D :py:class:`numpy.ndarray`
+    feature : 1D :py:class:`numpy.ndarray` (floats)
       The extracted eigenface feature.
     """
     self._check_data(image)
diff --git a/bob/bio/face/extractor/GridGraph.py b/bob/bio/face/extractor/GridGraph.py
index c8a2a9ac..c4a505f3 100644
--- a/bob/bio/face/extractor/GridGraph.py
+++ b/bob/bio/face/extractor/GridGraph.py
@@ -10,7 +10,42 @@ import math
 from bob.bio.base.extractor import Extractor
 
 class GridGraph (Extractor):
-  """Extracts grid graphs from the images"""
+  """Extracts Gabor jets in a grid structure [GHW12]_ using functionalities from :ref:`bob.ip.gabor <bob.ip.gabor>`.
+
+  The grid can be either aligned to the eye locations (in which case the grid might be rotated), or a fixed grid graph can be extracted.
+
+  In the first case, the eye locations in the aligned image need to be provided.
+  Additionally, the number of node between, along, above and below the eyes need to be specified.
+
+  In the second case, a regular grid graph is created, by specifying the distance between two nodes.
+  Additionally, the coordinate of the first node can be provided, which otherwise is calculated to evenly fill the whole image with nodes.
+
+  **Parameters:**
+
+  gabor_directions, gabor_scales, gabor_sigma, gabor_maximum_frequency, gabor_frequency_step, gabor_power_of_k, gabor_dc_free
+    The parameters of the Gabor wavelet family, with its default values set as given in [WFK97]_.
+    Please refer to :py:class:`bob.ip.gabor.Transform` for the documentation of these values.
+
+  normalize_gabor_jets : bool
+    Perform Gabor jet normalization during extraction?
+
+  eyes : dict or ``None``
+    If specified, the grid setup will be aligned to the eye positions {'reye' : (re_y, re_x), 'leye' : (le_y, le_x)}.
+    Otherwise a regular grid graph will be extracted.
+
+  nodes_between_eyes, nodes_along_eyes, nodes_above_eyes, nodes_below_eyes : int
+    Only used when ``eyes`` is not ``None``.
+    The number of nodes to be placed between, along, above or below the eyes.
+    The final number of nodes will be: :math:`(above + below + 1) \\times (between + 2*along + 2)`.
+
+  node_distance : (int, int)
+    Only used when ``eyes`` is ``None``.
+    The distance between two nodes in the regular grid graph.
+
+  first_node : (int, int) or ``None``
+    Only used when ``eyes`` is ``None``.
+    If ``None``, it is calculated automatically to equally cover the whole image.
+  """
 
   def __init__(
       self,
@@ -94,7 +129,11 @@ class GridGraph (Extractor):
     self.trafo_image = None
 
   def _extractor(self, image):
-    """Creates an extractor based on the given image."""
+    """Creates an extractor based on the given image.
+    If an aligned graph was specified in the constructor, it is simply returned.
+    Otherwise the resolution of the given image is used to create a graph extractor.
+    If the ``first_node`` was not specified, it is calculated automatically.
+    """
 
     if self.trafo_image is None or self.trafo_image.shape[1:3] != image.shape:
       # create trafo image
@@ -103,9 +142,11 @@ class GridGraph (Extractor):
     if self._aligned_graph is not None:
       return self._aligned_graph
 
+    # check if a new extractor needs to be created
     if self._last_image_resolution != image.shape:
       self._last_image_resolution = image.shape
       if self.first_node is None:
+        # automatically compute the first node
         first_node = [0,0]
         for i in (0,1):
           offset = int((image.shape[i] - int(image.shape[i]/self.node_distance[i])*self.node_distance[i]) / 2)
@@ -114,6 +155,7 @@ class GridGraph (Extractor):
           first_node[i] = offset
       else:
         first_node = self.first_node
+      # .. and the last node
       last_node = tuple([int(image.shape[i] - max(first_node[i],1)) for i in (0,1)])
 
       # take the specified nodes
@@ -127,6 +169,21 @@ class GridGraph (Extractor):
 
 
   def __call__(self, image):
+    """__call__(image) -> feature
+
+    Returns a list of Gabor jets extracted from the given image.
+
+    **Parameters:**
+
+    image : 2D :py:class:`numpy.ndarray` (floats)
+      The image to extract the features from.
+
+    **Returns:**
+
+    feature : [:py:class:`bob.ip.gabor.Jet`]
+      The list of Gabor jets extracted from the image.
+      The 2D location of the jet's nodes is not returned.
+    """
     assert image.ndim == 2
     assert isinstance(image, numpy.ndarray)
     assert image.dtype == numpy.float64
@@ -135,7 +192,6 @@ class GridGraph (Extractor):
 
     # perform Gabor wavelet transform
     self.gwt.transform(image, self.trafo_image)
-
     # extract face graph
     jets = extractor.extract(self.trafo_image)
 
@@ -146,9 +202,39 @@ class GridGraph (Extractor):
     # return the extracted face graph
     return jets
 
+
   def write_feature(self, feature, feature_file):
+    """Writes the feature extracted by the :py:meth:`__call__` function to the given file.
+
+    **Parameters:**
+
+    feature : [:py:class:`bob.ip.gabor.Jet`]
+      The list of Gabor jets extracted from the image.
+
+    feature_file : str or :py:class:`bob.io.base.HDF5File`
+      The name of the file or the file opened for writing.
+    """
     feature_file = feature_file if isinstance(feature_file, bob.io.base.HDF5File) else bob.io.base.HDF5File(feature_file, 'w')
     bob.ip.gabor.save_jets(feature, feature_file)
 
+
   def read_feature(self, feature_file):
+    """read_feature(feature_file) -> feature
+
+    Reads the feature written by the :py:meth:`write_feature` function from the given file.
+
+    **Parameters:**
+
+    feature_file : str or :py:class:`bob.io.base.HDF5File`
+      The name of the file or the file opened for reading.
+
+    **Returns:**
+
+    feature : [:py:class:`bob.ip.gabor.Jet`]
+      The list of Gabor jets read from file.
+    """
     return bob.ip.gabor.load_jets(bob.io.base.HDF5File(feature_file))
+
+  # re-define the train function to get it non-documented
+  def train(*args,**kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def load(*args,**kwargs) : pass
diff --git a/bob/bio/face/extractor/LGBPHS.py b/bob/bio/face/extractor/LGBPHS.py
index 35d60bda..bc4ec3c3 100644
--- a/bob/bio/face/extractor/LGBPHS.py
+++ b/bob/bio/face/extractor/LGBPHS.py
@@ -11,7 +11,47 @@ import math
 from bob.bio.base.extractor import Extractor
 
 class LGBPHS (Extractor):
-  """Extractor for local Gabor binary pattern histogram sequences"""
+  """Extracts *Local Gabor Binary Pattern Histogram Sequences* (LGBPHS) [ZSG+05]_ from the images, using functionality from :ref:`bob.ip.base <bob.ip.base>` and :ref:`bob.ip.gabor <bob.ip.gabor>`.
+
+  The block size and the overlap of the blocks can be varied, as well as the parameters of the Gabor wavelet (:py:class:`bob.ip.gabor.Transform`) and the LBP extractor (:py:class:`bob.ip.base.LBP`).
+
+  **Parameters:**
+
+  block_size : int or (int, int)
+    The size of the blocks that will be extracted.
+    This parameter might be either a single integral value, or a pair ``(block_height, block_width)`` of integral values.
+
+  block_overlap : int or (int, int)
+    The overlap of the blocks in vertical and horizontal direction.
+    This parameter might be either a single integral value, or a pair ``(block_overlap_y, block_overlap_x)`` of integral values.
+    It needs to be smaller than the ``block_size``.
+
+  gabor_directions, gabor_scales, gabor_sigma, gabor_maximum_frequency, gabor_frequency_step, gabor_power_of_k, gabor_dc_free
+    The parameters of the Gabor wavelet family, with its default values set as given in [WFK97]_.
+    Please refer to :py:class:`bob.ip.gabor.Transform` for the documentation of these values.
+
+  use_gabor_phases : bool
+    Extract also the Gabor phases (inline) and not only the absolute values.
+    In this case, Extended LGBPHS features [ZSQ+09]_ will be extracted.
+
+  lbp_radius, lbp_neighbor_count, lbp_uniform, lbp_circular, lbp_rotation_invariant, lbp_compare_to_average, lbp_add_average
+    The parameters of the LBP.
+    Please see :py:class:`bob.ip.base.LBP` for the documentation of these values.
+
+    .. note::
+       The default values are as given in [ZSG+05]_ (the values of [ZSQ+09]_ might differ).
+
+  sparse_histogram : bool
+    If specified, the histograms will be handled in a sparse way.
+    This reduces the size of the extracted features, but the computation will take longer.
+
+    .. note::
+       Sparse histograms are only supported, when ``split_histogram = None``.
+
+  split_histogram : one of ``('blocks', 'wavelets', 'both')`` or ``None``
+    Defines, how the histogram sequence is split.
+    This could be interesting, if the histograms should be used in another way as simply concatenating them into a single histogram sequence (the default).
+  """
 
   def __init__(
       self,
@@ -39,8 +79,6 @@ class LGBPHS (Extractor):
       sparse_histogram = False,
       split_histogram = None
   ):
-    """Initializes the local Gabor binary pattern histogram sequence tool chain with the given file selector object"""
-
     # call base class constructor
     Extractor.__init__(
         self,
@@ -137,7 +175,22 @@ class LGBPHS (Extractor):
 
 
   def __call__(self, image):
-    """Extracts the local Gabor binary pattern histogram sequence from the given image"""
+    """__call__(image) -> feature
+
+    Extracts the local Gabor binary pattern histogram sequence from the given image.
+
+    **Parameters:**
+
+    image : 2D :py:class:`numpy.ndarray` (floats)
+      The image to extract the features from.
+
+    **Returns:**
+
+    feature : 2D or 3D :py:class:`numpy.ndarray` (floats)
+      The list of Gabor jets extracted from the image.
+      The 2D location of the jet's nodes is not returned.
+    """
+    """"""
     assert image.ndim == 2
     assert isinstance(image, numpy.ndarray)
     assert image.dtype == numpy.float64
@@ -193,3 +246,7 @@ class LGBPHS (Extractor):
 
     # return the concatenated list of all histograms
     return self._sparsify(lgbphs_array)
+
+  # re-define the train function to get it non-documented
+  def train(*args,**kwargs) : raise NotImplementedError("This function is not implemented and should not be called.")
+  def load(*args,**kwargs) : pass
diff --git a/bob/bio/face/preprocessor/INormLBP.py b/bob/bio/face/preprocessor/INormLBP.py
index 2e3edfe1..eecfa80a 100644
--- a/bob/bio/face/preprocessor/INormLBP.py
+++ b/bob/bio/face/preprocessor/INormLBP.py
@@ -39,7 +39,7 @@ class INormLBP (Base):
 
     """Parameters of the constructor of this preprocessor:
 
-    face_cropper : str or `bob.bio.face.preprocessor.FaceCrop` or `bob.bio.face.preprocessor.FaceDetect`
+    face_cropper : str or :py:class:`bob.bio.face.preprocessor.FaceCrop` or :py:class:`bob.bio.face.preprocessor.FaceDetect` or ``None``
       The face image cropper that should be applied to the image.
       It might be specified as a registered resource, a configuration file, or an instance of a preprocessor.
 
@@ -88,16 +88,32 @@ class INormLBP (Base):
     self.cropper = load_cropper(face_cropper)
 
 
-  def i_norm(self, image):
-    """Computes the I-Norm-LBP normalization on the given image"""
-    # perform normalization
-    return self.lbp_extractor(image)
+  def __call__(self, image, annotations = None):
+    """__call__(image, annotations = None) -> face
 
+    Aligns the given image according to the given annotations.
 
-  def __call__(self, image, annotations = None):
-    """Crops the face using the specified face cropper and extracts the LBP features from the given image."""
+    First, the desired color channel is extracted from the given image.
+    Afterward, the face is eventually cropped using the ``face_cropper`` specified in the constructor.
+    Then, the image is photometrically enhanced by extracting LBP features [HRM06]_.
+    Finally, the resulting face is converted to the desired data type.
+
+    **Parameters:**
+
+    image : 2D or 3D :py:class:`numpy.ndarray`
+      The face image to be processed.
+
+    annotations : dict or ``None``
+      The annotations that fit to the given image.
+      Might be ``None``, when the ``face_cropper`` is ``None`` or of type :py:class:`FaceDetect`.
+
+    **Returns:**
+
+    face : 2D :py:class:`numpy.ndarray`
+      The cropped and photometrically enhanced face.
+    """
     image = self.color_channel(image)
     if self.cropper is not None:
       image = self.cropper.crop_face(image, annotations)
-    image = self.i_norm(image)
+    image = self.lbp_extractor(image)
     return self.data_type(image)
diff --git a/bob/bio/face/test/test_databases.py b/bob/bio/face/test/test_databases.py
index 70497ac8..6afcb84e 100644
--- a/bob/bio/face/test/test_databases.py
+++ b/bob/bio/face/test/test_databases.py
@@ -26,6 +26,10 @@ import bob.bio.base
 def _check_database(database, groups = ('dev',), protocol = None, training_depends = False, models_depend = False):
   assert isinstance(database, bob.bio.base.database.DatabaseBob)
 
+  # load the directories
+  if 'HOME' in os.environ:
+    database.replace_directories(os.path.join(os.environ['HOME'], '.bob_bio_databases.txt'))
+
   if protocol: database.protocol = protocol
   assert len(database.all_files()) > 0
   assert len(database.training_files('train_extractor')) > 0
diff --git a/doc/conf.py b/doc/conf.py
index 8e77047f..4915c472 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -58,7 +58,7 @@ source_suffix = '.rst'
 master_doc = 'index'
 
 # General information about the project.
-project = u'Bob Example Project'
+project = u'Run Face Recognition Experiments with bob.bio'
 import time
 copyright = u'%s, Idiap Research Institute' % time.strftime('%Y')
 
@@ -187,7 +187,7 @@ html_favicon = 'img/favicon.ico'
 #html_file_suffix = None
 
 # Output file base name for HTML help builder.
-htmlhelp_basename = 'bob_example_project_doc'
+htmlhelp_basename = 'bob_bio_face_doc'
 
 
 # -- Options for LaTeX output --------------------------------------------------
@@ -201,7 +201,7 @@ latex_font_size = '10pt'
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title, author, documentclass [howto/manual]).
 latex_documents = [
-  ('index', 'bob_example_project.tex', u'Bob',
+  ('index', 'bob_bio_face.tex', u'Bob',
    u'Biometrics Group, Idiap Research Institute', 'manual'),
 ]
 
@@ -236,13 +236,13 @@ rst_epilog = ''
 # One entry per manual page. List of tuples
 # (source start file, name, description, authors, manual section).
 man_pages = [
-    ('index', 'bob.example.project', u'Bob Example Project Documentation', [u'Idiap Research Institute'], 1)
+    ('index', 'bob.bio.face', u'Run Face Recognition Experiments with bob.bio', [u'Idiap Research Institute'], 1)
 ]
 
 # Default processing flags for sphinx
 autoclass_content = 'both'
 autodoc_member_order = 'bysource'
-autodoc_default_flags = ['members', 'undoc-members', 'inherited-members', 'show-inheritance']
+autodoc_default_flags = ['members', 'inherited-members', 'show-inheritance']
 
 # For inter-documentation mapping:
 from bob.extension.utils import link_documentation
-- 
GitLab