From 612b840ed4bca3bd098ba8d1e8949c4b2d65a0ee Mon Sep 17 00:00:00 2001
From: Manuel Guenther <manuel.guenther@idiap.ch>
Date: Tue, 4 Sep 2012 14:40:40 +0200
Subject: [PATCH] Added dependency to sphinx; updated documentation.

---
 .gitignore           |   2 +
 README.rst           |  34 +++++-----
 buildout.cfg         |  21 +++---
 doc/.gitignore       |   2 +
 doc/examples.rst     | 155 ++++++++++++++++++-------------------------
 doc/installation.rst |  75 +++++++++------------
 setup.py             |  17 +----
 7 files changed, 131 insertions(+), 175 deletions(-)
 create mode 100644 doc/.gitignore

diff --git a/.gitignore b/.gitignore
index 29c8744..158b185 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
 *.pyc
+*.png
 .installed.cfg
 .mr.developer.cfg
 eggs/
@@ -6,4 +7,5 @@ develop-eggs/
 bob.example.faceverify.egg-info/
 src/
 bin/
+dist/
 Database
diff --git a/README.rst b/README.rst
index c25d27c..c553949 100644
--- a/README.rst
+++ b/README.rst
@@ -1,35 +1,37 @@
 Face verification using Bob
 ===========================
 
-This example demonstrates how to use Bob to build different face verification
-systems. It includes examples with three different complexities:
+.. note::
+  If you are reading this page through our GitHub portal and not through PyPI, note the development tip of the package may not be stable or become unstable in a matter of moments.
+
+  Go to http://pypi.python.org/pypi/bob.example.faceverify to download the latest stable version of this package.
+
+This example demonstrates how to use Bob to build different face verification systems.
+It includes examples with three different complexities:
 
 * A simple eigenface based example
 * An example using Gabor jets and a Gabor-phase based similarity function
 * An example building an UBM/GMM model on top of DCT blocks.
 
-To use this example, you will require Bob and the AT&T database. If you do not
-have a Bob version yet, you can get it from `here <http://www.idiap.ch/software/bob/>`_.
+To use this example, you will require Bob and the AT&T database.
+If you do not have a Bob version yet, you can get it from http://www.idiap.ch/software/bob.
 
-If you already have installed Bob, please make sure that you have at least
-the version 1.0.5, otherwise the example won't work.
+If you already have installed Bob, please make sure that you have the version 1.0.5, otherwise the example won't work.
 
-The AT&T image database is quite small, but sufficient to show how the face
-verification methods work. Still, the results may not be meaningful. One good
-thing about the AT&T database is that it is freely available. You can download
-it from `here <http://www.cl.cam.ac.uk/research/dtg/attarchive/facedatabase.html>`_.
+The AT&T image database is quite small, but sufficient to show how the face verification methods work.
+Still, the results may not be meaningful.
+One good thing about the AT&T database is that it is freely available.
+You can download it from http://www.cl.cam.ac.uk/research/dtg/attarchive/facedatabase.html.
 
 
-Finally, to download this package, please open a shell, go to a directory of
-your choice and call::
+Finally, to download this package, please open a shell, go to a directory of your choice and call::
 
   $ pip install bob.example.faceverify
 
-To generate the Documentation, please further go into the "doc" directory and
-call::
+To generate the Documentation, please further go into the "doc" directory and call::
 
   $ make html
   $ firefox html/index.html
 
-(or use any other browser of your choice). After you did this, please read the
-documentation and try to execute the examples.
+(or use any other browser of your choice).
+After you did this, please read the documentation and try to execute the examples.
diff --git a/buildout.cfg b/buildout.cfg
index 71168d3..d86bf8c 100644
--- a/buildout.cfg
+++ b/buildout.cfg
@@ -1,26 +1,27 @@
 ; vim: set fileencoding=utf-8 :
-; Andre Anjos <andre.anjos@idiap.ch>
-; Mon 16 Apr 08:29:18 2012 CEST
+; Manuel Guenther <Manuel.Guenther@idiap.ch>
+; Tue Sep  4 14:34:37 CEST 2012
 
-; This is the configuration file for buildout. It is a windows-init style file
-; divided into sections. This is probably the most minimalistic example you can
-; have.
-;
-; The first section of this file defines to which other sections buildout will
-; look at for information. We only have 1 other section (or part) to look at,
-; the 'python' section.
+; This is the configuration file for buildout.
 
 [buildout]
 parts = external python
 develop = .
 
-eggs = bob
+; required packages: sphinx (to generate the documentation), bob, and our package.
+eggs = sphinx
+       bob
        bob.example.faceverify
 
+; This defines the installation directory of Bob.
+; The current setup should work fine for use at Idiap.
+; If you are not at Idiap, and bob is not installed in the default location,
+; please modify the egg-directories accordingly.
 [external]
 recipe = xbob.buildout:external
 egg-directories = /idiap/group/torch5spro/releases/bob-1.0.5/install/linux-x86_64-release/lib
 
+
 [python]
 recipe = zc.recipe.egg
 interpreter = python
diff --git a/doc/.gitignore b/doc/.gitignore
new file mode 100644
index 0000000..fd8eeba
--- /dev/null
+++ b/doc/.gitignore
@@ -0,0 +1,2 @@
+doctrees/
+html/
diff --git a/doc/examples.rst b/doc/examples.rst
index 1b99fb0..e44b9a8 100644
--- a/doc/examples.rst
+++ b/doc/examples.rst
@@ -2,21 +2,18 @@
  Running the examples
 =====================
 
-To run the examples, just call the scripts from within the ``bin`` directory, 
-e.g.:
+To run the examples, just call the scripts from within the ``bin`` directory, e.g.:
 
 .. code-block:: sh
 
   $ bin/eigenface.py
-  
-If you installed the image database in another folder than ``Database``, please
-give this directrory as parameter to the script, e.g.:
+
+If you installed the image database in another folder than ``Database``, please give this directrory as parameter to the script, e.g.:
 
 .. code-block:: sh
 
   $ bin/eigenface.py <AT&T_DATABASE_DIR>
 
-
 There are three example scripts:
 
 .. code-block:: sh
@@ -25,34 +22,28 @@ There are three example scripts:
   $ bin/gabor_phase.py
   $ bin/dct_ubm.py
 
-that perform more or less complicated face verification experiments. Each 
-experiment creates an ROC curve that contains the final verification result of
-the test. The generated files will be ``eigenface.png``, ``gabor_phase.png``, 
-and ``dct_ubm.png``.
+that perform more or less complicated face verification experiments.
+Each experiment creates an ROC curve that contains the final verification result of the test.
+The generated files will be ``eigenface.png``, ``gabor_phase.png``, and ``dct_ubm.png``.
 
-Since the complexity of the algorithms increase the expected execution time of 
-them differ a lot. While the eigenface example should be finished in a couple of
-seconds, the Gabor phase example could take some minutes, and the UBM/GMM model
-needs in the order of half an hour to compute.
+Since the complexity of the algorithms increase, the expected execution time of them differ a lot.
+While the eigenface example should be finished in a couple of seconds, the Gabor phase example could take some minutes, and the UBM/GMM model needs in the order of half an hour to compute.
 
 .. note::
 
-  The example code that is presented here differ slightly from the code in the
-  source files. Here, only the concepts of the functions should be clarified,
-  while the source files contain code that is better arranged and computes
-  faster.
+  The example code that is presented here differ slightly from the code in the source files.
+  Here, only the concepts of the functions should be clarified, while the source files contain code that is better arranged and computes faster.
 
 
 The eigenface example
 ~~~~~~~~~~~~~~~~~~~~~
-The eigenface example follows the work-flow that is presented in the original
-paper *Eigenfaces for Recognition* [TP91]_ by Turk and Pentland. First, it 
-creates an object to query the database:
+The eigenface example follows the work-flow that is presented in the original paper *Eigenfaces for Recognition* [TP91]_ by Turk and Pentland.
+First, it creates an object to query the database:
 
 .. code-block:: python
 
   >>> atnt_db = bob.db.atnt.Database()
-  
+
 For training the projection matrix, the training images need to be read:
 
 .. code-block:: python
@@ -60,9 +51,8 @@ For training the projection matrix, the training images need to be read:
   >>> training_image_files = atnt_db.files(groups = 'train', ...)
   >>> for filename in training_image_files.values():
   ...   training_image = bob.io.load(filename)
-  
-Since the images are already aligned to the eye positions, they can simply be
-linearized (converted into one long vector) and put into an ``bob.io.ArraySet``:
+
+Since the images are already aligned to the eye positions, they can simply be linearized (converted into one long vector) and put into an ``bob.io.ArraySet``:
 
 .. code-block:: python
 
@@ -77,11 +67,9 @@ which is used to train a ``bob.machine.LinearMachine``:
   >>> pca_trainer = bob.trainer.SVDPCATrainer()
   >>> pca_machine, eigen_values = pca_trainer.train(training_set)
 
-For some distance functions, the eigenvalues are needed, but in our example we
-just ignore them.
+For some distance functions, the eigenvalues are needed, but in our example we just ignore them.
 
-After training, the model and probe images are loaded, linearized, and projected
-into the eigenspace using the trained ``pca_machine``:
+After training, the model and probe images are loaded, linearized, and projected into the eigenspace using the trained ``pca_machine``:
 
 .. code-block:: python
 
@@ -95,8 +83,7 @@ into the eigenspace using the trained ``pca_machine``:
   ...   probe_image = bob.io.load(filename)
   ...   probe_feature = pca_machine(probe_image.flatten())
 
-To compute the verification result, each model feature is compared to each probe
-feature by computing the Euclidean distance:
+To compute the verification result, each model feature is compared to each probe feature by computing the Euclidean distance:
 
 .. code-block:: python
 
@@ -104,14 +91,13 @@ feature by computing the Euclidean distance:
   ...  for probe_feature in probe_features:
   ...    score = bob.math.euclidean_distance(model_feature, probe_feature)
 
-The results are divided into a list of positive scores (model and probe are from
-the same identity) and a a list of negative scores (identities of model and
-probe differ). Using these lists, the ROC curve is plotted:
+The results are divided into a list of positive scores (model and probe are from the same identity) and a a list of negative scores (identities of model and probe differ).
+Using these lists, the ROC curve is plotted:
 
 .. code-block:: python
 
   >>> bob.measure.plot.roc(negatives, positives)
-  
+
 .. image:: eigenface.png
   :scale: 70 %
 
@@ -126,34 +112,31 @@ The expected result is: FAR 83.6% and FRR 83.6% at distance threshold 2048.9
 
 .. note::
 
-  Computing eigenfaces with such a low amount of training data is usually not an 
-  excellent idea. Hence, the performance in this example is extremely poor.
+  Computing eigenfaces with such a low amount of training data is usually not an excellent idea.
+  Hence, the performance in this example is extremely poor.
 
 
 Gabor jet comparisons
 ~~~~~~~~~~~~~~~~~~~~~
-A better face verification example uses Gabor jet features [WFKM97]_ . In this 
-example we do not define a face graph, but instead we use the Gabor jets at all
-positions in the image. To do that, we define:
+A better face verification example uses Gabor jet features [WFKM97]_ .
+In this example we do not define a face graph, but instead we use the Gabor jets at all positions in the image.
+To do that, we define:
 
 .. code-block:: python
 
   >>> graph_machine = bob.machine.GaborGraphMachine((0,0), (111,91), (1,1))
-  
-that will create Gabor graphs with node positions from (0,0) to (111,91) with
-step size (1,1), i.e., a tight Gabor grid graph covering the whole image.
+
+that will create Gabor graphs with node positions from (0,0) to (111,91) with step size (1,1), i.e., a tight Gabor grid graph covering the whole image.
 
 .. note::
 
-  The resolution of the images in the AT&T database is 92x112. Of course, there
-  are ways to automatically get the size of the images, but for brevity we 
-  hard-coded the resolution of the images.
+  The resolution of the images in the AT&T database is 92x112.
+  Of course, there are ways to automatically get the size of the images, but for brevity we hard-coded the resolution of the images.
 
 .. note::
 
-  The Gabor graph extraction does not require a training stage. Therefore, in 
-  opposition to the eigenface example, the training images are not used in this
-  example.
+  The Gabor graph extraction does not require a training stage.
+  Therefore, in opposition to the eigenface example, the training images are not used in this example.
 
 Now, the Gabor graph features can be extracted from the model and probe images:
 
@@ -171,10 +154,9 @@ Now, the Gabor graph features can be extracted from the model and probe images:
   ...   # ... some steps to create the Gabor jet image ...
   ...   graph_machine(jet_image, probe_feature)
 
-To compare the Gabor graphs, several methods can be applied. Here, we chose to
-compute the similarity of two graphs as the average of corresponding Gabor jet
-similarities. Again, many choices for the Gabor jet comparison exist, here we
-take the novel Gabor phase based similarity function [GHW12]_:
+To compare the Gabor graphs, several methods can be applied.
+Here, we chose to compute the similarity of two graphs as the average of corresponding Gabor jet similarities.
+Again, many choices for the Gabor jet comparison exist, here we take the novel Gabor phase based similarity function [GHW12]_:
 
 .. code-block:: python
 
@@ -182,23 +164,21 @@ take the novel Gabor phase based similarity function [GHW12]_:
   ...  for probe_feature in probe_features:
   ...    score = graph_machine.similarity(model_feature, probe_feature, bob.machine.DisparityCorrectedPhaseDifference())
 
-The evaluation is identical to the evaluation in the eigenface example. Since
-this method is much better for suited for small image databases, the resulting
-verification rates are much better. The expected ROC curve is:
+The evaluation is identical to the evaluation in the eigenface example.
+Since this method is much better for suited for small image databases, the resulting verification rates are much better.
+The expected ROC curve is:
 
 .. image:: gabor_phase.png
   :scale: 70 %
 
-while the expected verification result is: FAR 22% and FRR 22% at distance 
-threshold 0.1799
+while the expected verification result is: FAR 22% and FRR 22% at distance threshold 0.1799
 
 
 The UBM/GMM modeling of DCT Blocks
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The last example shows a quite complicated, but very successful algorithm. The
-first step is the feature extraction of the training image features and the 
-collection of them in a **bob.io.Arrayset**. In this experiment we will use
-*Discrete Cosine Transform* (DCT) block features [MM09]_:
+The last example shows a quite complicated, but very successful algorithm.
+The first step is the feature extraction of the training image features and the collection of them in a **bob.io.Arrayset**.
+In this experiment we will use *Discrete Cosine Transform* (DCT) block features [MM09]_:
 
 .. code-block:: python
 
@@ -211,12 +191,10 @@ collection of them in a **bob.io.Arrayset**. In this experiment we will use
   ...   # ... create DCT extractor ...
   ...   training_dct_blocks = dct_extractor(training_image_blocks)
   ...   training_set.extend(training_dct_blocks)
- 
-With these training features, a *universal background model* (UBM) is computed
-[RQD00]_. It is a *Gaussian Mixture Model* (GMM) that holds information about
-the overall distribution of DCT features in facial images. The UBM model is 
-trained using a bob.trainer.KMeansTrainer to estimate the means of the
-Gaussians:
+
+With these training features, a *universal background model* (UBM) is computed [RQD00]_.
+It is a *Gaussian Mixture Model* (GMM) that holds information about the overall distribution of DCT features in facial images.
+The UBM model is trained using a bob.trainer.KMeansTrainer to estimate the means of the Gaussians:
 
 .. code-block:: python
 
@@ -233,26 +211,25 @@ Afterward, the UBM is initialized with the results of the k-means training:
   >>> [variances, weights] = kmeans_machine.get_variances_and_weights_for_each_cluster(training_set)
   >>> ubm.variances = variances
   >>> ubm.weights = weights
-  
+
 and a bob.trainer.ML_GMMTrainer is used to compute the actual UBM model:
 
 .. code-block:: python
 
   >>> trainer = bob.trainer.ML_GMMTrainer()
   >>> trainer.train(ubm, training_set)
-  
 
-After UBM training, the next step is the model enrollment. Here, a separate GMM
-model is generated by shifting the UBM towards the mean of the model features
-[MM09]_. For this purpose, we need to get the model images sorted by identity:
+
+After UBM training, the next step is the model enrollment.
+Here, a separate GMM model is generated by shifting the UBM towards the mean of the model features [MM09]_.
+For this purpose, we need to get the model images sorted by identity:
 
 .. code-block:: python
 
   >>> model_ids = atnt_db.client_ids(groups = 'test')
-  
-Now, we load the images for each identity, extract the DCT features and enroll a
-model for each identity. For that purpose, a **bob.trainer.MAP_GMMTrainer** is
-used:
+
+Now, we load the images for each identity, extract the DCT features and enroll a model for each identity.
+For that purpose, a **bob.trainer.MAP_GMMTrainer** is used:
 
 .. code-block:: python
 
@@ -269,8 +246,9 @@ used:
   ...   gmm_trainer.train(model_gmm, model_feature_set)
 
 
-Also the probe image need some processing. First, of course, the DCT features
-are extracted. Afterward, the statistics for each probe file are generated:
+Also the probe image need some processing.
+First, of course, the DCT features are extracted.
+Afterward, the statistics for each probe file are generated:
 
 .. code-block:: python
 
@@ -282,8 +260,7 @@ are extracted. Afterward, the statistics for each probe file are generated:
   ...   gmm_stats.init()
   ...   ubm.acc_statistics(probe_dct_blocks, probe_gmm_stats)
 
-Finally, the scores for the probe files are computed using the function 
-**bob.machine.linear_scoring**:
+Finally, the scores for the probe files are computed using the function **bob.machine.linear_scoring**:
 
 .. code-block:: python
 
@@ -291,8 +268,8 @@ Finally, the scores for the probe files are computed using the function
   ...  for probe_gmm_stats in probes:
   ...    score = bob.machine.linear_scoring([model_gmm], ubm, [probe_gmm_stats])[0,0]
 
-Again, the evaluation of the scores is identical to the previous examples. The
-expected ROC curve is:
+Again, the evaluation of the scores is identical to the previous examples.
+The expected ROC curve is:
 
 .. image:: dct_ubm.png
   :scale: 70 %
@@ -301,15 +278,13 @@ The expected result is: FAR 5% and FRR 5% at distance threshold 7640.9
 
 .. note::
 
-  The resulting ROC curve is not directly comparable to the ones from the other
-  experiments. This is due to the fact that here the model files are merged into
-  **one** GMM model **per identity**, whereas before each model file (**five per 
-  identity**) generated its own scores. Nonetheless, the verification results of 
-  the UBM/GMM model are impressive.
+  The resulting ROC curve is not directly comparable to the ones from the other experiments.
+  This is due to the fact that here the model files are merged into **one** GMM model **per identity**, whereas before each model file (**five per identity**) generated its own scores.
+  Nonetheless, the verification results of the UBM/GMM model are impressive.
 
 
-.. [TP91]   Matthew Turk and Alex Pentland. Eigenfaces for recognition. Journal of Cognitive Neuroscience, 3(1):71-86, 1991. 
+.. [TP91]   Matthew Turk and Alex Pentland. Eigenfaces for recognition. Journal of Cognitive Neuroscience, 3(1):71-86, 1991.
 .. [WFKM97] \L. Wiskott, J.-M. Fellous, N. Krüger and C.v.d. Malsburg. Face recognition by elastic bunch graph matching. IEEE Transactions on Pattern Analysis and Machine Intelligence, 19:775-779, 1997.
 .. [GHW12]  Manuel Günther, Dennis Haufe, Rolf P. Würtz. Face recognition with disparity corrected Gabor phase differences. in preparation
-.. [MM09]   Chris McCool and Sébastien Marcel. Parts-based face verification using local frequency bands. In proceedings of IEEE/IAPR international conference on biometrics. 2009. 
+.. [MM09]   Chris McCool and Sébastien Marcel. Parts-based face verification using local frequency bands. In proceedings of IEEE/IAPR international conference on biometrics. 2009.
 .. [RQD00]  D.A. Reynolds, T.F. Quatieri, and R.B. Dunn. Speaker verification using adapted gaussian mixture models. Digital Signal Processing, 10(1-3):19–41, 2000.
diff --git a/doc/installation.rst b/doc/installation.rst
index c3f1a25..8f9e6ad 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -4,80 +4,65 @@
 
 .. note::
 
-  To follow these instructions locally you will need a local copy of this
-  package. Start by cloning this project with something like::
+  To follow these instructions locally you will need a local copy of this package.
+  Start by cloning this project with something like:
 
-.. code-block:: sh
-
-  $ git clone https://github.com/bioidiap/bob.example.faceverify.git
-  $ cd bob.example.faceverify
+  .. code-block:: sh
 
+    $ pip install bob.example.faceverify
 
-Installation of the toolkit uses the `buildout <http://www.buildout.org/>`_
-build environment. You don't need to understand its inner workings to use this
-package. Here is a recipe to get you started (shell commands are marked with a
-``$`` signal):
+Installation of the toolkit uses the `buildout <http://www.buildout.org/>`_ build environment.
+You don't need to understand its inner workings to use this package.
+Here is a recipe to get you started (shell commands are marked with a ``$`` signal):
 
 .. code-block:: sh
-  
+
   $ python bootstrap.py
   $ ./bin/buildout
 
-These 2 commands should download and install all non-installed dependencies and
-get you a fully operational test and development environment.
+These 2 commands should download and install all non-installed dependencies and get you a fully operational test and development environment.
 
 .. note::
 
-  The python shell used in the first line of the previous command set
-  determines the python interpreter that will be used for all scripts developed
-  inside this package. Because this package makes use of `Bob
-  <http://idiap.github.com/bob>`_, you must make sure that the ``bootstrap.py``
-  script is called with the **same** interpreter used to build Bob, or
-  unexpected problems might occur.
+  The python shell used in the first line of the previous command set determines the python interpreter that will be used for all scripts developed inside this package.
+  Because this package makes use of `Bob <http://www.idiap.ch/software/bob>`_, you must make sure that the ``bootstrap.py`` script is called with the **same** interpreter used to build Bob, or unexpected problems might occur.
+
+  If Bob is installed by the administrator of your system, it is safe to consider it uses the default python interpreter.
+  In this case, the above 2 command lines should work as expected.
 
-  If Bob is installed by the administrator of your system, it is safe to
-  consider it uses the default python interpreter. In this case, the above 3
-  command lines should work as expected.
-  
 
 Use this example with Bob not installed globally
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-If your Bob version is not installed globally, this package provides a 
-configuration file *localbob.cfg* that has to be modified. Please search for the
-``bob.install.directory`` region and set the value according to your local Bob
-install directory. If you are at Idiap, you can simply choose one the existing
-directories.
+If your Bob version is not installed globally, you have to edit the *buildout.cfg* file.
+Please search for the ``egg-directories`` region and set the value according to your local Bob install directory.
+If you are at Idiap, you can simply choose the pre-set directories.
 
 
 Use Bob at Idiap
 ~~~~~~~~~~~~~~~~
-To get the example running nicely at Idiap, as noted above, ``bootstrap.py`` has
-to be executed with the correct python version. For Idiap, this is (currently):
+To get the example running nicely at Idiap, as noted above, ``bootstrap.py`` has to be executed with the correct python version.
+For Idiap, this is (currently):
 
 .. code-block:: sh
-  
-  $ /idiap/group/torch5spro/nightlies/externals/v2/linux-x86_64/bin/python2.6 bootstrap.py
-  $ ./bin/buildout -c localbob.cfg
+
+  $ /idiap/group/torch5spro/nightlies/externals/v3/ubuntu-10.04-x86_64/bin/python bootstrap.py
+  $ ./bin/buildout
 
 
 Downloading the test database
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The images that are required to run the test are not included in this package,
-but they are freely downloadable from the internet:
-
-http://www.cl.cam.ac.uk/research/dtg/attarchive/facedatabase.html
+The images that are required to run the test are not included in this package, but they are freely downloadable from http://www.cl.cam.ac.uk/research/dtg/attarchive/facedatabase.html
 
-Unpack the database in a directory that fits you. The easiest solution is to
-create a subdirectory ``Database`` in this package. If you decide to put the
-data somewhere else, please remember the image directory.
+Unpack the database in a directory that fits you.
+The easiest solution is to create a subdirectory ``Database`` in this package.
+If you decide to put the data somewhere else, please remember the image directory.
 
 .. note ::
 
-  If you are at Idiap, the AT&T database is located at 
-  /idiap/group/biometric/databases/orl. To ease up the usage of the examples, 
-  you can generate a link to the database:
-  
+  If you are at Idiap, the AT&T database is located at ``/idiap/group/biometric/databases/orl``.
+  To ease up the usage of the examples, you can generate a link to the database:
+
   .. code-block:: sh
-  
+
     $ ln -s /idiap/group/biometric/databases/orl Database
 
diff --git a/setup.py b/setup.py
index a282ce5..ed51d15 100644
--- a/setup.py
+++ b/setup.py
@@ -28,15 +28,13 @@ setup(
     # This is the basic information about your project. Modify all this
     # information before releasing code publicly.
     name='bob.example.faceverify',
-    version='0.1.1',
+    version='0.1.2',
     description='Example for using Bob to create face verification systems',
     url='http://pypi.python.org/pypi/bob.example.faceverify',
     license='GPLv3',
     author='Manuel Guenther',
     author_email='manuel.guenther@idiap.ch',
 
-    # If you have a better, long description of your package, place it on the
-    # 'doc' directory and then hook it here
     long_description=open('README.rst').read(),
 
     # This line is required for any distutils based packaging.
@@ -48,23 +46,14 @@ setup(
     # scripts of this package. Don't worry - You won't need administrative
     # privileges when using buildout.
     install_requires=[
-        "bob",      # base signal proc./machine learning library
+        "sphinx",                     # to generate the documentation
+        "bob >= 1.0.0, < 1.1.0",      # base signal proc./machine learning library
     ],
 
     # This entry defines which scripts you will have inside the 'bin' directory
     # once you install the package (or run 'bin/buildout'). The order of each
     # entry under 'console_scripts' is like this:
     #   script-name-at-bin-directory = module.at.your.library:function
-    #
-    # The module.at.your.library is the python file within your library, using
-    # the python syntax for directories (i.e., a '.' instead of '/' or '\').
-    # This syntax also omits the '.py' extension of the filename. So, a file
-    # installed under 'example/foo.py' that contains a function which
-    # implements the 'main()' function of particular script you want to have
-    # should be referred as 'example.foo:main'.
-    #
-    # In this simple example we will create a single program that will print
-    # the version of bob.
     entry_points={
       'console_scripts': [
         'eigenface.py = faceverify.eigenface:main',
-- 
GitLab