diff --git a/MANIFEST.in b/MANIFEST.in
index 02d873ed25791a695c1ea951d08b701c5b3853f5..f72ac250b47ac1b629372ba594975426a4c7b4a1 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,5 +1,6 @@
 include README.rst
 include bootstrap.py
 include buildout.cfg
-recursive-include doc *.rst
+recursive-include doc *.rst *.png conf.py
+include testdata/*
 
diff --git a/README.rst b/README.rst
index 27c21691087ff742a2821a848f83d6f6b31e3326..dd7e046e4435335363355d287cf01d31d457f830 100644
--- a/README.rst
+++ b/README.rst
@@ -59,7 +59,7 @@ Afterwards, please call::
   $ python bootstrap.py
   $ ./bin/buildout
 
-to generate the scripts that, amongst others, will run the face verification algorithms. For more details, please refer to the documentation, which you might create and open yourself by::
+to generate the scripts that, amongst others, will run the face verification algorithms. Please verify your installation by running the test cases. For more details, please refer to the documentation, which you might create and open yourself by::
 
   $ ./bin/sphinx-build doc sphinx
   $ firefox sphinx/index.html
diff --git a/buildout.cfg b/buildout.cfg
index a15009d69403fe9afc81532b3983333fe1f2e06c..f000e20be57568bef572259ea61f7828a523fd72 100644
--- a/buildout.cfg
+++ b/buildout.cfg
@@ -12,4 +12,3 @@ newest = false
 
 [scripts]
 recipe = xbob.buildout:scripts
-dependent-scripts = true
diff --git a/doc/installation.rst b/doc/installation.rst
index 69536108c75338f4f5f2358dd389b822f402a761..3d83c33ca2a7929fb39a028d2362c0c084c04652 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -58,3 +58,15 @@ If you decide to put the data somewhere else, please remember the image director
 
     $ ln -s /idiap/group/biometric/databases/orl Database
 
+
+Verify your installation
+~~~~~~~~~~~~~~~~~~~~~~~~
+To verify your installation, you might want to run the unit tests that are provided with this package.
+For this, the AT&T database is required to be either in the ``Database`` subdirectory of this package (see above), or that the ``ATNT_DATABASE_DIRECTORY`` environment variable points to your database directory.
+At Idiap, you might want to use:
+
+.. code-block:: sh
+
+  $ export ATNT_DATABASE_DIRECTORY=/idiap/group/biometric/databases/orl
+  $ bin/nosetests -v
+
diff --git a/faceverify/dct_ubm.py b/faceverify/dct_ubm.py
index e1e7dc45503fc7f0271df0c9e8b62be1d9799c9b..36389844fde97eb2f50df7fc78e86d44404d3395 100644
--- a/faceverify/dct_ubm.py
+++ b/faceverify/dct_ubm.py
@@ -7,7 +7,7 @@ from matplotlib import pyplot
 # This is the base directory where by default the AT&T images are found. You can
 # overwrite this  directory on the command line
 global ATNT_IMAGE_DIRECTORY
-ATNT_IMAGE_DIRECTORY = "Database"
+ATNT_IMAGE_DIRECTORY = os.environ['ATNT_DATABASE_DIRECTORY'] if 'ATNT_DATABASE_DIRECTORY' in os.environ else "Database"
 
 # The default file name extension of the AT&T images
 ATNT_IMAGE_EXTENSION = ".pgm"
@@ -85,15 +85,15 @@ def train(training_features):
   return ubm
 
 
-def enrol(model_features, ubm, gmm_trainer):
+def enroll(model_features, ubm, gmm_trainer):
   """Enrolls the GMM model for the given model features (which should stem from the same identity)"""
-  # create array set used for enroling
-  enrol_set = numpy.vstack(model_features.values())
+  # create array set used for enrolling
+  enroll_set = numpy.vstack(model_features.values())
   # create a GMM from the UBM
   gmm = bob.machine.GMMMachine(ubm)
 
   # train the GMM
-  gmm_trainer.train(gmm, enrol_set)
+  gmm_trainer.train(gmm, enroll_set)
 
   # return the resulting gmm
   return gmm
@@ -159,7 +159,7 @@ def main():
     for key, image in model_images.iteritems():
       models_for_current_id[key] = extract_feature(image)
     # enroll model for the current identity from these features
-    model = enrol(models_for_current_id, ubm, gmm_trainer)
+    model = enroll(models_for_current_id, ubm, gmm_trainer)
     models[model_id] = model
 
   #####################################################################
diff --git a/faceverify/eigenface.py b/faceverify/eigenface.py
index 0983606dbe1392e8f30ef6718d95f023e1ae5469..ae210cf893b11c2f0443fdaa30977918a7838da0 100644
--- a/faceverify/eigenface.py
+++ b/faceverify/eigenface.py
@@ -7,7 +7,8 @@ from matplotlib import pyplot
 # This is the base directory where by default the AT&T images are found. You can
 # overwrite this  directory on the command line
 global ATNT_IMAGE_DIRECTORY
-ATNT_IMAGE_DIRECTORY = "Database"
+ATNT_IMAGE_DIRECTORY = os.environ['ATNT_DATABASE_DIRECTORY'] if 'ATNT_DATABASE_DIRECTORY' in os.environ else "Database"
+
 
 # The default file name extension of the AT&T images
 ATNT_IMAGE_EXTENSION = ".pgm"
diff --git a/faceverify/gabor_phase.py b/faceverify/gabor_phase.py
index 6372b83abe343369c417c9fa55c34a30ffb8c782..eddbdc527e17539db340e16fb8b33fbd11a1f4dc 100644
--- a/faceverify/gabor_phase.py
+++ b/faceverify/gabor_phase.py
@@ -7,7 +7,7 @@ from matplotlib import pyplot
 # This is the base directory where by default the AT&T images are found. You can
 # overwrite this  directory on the command line
 global ATNT_IMAGE_DIRECTORY
-ATNT_IMAGE_DIRECTORY = "Database"
+ATNT_IMAGE_DIRECTORY = os.environ['ATNT_DATABASE_DIRECTORY'] if 'ATNT_DATABASE_DIRECTORY' in os.environ else "Database"
 
 # The default file name extension of the AT&T images
 ATNT_IMAGE_EXTENSION = ".pgm"
diff --git a/faceverify/tests.py b/faceverify/tests.py
new file mode 100644
index 0000000000000000000000000000000000000000..0da51efd9306e347c6535297ba50d015dab3fe27
--- /dev/null
+++ b/faceverify/tests.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+# vim: set fileencoding=utf-8 :
+# Author: Manuel Günther <manuel.guenther@idiap.ch>
+# Date:   Tue Apr 16 15:56:33 CEST 2013
+#
+# Copyright (C) 2011-2012 Idiap Research Institute, Martigny, Switzerland
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, version 3 of the License.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""A few checks at the faceverify examples.
+"""
+
+import os, sys
+import unittest
+
+import bob
+import numpy
+
+import xbob.db.atnt
+
+import pkg_resources
+import faceverify
+
+
+regenerate_references = False
+
+
+class FaceVerifyExampleTest(unittest.TestCase):
+  """Performs various tests on the BANCA database."""
+
+  def resource(self, f):
+    return pkg_resources.resource_filename('faceverify', '../testdata/%s'%f)
+
+  def test01_eigenface(self):
+    # test the eigenface algorithm
+    from faceverify.eigenface import load_images, train, extract_feature
+
+    # open database
+    atnt_db = xbob.db.atnt.Database()
+    # test if all training images are loaded
+    images = load_images(atnt_db, group = 'world')
+    self.assertEqual(len(images), 200)
+
+    # test that the training works (for speed reasons, we limit the number of training files)
+    pca = train(images)
+    if regenerate_references:
+      pca.save(bob.io.HDF5File(self.resource('pca_projector.hdf5'), 'w'))
+
+    # load PCA reference and check that it is still similar
+    pca_ref = bob.machine.LinearMachine(bob.io.HDF5File(self.resource('pca_projector.hdf5')))
+#TODO: enable for bob version 1.2.0
+#    self.assertTrue(pca_ref.is_similar_to(pca))
+
+    # check the the projection is the same
+    model = extract_feature(images[1], pca)
+    probe = extract_feature(images[2], pca)
+
+    if regenerate_references:
+      bob.io.save(model, self.resource('pca_model.hdf5'))
+      bob.io.save(probe, self.resource('pca_probe.hdf5'))
+
+    # load model and probe reference
+    model_ref = bob.io.load(self.resource('pca_model.hdf5'))
+    probe_ref = bob.io.load(self.resource('pca_probe.hdf5'))
+    self.assertTrue(numpy.allclose(model_ref, model))
+    self.assertTrue(numpy.allclose(probe_ref, probe))
+
+    # compute score
+    score = bob.math.euclidean_distance(model, probe)
+    self.assertAlmostEqual(score, 3498.308154114)
+
+
+  def test02_gabor_phase(self):
+    # test the gabor phase algorithm
+    from faceverify.gabor_phase import load_images, extract_feature
+
+    # open database
+    atnt_db = xbob.db.atnt.Database()
+    # test if all training images are loaded
+    images = load_images(atnt_db, group = 'world')
+    self.assertEqual(len(images), 200)
+
+    # extract features; for test purposes we wil use smaller features with inter-node-distance 8
+    graph = bob.machine.GaborGraphMachine((0,0), (111,91), (8,8))
+
+    # check the the projection is the same
+    model = extract_feature(images[1], graph)
+    probe = extract_feature(images[2], graph)
+
+    if regenerate_references:
+      bob.io.save(model, self.resource('gabor_model.hdf5'))
+      bob.io.save(probe, self.resource('gabor_probe.hdf5'))
+
+    # load model and probe reference
+    model_ref = bob.io.load(self.resource('gabor_model.hdf5'))
+    probe_ref = bob.io.load(self.resource('gabor_probe.hdf5'))
+    self.assertTrue(numpy.allclose(model_ref, model))
+    self.assertTrue(numpy.allclose(probe_ref, probe))
+
+    # compute score
+    similarity_function = bob.machine.GaborJetSimilarity(bob.machine.gabor_jet_similarity_type.PHASE_DIFF)
+    score = graph.similarity(model, probe, similarity_function)
+    self.assertAlmostEqual(score, 0.110043015)
+
+  def test03_dct_ubm(self):
+    # test the UBM/GMM algorithm
+    from faceverify.dct_ubm import load_images, extract_feature, train, enroll, stats, NUMBER_OF_GAUSSIANS
+
+    # open database
+    atnt_db = xbob.db.atnt.Database()
+    # test if all training images are loaded
+    images = load_images(atnt_db, group = 'world')
+    keys = sorted(images.keys())
+    self.assertEqual(len(images), 200)
+
+    extract_feature(images[1])
+
+    # extract features for several images
+    features = {i : extract_feature(images[i]) for i in keys[:13]}
+
+    if regenerate_references:
+      bob.io.save(features[1], self.resource('dct_feature.hdf5'))
+
+    feature_ref = bob.io.load(self.resource('dct_feature.hdf5'))
+    self.assertTrue(numpy.allclose(feature_ref, features[1]))
+
+    # train the UBM with several features, and a limited numebr of Gaussians
+    NUMBER_OF_GAUSSIANS = 2
+    ubm = train({i : features[i] for i in keys[:10]})
+    if regenerate_references:
+      ubm.save(bob.io.HDF5File(self.resource('dct_ubm.hdf5'), 'w'))
+
+    # load PCA reference and check that it is still similar
+    ubm_ref = bob.machine.GMMMachine(bob.io.HDF5File(self.resource('dct_ubm.hdf5')))
+#TODO: enable for bob version 1.2.0
+#    self.assertTrue(ubm_ref.is_similar_to(ubm))
+
+
+    # enroll a model with two features
+    enroller = bob.trainer.MAP_GMMTrainer()
+    enroller.max_iterations = 1
+    enroller.set_prior_gmm(ubm)
+    model = enroll({i : features[i] for i in keys[10:12]}, ubm, enroller)
+    if regenerate_references:
+      model.save(bob.io.HDF5File(self.resource('dct_model.hdf5'), 'w'))
+
+    model_ref = bob.machine.GMMMachine(bob.io.HDF5File(self.resource('dct_model.hdf5')))
+#TODO: enable for bob version 1.2.0
+#    self.assertTrue(model_ref.is_similar_to(model))
+
+    # compute probe statistics
+    probe = stats(features[keys[12]], ubm)
+    if regenerate_references:
+      probe.save(bob.io.HDF5File(self.resource('dct_probe.hdf5'), 'w'))
+
+    probe_ref = bob.machine.GMMStats(bob.io.HDF5File(self.resource('dct_probe.hdf5')))
+#TODO: enable for bob version 1.2.0
+#    self.assertTrue(probe_ref.is_similar_to(probe))
+
+    # compute score
+    score = bob.machine.linear_scoring([model], ubm, [probe])[0,0]
+    self.assertAlmostEqual(score, 43049.56532399742)
+
diff --git a/setup.py b/setup.py
index 47472fc65960b1d473b1aaaa88fb6008b5560e95..8e453986f7f734383d508da6a260fa4acf016fe1 100644
--- a/setup.py
+++ b/setup.py
@@ -28,7 +28,7 @@ setup(
     # This is the basic information about your project. Modify all this
     # information before releasing code publicly.
     name='bob.example.faceverify',
-    version='0.2.4',
+    version='0.3.0',
     description='Example for using Bob to create face verification systems',
     url='http://pypi.python.org/pypi/bob.example.faceverify',
     license='GPLv3',
@@ -50,7 +50,7 @@ setup(
 
     install_requires=[
         "setuptools",
-        "bob >= 1.1.0",               # base signal proc./machine learning library
+        "bob >= 1.1.0, <= 1.1.3",      # base signal proc./machine learning library
         "xbob.db.atnt",               # the AT&T (ORL) database of images
     ],
 
diff --git a/testdata/dct_feature.hdf5 b/testdata/dct_feature.hdf5
new file mode 100644
index 0000000000000000000000000000000000000000..2e43ff74cc80ca16b7165a8df3e1f2e9149fedd8
Binary files /dev/null and b/testdata/dct_feature.hdf5 differ
diff --git a/testdata/dct_model.hdf5 b/testdata/dct_model.hdf5
new file mode 100644
index 0000000000000000000000000000000000000000..7b4cd5983b974f546d9aca8156eb3a874f542dc7
Binary files /dev/null and b/testdata/dct_model.hdf5 differ
diff --git a/testdata/dct_probe.hdf5 b/testdata/dct_probe.hdf5
new file mode 100644
index 0000000000000000000000000000000000000000..73daa574c7c3fe832a38ddce9c7d87ba60a57d96
Binary files /dev/null and b/testdata/dct_probe.hdf5 differ
diff --git a/testdata/dct_ubm.hdf5 b/testdata/dct_ubm.hdf5
new file mode 100644
index 0000000000000000000000000000000000000000..be81f2a039682f05de5471644a8409be0279a8ce
Binary files /dev/null and b/testdata/dct_ubm.hdf5 differ
diff --git a/testdata/gabor_model.hdf5 b/testdata/gabor_model.hdf5
new file mode 100644
index 0000000000000000000000000000000000000000..93b6f27722a985775b420214efbe922ab17324c1
Binary files /dev/null and b/testdata/gabor_model.hdf5 differ
diff --git a/testdata/gabor_probe.hdf5 b/testdata/gabor_probe.hdf5
new file mode 100644
index 0000000000000000000000000000000000000000..7aab9ed8c879e91ac61a6897033d24ce60bade0d
Binary files /dev/null and b/testdata/gabor_probe.hdf5 differ
diff --git a/testdata/pca_model.hdf5 b/testdata/pca_model.hdf5
new file mode 100644
index 0000000000000000000000000000000000000000..245a5b6507b79f5508b60e3f6b0c92253198a7f8
Binary files /dev/null and b/testdata/pca_model.hdf5 differ
diff --git a/testdata/pca_probe.hdf5 b/testdata/pca_probe.hdf5
new file mode 100644
index 0000000000000000000000000000000000000000..6509e8bd55c5c8c9ac9402a1074dacdcba5b200f
Binary files /dev/null and b/testdata/pca_probe.hdf5 differ
diff --git a/testdata/pca_projector.hdf5 b/testdata/pca_projector.hdf5
new file mode 100644
index 0000000000000000000000000000000000000000..e4dfd6dd78cff450f05c0f09a80600c52cb25adc
Binary files /dev/null and b/testdata/pca_projector.hdf5 differ