diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..48f1a837d26e9ac9aa35020f48ef1be37a2a7195
--- /dev/null
+++ b/.gitlab-ci.yml
@@ -0,0 +1,254 @@
+# This build file is defined in two parts: 1) a generic set of instructions you
+# probably **don't** need to change and 2) a part you may have to tune to your
+# project. It heavily uses template features from YAML to help you in only
+# changing a minimal part of it and avoid code duplication to a maximum while
+# still providing a nice pipeline display on your package.
+
+
+# 1) Generic instructions (only change if you know what you're doing)
+# -------------------------------------------------------------------
+
+# Definition of our build pipeline
+stages:
+  - build
+  - test
+  - docs
+  - wheels
+
+
+# Global variables
+variables:
+  CONDA_PREFIX: env
+
+
+# Template for the build stage
+# Needs to run on all supported architectures, platforms and python versions
+.build_template: &build_job
+  stage: build
+  before_script:
+    - git clean -ffdx
+    - curl --silent https://gitlab.idiap.ch/bob/bob/snippets/7/raw | tr -d '\r' > bootstrap-conda.sh
+    - chmod 755 ./bootstrap-conda.sh
+    - ./bootstrap-conda.sh ${CONDA_FOLDER} ${PYTHON_VER} ${CONDA_PREFIX}
+  variables: &build_variables
+    BOB_DOCUMENTATION_SERVER: "http://www.idiap.ch/software/bob/docs/latest/bob/%s/master/"
+  script:
+    - ./bin/buildout
+    - ./bin/sphinx-build doc sphinx
+    - ./bin/python setup.py bdist_wheel --python-tag ${WHEEL_TAG}
+  after_script:
+    - rm -rf ${CONDA_PREFIX}
+  artifacts:
+    expire_in: 1 day
+    paths:
+      - bootstrap-conda.sh
+      - dist/
+      - sphinx/
+
+
+# Template for building on a Linux machine
+.build_linux_template: &linux_build_job
+  <<: *build_job
+  variables: &linux_build_variables
+    <<: *build_variables
+    CONDA_FOLDER: "/local/conda"
+    CFLAGS: "-D_GLIBCXX_USE_CXX11_ABI=0 -coverage"
+    CXXFLAGS: "-D_GLIBCXX_USE_CXX11_ABI=0 -coverage"
+
+
+# Template for building on a Mac OSX machine
+.build_mac_template: &macosx_build_job
+  <<: *build_job
+  variables: &macosx_build_variables
+    <<: *build_variables
+    CONDA_FOLDER: "/opt/conda"
+    MACOSX_DEPLOYMENT_TARGET: "10.9"
+    CFLAGS: "-pthread -coverage"
+    CXXFLAGS: "-pthread -coverage"
+    LDFLAGS: "-lpthread"
+
+
+# Template for the test stage - re-install from uploaded wheels
+# Needs to run on all supported architectures, platforms and python versions
+.test_template: &test_job
+  stage: test
+  before_script:
+    - ./bootstrap-conda.sh ${CONDA_FOLDER} ${PYTHON_VER} ${CONDA_PREFIX}
+    - source ${CONDA_FOLDER}/bin/activate ${CONDA_PREFIX}
+    - pip install --use-wheel --no-index --pre dist/*.whl
+  script:
+    - cd ${CONDA_PREFIX}
+    - python -c "from ${CI_PROJECT_NAME} import get_config; print(get_config())"
+    - if [ -x ./bin/bob_dbmanage.py ]; then ./bin/bob_dbmanage.py all download --force; fi
+    - coverage run --source=${CI_PROJECT_NAME} ./bin/nosetests -sv ${CI_PROJECT_NAME}
+    - coverage report
+    - sphinx-build -b doctest ../doc ../sphinx
+  after_script:
+    - rm -rf ${CONDA_PREFIX}
+
+
+# Template for the wheel uploading stage
+# Needs to run against one combination of python 2.x and 3.x if it is a python
+# only package, otherwise, needs to run in both pythons to all supported
+# architectures (Linux and Mac OSX 64-bit)
+.wheels_template: &wheels_job
+  stage: wheels
+  only:
+    - master
+    - tags
+  before_script:
+    - curl --silent https://gitlab.idiap.ch/bob/bob/snippets/8/raw | tr -d '\r' > upload-wheel.sh
+    - chmod 755 upload-wheel.sh
+  script:
+    - ./upload-wheel.sh
+
+
+# Template for (latest) documentation upload stage
+# Only one real job needs to do this
+.docs_template: &docs_job
+  stage: docs
+  only:
+    - master
+  before_script:
+    - curl --silent https://gitlab.idiap.ch/bob/bob/snippets/9/raw | tr -d '\r' > upload-sphinx.sh
+    - chmod 755 upload-sphinx.sh
+  script:
+    - ./upload-sphinx.sh
+
+
+# 2) Package specific instructions (you may tune this if needed)
+# --------------------------------------------------------------
+
+# Linux + Python 2.7: Builds, tests, uploads wheel
+build_linux_27:
+  <<: *linux_build_job
+  variables: &linux_27_build_variables
+    <<: *linux_build_variables
+    PYTHON_VER: "2.7"
+    WHEEL_TAG: "py27"
+  tags:
+    - conda-linux
+
+test_linux_27:
+  <<: *test_job
+  variables: *linux_27_build_variables
+  dependencies:
+    - build_linux_27
+  tags:
+    - conda-linux
+
+wheels_linux_27:
+  <<: *wheels_job
+  dependencies:
+    - build_linux_27
+  tags:
+    - conda-linux
+
+
+# Linux + Python 3.4: Builds and tests
+build_linux_34:
+  <<: *linux_build_job
+  variables: &linux_34_build_variables
+    <<: *linux_build_variables
+    PYTHON_VER: "3.4"
+    WHEEL_TAG: "py3"
+  tags:
+    - conda-linux
+
+test_linux_34:
+  <<: *test_job
+  variables: *linux_34_build_variables
+  dependencies:
+    - build_linux_34
+  tags:
+    - conda-linux
+
+
+# Linux + Python 3.5: Builds, tests, uploads wheel
+build_linux_35:
+  <<: *linux_build_job
+  variables: &linux_35_build_variables
+    <<: *linux_build_variables
+    PYTHON_VER: "3.5"
+    WHEEL_TAG: "py3"
+  tags:
+    - conda-linux
+
+test_linux_35:
+  <<: *test_job
+  variables: *linux_35_build_variables
+  dependencies:
+    - build_linux_35
+  tags:
+    - conda-linux
+
+wheels_linux_35:
+  <<: *wheels_job
+  dependencies:
+    - build_linux_35
+  tags:
+    - conda-linux
+
+docs_linux_35:
+  <<: *docs_job
+  dependencies:
+    - build_linux_35
+  tags:
+    - conda-linux
+
+
+# Mac OSX + Python 2.7: Builds and tests
+build_macosx_27:
+  <<: *macosx_build_job
+  variables: &macosx_27_build_variables
+    <<: *macosx_build_variables
+    PYTHON_VER: "2.7"
+    WHEEL_TAG: "py27"
+  tags:
+    - conda-macosx
+
+test_macosx_27:
+  <<: *test_job
+  variables: *macosx_27_build_variables
+  dependencies:
+    - build_macosx_27
+  tags:
+    - conda-macosx
+
+
+# Mac OSX + Python 3.4: Builds and tests
+build_macosx_34:
+  <<: *macosx_build_job
+  variables: &macosx_34_build_variables
+    <<: *macosx_build_variables
+    PYTHON_VER: "3.4"
+    WHEEL_TAG: "py3"
+  tags:
+    - conda-macosx
+
+test_macosx_34:
+  <<: *test_job
+  variables: *macosx_34_build_variables
+  dependencies:
+    - build_macosx_34
+  tags:
+    - conda-macosx
+
+
+# Mac OSX + Python 3.5: Builds and tests
+build_macosx_35:
+  <<: *macosx_build_job
+  variables: &macosx_35_build_variables
+    <<: *macosx_build_variables
+    PYTHON_VER: "3.5"
+    WHEEL_TAG: "py3"
+  tags:
+    - conda-macosx
+
+test_macosx_35:
+  <<: *test_job
+  variables: *macosx_35_build_variables
+  dependencies:
+    - build_macosx_35
+  tags:
+    - conda-macosx
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
index a42a0bd07f4d833d4fb3d4dfa93166eab4fcb046..67e0aede86c9bb13d7bf1f3ae572c87c7bbf222d 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,3 +1,5 @@
-include README.rst bootstrap-buildout.py buildout.cfg COPYING version.txt requirements.txt
+include README.rst bootstrap-buildout.py buildout.cfg develop.cfg version.txt requirements.txt
 recursive-include doc *.py *.rst
-recursive-include bob/bio/base/test/data *-dev*
+recursive-include bob *.txt *.hdf5
+recursive-include bob *.sql3
+recursive-include bob/pad/base/test/data scores-* *.sql3
diff --git a/bob/pad/base/__init__.py b/bob/pad/base/__init__.py
index e48cb433bc32cf30bcc7db97ee74186171b8ab32..722c959bb7670ba1fe57e8e79270378841143816 100644
--- a/bob/pad/base/__init__.py
+++ b/bob/pad/base/__init__.py
@@ -1,6 +1,6 @@
+from . import database
 from . import algorithm
 from . import tools
-#from . import grid # only one file, not complete directory
 
 from . import script
 from . import test
diff --git a/bob/pad/base/database/DatabaseBobSpoof.py b/bob/pad/base/database/DatabaseBobSpoof.py
deleted file mode 100644
index 1df540601d7e83dfa61a6bd7c730a201a3f87a60..0000000000000000000000000000000000000000
--- a/bob/pad/base/database/DatabaseBobSpoof.py
+++ /dev/null
@@ -1,217 +0,0 @@
-#!/usr/bin/env python
-# vim: set fileencoding=utf-8 :
-# @author: Pavel Korshunov <pavel.korshunov@idiap.ch>
-# @date: Wed 19 Aug 13:43:21 2015
-#
-# Copyright (C) 2011-2012 Idiap Research Institute, Martigny, Switzerland
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, version 3 of the License.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-from bob.bio.base.database.Database import Database
-import os
-
-import antispoofing.utils.db
-
-
-class DatabaseBobSpoof(Database):
-    """This class can be used whenever you have a database that follows the Bob
-    antispoofing database interface, which is defined in :py:class:`antispoofing.utils.db.Database`
-
-    **Parameters:**
-
-    database : derivative of :py:class:`antispoofing.utils.db.Database`
-      The database instance that provides the actual interface, see :ref:`antispoofing_databases` for a list.
-
-    all_files_options : dict
-      Dictionary of options passed to the :py:meth:`antispoofing.utils.db.Database.objects` database query when retrieving all data.
-
-    check_original_files_for_existence : bool
-      Enables to test for the original data files when querying the database.
-
-    kwargs : ``key=value`` pairs
-      The arguments of the :py:class:`Database` base class constructor.
-
-      .. note:: Usually, the ``name``, ``protocol`` keyword parameters of the base class constructor need to be specified.
-    """
-
-    def __init__(
-            self,
-            database,  # The bob database that is used
-            all_files_options={},  # additional options for the database query that can be used to extract all files
-            original_directory=None,  # the directory where the data files are located
-            check_original_files_for_existence=False,
-            **kwargs  # The default parameters of the base class
-    ):
-
-        Database.__init__(
-            self,
-            **kwargs
-        )
-
-        assert isinstance(database, antispoofing.utils.db.Database), \
-            "Only databases derived from antispoofing.utils.db.Database are supported by this interface. " \
-            "Please implement your own bob.bio.base.database.Database interface for anti-spoofing experiments."
-
-        self.database = database
-        if original_directory is None:
-            self.original_directory = database.original_directory
-        else:
-            self.original_directory = original_directory
-
-        self.all_files_options = all_files_options
-        self.check_existence = check_original_files_for_existence
-
-        self._kwargs = kwargs
-
-    def set_protocol(self, protocol):
-        """
-        Sets the protocol for the database. The protocol can be specified via command line to spoof.py
-        script with option -P
-        :param protocol: name of the protocol
-        :return: None
-        """
-        self.protocol = protocol
-        self.database.set_kwargs({'protocol': protocol})
-
-    def __str__(self):
-        """__str__() -> info
-
-        This function returns all parameters of this class (and its derived class).
-
-        **Returns:**
-
-        info : str
-          A string containing the full information of all parameters of this (and the derived) class.
-        """
-        params = ", ".join(["%s=%s" % (key, value) for key, value in self._kwargs.items()])
-        params += ", original_directory=%s" % (self.original_directory)
-        if self.all_files_options: params += ", all_files_options=%s" % self.all_files_options
-
-        return "%s(%s)" % (str(self.__class__), params)
-
-
-    def replace_directories(self, replacements=None):
-        """This helper function replaces the ``original_directory`` of the database with
-        the directory read from the given replacement file.
-
-        This function is provided for convenience, so that the database
-        configuration files do not need to be modified.
-        Instead, this function uses the given dictionary of replacements to change the original directory.
-
-        The given ``replacements`` can be of type ``dict``, including all replacements,
-        or a file name (as a ``str``), in which case the file is read.
-        The structure of the file should be:
-
-        .. code-block:: text
-
-           # Comments starting with # and empty lines are ignored
-
-           original/path/to/data = /path/to/your/data
-
-        **Parameters:**
-
-        replacements : dict or str
-          A dictionary with replacements, or a name of a file to read the dictionary from.
-          If the file name does not exist, no directories are replaced.
-        """
-        if replacements is None:
-            return
-        if isinstance(replacements, str):
-            if not os.path.exists(replacements):
-                return
-            # Open the database replacement file and reads its content
-            with open(replacements) as f:
-                replacements = {}
-                for line in f:
-                    if line.strip() and not line.startswith("#"):
-                        splits = line.split("=")
-                        assert len(splits) == 2
-                        replacements[splits[0].strip()] = splits[1].strip()
-
-        assert isinstance(replacements, dict)
-
-        if self.original_directory in replacements:
-            self.original_directory = replacements[self.original_directory]
-            self.database.original_directory = self.original_directory
-
-
-    def all_files(self, groups=('train', 'dev', 'eval')):
-        """all_files(groups=('train', 'dev', 'eval')) -> files
-
-        Returns all files of the database, respecting the current protocol.
-
-        **Parameters:**
-
-        groups : some of ``('train', 'dev', 'eval')`` or ``None``
-          The groups to get the data for.
-          If ``None``, data for all groups is returned.
-
-        **Returns:**
-
-        files : [:py:class:`antispoofing.utils.db.File`]
-          The sorted and unique list of all files of the database.
-        """
-        realset = []
-        attackset = []
-        if 'train' in groups:
-            real, attack = self.database.get_train_data()
-            realset += real
-            attackset += attack
-        if 'dev' in groups:
-            real, attack = self.database.get_devel_data()
-            realset += real
-            attackset += attack
-        if 'eval' in groups:
-            real, attack = self.database.get_test_data()
-            realset += real
-            attackset += attack
-        return [realset, attackset]
-
-    def training_files(self, step=None, arrange_by_client=False):
-        """training_files(step = None, arrange_by_client = False) -> files
-
-        Returns all training File objects
-        This function needs to be implemented in derived class implementations.
-
-        **Parameters:**
-            The parameters are not applicable in this version of anti-spoofing experiments
-
-        **Returns:**
-
-        files : [:py:class:`File`] or [[:py:class:`File`]]
-          The (arranged) list of files used for the training.
-        """
-        return self.database.get_train_data()
-
-    def original_file_names(self, files):
-        """original_file_names(files) -> paths
-
-        Returns the full paths of the real and attack data of the given File objects.
-
-        **Parameters:**
-
-        files : [[:py:class:`antispoofing.utils.db.File`], [:py:class:`antispoofing.utils.db.File`]]
-          The list of lists ([real, attack]]) of file object to retrieve the original data file names for.
-
-        **Returns:**
-
-        paths : [str]
-          The paths extracted for the concatenated real+attack files, in the preserved order.
-        """
-        realfiles = files[0]
-        attackfiles = files[1]
-        realpaths = [file.make_path(directory=self.original_directory, extension=self.original_extension) for file in
-                     realfiles]
-        attackpaths = [file.make_path(directory=self.original_directory, extension=self.original_extension) for file in
-                       attackfiles]
-        return realpaths + attackpaths
diff --git a/bob/pad/base/database/__init__.py b/bob/pad/base/database/__init__.py
index dba54d5a1e47e1ce772d0146c930989a6a024376..f437a24a04cf3c1e5dbc656a09335194f38be002 100644
--- a/bob/pad/base/database/__init__.py
+++ b/bob/pad/base/database/__init__.py
@@ -1,7 +1,5 @@
-#from .utils import File, FileSet
-
-# from bob.bio.base.database.Database import Database
-from .DatabaseBobSpoof import DatabaseBobSpoof
+from .database import PadDatabase
+from .file import PadFile
 
 # gets sphinx autodoc done right - don't remove it
 __all__ = [_ for _ in dir() if not _.startswith('_')]
diff --git a/bob/pad/base/database/database.py b/bob/pad/base/database/database.py
new file mode 100644
index 0000000000000000000000000000000000000000..f316f3b5369e17eb174b3b9ae56cb312bea04e56
--- /dev/null
+++ b/bob/pad/base/database/database.py
@@ -0,0 +1,155 @@
+#!/usr/bin/env python
+# vim: set fileencoding=utf-8 :
+# @author: Manuel Guenther <Manuel.Guenther@idiap.ch>
+# @author: Pavel Korshunov <pavel.korshunov@idiap.ch>
+# @date:   Tue May 17 12:09:22 CET 2016
+#
+
+import abc
+import bob.bio.base.database
+
+
+class PadDatabase(bob.bio.base.database.BioDatabase):
+    def __init__(
+            self,
+            name,
+            all_files_options={},  # additional options for the database query that can be used to extract all files
+            check_original_files_for_existence=False,
+            original_directory=None,
+            original_extension=None,
+            protocol='Default',
+            **kwargs  # The rest of the default parameters of the base class
+    ):
+        """This class represents the basic API for database access.
+        Please use this class as a base class for your database access classes.
+        Do not forget to call the constructor of this base class in your derived class.
+
+        **Parameters:**
+
+        name : str
+        A unique name for the database.
+
+        all_files_options : dict
+        Dictionary of options passed to the second-level database query when retrieving all data.
+
+        check_original_files_for_existence : bool
+        Enables to test for the original data files when querying the database.
+
+        original_directory : str
+        The directory where the original data of the database are stored.
+
+        original_extension : str
+        The file name extension of the original data.
+
+        protocol : str or ``None``
+        The name of the protocol that defines the default experimental setup for this database.
+
+        kwargs : ``key=value`` pairs
+        The arguments of the :py:class:`bob.bio.base.BioDatabase` base class constructor.
+
+        """
+        super(PadDatabase, self).__init__(name=name, all_files_options=all_files_options, check_original_files_for_existence=check_original_files_for_existence, original_directory=original_directory, original_extension=original_extension, protocol=protocol, **kwargs)
+
+    def original_file_names(self, files):
+        """original_file_names(files) -> paths
+
+        Returns the full paths of the real and attack data of the given PadFile objects.
+
+        **Parameters:**
+
+        files : [[:py:class:`bob.pad.db.PadFile`], [:py:class:`bob.pad.db.PadFile`]
+          The list of lists ([real, attack]) of file object to retrieve the original data file names for.
+
+        **Returns:**
+
+        paths : [str] or [[str]]
+          The paths extracted for the concatenated real+attack files, in the preserved order.
+        """
+        assert self.original_directory is not None
+        assert self.original_extension is not None
+        realfiles = files[0]
+        attackfiles = files[1]
+        realpaths = [file.make_path(directory=self.original_directory, extension=self.original_extension) for file in
+                     realfiles]
+        attackpaths = [file.make_path(directory=self.original_directory, extension=self.original_extension) for file in
+                       attackfiles]
+        return realpaths + attackpaths
+
+    def model_ids_with_protocol(self, groups=None, protocol=None, **kwargs):
+        """model_ids_with_protocol(groups = None, protocol = None, **kwargs) -> ids
+
+            Client-based PAD is not implemented.
+        """
+        return []
+
+    def annotations(self, file):
+        """
+        Annotations are not supported by PAD interface
+
+        """
+        return None
+
+    @abc.abstractmethod
+    def objects(self, groups=None, protocol=None, purposes=None, model_ids=None, **kwargs):
+        """This function returns lists of File objects, which fulfill the given restrictions.
+
+        Keyword parameters:
+
+        groups : str or [str]
+          The groups of which the clients should be returned.
+          Usually, groups are one or more elements of ('train', 'dev', 'eval')
+
+        protocol
+          The protocol for which the clients should be retrieved.
+          The protocol is dependent on your database.
+          If you do not have protocols defined, just ignore this field.
+
+        purposes : str or [str]
+          The purposes for which File objects should be retrieved.
+          Usually it is either 'real' or 'attack'.
+
+        model_ids : [various type]
+          This parameter is not suported in PAD databases yet
+        """
+        raise NotImplementedError("This function must be implemented in your derived class.")
+
+    #################################################################
+    ######### Methods to provide common functionality ###############
+    #################################################################
+
+    def all_files(self, groups=('train', 'dev', 'eval')):
+        """all_files(groups=('train', 'dev', 'eval')) -> files
+
+        Returns all files of the database, respecting the current protocol.
+        The files can be limited using the ``all_files_options`` in the constructor.
+
+        **Parameters:**
+
+        groups : some of ``('train', 'dev', 'eval')`` or ``None``
+          The groups to get the data for.
+
+        **Returns:**
+
+        files : [:py:class:`File`]
+          The sorted and unique list of all files of the database.
+        """
+        realset = self.sort(self.objects(protocol=self.protocol, groups=groups, purposes='real', **self.all_files_options))
+        attackset = self.sort(self.objects(protocol=self.protocol, groups=groups, purposes='attack', **self.all_files_options))
+        return [realset, attackset]
+
+    def training_files(self, step=None, arrange_by_client=False):
+        """training_files(step = None, arrange_by_client = False) -> files
+
+        Returns all training File objects
+        This function needs to be implemented in derived class implementations.
+
+        **Parameters:**
+            The parameters are not applicable in this version of anti-spoofing experiments
+
+        **Returns:**
+
+        files : [:py:class:`File`] or [[:py:class:`File`]]
+          The (arranged) list of files used for the training.
+        """
+
+        return self.all_files(groups=('train',))
diff --git a/bob/pad/base/database/file.py b/bob/pad/base/database/file.py
new file mode 100644
index 0000000000000000000000000000000000000000..585f8040448c6e9a4b19ff57ce50e2b03632d601
--- /dev/null
+++ b/bob/pad/base/database/file.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+# vim: set fileencoding=utf-8 :
+# @author: Pavel Korshunov <pavel.korshunov@idiap.ch>
+# @date:   Wed May 18 10:09:22 CET 2016
+#
+
+import bob.bio.base.database
+
+
+class PadFile(bob.bio.base.database.BioFile):
+    """A simple base class that defines basic properties of File object for the use in PAD experiments"""
+
+    def __init__(self, client_id, path, attack_type=None, file_id=None):
+        """**Constructor Documentation**
+
+        Initialize the File object with the minimum required data.
+
+        Parameters:
+
+        attack_type : a string type
+          In cased of a spoofed data, this parameter should indicate what kind of spoofed attack it is.
+          The default None value is interpreted that the PadFile is a genuine or real sample.
+
+        For client_id, path and file_id, please refer to :py:class:`bob.bio.base.BioFile` constructor
+        """
+        super(PadFile, self).__init__(client_id, path, file_id)
+
+        if attack_type is not None:
+            assert isinstance(attack_type, str)
+
+        # just copy the information
+        self.attack_type = attack_type
+        """The attack type of the sample, None if it is a genuine sample."""
diff --git a/bob/pad/base/test/dummy/__init__.py b/bob/pad/base/test/dummy/__init__.py
index fc3eb09b2ff61ff843e299a42a11c9c48b20cef8..046239865f2bf13aee36887ffb56a295abc2f6c6 100644
--- a/bob/pad/base/test/dummy/__init__.py
+++ b/bob/pad/base/test/dummy/__init__.py
@@ -1,4 +1,5 @@
 from . import database
+from . import database_sql
 from . import preprocessor
 from . import extractor
 from . import algorithm
diff --git a/bob/pad/base/test/dummy/database.py b/bob/pad/base/test/dummy/database.py
index 7294d245758effbfb4bc26ad4adbe9dbca363363..a6e056ea8e5a2d8bdf511c508eb6b8978f925510 100644
--- a/bob/pad/base/test/dummy/database.py
+++ b/bob/pad/base/test/dummy/database.py
@@ -21,8 +21,8 @@ import os
 import sys
 import six
 
-from bob.pad.db import PadFile
-from bob.pad.db import PadDatabase
+from bob.pad.base.database import PadFile
+from bob.pad.base.database import PadDatabase
 
 import bob.io.base
 from bob.db.base.driver import Interface as BaseInterface
@@ -35,6 +35,11 @@ dummy_train_list = ['train_real', 'train_attack']
 dummy_devel_list = ['dev_real', 'dev_attack']
 dummy_test_list = ['eval_real', 'eval_attack']
 
+dummy_data = {'train_real': 1.0, 'train_attack': 2.0,
+              'dev_real': 3.0, 'dev_attack': 4.0,
+              'eval_real': 5.0, 'eval_attack': 6.0}
+
+
 class TestFile(PadFile):
     def __init__(self, path, id):
         attack_type = None
@@ -42,6 +47,27 @@ class TestFile(PadFile):
             attack_type = "attack"
         PadFile.__init__(self, client_id=1, path=path, file_id=id, attack_type=attack_type)
 
+    def load(self, directory=None, extension='.hdf5'):
+        """Loads the data at the specified location and using the given extension.
+        Override it if you need to load differently.
+
+        Keyword Parameters:
+
+        data
+          The data blob to be saved (normally a :py:class:`numpy.ndarray`).
+
+        directory
+          [optional] If not empty or None, this directory is prefixed to the final
+          file destination
+
+        extension
+          [optional] The extension of the filename - this will control the type of
+          output and the codec for saving the input blob.
+
+        """
+        # get the path
+        path = self.make_path(directory or '', extension or '')
+        return dummy_data[os.path.basename(path)]
 
 def dumplist(args):
     """Dumps lists of files based on your criteria"""
diff --git a/bob/pad/base/test/dummy/database_sql.py b/bob/pad/base/test/dummy/database_sql.py
new file mode 100644
index 0000000000000000000000000000000000000000..969e73313e347c3b19f39aaa5c17ef36b942c532
--- /dev/null
+++ b/bob/pad/base/test/dummy/database_sql.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+# vim: set fileencoding=utf-8 :
+# @author: Manuel Guenther <Manuel.Guenther@idiap.ch>
+# @author: Pavel Korshunov <pavel.korshunov@idiap.ch>
+# @date:   Tue May 17 12:09:22 CET 2016
+#
+
+import os
+import bob.io.base
+import bob.io.base.test_utils
+import bob.pad.base.database
+import bob.db.base
+
+from sqlalchemy import Column, Integer, String
+from sqlalchemy.ext.declarative import declarative_base
+
+regenerate_database = False
+
+dbfile = bob.io.base.test_utils.datafile("test_db.sql3", "bob.pad.base.test")
+
+Base = declarative_base()
+
+
+class TestFileSql (Base, bob.pad.base.database.PadFile):
+    __tablename__ = "file"
+    id = Column(Integer, primary_key=True)
+    client_id = Column(Integer, unique=True)
+    path = Column(String(100), unique=True)
+
+    def __init__(self):
+        bob.pad.base.database.PadFile.__init__(self, client_id=5, path="test/path")
+
+
+def create_database():
+    if os.path.exists(dbfile):
+        os.remove(dbfile)
+    import bob.db.base.utils
+    engine = bob.db.base.utils.create_engine_try_nolock('sqlite', dbfile, echo=True)
+    Base.metadata.create_all(engine)
+    session = bob.db.base.utils.session('sqlite', dbfile, echo=True)
+    session.add(TestFileSql())
+    session.commit()
+    session.close()
+    del session
+    del engine
+
+
+class TestDatabaseSql (bob.pad.base.database.PadDatabase, bob.db.base.SQLiteDatabase):
+    def __init__(self):
+        bob.pad.base.database.PadDatabase.__init__(self, 'pad_test', original_directory="original/directory", original_extension=".orig")
+        bob.db.base.SQLiteDatabase.__init__(self, dbfile, TestFileSql)
+
+    def groups(self, protocol=None):
+        return ['group']
+
+    def objects(self, groups=None, protocol=None, purposes=None, model_ids=None, **kwargs):
+        return list(self.query(TestFileSql))
+
+database = TestDatabaseSql()
\ No newline at end of file
diff --git a/bob/pad/base/test/dummy/preprocessor.py b/bob/pad/base/test/dummy/preprocessor.py
index d6d76f5692aa201c2803a37009648c39e1e5ec6e..e17e72b16c8a8172ae63c0afe9dc1511b75faec1 100644
--- a/bob/pad/base/test/dummy/preprocessor.py
+++ b/bob/pad/base/test/dummy/preprocessor.py
@@ -33,8 +33,5 @@ class DummyPreprocessor(Preprocessor):
         """Does nothing, simply converts the data type of the data, ignoring any annotation."""
         return data
 
-    def read_original_data(self, original_file_name):
-        return dummy_data[os.path.basename(original_file_name)]
-
 
 preprocessor = DummyPreprocessor()
diff --git a/bob/pad/base/test/test.py b/bob/pad/base/test/test.py
new file mode 100644
index 0000000000000000000000000000000000000000..c5c1702d799c04bb748c909fd28b395a4eb5f6bb
--- /dev/null
+++ b/bob/pad/base/test/test.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+# vim: set fileencoding=utf-8 :
+# @author: Manuel Guenther <Manuel.Guenther@idiap.ch>
+# @author: Pavel Korshunov <pavel.korshunov@idiap.ch>
+# @date:   Tue May 17 12:09:22 CET 2016
+#
+
+import os
+import shutil
+import bob.io.base
+import bob.io.base.test_utils
+import bob.bio.base.database
+import bob.pad.base.database
+import bob.db.base
+
+import tempfile
+from sqlalchemy import Column, Integer, String
+from sqlalchemy.ext.declarative import declarative_base
+
+regenerate_database = False
+
+dbfile = bob.io.base.test_utils.datafile("test_db.sql3", "bob.pad.base.test")
+
+Base = declarative_base()
+
+
+class TestFile (Base, bob.pad.base.database.PadFile):
+    __tablename__ = "file"
+    id = Column(Integer, primary_key=True)
+    client_id = Column(Integer, unique=True)
+    path = Column(String(100), unique=True)
+
+    def __init__(self):
+        bob.pad.base.database.PadFile.__init__(self, client_id=5, path="test/path")
+
+
+def create_database():
+    if os.path.exists(dbfile):
+        os.remove(dbfile)
+    import bob.db.base.utils
+    engine = bob.db.base.utils.create_engine_try_nolock('sqlite', dbfile, echo=True)
+    Base.metadata.create_all(engine)
+    session = bob.db.base.utils.session('sqlite', dbfile, echo=True)
+    session.add(TestFile())
+    session.commit()
+    session.close()
+    del session
+    del engine
+
+
+class TestDatabase (bob.pad.base.database.PadDatabase, bob.db.base.SQLiteDatabase):
+    def __init__(self):
+        bob.pad.base.database.PadDatabase.__init__(self, 'pad_test', original_directory="original/directory", original_extension=".orig")
+        bob.db.base.SQLiteDatabase.__init__(self, dbfile, TestFile)
+
+    def groups(self, protocol=None):
+        return ['group']
+
+    def objects(self, groups=None, protocol=None, purposes=None, model_ids=None, **kwargs):
+        return list(self.query(TestFile))
+
+
+# def test01_database():
+#     # check that the database API works
+#     if regenerate_database:
+#         create_database()
+#
+#     db = TestDatabase()
+#
+#     def check_file(fs, l=1):
+#         assert len(fs) == l
+#         if l == 1:
+#             f = fs[0]
+#         else:
+#             f = fs[0][0]
+#         assert isinstance(f, TestFile)
+#         assert f.id == 1
+#         assert f.client_id == 5
+#         assert f.path == "test/path"
+#
+#     check_file(db.objects())
+#     check_file(db.all_files(), 2)
+#     check_file(db.training_files(), 2)
+#     check_file(db.files([1]))
+#     check_file(db.reverse(["test/path"]))
+#
+#     file = db.objects()[0]
+#     assert db.original_file_name(file) == "original/directory/test/path.orig"
+#     assert db.file_names([file], "another/directory", ".other")[0] == "another/directory/test/path.other"
+#     assert db.paths([1], "another/directory", ".other")[0] == "another/directory/test/path.other"
+#
+#     # try file save
+#     temp_dir = tempfile.mkdtemp(prefix="bob_db_test_")
+#     data = [1., 2., 3.]
+#     file.save(data, temp_dir)
+#     assert os.path.exists(file.make_path(temp_dir, ".hdf5"))
+#     read_data = bob.io.base.load(file.make_path(temp_dir, ".hdf5"))
+#     for i in range(3):
+#         assert data[i] == read_data[i]
+#     shutil.rmtree(temp_dir)
diff --git a/bob/pad/base/test/test_databases.py b/bob/pad/base/test/test_databases.py
index 1b59b55c4f1837803673a0ca1b1b4151c9cc0f6f..d3191c2ca9d7d54de075ac0219ff530891e8564f 100644
--- a/bob/pad/base/test/test_databases.py
+++ b/bob/pad/base/test/test_databases.py
@@ -21,11 +21,58 @@
 import os
 import unittest
 import bob.pad.base
+from bob.pad.base.test.dummy.database_sql import create_database
 
 import pkg_resources
 
+import tempfile
+import shutil
+
 dummy_dir = pkg_resources.resource_filename('bob.pad.base', 'test/dummy')
 
+regenerate_database = False
+
+class DummyDatabaseSqlTest(unittest.TestCase):
+
+    def test01_database(self):
+        # check that the database API works
+        if regenerate_database:
+            create_database()
+
+        db = bob.pad.base.test.dummy.database_sql.TestDatabaseSql()
+
+        def check_file(fs, l=1):
+            assert len(fs) == l
+            if l == 1:
+                f = fs[0]
+            else:
+                f = fs[0][0]
+            assert isinstance(f, bob.pad.base.test.dummy.database_sql.TestFileSql)
+            assert f.id == 1
+            assert f.client_id == 5
+            assert f.path == "test/path"
+
+        check_file(db.objects())
+        check_file(db.all_files(), 2)
+        check_file(db.training_files(), 2)
+        check_file(db.files([1]))
+        check_file(db.reverse(["test/path"]))
+
+        file = db.objects()[0]
+        assert db.original_file_name(file) == "original/directory/test/path.orig"
+        assert db.file_names([file], "another/directory", ".other")[0] == "another/directory/test/path.other"
+        assert db.paths([1], "another/directory", ".other")[0] == "another/directory/test/path.other"
+
+        # try file save
+        temp_dir = tempfile.mkdtemp(prefix="bob_db_test_")
+        data = [1., 2., 3.]
+        file.save(data, temp_dir)
+        assert os.path.exists(file.make_path(temp_dir, ".hdf5"))
+        read_data = bob.io.base.load(file.make_path(temp_dir, ".hdf5"))
+        for i in range(3):
+            assert data[i] == read_data[i]
+        shutil.rmtree(temp_dir)
+
 
 class DummyDatabaseTest(unittest.TestCase):
     """Performs various tests on the AVspoof attack database."""
diff --git a/bob/pad/base/test/test_spoof.py b/bob/pad/base/test/test_spoof.py
index a4d884d2ff83630f109265423f7380bb56f5835e..ad7fd1e3f040977e725493c7a6b1960012afa828 100644
--- a/bob/pad/base/test/test_spoof.py
+++ b/bob/pad/base/test/test_spoof.py
@@ -72,6 +72,7 @@ def _detect(parameters, cur_test_dir, sub_dir, score_types=('dev-real',), scores
             assert numpy.allclose(data2check[0][:, 3].astype(float), data2check[1][:, 3].astype(float), 1e-5)
 
     finally:
+        # print ("empty")
         shutil.rmtree(cur_test_dir)
 
 
diff --git a/bob/pad/base/tools/FileSelector.py b/bob/pad/base/tools/FileSelector.py
index e5cf76b1815de6cad06b4e29cc533066e8f7b720..ce058d2e87574520e4b275187be1ce67e4a2ba89 100644
--- a/bob/pad/base/tools/FileSelector.py
+++ b/bob/pad/base/tools/FileSelector.py
@@ -107,9 +107,18 @@ class FileSelector:
 
     # List of files that will be used for all files
     def original_data_list(self, groups=None):
-        """Returns the tuple of lists of original (real, attack) data that can be used for preprocessing."""
+        """Returns the the joint list of original (real and attack) file names."""
         return self.database.original_file_names(self.database.all_files(groups=groups))
 
+    def original_data_list_files(self, groups=None):
+        """Returns the joint list of original (real and attack) data files that can be used for preprocessing."""
+        files = self.database.all_files(groups=groups)
+        if len(files) != 2:
+            fileset = files
+        else:
+            fileset = files[0]+files[1]
+        return fileset, self.database.original_directory, self.database.original_extension
+
     def preprocessed_data_list(self, groups=None):
         """Returns the tuple of lists (real, attacks) of preprocessed data files."""
         return self.get_paths(self.database.all_files(groups=groups), "preprocessed")
diff --git a/bob/pad/base/tools/command_line.py b/bob/pad/base/tools/command_line.py
index be370f2dbd77070fbe74f1f045131b1ee081c326..7028da9bdc0d9520d95931776c6e800776b378c6 100644
--- a/bob/pad/base/tools/command_line.py
+++ b/bob/pad/base/tools/command_line.py
@@ -26,6 +26,8 @@ import bob.core
 
 logger = bob.core.log.setup("bob.pad.base")
 
+from bob.pad.base.database import PadDatabase
+
 from bob.bio.base import utils
 from . import FileSelector
 from .. import database
@@ -105,7 +107,7 @@ def command_line_parser(description=__doc__, exclude_resources_from=[]):
     # directories differ between idiap and extern
     temp = "/idiap/temp/%s/database-name/sub-directory" % os.environ["USER"] if is_idiap() else "temp"
     results = "/idiap/user/%s/database-name/sub-directory" % os.environ["USER"] if is_idiap() else "results"
-    database_replacement = "%s/.bob_spoof_databases.txt" % os.environ["HOME"]
+    database_replacement = "%s/.bob_bio_databases.txt" % os.environ["HOME"]
 
     dir_group = parser.add_argument_group('\nDirectories that can be changed according to your requirements')
     dir_group.add_argument('-T', '--temp-directory', metavar='DIR',
@@ -264,7 +266,7 @@ def initialize(parsers, command_line_parameters=None, skips=[]):
     projector_sub_dir = extractor_sub_dir
 
     # Database directories, which should be automatically replaced
-    if isinstance(args.database, database.DatabaseBobSpoof):
+    if isinstance(args.database, PadDatabase):
         args.database.replace_directories(args.database_directories_file)
 
     # initialize the file selector
diff --git a/bob/pad/base/tools/preprocessor.py b/bob/pad/base/tools/preprocessor.py
index d3a192788d4e8e5247ecc40251747bfb07ca424f..0db64a3bef411f6a7b119065c2704394a6cb074f 100644
--- a/bob/pad/base/tools/preprocessor.py
+++ b/bob/pad/base/tools/preprocessor.py
@@ -55,7 +55,7 @@ def preprocess(preprocessor, groups=None, indices=None, force=False):
     fs = FileSelector.instance()
 
     # get the file lists
-    data_files = fs.original_data_list(groups=groups)
+    data_files, original_directory, original_extension = fs.original_data_list_files(groups=groups)
     preprocessed_data_files = fs.preprocessed_data_list(groups=groups)
 
     # select a subset of keys to iterate
@@ -71,18 +71,23 @@ def preprocess(preprocessor, groups=None, indices=None, force=False):
     # iterate over the selected files
     for i in index_range:
         preprocessed_data_file = str(preprocessed_data_files[i])
+        file_object = data_files[i]
+        file_name = file_object.make_path(original_directory, original_extension)
 
         # check for existence
         if not utils.check_file(preprocessed_data_file, force, 1000):
-            file_name = data_files[i]
-            data = preprocessor.read_original_data(file_name)
+            logger.info("... Processing original data file '%s'", file_name)
+            data = preprocessor.read_original_data(file_object, original_directory, original_extension)
+            # create output directory before reading the data file (is sometimes required, when relative directories are specified, especially, including a .. somewhere)
+            bob.io.base.create_directories_safe(os.path.dirname(preprocessed_data_file))
 
             # call the preprocessor
-            logger.info("- Preprocessor: processing file: %s", file_name)
             preprocessed_data = preprocessor(data, None)
+            if preprocessed_data is None:
+                logger.error("Preprocessing of file '%s' was not successful", file_name)
+                continue
 
             # write the data
-            bob.io.base.create_directories_safe(os.path.dirname(preprocessed_data_file))
             preprocessor.write_data(preprocessed_data, preprocessed_data_file)
 
 
diff --git a/bob/pad/base/tools/scoring.py b/bob/pad/base/tools/scoring.py
index 40f3171e23b8e84930ac699120f95f5dbb8fbfad..f9c89335d5f542ba7d97197b116c6bd2389421f3 100644
--- a/bob/pad/base/tools/scoring.py
+++ b/bob/pad/base/tools/scoring.py
@@ -112,14 +112,11 @@ def _save_scores(score_file, scores, toscore_objects, write_compressed=False):
     for i, toscore_object in enumerate(toscore_objects):
         id_str = (str(toscore_object.client_id)).zfill(3)
         sample_name = str(toscore_object.make_path())
-        print("i=%d, scores=%s" % (i, str(scores)))
         for score in scores[i]:
             if not toscore_object.attack_type or toscore_object.attack_type=="None":
-                print("- Scoring: %s, id: %s, real" %(sample_name, id_str))
                 _write(f, "%s %s %s %.12f\n" % (id_str, id_str, sample_name, score), write_compressed)
             else:
                 attackname = toscore_object.attack_type
-                print("- Scoring: %s, id: %s, attack: %s" %(sample_name, id_str, attackname))
                 _write(f, "%s %s %s %.12f\n" % (id_str, attackname, sample_name, score), write_compressed)
 
     _close_written(score_file, f, write_compressed)
diff --git a/buildout.cfg b/buildout.cfg
index 7ff85b3bfa0eb37326e401dccad06dfd6bb9d810..0b85449b386e39996ad91d9187f57616f0090702 100644
--- a/buildout.cfg
+++ b/buildout.cfg
@@ -1,31 +1,13 @@
 ; vim: set fileencoding=utf-8 :
-; Pavel Korshunov <Pavel.Korshunov@idiap.ch>
-; Wed 19 Aug 13:43:22 2015
+; Tue 16 Aug 15:00:20 CEST 2016
 
 [buildout]
 parts = scripts
+develop = .
 eggs = bob.pad.base
-       gridtk
-
 extensions = bob.buildout
-             mr.developer
-auto-checkout = *
-develop = src/bob.db.base
-          src/bob.bio.base
-          src/bob.bio.db
-          src/bob.pad.db
-          .
-         
-; options for bob.buildout
-debug = true
-verbose = true
 newest = false
-
-[sources]
-bob.db.base = git branch=refactoring_2016 git@github.com:bioidiap/bob.db.base.git
-bob.bio.base = git https://github.com/bioidiap/bob.bio.base
-bob.bio.db = git git@gitlab.idiap.ch:biometric/bob.bio.db.git
-bob.pad.db = git git@gitlab.idiap.ch:biometric/bob.pad.db.git
+verbose = true
 
 [scripts]
 recipe = bob.buildout:scripts
diff --git a/develop.cfg b/develop.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..0f2a6980a3bc37190fd134365eeb9460dcb8e3b0
--- /dev/null
+++ b/develop.cfg
@@ -0,0 +1,28 @@
+; vim: set fileencoding=utf-8 :
+; Pavel Korshunov <Pavel.Korshunov@idiap.ch>
+; Wed 19 Aug 13:43:22 2015
+
+[buildout]
+parts = scripts
+eggs = bob.pad.base
+       gridtk
+
+extensions = bob.buildout
+             mr.developer
+auto-checkout = *
+develop = src/bob.db.base
+          src/bob.bio.base
+          .
+         
+; options for bob.buildout
+debug = true
+verbose = true
+newest = false
+
+[sources]
+bob.db.base = git git@gitlab.idiap.ch:bob/bob.db.base.git
+bob.bio.base = git git@gitlab.idiap.ch:bob/bob.bio.base.git
+
+[scripts]
+recipe = bob.buildout:scripts
+dependent-scripts = true
diff --git a/doc/installation.rst b/doc/installation.rst
index cd612ea9107e741fa39f490e5def0523ec930ced..a08824fe42cd43126bf14be1985e45c569f55042 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -53,7 +53,7 @@ For a list of supported databases including their download URLs, please refer to
 
 After downloading the original data for the databases, you will need to tell ``bob.pad``, where these databases can be found.
 For this purpose, we have decided to implement a special file, where you can set your directories.
-By default, this file is located in ``~/.bob_spoof_databases.txt``, and it contains several lines, each line looking somewhat like:
+Similar to ``bob.bio.base``, by default, this file is located in ``~/.bob_bio_databases.txt``, and it contains several lines, each line looking somewhat like:
 
 .. code-block:: text
 
diff --git a/requirements.txt b/requirements.txt
index 2d0335afb726bd7aa94752ac37930fc9cb4cf930..b1790d77325582c5aa5cdf3f5f0510efa2308755 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,4 @@
 setuptools
 bob.extension
-bob.io.base
 bob.db.base
 bob.bio.base
diff --git a/setup.py b/setup.py
index 9926ea349c3cc78e554a48fa293d303f16ce7c1b..9747ba932a26fdcee271ba94d14f36e1f0a95725 100644
--- a/setup.py
+++ b/setup.py
@@ -113,6 +113,7 @@ setup(
 
         'bob.pad.database': [
             'dummy             = bob.pad.base.test.dummy.database:database',  # for test purposes only
+            'dummysql             = bob.pad.base.test.dummy.database_sql:database',  # for test purposes only
         ],
 
         'bob.pad.preprocessor': [
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..900ea9cbbf58d5b5a8a3a8791705cebde6eb40db
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1,3 @@
+bob.io.base
+matplotlib
+gridtk