diff --git a/bob/bio/base/config/examples/lda_atnt_legacy.py b/bob/bio/base/config/examples/lda_atnt_legacy.py
index 8903a324d5d3d9fb38b7112425eec2150c6c9803..2df28fd7ece87a847ca889024212e652aea99e20 100644
--- a/bob/bio/base/config/examples/lda_atnt_legacy.py
+++ b/bob/bio/base/config/examples/lda_atnt_legacy.py
@@ -2,20 +2,17 @@ from bob.bio.face.database import AtntBioDatabase
 from bob.bio.base.algorithm import LDA
 from bob.bio.face.preprocessor import FaceCrop
 from sklearn.pipeline import make_pipeline
-from bob.bio.base.mixins.legacy import (
-    LegacyPreprocessor,
-    LegacyAlgorithmAsTransformer,
-)
 from bob.pipelines.transformers import CheckpointSampleLinearize
-from bob.bio.base.pipelines.vanilla_biometrics.legacy import LegacyDatabaseConnector
+from bob.bio.base.pipelines.vanilla_biometrics.legacy import DatabaseConnector, Preprocessor, AlgorithmAsTransformer
 import functools
-from bob.bio.base.pipelines.vanilla_biometrics.biometric_algorithm import (
+from bob.bio.base.pipelines.vanilla_biometrics.implemented import (
+    Distance,
     CheckpointDistance,
 )
 
 # DATABASE
 
-database = LegacyDatabaseConnector(
+database = DatabaseConnector(
     AtntBioDatabase(original_directory="./atnt", protocol="Default"),
 )
 
@@ -49,20 +46,20 @@ lda = functools.partial(LDA, use_pinv=True, pca_subspace_dimension=0.90)
 
 
 transformer = make_pipeline(
-    LegacyPreprocessor(callable=face_cropper, features_dir="./example/transformer0"),
+    Preprocessor(callable=face_cropper, features_dir="./example/transformer0"),
     CheckpointSampleLinearize(features_dir="./example/transformer1"),
-    LegacyAlgorithmAsTransformer(
-        callable=lda, features_dir="./example/transformer2", model_path="./example/"
+    AlgorithmAsTransformer(
+        callable=lda, features_dir="./example/transformer2", model_path="./example/lda_projector.hdf5"
     ),
 )
 
-
 algorithm = CheckpointDistance(features_dir="./example/")
+# algorithm = Distance()
 
 
 # comment out the code below to disable dask
 from bob.pipelines.mixins import estimator_dask_it, mix_me_up
-from bob.bio.base.pipelines.vanilla_biometrics.biometric_algorithm import (
+from bob.bio.base.pipelines.vanilla_biometrics.mixins import (
     BioAlgDaskMixin,
 )
 
diff --git a/bob/bio/base/config/examples/pca_atnt.py b/bob/bio/base/config/examples/pca_atnt.py
index afffe2df476045a6509b2443269751b66fae0df9..e14a8e86b0b6fedd45f66b5f2031406880f4bd8f 100644
--- a/bob/bio/base/config/examples/pca_atnt.py
+++ b/bob/bio/base/config/examples/pca_atnt.py
@@ -5,10 +5,11 @@ from bob.bio.base.pipelines.vanilla_biometrics.implemented import (
     CheckpointDistance,
 )
 from bob.bio.face.database import AtntBioDatabase
+import os
 
 
 database = DatabaseConnector(
-    AtntBioDatabase(original_directory="./atnt"), protocol="Default"
+    AtntBioDatabase(original_directory=os.environ.get("ATNT_DATABASE_DIRECTORY")), protocol="Default"
 )
 transformer = make_pipeline(
     CheckpointSampleLinearize(features_dir="./example/extractor0"),
@@ -18,11 +19,11 @@ transformer = make_pipeline(
 )
 algorithm = CheckpointDistance(features_dir="./example/")
 
-# comment out the code below to disable dask
-from bob.pipelines.mixins import estimator_dask_it, mix_me_up
-from bob.bio.base.pipelines.vanilla_biometrics.mixins import (
-    BioAlgDaskMixin,
-)
+# # comment out the code below to disable dask
+# from bob.pipelines.mixins import estimator_dask_it, mix_me_up
+# from bob.bio.base.pipelines.vanilla_biometrics.mixins import (
+#     BioAlgDaskMixin,
+# )
 
-transformer = estimator_dask_it(transformer)
-algorithm = mix_me_up(BioAlgDaskMixin, algorithm)
+# transformer = estimator_dask_it(transformer)
+# algorithm = mix_me_up(BioAlgDaskMixin, algorithm)
diff --git a/bob/bio/base/config/examples/pca_mobio-male.py b/bob/bio/base/config/examples/pca_mobio-male.py
index 61393f38dd0ac2c93b8e579c097a76ad44028f57..a581b591900941ff240accd6cc5fd10826247b8f 100644
--- a/bob/bio/base/config/examples/pca_mobio-male.py
+++ b/bob/bio/base/config/examples/pca_mobio-male.py
@@ -1,9 +1,9 @@
-from bob.bio.base.pipelines.vanilla_biometrics.biometric_algorithm import (
+from bob.bio.base.pipelines.vanilla_biometrics.implemented import (
     CheckpointDistance,
 )
 from bob.bio.base.pipelines.vanilla_biometrics.legacy import (
-    LegacyDatabaseConnector,
-    LegacyPreprocessor,
+    DatabaseConnector,
+    Preprocessor,
 )
 from bob.bio.face.database.mobio import MobioBioDatabase
 from bob.bio.face.preprocessor import FaceCrop
@@ -13,7 +13,7 @@ from sklearn.pipeline import make_pipeline
 import functools
 
 
-database = LegacyDatabaseConnector(
+database = DatabaseConnector(
     MobioBioDatabase(
         original_directory=rc["bob.db.mobio.directory"],
         annotation_directory=rc["bob.db.mobio.annotation_directory"],
@@ -36,17 +36,17 @@ preprocessor = functools.partial(
 )
 
 transformer = make_pipeline(
-    LegacyPreprocessor(preprocessor),
-    CheckpointSampleLinearize(features_dir="./example/extractor0"),
+    Preprocessor(preprocessor, features_dir="./example/extractor0"),
+    CheckpointSampleLinearize(features_dir="./example/extractor1"),
     CheckpointSamplePCA(
-        features_dir="./example/extractor1", model_path="./example/pca.pkl"
+        features_dir="./example/extractor2", model_path="./example/pca.pkl"
     ),
 )
 algorithm = CheckpointDistance(features_dir="./example/")
 
 # comment out the code below to disable dask
 from bob.pipelines.mixins import estimator_dask_it, mix_me_up
-from bob.bio.base.pipelines.vanilla_biometrics.biometric_algorithm import (
+from bob.bio.base.pipelines.vanilla_biometrics.mixins import (
     BioAlgDaskMixin,
 )
 
diff --git a/bob/bio/base/pipelines/vanilla_biometrics/abstract_classes.py b/bob/bio/base/pipelines/vanilla_biometrics/abstract_classes.py
index 74e2f17550546765aea1cf15d8b17f6ea2a3317f..f598e7cd030a8fd5158aa07a715de775bdd289b7 100644
--- a/bob/bio/base/pipelines/vanilla_biometrics/abstract_classes.py
+++ b/bob/bio/base/pipelines/vanilla_biometrics/abstract_classes.py
@@ -2,6 +2,7 @@ from abc import ABCMeta, abstractmethod
 from bob.pipelines.sample import Sample, SampleSet, DelayedSample
 import functools
 
+
 class BioAlgorithm(metaclass=ABCMeta):
     """Describes a base biometric comparator for the Vanilla Biometrics Pipeline :ref:`_bob.bio.base.struct_bio_rec_sys`_.
 
@@ -102,7 +103,9 @@ class BioAlgorithm(metaclass=ABCMeta):
             for ref in [
                 r for r in biometric_references if r.key in sampleset.references
             ]:
-                subprobe_scores.append(Sample(self.score(ref.data, s), parent=ref))
+                score = self.score(ref.data, s)
+                data = make_score_line(ref.subject, sampleset.subject, sampleset.path, score)
+                subprobe_scores.append(Sample(data, parent=ref))
 
             # Creating one sampleset per probe
             subprobe = SampleSet(subprobe_scores, parent=sampleset)
@@ -192,6 +195,18 @@ class Database(metaclass=ABCMeta):
         pass
 
 
+def make_score_line(
+    biometric_reference_subject, probe_subject, probe_path, score,
+):
+    data = "{0} {1} {2} {3}\n".format(
+        biometric_reference_subject,
+        probe_subject,
+        probe_path,
+        score,
+    )
+    return data
+
+
 def save_scores_four_columns(path, probe):
     """
     Write scores in the four columns format
@@ -199,7 +214,7 @@ def save_scores_four_columns(path, probe):
 
     with open(path, "w") as f:
         for biometric_reference in probe.samples:
-            line = "{0} {1} {2} {3}\n".format(
+            line = make_score_line(
                 biometric_reference.subject,
                 probe.subject,
                 probe.path,
@@ -207,4 +222,8 @@ def save_scores_four_columns(path, probe):
             )
             f.write(line)
 
-    return  DelayedSample(functools.partial(open, path), parent=probe)
+    def load():
+        with open(path) as f:
+            return f.read()
+
+    return DelayedSample(load, parent=probe)
diff --git a/bob/bio/base/pipelines/vanilla_biometrics/legacy.py b/bob/bio/base/pipelines/vanilla_biometrics/legacy.py
index fcf80a026c99be79e61b79b216ac76207cca0f84..fb093a723a80fecfe2c116f3282ddbbb0eab5984 100644
--- a/bob/bio/base/pipelines/vanilla_biometrics/legacy.py
+++ b/bob/bio/base/pipelines/vanilla_biometrics/legacy.py
@@ -13,7 +13,7 @@ from bob.io.base import HDF5File
 from bob.pipelines.mixins import SampleMixin, CheckpointMixin
 from bob.pipelines.sample import DelayedSample, SampleSet, Sample
 from bob.pipelines.utils import is_picklable
-from sklearn.base import TransformerMixin
+from sklearn.base import TransformerMixin, BaseEstimator
 import logging
 
 logger = logging.getLogger("bob.bio.base")
@@ -24,6 +24,7 @@ def _biofile_to_delayed_sample(biofile, database):
         load=functools.partial(
             biofile.load, database.original_directory, database.original_extension,
         ),
+        subject=str(biofile.client_id),
         key=biofile.path,
         path=biofile.path,
         annotations=database.annotations(biofile),
@@ -50,7 +51,7 @@ class DatabaseConnector(Database):
 
     """
 
-    def __init__(self, database, **kwargs):        
+    def __init__(self, database, **kwargs):
         self.database = database
 
     def background_model_samples(self):
@@ -175,7 +176,7 @@ class _NonPickableWrapper:
         return super().__getstate__()
 
 
-class _Preprocessor(_NonPickableWrapper, TransformerMixin):
+class _Preprocessor(_NonPickableWrapper, TransformerMixin, BaseEstimator):
     def transform(self, X, annotations):
         return [self.instance(data, annot) for data, annot in zip(X, annotations)]
 
@@ -212,7 +213,7 @@ def _split_X_by_y(X, y):
     return training_data
 
 
-class _Extractor(_NonPickableWrapper, TransformerMixin):
+class _Extractor(_NonPickableWrapper, TransformerMixin, BaseEstimator):
     def transform(self, X, metadata=None):
         if self.requires_metadata:
             return [self.instance(data, metadata=m) for data, m in zip(X, metadata)]
@@ -267,7 +268,7 @@ class Extractor(CheckpointMixin, SampleMixin, _Extractor):
         return self
 
 
-class _AlgorithmTransformer(_NonPickableWrapper, TransformerMixin):
+class _AlgorithmTransformer(_NonPickableWrapper, TransformerMixin, BaseEstimator):
     def transform(self, X):
         return [self.instance.project(feature) for feature in X]
 
@@ -279,7 +280,7 @@ class _AlgorithmTransformer(_NonPickableWrapper, TransformerMixin):
         if self.instance.split_training_features_by_client:
             training_data = _split_X_by_y(X, y)
 
-        self.instance.train_projector(self, training_data, self.model_path)
+        self.instance.train_projector(training_data, self.model_path)
         return self
 
     def _more_tags(self):
diff --git a/bob/bio/base/script/vanilla_biometrics.py b/bob/bio/base/script/vanilla_biometrics.py
index abe07934b0b91eea228110e9e8d3ccdfe3c73cda..2b3e25674a06dcb4f9a8bf507fbe948eb6cb4256 100644
--- a/bob/bio/base/script/vanilla_biometrics.py
+++ b/bob/bio/base/script/vanilla_biometrics.py
@@ -189,12 +189,7 @@ def vanilla_biometrics(
             result = itertools.chain(*result)
             for probe in result:
                 for sample in probe.samples:
-                    if isinstance(sample, Sample):
-                        f.write("{0} {1} {2} {3}\n".format(sample.key, probe.key, probe.path, sample.data))
-                    elif isinstance(sample, DelayedSample):
-                        f.writelines(sample.load().readlines())
-                    else:
-                        raise TypeError("The output of the pipeline is not writeble")
+                    f.write(sample.data)
 
     if dask_client is not None:
         dask_client.shutdown()