diff --git a/bob/bio/face/config/baseline/facenet_sanderberg.py b/bob/bio/face/config/baseline/facenet_sanderberg.py
index f59ff74e4618f8f3b9b1ff4fc1b346917f2b3f91..20b8c9ce80ef4db4a000db064c4ddd5ca562388e 100644
--- a/bob/bio/face/config/baseline/facenet_sanderberg.py
+++ b/bob/bio/face/config/baseline/facenet_sanderberg.py
@@ -1,5 +1,7 @@
 from bob.bio.face.embeddings import FaceNetSanderberg
 from bob.bio.face.config.baseline.helpers import embedding_transformer_160x160
+from bob.bio.base.pipelines.vanilla_biometrics import Distance, VanillaBiometricsPipeline
+
 
 
 if "database" in locals():
@@ -10,4 +12,11 @@ else:
     fixed_positions = None
 
 
-transformer = embedding_transformer_160x160(FaceNetSanderberg(), annotation_type, fixed_positions)
\ No newline at end of file
+transformer = embedding_transformer_160x160(FaceNetSanderberg(), annotation_type, fixed_positions)
+
+algorithm = Distance()
+
+pipeline = VanillaBiometricsPipeline(
+    transformer,
+    algorithm
+)
diff --git a/bob/bio/face/test/test_baselines.py b/bob/bio/face/test/test_baselines.py
new file mode 100644
index 0000000000000000000000000000000000000000..49c561137b12faf6254d28a68e0a9452cd0a9243
--- /dev/null
+++ b/bob/bio/face/test/test_baselines.py
@@ -0,0 +1,95 @@
+from bob.extension.config import load
+import pkg_resources
+import numpy as np
+from bob.pipelines import Sample, SampleSet
+from bob.bio.base import load_resource
+
+
+def get_fake_sample_set(
+    face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46, 53)}
+):
+
+    data = np.random.rand(3, 400, 400)
+    annotations = {"leye": (115, 267), "reye": (115, 132)}
+    return [
+        SampleSet(
+            [Sample(data, key="1", annotations=annotations)],
+            key="1",
+            subject="1",
+            references=["1"],
+        )
+    ]
+
+
+def test_facenet_baseline():
+
+    biometric_references = get_fake_sample_set()
+    probes = get_fake_sample_set()
+    
+    # Regular pipeline
+    pipeline = load_resource("facenet_sanderberg", "baseline")
+    scores = pipeline([], biometric_references, probes)
+    assert len(scores)==1
+    assert len(scores[0])==1
+
+
+    # Regular with 
+
+    # fake_sample = get_fake_sample()
+
+    # transformed_sample = transformer.transform([fake_sample])[0]
+    # transformed_data = transformed_sample.data
+    # assert transformed_sample.data.size == 128
+
+
+def test_inception_resnetv2_msceleb():
+    transformer = load_resource("inception_resnetv2_msceleb", "baseline")
+
+    fake_sample = get_fake_sample()
+
+    transformed_sample = transformer.transform([fake_sample])[0]
+    transformed_data = transformed_sample.data
+    assert transformed_sample.data.size == 128
+
+
+def test_inception_resnetv2_casiawebface():
+    transformer = load_resource("inception_resnetv2_casiawebface", "baseline")
+
+    fake_sample = get_fake_sample()
+
+    transformed_sample = transformer.transform([fake_sample])[0]
+    transformed_data = transformed_sample.data
+    assert transformed_sample.data.size == 128
+
+
+def test_inception_resnetv1_msceleb():
+    transformer = load_resource("inception_resnetv1_msceleb", "baseline")
+
+    fake_sample = get_fake_sample()
+
+    transformed_sample = transformer.transform([fake_sample])[0]
+    transformed_data = transformed_sample.data
+    assert transformed_sample.data.size == 128
+
+
+def test_inception_resnetv1_casiawebface():
+    transformer = load_resource("inception_resnetv1_casiawebface", "baseline")
+
+    fake_sample = get_fake_sample()
+
+    transformed_sample = transformer.transform([fake_sample])[0]
+    transformed_data = transformed_sample.data
+    assert transformed_sample.data.size == 128
+
+
+def test_arcface_insight_tf():
+    import tensorflow as tf
+
+    tf.compat.v1.reset_default_graph()
+    transformer = load_resource("arcface_insight_tf", "baseline")
+
+    fake_sample = get_fake_sample()
+
+    transformed_sample = transformer.transform([fake_sample])[0]
+    transformed_data = transformed_sample.data
+    assert transformed_sample.data.size == 512
diff --git a/bob/bio/face/test/test_baseline.py b/bob/bio/face/test/test_transformers.py
similarity index 98%
rename from bob/bio/face/test/test_baseline.py
rename to bob/bio/face/test/test_transformers.py
index f6ea8f58f6a603adf4345739b5c98696b7b1f527..ab73788435171a0991dcc04596601cdcf9f25542 100644
--- a/bob/bio/face/test/test_baseline.py
+++ b/bob/bio/face/test/test_transformers.py
@@ -11,7 +11,7 @@ def get_fake_sample(face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46,
     return Sample(data, key="1", annotations=annotations)
 
 
-def test_facenet_baseline():
+def test_facenet_baseline():    
     transformer = load_resource("facenet_sanderberg", "baseline")
 
     fake_sample = get_fake_sample()
diff --git a/develop.cfg b/develop.cfg
index 0100329d87f0e982f75cd2f4fa5a502f0b972f09..5fafd36ce01e6fa8420cf5be0261782f921c45e9 100644
--- a/develop.cfg
+++ b/develop.cfg
@@ -6,14 +6,6 @@ parts = scripts
 
 develop = src/bob.pipelines
           src/bob.bio.base
-          src/bob.bio.face_ongoing
-          src/bob.bio.gmm
-          src/bob.ip.tensorflow_extractor
-          src/bob.db.ijbc
-          src/bob.extension
-          src/bob.ip.gabor
-          src/bob.learn.linear
-          src/bob.learn.em
           .
           
 
@@ -21,15 +13,7 @@ develop = src/bob.pipelines
 eggs = bob.bio.face
        bob.pipelines
        bob.bio.base
-       bob.bio.face_ongoing
-       bob.bio.gmm
-       bob.ip.gabor
-       bob.ip.tensorflow_extractor
-       bob.db.ijbc
-       bob.extension
-       bob.learn.linear
-       bob.learn.em
-
+ 
 
 extensions = bob.buildout
              mr.developer
@@ -42,14 +26,6 @@ auto-checkout = *
 [sources]
 bob.pipelines = git git@gitlab.idiap.ch:bob/bob.pipelines
 bob.bio.base = git git@gitlab.idiap.ch:bob/bob.bio.base
-bob.bio.face_ongoing = git git@gitlab.idiap.ch:bob/bob.bio.face_ongoing
-bob.ip.tensorflow_extractor =  git git@gitlab.idiap.ch:bob/bob.ip.tensorflow_extractor
-bob.db.ijbc = git git@gitlab.idiap.ch:bob/bob.db.ijbc
-bob.bio.gmm = git git@gitlab.idiap.ch:bob/bob.bio.gmm
-bob.extension = git git@gitlab.idiap.ch:bob/bob.extension
-bob.learn.linear = git git@gitlab.idiap.ch:bob/bob.learn.linear
-bob.learn.em = git git@gitlab.idiap.ch:bob/bob.learn.em
-bob.ip.gabor = git git@gitlab.idiap.ch:bob/bob.ip.gabor
 
 
 [scripts]
diff --git a/setup.py b/setup.py
index d6c10dca9483d6d24be55be04ae886e11f165dc0..2f920af704fbf94ec50463bba58e015ad6fe4132 100644
--- a/setup.py
+++ b/setup.py
@@ -141,8 +141,7 @@ setup(
             'mtcnn                    = bob.bio.face.config.annotator.mtcnn:annotator',
         ],
 
-        #baselines
-        'bob.bio.baseline':[
+        'bob.bio.transformer':[
           'facenet_sanderberg = bob.bio.face.config.baseline.facenet_sanderberg:transformer',
           'inception_resnetv1_casiawebface = bob.bio.face.config.baseline.inception_resnetv1_casiawebface:transformer',
           'inception_resnetv2_casiawebface = bob.bio.face.config.baseline.inception_resnetv2_casiawebface:transformer',
@@ -151,6 +150,17 @@ setup(
           'arcface_insight_tf = bob.bio.face.config.baseline.arcface_insight_tf:transformer',
         ],
 
+        #baselines
+        'bob.bio.baseline':[
+          'facenet_sanderberg = bob.bio.face.config.baseline.facenet_sanderberg:pipeline',
+          'inception_resnetv1_casiawebface = bob.bio.face.config.baseline.inception_resnetv1_casiawebface:pipeline',
+          'inception_resnetv2_casiawebface = bob.bio.face.config.baseline.inception_resnetv2_casiawebface:pipeline',
+          'inception_resnetv1_msceleb = bob.bio.face.config.baseline.inception_resnetv1_msceleb:pipeline',
+          'inception_resnetv2_msceleb = bob.bio.face.config.baseline.inception_resnetv2_msceleb:pipeline',
+          'arcface_insight_tf = bob.bio.face.config.baseline.arcface_insight_tf:pipeline',
+        ],
+
+
     },
 
     # Classifiers are important if you plan to distribute this package through