diff --git a/bob/bio/face/config/baseline/arcface_insightface.py b/bob/bio/face/config/baseline/arcface_insightface.py
index 06f071c592a841b6b2ca66ad1997dc86cda518be..3aae89491d77909db49a10d3c8ef010d66e37f4a 100644
--- a/bob/bio/face/config/baseline/arcface_insightface.py
+++ b/bob/bio/face/config/baseline/arcface_insightface.py
@@ -15,11 +15,15 @@ if "database" in locals():
 else:
     annotation_type = None
     fixed_positions = None
+    memory_demanding = False
 
 
 def load(annotation_type, fixed_positions=None):
     transformer = embedding_transformer_112x112(
-        ArcFaceInsightFace(memory_demanding=memory_demanding), annotation_type, fixed_positions, color_channel="rgb"
+        ArcFaceInsightFace(memory_demanding=memory_demanding),
+        annotation_type,
+        fixed_positions,
+        color_channel="rgb",
     )
 
     algorithm = Distance()
diff --git a/bob/bio/face/test/test_baselines.py b/bob/bio/face/test/test_baselines.py
index 2de487ade1ec3753ec208d87f9fbb6c8fa98a9e1..bbe3c4c9afeaa468fd789496e05f6ad88c81885a 100644
--- a/bob/bio/face/test/test_baselines.py
+++ b/bob/bio/face/test/test_baselines.py
@@ -59,7 +59,7 @@ def get_fake_samples_for_training():
     ]
 
 
-def run_baseline(baseline, samples_for_training=[]):
+def run_baseline(baseline, samples_for_training=[], target_scores=None):
     biometric_references = get_fake_sample_set(purpose="bioref")
     probes = get_fake_sample_set(purpose="probe")
 
@@ -78,6 +78,10 @@ def run_baseline(baseline, samples_for_training=[]):
         checkpoint_scores = checkpoint_pipeline([], biometric_references, probes)
         assert len(checkpoint_scores) == 1
         assert len(checkpoint_scores[0]) == 1
+
+        if target_scores is not None:
+            np.allclose(target_scores, scores[0][0].data, atol=10e-3, rtol=10e-3)
+
         assert np.isclose(scores[0][0].data, checkpoint_scores[0][0].data)
 
         dirs = os.listdir(d)
@@ -109,41 +113,41 @@ def run_baseline(baseline, samples_for_training=[]):
 @pytest.mark.slow
 @is_library_available("tensorflow")
 def test_facenet_baseline():
-    run_baseline("facenet-sanderberg")
+    run_baseline("facenet-sanderberg", target_scores=[-0.9220775737526933])
 
 
 @pytest.mark.slow
 @is_library_available("tensorflow")
 def test_inception_resnetv2_msceleb():
-    run_baseline("inception-resnetv2-msceleb")
+    run_baseline("inception-resnetv2-msceleb", target_scores=[-0.43447269718504244])
 
 
 @pytest.mark.slow
 @is_library_available("tensorflow")
 def test_inception_resnetv2_casiawebface():
-    run_baseline("inception-resnetv2-casiawebface")
+    run_baseline("inception-resnetv2-casiawebface", target_scores=[-0.634583944368043])
 
 
 @pytest.mark.slow
 @is_library_available("tensorflow")
 def test_inception_resnetv1_msceleb():
-    run_baseline("inception-resnetv1-msceleb")
+    run_baseline("inception-resnetv1-msceleb", target_scores=[-0.44497649298306907])
 
 
 @pytest.mark.slow
 @is_library_available("tensorflow")
 def test_inception_resnetv1_casiawebface():
-    run_baseline("inception-resnetv1-casiawebface")
+    run_baseline("inception-resnetv1-casiawebface", target_scores=[-0.6411599976437636])
 
 
 @pytest.mark.slow
 @is_library_available("mxnet")
 def test_arcface_insightface():
-    run_baseline("arcface-insightface")
+    run_baseline("arcface-insightface", target_scores=[-0.0005965275677296544])
 
 
 def test_gabor_graph():
-    run_baseline("gabor_graph")
+    run_baseline("gabor_graph", target_scores=[0.4385451147418939])
 
 
 # def test_lda():
diff --git a/bob/bio/face/test/test_embeddings.py b/bob/bio/face/test/test_embeddings.py
deleted file mode 100644
index 9432e3e06a283acb5ec7df54362040d8625db4b0..0000000000000000000000000000000000000000
--- a/bob/bio/face/test/test_embeddings.py
+++ /dev/null
@@ -1,188 +0,0 @@
-import bob.bio.face
-import bob.io.base
-import numpy as np
-from bob.pipelines import Sample, wrap
-import pkg_resources
-from bob.bio.base.test.utils import is_library_available
-import pytest
-
-
-@pytest.mark.slow
-@is_library_available("tensorflow")
-def test_idiap_inceptionv2_msceleb():
-    from bob.bio.face.embeddings.tf2_inception_resnet import (
-        InceptionResnetv2_MsCeleb_CenterLoss_2018,
-    )
-
-    reference = bob.io.base.load(
-        pkg_resources.resource_filename(
-            "bob.bio.face.test", "data/inception_resnet_v2_msceleb_rgb.hdf5"
-        )
-    )
-    np.random.seed(10)
-    transformer = InceptionResnetv2_MsCeleb_CenterLoss_2018()
-    data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
-    output = transformer.transform([data])[0]
-    assert output.size == 128, output.shape
-
-    # Sample Batch
-    sample = Sample(data)
-    transformer_sample = wrap(["sample"], transformer)
-    output = [s.data for s in transformer_sample.transform([sample])][0]
-
-    np.testing.assert_allclose(output, reference.flatten(), rtol=1e-5, atol=1e-4)
-    assert output.size == 128, output.shape
-
-
-@pytest.mark.slow
-@is_library_available("tensorflow")
-def test_idiap_inceptionv2_msceleb_memory_demanding():
-    from bob.bio.face.embeddings.tf2_inception_resnet import (
-        InceptionResnetv2_MsCeleb_CenterLoss_2018,
-    )
-
-    reference = bob.io.base.load(
-        pkg_resources.resource_filename(
-            "bob.bio.face.test", "data/inception_resnet_v2_msceleb_rgb.hdf5"
-        )
-    )
-    np.random.seed(10)
-
-    transformer = InceptionResnetv2_MsCeleb_CenterLoss_2018(memory_demanding=True)
-    data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
-    output = transformer.transform([data])[0]
-    assert output.size == 128, output.shape
-
-    # Sample Batch
-    sample = Sample(data)
-    transformer_sample = wrap(["sample"], transformer)
-    output = [s.data for s in transformer_sample.transform([sample])][0]
-
-    np.testing.assert_allclose(output[0], reference.flatten(), rtol=1e-5, atol=1e-4)
-    assert output.size == 128, output.shape
-
-
-@pytest.mark.slow
-@is_library_available("tensorflow")
-def test_idiap_inceptionv2_casia():
-    from bob.bio.face.embeddings.tf2_inception_resnet import (
-        InceptionResnetv2_Casia_CenterLoss_2018,
-    )
-
-    reference = bob.io.base.load(
-        pkg_resources.resource_filename(
-            "bob.bio.face.test", "data/inception_resnet_v2_casia_rgb.hdf5"
-        )
-    )
-    np.random.seed(10)
-    transformer = InceptionResnetv2_Casia_CenterLoss_2018()
-    data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
-    output = transformer.transform([data])[0]
-    assert output.size == 128, output.shape
-
-    # Sample Batch
-    sample = Sample(data)
-    transformer_sample = wrap(["sample"], transformer)
-    output = [s.data for s in transformer_sample.transform([sample])][0]
-
-    np.testing.assert_allclose(output, reference.flatten(), rtol=1e-5, atol=1e-4)
-    assert output.size == 128, output.shape
-
-
-@pytest.mark.slow
-@is_library_available("tensorflow")
-def test_idiap_inceptionv1_msceleb():
-    from bob.bio.face.embeddings.tf2_inception_resnet import (
-        InceptionResnetv1_MsCeleb_CenterLoss_2018,
-    )
-
-    reference = bob.io.base.load(
-        pkg_resources.resource_filename(
-            "bob.bio.face.test", "data/inception_resnet_v1_msceleb_rgb.hdf5"
-        )
-    )
-    np.random.seed(10)
-    transformer = InceptionResnetv1_MsCeleb_CenterLoss_2018()
-    data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
-    output = transformer.transform([data])[0]
-    assert output.size == 128, output.shape
-
-    # Sample Batch
-    sample = Sample(data)
-    transformer_sample = wrap(["sample"], transformer)
-    output = [s.data for s in transformer_sample.transform([sample])][0]
-
-    np.testing.assert_allclose(output, reference.flatten(), rtol=1e-5, atol=1e-4)
-    assert output.size == 128, output.shape
-
-
-@pytest.mark.slow
-@is_library_available("tensorflow")
-def test_idiap_inceptionv1_casia():
-    from bob.bio.face.embeddings.tf2_inception_resnet import (
-        InceptionResnetv1_Casia_CenterLoss_2018,
-    )
-
-    reference = bob.io.base.load(
-        pkg_resources.resource_filename(
-            "bob.bio.face.test", "data/inception_resnet_v1_casia_rgb.hdf5"
-        )
-    )
-    np.random.seed(10)
-    transformer = InceptionResnetv1_Casia_CenterLoss_2018()
-    data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
-    output = transformer.transform([data])[0]
-    assert output.size == 128, output.shape
-
-    # Sample Batch
-    sample = Sample(data)
-    transformer_sample = wrap(["sample"], transformer)
-    output = [s.data for s in transformer_sample.transform([sample])][0]
-
-    np.testing.assert_allclose(output, reference.flatten(), rtol=1e-5, atol=1e-4)
-    assert output.size == 128, output.shape
-
-
-@pytest.mark.slow
-@is_library_available("tensorflow")
-def test_facenet_sanderberg():
-    from bob.bio.face.embeddings.tf2_inception_resnet import (
-        FaceNetSanderberg_20170512_110547,
-    )
-
-    reference = bob.io.base.load(
-        pkg_resources.resource_filename(
-            "bob.bio.face.test", "data/facenet_sandberg_20170512-110547.hdf5"
-        )
-    )
-    np.random.seed(10)
-    transformer = FaceNetSanderberg_20170512_110547()
-    data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
-    output = transformer.transform([data])[0]
-    assert output.size == 128, output.shape
-
-    # Sample Batch
-    sample = Sample(data)
-    transformer_sample = wrap(["sample"], transformer)
-    output = [s.data for s in transformer_sample.transform([sample])][0]
-
-    np.testing.assert_allclose(output, reference.flatten(), rtol=1e-5, atol=1e-4)
-    assert output.size == 128, output.shape
-
-
-@pytest.mark.slow
-@is_library_available("mxnet")
-def test_arcface_insight_face():
-    from bob.bio.face.embeddings.mxnet_models import ArcFaceInsightFace
-
-    transformer = ArcFaceInsightFace()
-    data = np.random.rand(3, 112, 112) * 255
-    data = data.astype("uint8")
-    output = transformer.transform([data])
-    assert output.size == 512, output.shape
-
-    # Sample Batch
-    sample = Sample(data)
-    transformer_sample = wrap(["sample"], transformer)
-    output = [s.data for s in transformer_sample.transform([sample])][0]
-    assert output.size == 512, output.shape
diff --git a/bob/bio/face/test/test_transformers.py b/bob/bio/face/test/test_transformers.py
index 1af1a86d50c9cebdaf3cf46dca9356d1b64e4ce1..805a31485df14337a58f1f6d855fd32e66e00811 100644
--- a/bob/bio/face/test/test_transformers.py
+++ b/bob/bio/face/test/test_transformers.py
@@ -12,89 +12,6 @@ def get_fake_sample(face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46,
     return Sample(data, key="1", annotations=annotations)
 
 
-@pytest.mark.slow
-@is_library_available("tensorflow")
-def test_facenet_sanderberg():
-    transformer = load_resource("facenet-sanderberg", "transformer")
-
-    fake_sample = get_fake_sample()
-
-    transformed_sample = transformer.transform([fake_sample])[0]
-    transformed_data = transformed_sample.data
-    assert transformed_sample.data.size == 128
-
-
-@pytest.mark.slow
-@is_library_available("tensorflow")
-def test_inception_resnetv2_msceleb():
-    transformer = load_resource("inception-resnetv2-msceleb", "transformer")
-
-    fake_sample = get_fake_sample()
-
-    transformed_sample = transformer.transform([fake_sample])[0]
-    transformed_data = transformed_sample.data
-    assert transformed_sample.data.size == 128
-
-
-@pytest.mark.slow
-@is_library_available("tensorflow")
-def test_inception_resnetv2_casiawebface():
-    transformer = load_resource("inception-resnetv2-casiawebface", "transformer")
-
-    fake_sample = get_fake_sample()
-
-    transformed_sample = transformer.transform([fake_sample])[0]
-    transformed_data = transformed_sample.data
-    assert transformed_sample.data.size == 128
-
-
-@pytest.mark.slow
-@is_library_available("tensorflow")
-def test_inception_resnetv1_msceleb():
-    transformer = load_resource("inception-resnetv1-msceleb", "transformer")
-
-    fake_sample = get_fake_sample()
-
-    transformed_sample = transformer.transform([fake_sample])[0]
-    transformed_data = transformed_sample.data
-    assert transformed_sample.data.size == 128
-
-
-@pytest.mark.slow
-@is_library_available("tensorflow")
-def test_inception_resnetv1_casiawebface():
-    transformer = load_resource("inception-resnetv1-casiawebface", "transformer")
-
-    fake_sample = get_fake_sample()
-
-    transformed_sample = transformer.transform([fake_sample])[0]
-    transformed_data = transformed_sample.data
-    assert transformed_sample.data.size == 128
-
-
-"""
-def test_arcface_insight_tf():
-    import tensorflow as tf
-    tf.compat.v1.reset_default_graph()
-    transformer = load_resource("arcface-insight-tf", "transformer")
-
-    fake_sample = get_fake_sample()
-
-    transformed_sample = transformer.transform([fake_sample])[0]
-    transformed_data = transformed_sample.data
-    assert transformed_sample.data.size == 512
-"""
-
-
-def test_gabor_graph():
-    transformer = load_resource("gabor-graph", "transformer")
-
-    fake_sample = get_fake_sample()
-    transformed_sample = transformer.transform([fake_sample])[0]
-    transformed_data = transformed_sample.data
-    assert len(transformed_sample.data) == 80
-
-
 def test_lgbphs():
     transformer = load_resource("lgbphs", "transformer")