From 144f7748f2f864cf88f17fdd1c99950c94eac110 Mon Sep 17 00:00:00 2001
From: Tiago Freitas Pereira <tiagofrepereira@gmail.com>
Date: Mon, 15 Jun 2020 08:06:31 +0200
Subject: [PATCH] setup.py

Redefining baselines
---
 .../config/baseline/facenet_sanderberg.py     | 11 ++-
 bob/bio/face/test/test_baselines.py           | 95 +++++++++++++++++++
 ...{test_baseline.py => test_transformers.py} |  2 +-
 develop.cfg                                   | 26 +----
 setup.py                                      | 14 ++-
 5 files changed, 119 insertions(+), 29 deletions(-)
 create mode 100644 bob/bio/face/test/test_baselines.py
 rename bob/bio/face/test/{test_baseline.py => test_transformers.py} (98%)

diff --git a/bob/bio/face/config/baseline/facenet_sanderberg.py b/bob/bio/face/config/baseline/facenet_sanderberg.py
index f59ff74e..20b8c9ce 100644
--- a/bob/bio/face/config/baseline/facenet_sanderberg.py
+++ b/bob/bio/face/config/baseline/facenet_sanderberg.py
@@ -1,5 +1,7 @@
 from bob.bio.face.embeddings import FaceNetSanderberg
 from bob.bio.face.config.baseline.helpers import embedding_transformer_160x160
+from bob.bio.base.pipelines.vanilla_biometrics import Distance, VanillaBiometricsPipeline
+
 
 
 if "database" in locals():
@@ -10,4 +12,11 @@ else:
     fixed_positions = None
 
 
-transformer = embedding_transformer_160x160(FaceNetSanderberg(), annotation_type, fixed_positions)
\ No newline at end of file
+transformer = embedding_transformer_160x160(FaceNetSanderberg(), annotation_type, fixed_positions)
+
+algorithm = Distance()
+
+pipeline = VanillaBiometricsPipeline(
+    transformer,
+    algorithm
+)
diff --git a/bob/bio/face/test/test_baselines.py b/bob/bio/face/test/test_baselines.py
new file mode 100644
index 00000000..49c56113
--- /dev/null
+++ b/bob/bio/face/test/test_baselines.py
@@ -0,0 +1,95 @@
+from bob.extension.config import load
+import pkg_resources
+import numpy as np
+from bob.pipelines import Sample, SampleSet
+from bob.bio.base import load_resource
+
+
+def get_fake_sample_set(
+    face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46, 53)}
+):
+
+    data = np.random.rand(3, 400, 400)
+    annotations = {"leye": (115, 267), "reye": (115, 132)}
+    return [
+        SampleSet(
+            [Sample(data, key="1", annotations=annotations)],
+            key="1",
+            subject="1",
+            references=["1"],
+        )
+    ]
+
+
+def test_facenet_baseline():
+
+    biometric_references = get_fake_sample_set()
+    probes = get_fake_sample_set()
+    
+    # Regular pipeline
+    pipeline = load_resource("facenet_sanderberg", "baseline")
+    scores = pipeline([], biometric_references, probes)
+    assert len(scores)==1
+    assert len(scores[0])==1
+
+
+    # Regular with 
+
+    # fake_sample = get_fake_sample()
+
+    # transformed_sample = transformer.transform([fake_sample])[0]
+    # transformed_data = transformed_sample.data
+    # assert transformed_sample.data.size == 128
+
+
+def test_inception_resnetv2_msceleb():
+    transformer = load_resource("inception_resnetv2_msceleb", "baseline")
+
+    fake_sample = get_fake_sample()
+
+    transformed_sample = transformer.transform([fake_sample])[0]
+    transformed_data = transformed_sample.data
+    assert transformed_sample.data.size == 128
+
+
+def test_inception_resnetv2_casiawebface():
+    transformer = load_resource("inception_resnetv2_casiawebface", "baseline")
+
+    fake_sample = get_fake_sample()
+
+    transformed_sample = transformer.transform([fake_sample])[0]
+    transformed_data = transformed_sample.data
+    assert transformed_sample.data.size == 128
+
+
+def test_inception_resnetv1_msceleb():
+    transformer = load_resource("inception_resnetv1_msceleb", "baseline")
+
+    fake_sample = get_fake_sample()
+
+    transformed_sample = transformer.transform([fake_sample])[0]
+    transformed_data = transformed_sample.data
+    assert transformed_sample.data.size == 128
+
+
+def test_inception_resnetv1_casiawebface():
+    transformer = load_resource("inception_resnetv1_casiawebface", "baseline")
+
+    fake_sample = get_fake_sample()
+
+    transformed_sample = transformer.transform([fake_sample])[0]
+    transformed_data = transformed_sample.data
+    assert transformed_sample.data.size == 128
+
+
+def test_arcface_insight_tf():
+    import tensorflow as tf
+
+    tf.compat.v1.reset_default_graph()
+    transformer = load_resource("arcface_insight_tf", "baseline")
+
+    fake_sample = get_fake_sample()
+
+    transformed_sample = transformer.transform([fake_sample])[0]
+    transformed_data = transformed_sample.data
+    assert transformed_sample.data.size == 512
diff --git a/bob/bio/face/test/test_baseline.py b/bob/bio/face/test/test_transformers.py
similarity index 98%
rename from bob/bio/face/test/test_baseline.py
rename to bob/bio/face/test/test_transformers.py
index f6ea8f58..ab737884 100644
--- a/bob/bio/face/test/test_baseline.py
+++ b/bob/bio/face/test/test_transformers.py
@@ -11,7 +11,7 @@ def get_fake_sample(face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46,
     return Sample(data, key="1", annotations=annotations)
 
 
-def test_facenet_baseline():
+def test_facenet_baseline():    
     transformer = load_resource("facenet_sanderberg", "baseline")
 
     fake_sample = get_fake_sample()
diff --git a/develop.cfg b/develop.cfg
index 0100329d..5fafd36c 100644
--- a/develop.cfg
+++ b/develop.cfg
@@ -6,14 +6,6 @@ parts = scripts
 
 develop = src/bob.pipelines
           src/bob.bio.base
-          src/bob.bio.face_ongoing
-          src/bob.bio.gmm
-          src/bob.ip.tensorflow_extractor
-          src/bob.db.ijbc
-          src/bob.extension
-          src/bob.ip.gabor
-          src/bob.learn.linear
-          src/bob.learn.em
           .
           
 
@@ -21,15 +13,7 @@ develop = src/bob.pipelines
 eggs = bob.bio.face
        bob.pipelines
        bob.bio.base
-       bob.bio.face_ongoing
-       bob.bio.gmm
-       bob.ip.gabor
-       bob.ip.tensorflow_extractor
-       bob.db.ijbc
-       bob.extension
-       bob.learn.linear
-       bob.learn.em
-
+ 
 
 extensions = bob.buildout
              mr.developer
@@ -42,14 +26,6 @@ auto-checkout = *
 [sources]
 bob.pipelines = git git@gitlab.idiap.ch:bob/bob.pipelines
 bob.bio.base = git git@gitlab.idiap.ch:bob/bob.bio.base
-bob.bio.face_ongoing = git git@gitlab.idiap.ch:bob/bob.bio.face_ongoing
-bob.ip.tensorflow_extractor =  git git@gitlab.idiap.ch:bob/bob.ip.tensorflow_extractor
-bob.db.ijbc = git git@gitlab.idiap.ch:bob/bob.db.ijbc
-bob.bio.gmm = git git@gitlab.idiap.ch:bob/bob.bio.gmm
-bob.extension = git git@gitlab.idiap.ch:bob/bob.extension
-bob.learn.linear = git git@gitlab.idiap.ch:bob/bob.learn.linear
-bob.learn.em = git git@gitlab.idiap.ch:bob/bob.learn.em
-bob.ip.gabor = git git@gitlab.idiap.ch:bob/bob.ip.gabor
 
 
 [scripts]
diff --git a/setup.py b/setup.py
index d6c10dca..2f920af7 100644
--- a/setup.py
+++ b/setup.py
@@ -141,8 +141,7 @@ setup(
             'mtcnn                    = bob.bio.face.config.annotator.mtcnn:annotator',
         ],
 
-        #baselines
-        'bob.bio.baseline':[
+        'bob.bio.transformer':[
           'facenet_sanderberg = bob.bio.face.config.baseline.facenet_sanderberg:transformer',
           'inception_resnetv1_casiawebface = bob.bio.face.config.baseline.inception_resnetv1_casiawebface:transformer',
           'inception_resnetv2_casiawebface = bob.bio.face.config.baseline.inception_resnetv2_casiawebface:transformer',
@@ -151,6 +150,17 @@ setup(
           'arcface_insight_tf = bob.bio.face.config.baseline.arcface_insight_tf:transformer',
         ],
 
+        #baselines
+        'bob.bio.baseline':[
+          'facenet_sanderberg = bob.bio.face.config.baseline.facenet_sanderberg:pipeline',
+          'inception_resnetv1_casiawebface = bob.bio.face.config.baseline.inception_resnetv1_casiawebface:pipeline',
+          'inception_resnetv2_casiawebface = bob.bio.face.config.baseline.inception_resnetv2_casiawebface:pipeline',
+          'inception_resnetv1_msceleb = bob.bio.face.config.baseline.inception_resnetv1_msceleb:pipeline',
+          'inception_resnetv2_msceleb = bob.bio.face.config.baseline.inception_resnetv2_msceleb:pipeline',
+          'arcface_insight_tf = bob.bio.face.config.baseline.arcface_insight_tf:pipeline',
+        ],
+
+
     },
 
     # Classifiers are important if you plan to distribute this package through
-- 
GitLab