From 73eabc81ed1bce90ad075969a104d0784ff143e6 Mon Sep 17 00:00:00 2001
From: Yannick DAYER <yannick.dayer@idiap.ch>
Date: Wed, 28 Oct 2020 09:27:12 +0100
Subject: [PATCH] Fix transformers naming in tests

---
 .gitignore                             |  1 +
 bob/bio/face/test/test_baselines.py    | 18 +++++++++---------
 bob/bio/face/test/test_transformers.py | 16 ++++++++--------
 3 files changed, 18 insertions(+), 17 deletions(-)

diff --git a/.gitignore b/.gitignore
index 21e7c55c..2d178a91 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,3 +13,4 @@ sphinx
 dist
 record.txt
 build/
+bob/bio/face/embeddings/data
diff --git a/bob/bio/face/test/test_baselines.py b/bob/bio/face/test/test_baselines.py
index c645d01c..6b46f5ab 100644
--- a/bob/bio/face/test/test_baselines.py
+++ b/bob/bio/face/test/test_baselines.py
@@ -57,12 +57,12 @@ def get_fake_samples_for_training():
     ]
 
 
-def run_baseline(baseline, samples_for_training=[]):    
+def run_baseline(baseline, samples_for_training=[]):
     biometric_references = get_fake_sample_set(purpose="bioref")
     probes = get_fake_sample_set(purpose="probe")
 
     # Regular pipeline
-    pipeline = load_resource(baseline, "pipeline")    
+    pipeline = load_resource(baseline, "pipeline")
     scores = pipeline(samples_for_training, biometric_references, probes)
     assert len(scores) == 1
     assert len(scores[0]) == 1
@@ -72,7 +72,7 @@ def run_baseline(baseline, samples_for_training=[]):
 
         checkpoint_pipeline = checkpoint_vanilla_biometrics(
             copy.deepcopy(pipeline), base_dir=d
-        )        
+        )
         checkpoint_scores = checkpoint_pipeline([], biometric_references, probes)
         assert len(checkpoint_scores) == 1
         assert len(checkpoint_scores[0]) == 1
@@ -104,23 +104,23 @@ def run_baseline(baseline, samples_for_training=[]):
 
 
 def test_facenet_baseline():
-    run_baseline("facenet_sanderberg")
+    run_baseline("facenet-sanderberg")
 
 
 def test_inception_resnetv2_msceleb():
-    run_baseline("inception_resnetv2_msceleb")
+    run_baseline("inception-resnetv2-msceleb")
 
 
 def test_inception_resnetv2_casiawebface():
-    run_baseline("inception_resnetv2_casiawebface")
+    run_baseline("inception-resnetv2-casiawebface")
 
 
 def test_inception_resnetv1_msceleb():
-    run_baseline("inception_resnetv1_msceleb")
+    run_baseline("inception-resnetv1-msceleb")
 
 
 def test_inception_resnetv1_casiawebface():
-    run_baseline("inception_resnetv1_casiawebface")
+    run_baseline("inception-resnetv1-casiawebface")
 
 
 def test_arcface_insight_tf():
@@ -128,7 +128,7 @@ def test_arcface_insight_tf():
 
     tf.compat.v1.reset_default_graph()
 
-    run_baseline("arcface_insight_tf")
+    run_baseline("arcface-insight-tf")
 
 
 def test_gabor_graph():
diff --git a/bob/bio/face/test/test_transformers.py b/bob/bio/face/test/test_transformers.py
index 4f9e63e8..615ddd0a 100644
--- a/bob/bio/face/test/test_transformers.py
+++ b/bob/bio/face/test/test_transformers.py
@@ -11,8 +11,8 @@ def get_fake_sample(face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46,
     return Sample(data, key="1", annotations=annotations)
 
 
-def test_facenet():    
-    transformer = load_resource("facenet_sanderberg", "transformer")
+def test_facenet():
+    transformer = load_resource("facenet-sanderberg", "transformer")
 
     fake_sample = get_fake_sample()
 
@@ -22,7 +22,7 @@ def test_facenet():
 
 
 def test_inception_resnetv2_msceleb():
-    transformer = load_resource("inception_resnetv2_msceleb", "transformer")
+    transformer = load_resource("inception-resnetv2-msceleb", "transformer")
 
     fake_sample = get_fake_sample()
 
@@ -32,7 +32,7 @@ def test_inception_resnetv2_msceleb():
 
 
 def test_inception_resnetv2_casiawebface():
-    transformer = load_resource("inception_resnetv2_casiawebface", "transformer")
+    transformer = load_resource("inception-resnetv2-casiawebface", "transformer")
 
     fake_sample = get_fake_sample()
 
@@ -42,7 +42,7 @@ def test_inception_resnetv2_casiawebface():
 
 
 def test_inception_resnetv1_msceleb():
-    transformer = load_resource("inception_resnetv1_msceleb", "transformer")
+    transformer = load_resource("inception-resnetv1-msceleb", "transformer")
 
     fake_sample = get_fake_sample()
 
@@ -52,7 +52,7 @@ def test_inception_resnetv1_msceleb():
 
 
 def test_inception_resnetv1_casiawebface():
-    transformer = load_resource("inception_resnetv1_casiawebface", "transformer")
+    transformer = load_resource("inception-resnetv1-casiawebface", "transformer")
 
     fake_sample = get_fake_sample()
 
@@ -64,7 +64,7 @@ def test_inception_resnetv1_casiawebface():
 def test_arcface_insight_tf():
     import tensorflow as tf
     tf.compat.v1.reset_default_graph()
-    transformer = load_resource("arcface_insight_tf", "transformer")
+    transformer = load_resource("arcface-insight-tf", "transformer")
 
     fake_sample = get_fake_sample()
 
@@ -74,7 +74,7 @@ def test_arcface_insight_tf():
 
 
 def test_gabor_graph():
-    transformer = load_resource("gabor_graph", "transformer")
+    transformer = load_resource("gabor-graph", "transformer")
 
     fake_sample = get_fake_sample()
     transformed_sample = transformer.transform([fake_sample])[0]
-- 
GitLab