Skip to content
Snippets Groups Projects
Commit 4dc57ca0 authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Removing duplicated tests

parent f73aa7a5
No related branches found
No related tags found
1 merge request!109Remove duplicated tests
Pipeline #49366 passed
...@@ -15,11 +15,15 @@ if "database" in locals(): ...@@ -15,11 +15,15 @@ if "database" in locals():
else: else:
annotation_type = None annotation_type = None
fixed_positions = None fixed_positions = None
memory_demanding = False
def load(annotation_type, fixed_positions=None): def load(annotation_type, fixed_positions=None):
transformer = embedding_transformer_112x112( transformer = embedding_transformer_112x112(
ArcFaceInsightFace(memory_demanding=memory_demanding), annotation_type, fixed_positions, color_channel="rgb" ArcFaceInsightFace(memory_demanding=memory_demanding),
annotation_type,
fixed_positions,
color_channel="rgb",
) )
algorithm = Distance() algorithm = Distance()
......
...@@ -59,7 +59,7 @@ def get_fake_samples_for_training(): ...@@ -59,7 +59,7 @@ def get_fake_samples_for_training():
] ]
def run_baseline(baseline, samples_for_training=[]): def run_baseline(baseline, samples_for_training=[], target_scores=None):
biometric_references = get_fake_sample_set(purpose="bioref") biometric_references = get_fake_sample_set(purpose="bioref")
probes = get_fake_sample_set(purpose="probe") probes = get_fake_sample_set(purpose="probe")
...@@ -78,6 +78,10 @@ def run_baseline(baseline, samples_for_training=[]): ...@@ -78,6 +78,10 @@ def run_baseline(baseline, samples_for_training=[]):
checkpoint_scores = checkpoint_pipeline([], biometric_references, probes) checkpoint_scores = checkpoint_pipeline([], biometric_references, probes)
assert len(checkpoint_scores) == 1 assert len(checkpoint_scores) == 1
assert len(checkpoint_scores[0]) == 1 assert len(checkpoint_scores[0]) == 1
if target_scores is not None:
np.allclose(target_scores, scores[0][0].data, atol=10e-3, rtol=10e-3)
assert np.isclose(scores[0][0].data, checkpoint_scores[0][0].data) assert np.isclose(scores[0][0].data, checkpoint_scores[0][0].data)
dirs = os.listdir(d) dirs = os.listdir(d)
...@@ -109,41 +113,41 @@ def run_baseline(baseline, samples_for_training=[]): ...@@ -109,41 +113,41 @@ def run_baseline(baseline, samples_for_training=[]):
@pytest.mark.slow @pytest.mark.slow
@is_library_available("tensorflow") @is_library_available("tensorflow")
def test_facenet_baseline(): def test_facenet_baseline():
run_baseline("facenet-sanderberg") run_baseline("facenet-sanderberg", target_scores=[-0.9220775737526933])
@pytest.mark.slow @pytest.mark.slow
@is_library_available("tensorflow") @is_library_available("tensorflow")
def test_inception_resnetv2_msceleb(): def test_inception_resnetv2_msceleb():
run_baseline("inception-resnetv2-msceleb") run_baseline("inception-resnetv2-msceleb", target_scores=[-0.43447269718504244])
@pytest.mark.slow @pytest.mark.slow
@is_library_available("tensorflow") @is_library_available("tensorflow")
def test_inception_resnetv2_casiawebface(): def test_inception_resnetv2_casiawebface():
run_baseline("inception-resnetv2-casiawebface") run_baseline("inception-resnetv2-casiawebface", target_scores=[-0.634583944368043])
@pytest.mark.slow @pytest.mark.slow
@is_library_available("tensorflow") @is_library_available("tensorflow")
def test_inception_resnetv1_msceleb(): def test_inception_resnetv1_msceleb():
run_baseline("inception-resnetv1-msceleb") run_baseline("inception-resnetv1-msceleb", target_scores=[-0.44497649298306907])
@pytest.mark.slow @pytest.mark.slow
@is_library_available("tensorflow") @is_library_available("tensorflow")
def test_inception_resnetv1_casiawebface(): def test_inception_resnetv1_casiawebface():
run_baseline("inception-resnetv1-casiawebface") run_baseline("inception-resnetv1-casiawebface", target_scores=[-0.6411599976437636])
@pytest.mark.slow @pytest.mark.slow
@is_library_available("mxnet") @is_library_available("mxnet")
def test_arcface_insightface(): def test_arcface_insightface():
run_baseline("arcface-insightface") run_baseline("arcface-insightface", target_scores=[-0.0005965275677296544])
def test_gabor_graph(): def test_gabor_graph():
run_baseline("gabor_graph") run_baseline("gabor_graph", target_scores=[0.4385451147418939])
# def test_lda(): # def test_lda():
......
import bob.bio.face
import bob.io.base
import numpy as np
from bob.pipelines import Sample, wrap
import pkg_resources
from bob.bio.base.test.utils import is_library_available
import pytest
@pytest.mark.slow
@is_library_available("tensorflow")
def test_idiap_inceptionv2_msceleb():
from bob.bio.face.embeddings.tf2_inception_resnet import (
InceptionResnetv2_MsCeleb_CenterLoss_2018,
)
reference = bob.io.base.load(
pkg_resources.resource_filename(
"bob.bio.face.test", "data/inception_resnet_v2_msceleb_rgb.hdf5"
)
)
np.random.seed(10)
transformer = InceptionResnetv2_MsCeleb_CenterLoss_2018()
data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
output = transformer.transform([data])[0]
assert output.size == 128, output.shape
# Sample Batch
sample = Sample(data)
transformer_sample = wrap(["sample"], transformer)
output = [s.data for s in transformer_sample.transform([sample])][0]
np.testing.assert_allclose(output, reference.flatten(), rtol=1e-5, atol=1e-4)
assert output.size == 128, output.shape
@pytest.mark.slow
@is_library_available("tensorflow")
def test_idiap_inceptionv2_msceleb_memory_demanding():
from bob.bio.face.embeddings.tf2_inception_resnet import (
InceptionResnetv2_MsCeleb_CenterLoss_2018,
)
reference = bob.io.base.load(
pkg_resources.resource_filename(
"bob.bio.face.test", "data/inception_resnet_v2_msceleb_rgb.hdf5"
)
)
np.random.seed(10)
transformer = InceptionResnetv2_MsCeleb_CenterLoss_2018(memory_demanding=True)
data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
output = transformer.transform([data])[0]
assert output.size == 128, output.shape
# Sample Batch
sample = Sample(data)
transformer_sample = wrap(["sample"], transformer)
output = [s.data for s in transformer_sample.transform([sample])][0]
np.testing.assert_allclose(output[0], reference.flatten(), rtol=1e-5, atol=1e-4)
assert output.size == 128, output.shape
@pytest.mark.slow
@is_library_available("tensorflow")
def test_idiap_inceptionv2_casia():
from bob.bio.face.embeddings.tf2_inception_resnet import (
InceptionResnetv2_Casia_CenterLoss_2018,
)
reference = bob.io.base.load(
pkg_resources.resource_filename(
"bob.bio.face.test", "data/inception_resnet_v2_casia_rgb.hdf5"
)
)
np.random.seed(10)
transformer = InceptionResnetv2_Casia_CenterLoss_2018()
data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
output = transformer.transform([data])[0]
assert output.size == 128, output.shape
# Sample Batch
sample = Sample(data)
transformer_sample = wrap(["sample"], transformer)
output = [s.data for s in transformer_sample.transform([sample])][0]
np.testing.assert_allclose(output, reference.flatten(), rtol=1e-5, atol=1e-4)
assert output.size == 128, output.shape
@pytest.mark.slow
@is_library_available("tensorflow")
def test_idiap_inceptionv1_msceleb():
from bob.bio.face.embeddings.tf2_inception_resnet import (
InceptionResnetv1_MsCeleb_CenterLoss_2018,
)
reference = bob.io.base.load(
pkg_resources.resource_filename(
"bob.bio.face.test", "data/inception_resnet_v1_msceleb_rgb.hdf5"
)
)
np.random.seed(10)
transformer = InceptionResnetv1_MsCeleb_CenterLoss_2018()
data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
output = transformer.transform([data])[0]
assert output.size == 128, output.shape
# Sample Batch
sample = Sample(data)
transformer_sample = wrap(["sample"], transformer)
output = [s.data for s in transformer_sample.transform([sample])][0]
np.testing.assert_allclose(output, reference.flatten(), rtol=1e-5, atol=1e-4)
assert output.size == 128, output.shape
@pytest.mark.slow
@is_library_available("tensorflow")
def test_idiap_inceptionv1_casia():
from bob.bio.face.embeddings.tf2_inception_resnet import (
InceptionResnetv1_Casia_CenterLoss_2018,
)
reference = bob.io.base.load(
pkg_resources.resource_filename(
"bob.bio.face.test", "data/inception_resnet_v1_casia_rgb.hdf5"
)
)
np.random.seed(10)
transformer = InceptionResnetv1_Casia_CenterLoss_2018()
data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
output = transformer.transform([data])[0]
assert output.size == 128, output.shape
# Sample Batch
sample = Sample(data)
transformer_sample = wrap(["sample"], transformer)
output = [s.data for s in transformer_sample.transform([sample])][0]
np.testing.assert_allclose(output, reference.flatten(), rtol=1e-5, atol=1e-4)
assert output.size == 128, output.shape
@pytest.mark.slow
@is_library_available("tensorflow")
def test_facenet_sanderberg():
from bob.bio.face.embeddings.tf2_inception_resnet import (
FaceNetSanderberg_20170512_110547,
)
reference = bob.io.base.load(
pkg_resources.resource_filename(
"bob.bio.face.test", "data/facenet_sandberg_20170512-110547.hdf5"
)
)
np.random.seed(10)
transformer = FaceNetSanderberg_20170512_110547()
data = (np.random.rand(3, 160, 160) * 255).astype("uint8")
output = transformer.transform([data])[0]
assert output.size == 128, output.shape
# Sample Batch
sample = Sample(data)
transformer_sample = wrap(["sample"], transformer)
output = [s.data for s in transformer_sample.transform([sample])][0]
np.testing.assert_allclose(output, reference.flatten(), rtol=1e-5, atol=1e-4)
assert output.size == 128, output.shape
@pytest.mark.slow
@is_library_available("mxnet")
def test_arcface_insight_face():
from bob.bio.face.embeddings.mxnet_models import ArcFaceInsightFace
transformer = ArcFaceInsightFace()
data = np.random.rand(3, 112, 112) * 255
data = data.astype("uint8")
output = transformer.transform([data])
assert output.size == 512, output.shape
# Sample Batch
sample = Sample(data)
transformer_sample = wrap(["sample"], transformer)
output = [s.data for s in transformer_sample.transform([sample])][0]
assert output.size == 512, output.shape
...@@ -12,89 +12,6 @@ def get_fake_sample(face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46, ...@@ -12,89 +12,6 @@ def get_fake_sample(face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46,
return Sample(data, key="1", annotations=annotations) return Sample(data, key="1", annotations=annotations)
@pytest.mark.slow
@is_library_available("tensorflow")
def test_facenet_sanderberg():
transformer = load_resource("facenet-sanderberg", "transformer")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 128
@pytest.mark.slow
@is_library_available("tensorflow")
def test_inception_resnetv2_msceleb():
transformer = load_resource("inception-resnetv2-msceleb", "transformer")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 128
@pytest.mark.slow
@is_library_available("tensorflow")
def test_inception_resnetv2_casiawebface():
transformer = load_resource("inception-resnetv2-casiawebface", "transformer")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 128
@pytest.mark.slow
@is_library_available("tensorflow")
def test_inception_resnetv1_msceleb():
transformer = load_resource("inception-resnetv1-msceleb", "transformer")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 128
@pytest.mark.slow
@is_library_available("tensorflow")
def test_inception_resnetv1_casiawebface():
transformer = load_resource("inception-resnetv1-casiawebface", "transformer")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 128
"""
def test_arcface_insight_tf():
import tensorflow as tf
tf.compat.v1.reset_default_graph()
transformer = load_resource("arcface-insight-tf", "transformer")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 512
"""
def test_gabor_graph():
transformer = load_resource("gabor-graph", "transformer")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert len(transformed_sample.data) == 80
def test_lgbphs(): def test_lgbphs():
transformer = load_resource("lgbphs", "transformer") transformer = load_resource("lgbphs", "transformer")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment