Skip to content
Snippets Groups Projects
Commit 144f7748 authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

setup.py

Redefining baselines
parent 7579be17
No related branches found
No related tags found
2 merge requests!66Adding some baselines as transformers,!64Dask pipelines
Pipeline #40471 failed
from bob.bio.face.embeddings import FaceNetSanderberg from bob.bio.face.embeddings import FaceNetSanderberg
from bob.bio.face.config.baseline.helpers import embedding_transformer_160x160 from bob.bio.face.config.baseline.helpers import embedding_transformer_160x160
from bob.bio.base.pipelines.vanilla_biometrics import Distance, VanillaBiometricsPipeline
if "database" in locals(): if "database" in locals():
...@@ -10,4 +12,11 @@ else: ...@@ -10,4 +12,11 @@ else:
fixed_positions = None fixed_positions = None
transformer = embedding_transformer_160x160(FaceNetSanderberg(), annotation_type, fixed_positions) transformer = embedding_transformer_160x160(FaceNetSanderberg(), annotation_type, fixed_positions)
\ No newline at end of file
algorithm = Distance()
pipeline = VanillaBiometricsPipeline(
transformer,
algorithm
)
from bob.extension.config import load
import pkg_resources
import numpy as np
from bob.pipelines import Sample, SampleSet
from bob.bio.base import load_resource
def get_fake_sample_set(
face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46, 53)}
):
data = np.random.rand(3, 400, 400)
annotations = {"leye": (115, 267), "reye": (115, 132)}
return [
SampleSet(
[Sample(data, key="1", annotations=annotations)],
key="1",
subject="1",
references=["1"],
)
]
def test_facenet_baseline():
biometric_references = get_fake_sample_set()
probes = get_fake_sample_set()
# Regular pipeline
pipeline = load_resource("facenet_sanderberg", "baseline")
scores = pipeline([], biometric_references, probes)
assert len(scores)==1
assert len(scores[0])==1
# Regular with
# fake_sample = get_fake_sample()
# transformed_sample = transformer.transform([fake_sample])[0]
# transformed_data = transformed_sample.data
# assert transformed_sample.data.size == 128
def test_inception_resnetv2_msceleb():
transformer = load_resource("inception_resnetv2_msceleb", "baseline")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 128
def test_inception_resnetv2_casiawebface():
transformer = load_resource("inception_resnetv2_casiawebface", "baseline")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 128
def test_inception_resnetv1_msceleb():
transformer = load_resource("inception_resnetv1_msceleb", "baseline")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 128
def test_inception_resnetv1_casiawebface():
transformer = load_resource("inception_resnetv1_casiawebface", "baseline")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 128
def test_arcface_insight_tf():
import tensorflow as tf
tf.compat.v1.reset_default_graph()
transformer = load_resource("arcface_insight_tf", "baseline")
fake_sample = get_fake_sample()
transformed_sample = transformer.transform([fake_sample])[0]
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 512
...@@ -11,7 +11,7 @@ def get_fake_sample(face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46, ...@@ -11,7 +11,7 @@ def get_fake_sample(face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46,
return Sample(data, key="1", annotations=annotations) return Sample(data, key="1", annotations=annotations)
def test_facenet_baseline(): def test_facenet_baseline():
transformer = load_resource("facenet_sanderberg", "baseline") transformer = load_resource("facenet_sanderberg", "baseline")
fake_sample = get_fake_sample() fake_sample = get_fake_sample()
......
...@@ -6,14 +6,6 @@ parts = scripts ...@@ -6,14 +6,6 @@ parts = scripts
develop = src/bob.pipelines develop = src/bob.pipelines
src/bob.bio.base src/bob.bio.base
src/bob.bio.face_ongoing
src/bob.bio.gmm
src/bob.ip.tensorflow_extractor
src/bob.db.ijbc
src/bob.extension
src/bob.ip.gabor
src/bob.learn.linear
src/bob.learn.em
. .
...@@ -21,15 +13,7 @@ develop = src/bob.pipelines ...@@ -21,15 +13,7 @@ develop = src/bob.pipelines
eggs = bob.bio.face eggs = bob.bio.face
bob.pipelines bob.pipelines
bob.bio.base bob.bio.base
bob.bio.face_ongoing
bob.bio.gmm
bob.ip.gabor
bob.ip.tensorflow_extractor
bob.db.ijbc
bob.extension
bob.learn.linear
bob.learn.em
extensions = bob.buildout extensions = bob.buildout
mr.developer mr.developer
...@@ -42,14 +26,6 @@ auto-checkout = * ...@@ -42,14 +26,6 @@ auto-checkout = *
[sources] [sources]
bob.pipelines = git git@gitlab.idiap.ch:bob/bob.pipelines bob.pipelines = git git@gitlab.idiap.ch:bob/bob.pipelines
bob.bio.base = git git@gitlab.idiap.ch:bob/bob.bio.base bob.bio.base = git git@gitlab.idiap.ch:bob/bob.bio.base
bob.bio.face_ongoing = git git@gitlab.idiap.ch:bob/bob.bio.face_ongoing
bob.ip.tensorflow_extractor = git git@gitlab.idiap.ch:bob/bob.ip.tensorflow_extractor
bob.db.ijbc = git git@gitlab.idiap.ch:bob/bob.db.ijbc
bob.bio.gmm = git git@gitlab.idiap.ch:bob/bob.bio.gmm
bob.extension = git git@gitlab.idiap.ch:bob/bob.extension
bob.learn.linear = git git@gitlab.idiap.ch:bob/bob.learn.linear
bob.learn.em = git git@gitlab.idiap.ch:bob/bob.learn.em
bob.ip.gabor = git git@gitlab.idiap.ch:bob/bob.ip.gabor
[scripts] [scripts]
......
...@@ -141,8 +141,7 @@ setup( ...@@ -141,8 +141,7 @@ setup(
'mtcnn = bob.bio.face.config.annotator.mtcnn:annotator', 'mtcnn = bob.bio.face.config.annotator.mtcnn:annotator',
], ],
#baselines 'bob.bio.transformer':[
'bob.bio.baseline':[
'facenet_sanderberg = bob.bio.face.config.baseline.facenet_sanderberg:transformer', 'facenet_sanderberg = bob.bio.face.config.baseline.facenet_sanderberg:transformer',
'inception_resnetv1_casiawebface = bob.bio.face.config.baseline.inception_resnetv1_casiawebface:transformer', 'inception_resnetv1_casiawebface = bob.bio.face.config.baseline.inception_resnetv1_casiawebface:transformer',
'inception_resnetv2_casiawebface = bob.bio.face.config.baseline.inception_resnetv2_casiawebface:transformer', 'inception_resnetv2_casiawebface = bob.bio.face.config.baseline.inception_resnetv2_casiawebface:transformer',
...@@ -151,6 +150,17 @@ setup( ...@@ -151,6 +150,17 @@ setup(
'arcface_insight_tf = bob.bio.face.config.baseline.arcface_insight_tf:transformer', 'arcface_insight_tf = bob.bio.face.config.baseline.arcface_insight_tf:transformer',
], ],
#baselines
'bob.bio.baseline':[
'facenet_sanderberg = bob.bio.face.config.baseline.facenet_sanderberg:pipeline',
'inception_resnetv1_casiawebface = bob.bio.face.config.baseline.inception_resnetv1_casiawebface:pipeline',
'inception_resnetv2_casiawebface = bob.bio.face.config.baseline.inception_resnetv2_casiawebface:pipeline',
'inception_resnetv1_msceleb = bob.bio.face.config.baseline.inception_resnetv1_msceleb:pipeline',
'inception_resnetv2_msceleb = bob.bio.face.config.baseline.inception_resnetv2_msceleb:pipeline',
'arcface_insight_tf = bob.bio.face.config.baseline.arcface_insight_tf:pipeline',
],
}, },
# Classifiers are important if you plan to distribute this package through # Classifiers are important if you plan to distribute this package through
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment