Commit d93b03ff authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Reorganized baselines

parent 2f04b3e6
Pipeline #40427 failed with stage
in 13 minutes and 28 seconds
import bob.bio.face
from sklearn.pipeline import make_pipeline
from bob.bio.base.wrappers import wrap_sample_preprocessor
from bob.pipelines import wrap
from bob.bio.face.embeddings import FaceNetSanderberg
from bob.bio.face.helpers import face_crop_solver
# This is the size of the image that this model expects
CROPPED_IMAGE_HEIGHT = 160
CROPPED_IMAGE_WIDTH = 160
cropped_image_size = (CROPPED_IMAGE_HEIGHT, CROPPED_IMAGE_WIDTH)
color_channel = "rgb"
if "database" in locals():
annotation_type = database.annotation_type
fixed_positions = database.fixed_positions
else:
annotation_type = "eyes-center"
fixed_positions = None
#### SOLVING THE FACE CROPPER TO BE USED
if annotation_type == "bounding-box":
transform_extra_arguments = (("annotations", "annotations"),)
TOP_LEFT_POS = (0, 0)
BOTTOM_RIGHT_POS = (CROPPED_IMAGE_HEIGHT, CROPPED_IMAGE_WIDTH)
# Detects the face and crops it without eye detection
face_cropper = face_crop_solver(
cropped_image_size,
color_channel=color_channel,
cropped_positions={"topleft": TOP_LEFT_POS, "bottomright": BOTTOM_RIGHT_POS},
fixed_positions=fixed_positions,
)
elif annotation_type == "eyes-center":
transform_extra_arguments = (("annotations", "annotations"),)
# eye positions for frontal images
RIGHT_EYE_POS = (46, 53)
LEFT_EYE_POS = (46, 107)
# Detects the face and crops it without eye detection
face_cropper = face_crop_solver(
cropped_image_size,
color_channel=color_channel,
cropped_positions={"leye": LEFT_EYE_POS, "reye": RIGHT_EYE_POS},
fixed_positions=fixed_positions,
)
else:
transform_extra_arguments = None
# DEFAULT TO FACE SIMPLE RESIZE
face_cropper = face_crop_solver(cropped_image_size)
embedding = FaceNetSanderberg()
transformer = make_pipeline(
wrap(
["sample"],
face_cropper,
transform_extra_arguments=transform_extra_arguments,
),
wrap(["sample"], embedding),
)
import bob.bio.face
from sklearn.pipeline import make_pipeline
from bob.bio.base.wrappers import wrap_sample_preprocessor
from bob.pipelines import wrap
from bob.bio.face.transformers import FaceNetSanderberg
# This is the size of the image that this model expects
CROPPED_IMAGE_HEIGHT = 160
CROPPED_IMAGE_WIDTH = 160
# eye positions for frontal images
RIGHT_EYE_POS = (46, 53)
LEFT_EYE_POS = (46, 107)
legacy_face_cropper = bob.bio.face.preprocessor.FaceCrop(
cropped_image_size=(CROPPED_IMAGE_HEIGHT, CROPPED_IMAGE_WIDTH),
cropped_positions={"leye": LEFT_EYE_POS, "reye": RIGHT_EYE_POS},
color_channel="rgb",
)
embedding = FaceNetSanderberg()
transformer = make_pipeline(
wrap_sample_preprocessor(
legacy_face_cropper,
transform_extra_arguments=(("annotations", "annotations"),),
),
wrap(["sample"], embedding),
)
......@@ -6,24 +6,23 @@ from bob.pipelines import Sample, SampleSet
def get_fake_sample(face_size=(160, 160), eyes={"leye": (46, 107), "reye": (46, 53)}):
data = np.random.rand(3,400,400)
annotations={"leye": (115, 267), "reye": (115, 132)}
data = np.random.rand(3, 400, 400)
annotations = {"leye": (115, 267), "reye": (115, 132)}
return Sample(data, key="1", annotations=annotations)
def test_facenet_pipeline():
config_name = pkg_resources.resource_filename('bob.bio.face', 'config/transformers/eyes_crop/facenet.py')
def test_facenet_baseline():
config_name = pkg_resources.resource_filename(
"bob.bio.face", "config/baseline/facenet.py"
)
transformer = load([config_name]).transformer
#import ipdb; ipdb.set_trace()
fake_sample = get_fake_sample()
#transformed_sample = transformer.transform([fake_sample])[0].data
# transformed_sample = transformer.transform([fake_sample])[0].data
#import ipdb; ipdb.set_trace()
transformed_sample = transformer.transform([fake_sample])[0]
assert transformed_sample.data.size == 160
transformed_data = transformed_sample.data
assert transformed_sample.data.size == 128
pass
\ No newline at end of file
pass
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment