Skip to content
Snippets Groups Projects
Commit d3cb2507 authored by Laurent COLBOIS's avatar Laurent COLBOIS
Browse files

Refactor legacy baselines

parent a3abe09d
No related branches found
No related tags found
1 merge request!119Refactor baseline config helpers
......@@ -3,7 +3,11 @@ from bob.bio.base.pipelines.vanilla_biometrics import (
VanillaBiometricsPipeline,
BioAlgorithmLegacy,
)
from bob.bio.face.config.baseline.helpers import crop_80x64, lookup_config_from_database
from bob.bio.face.config.baseline.helpers import (
lookup_config_from_database,
legacy_default_cropping,
make_cropper,
)
import math
import numpy as np
import bob.bio.face
......@@ -20,14 +24,6 @@ logger = logging.getLogger(__name__)
annotation_type, fixed_positions, memory_demanding = lookup_config_from_database()
def get_cropper(annotation_type, fixed_positions=None):
# Cropping
face_cropper, transform_extra_arguments = crop_80x64(
annotation_type, fixed_positions, color_channel="gray"
)
return face_cropper, transform_extra_arguments
def get_pipeline(face_cropper, transform_extra_arguments):
preprocessor = bob.bio.face.preprocessor.INormLBP(
face_cropper=face_cropper, dtype=np.float64
......@@ -75,9 +71,22 @@ def get_pipeline(face_cropper, transform_extra_arguments):
def load(annotation_type, fixed_positions=None):
####### SOLVING THE FACE CROPPER TO BE USED ##########
face_cropper, transform_extra_arguments = get_cropper(
annotation_type, fixed_positions
# Define cropped positions
CROPPED_IMAGE_HEIGHT = 80
CROPPED_IMAGE_WIDTH = CROPPED_IMAGE_HEIGHT * 4 // 5
cropped_image_size = (CROPPED_IMAGE_HEIGHT, CROPPED_IMAGE_WIDTH)
cropped_positions = legacy_default_cropping(cropped_image_size, annotation_type)
# Cropping
face_cropper, transform_extra_arguments = make_cropper(
cropped_image_size=cropped_image_size,
annotation_type=annotation_type,
cropped_positions=cropped_positions,
fixed_positions=fixed_positions,
color_channel="gray",
)
return get_pipeline(face_cropper, transform_extra_arguments)
......
......@@ -173,6 +173,33 @@ def legacy_default_cropping(cropped_image_size, annotation_type):
return cropped_positions
def make_cropper(
cropped_image_size,
annotation_type,
cropped_positions,
fixed_positions=None,
color_channel="rgb",
annotator=None,
):
face_cropper = face_crop_solver(
cropped_image_size,
color_channel=color_channel,
cropped_positions=cropped_positions,
fixed_positions=fixed_positions,
dtype="float64",
annotator=annotator,
)
transform_extra_arguments = (
None
if (cropped_positions is None or fixed_positions is not None)
else (("annotations", "annotations"),)
)
return face_cropper, transform_extra_arguments
def embedding_transformer(
cropped_image_size,
embedding,
......@@ -304,58 +331,3 @@ def embedding_transformer_224x224(
fixed_positions,
color_channel=color_channel,
)
def crop_80x64(annotation_type, fixed_positions=None, color_channel="gray"):
"""
Crops a face to :math:`80 \times 64`
Parameters
----------
annotation_type: str
Type of annotations. Possible values are: `bounding-box`, `eyes-center` and None
fixed_positions: tuple
A tuple containing the annotations. This is used in case your input is already registered
with fixed positions (eyes or bounding box)
color_channel: str
Returns
-------
face_cropper:
A face cropper to be used
transform_extra_arguments:
The parameters to the transformer
"""
color_channel = color_channel
dtype = np.float64
# Cropping
CROPPED_IMAGE_HEIGHT = 80
CROPPED_IMAGE_WIDTH = CROPPED_IMAGE_HEIGHT * 4 // 5
cropped_image_size = (CROPPED_IMAGE_HEIGHT, CROPPED_IMAGE_WIDTH)
cropped_positions = legacy_default_cropping(cropped_image_size, annotation_type)
face_cropper = face_crop_solver(
cropped_image_size,
color_channel=color_channel,
cropped_positions=cropped_positions,
fixed_positions=fixed_positions,
dtype=dtype,
)
transform_extra_arguments = (
None
if (cropped_positions is None or fixed_positions is not None)
else (("annotations", "annotations"),)
)
return face_cropper, transform_extra_arguments
......@@ -3,7 +3,11 @@ from bob.bio.base.pipelines.vanilla_biometrics import (
VanillaBiometricsPipeline,
BioAlgorithmLegacy,
)
from bob.bio.face.config.baseline.helpers import crop_80x64, lookup_config_from_database
from bob.bio.face.config.baseline.helpers import (
lookup_config_from_database,
legacy_default_cropping,
make_cropper,
)
import numpy as np
import bob.bio.face
from sklearn.pipeline import make_pipeline
......@@ -24,9 +28,19 @@ annotation_type, fixed_positions, memory_demanding = lookup_config_from_database
####### SOLVING THE FACE CROPPER TO BE USED ##########
def load(annotation_type, fixed_positions=None):
# Define cropped positions
CROPPED_IMAGE_HEIGHT = 80
CROPPED_IMAGE_WIDTH = CROPPED_IMAGE_HEIGHT * 4 // 5
cropped_image_size = (CROPPED_IMAGE_HEIGHT, CROPPED_IMAGE_WIDTH)
cropped_positions = legacy_default_cropping(cropped_image_size, annotation_type)
# Cropping
face_cropper, transform_extra_arguments = crop_80x64(
annotation_type, fixed_positions, color_channel="gray"
face_cropper, transform_extra_arguments = make_cropper(
cropped_image_size=cropped_image_size,
annotation_type=annotation_type,
cropped_positions=cropped_positions,
fixed_positions=fixed_positions,
color_channel="gray",
)
preprocessor = bob.bio.face.preprocessor.TanTriggs(
......@@ -57,7 +71,9 @@ def load(annotation_type, fixed_positions=None):
### BIOMETRIC ALGORITHM
algorithm = BioAlgorithmLegacy(
lda, base_dir=tempdir, projector_file=os.path.join(tempdir, "Projector.hdf5"),
lda,
base_dir=tempdir,
projector_file=os.path.join(tempdir, "Projector.hdf5"),
)
return VanillaBiometricsPipeline(transformer, algorithm)
......
......@@ -3,7 +3,11 @@ from bob.bio.base.pipelines.vanilla_biometrics import (
VanillaBiometricsPipeline,
BioAlgorithmLegacy,
)
from bob.bio.face.config.baseline.helpers import crop_80x64, lookup_config_from_database
from bob.bio.face.config.baseline.helpers import (
lookup_config_from_database,
legacy_default_cropping,
make_cropper,
)
import math
import numpy as np
import bob.bio.face
......@@ -16,14 +20,6 @@ import bob.math
annotation_type, fixed_positions, memory_demanding = lookup_config_from_database()
def get_cropper(annotation_type, fixed_positions=None):
# Cropping
face_cropper, transform_extra_arguments = crop_80x64(
annotation_type, fixed_positions, color_channel="gray"
)
return face_cropper, transform_extra_arguments
def get_pipeline(face_cropper, transform_extra_arguments):
preprocessor = bob.bio.face.preprocessor.TanTriggs(
face_cropper=face_cropper, dtype=np.float64
......@@ -65,10 +61,22 @@ def get_pipeline(face_cropper, transform_extra_arguments):
def load(annotation_type, fixed_positions=None):
# Define cropped positions
CROPPED_IMAGE_HEIGHT = 80
CROPPED_IMAGE_WIDTH = CROPPED_IMAGE_HEIGHT * 4 // 5
cropped_image_size = (CROPPED_IMAGE_HEIGHT, CROPPED_IMAGE_WIDTH)
cropped_positions = legacy_default_cropping(cropped_image_size, annotation_type)
####### SOLVING THE FACE CROPPER TO BE USED ##########
face_cropper, transform_extra_arguments = get_cropper(
annotation_type, fixed_positions
# Cropping
face_cropper, transform_extra_arguments = make_cropper(
cropped_image_size=cropped_image_size,
annotation_type=annotation_type,
cropped_positions=cropped_positions,
fixed_positions=fixed_positions,
color_channel="gray",
)
return get_pipeline(face_cropper, transform_extra_arguments)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment