Skip to content
Snippets Groups Projects
Commit d85be042 authored by Amir MOHAMMADI's avatar Amir MOHAMMADI
Browse files

merge the two helpers and rename to utils

parent 75261a6e
Branches
Tags
1 merge request!119Refactor baseline config helpers
Pipeline #51144 passed
Showing
with 97 additions and 96 deletions
......@@ -4,6 +4,7 @@ from . import algorithm
from . import script
from . import database
from . import annotator
from . import utils
from . import test
......
from bob.bio.face.embeddings.mxnet_models import ArcFaceInsightFace
from bob.bio.face.config.baseline.helpers import lookup_config_from_database
from bob.bio.face.utils import lookup_config_from_database
from bob.bio.face.config.baseline.templates import arcface_baseline
annotation_type, fixed_positions, memory_demanding = lookup_config_from_database(
......
......@@ -5,7 +5,7 @@ from bob.bio.base.pipelines.vanilla_biometrics import (
VanillaBiometricsPipeline,
)
from bob.pipelines.transformers import SampleLinearize
from bob.bio.face.config.baseline.helpers import lookup_config_from_database
from bob.bio.face.utils import lookup_config_from_database
annotation_type, fixed_positions, memory_demanding = lookup_config_from_database()
......
from bob.bio.face.embeddings.tf2_inception_resnet import (
FaceNetSanderberg_20170512_110547,
)
from bob.bio.face.config.baseline.helpers import lookup_config_from_database
from bob.bio.face.utils import lookup_config_from_database
from bob.bio.face.config.baseline.templates import facenet_baseline
annotation_type, fixed_positions, memory_demanding = lookup_config_from_database(
......
......@@ -3,7 +3,7 @@ from bob.bio.base.pipelines.vanilla_biometrics import (
VanillaBiometricsPipeline,
BioAlgorithmLegacy,
)
from bob.bio.face.config.baseline.helpers import (
from bob.bio.face.utils import (
lookup_config_from_database,
legacy_default_cropping,
make_cropper,
......
from bob.bio.face.embeddings.tf2_inception_resnet import (
InceptionResnetv1_Casia_CenterLoss_2018,
)
from bob.bio.face.config.baseline.helpers import lookup_config_from_database
from bob.bio.face.utils import lookup_config_from_database
from bob.bio.face.config.baseline.templates import facenet_baseline
annotation_type, fixed_positions, memory_demanding = lookup_config_from_database(
......
from bob.bio.face.embeddings.tf2_inception_resnet import (
InceptionResnetv1_MsCeleb_CenterLoss_2018,
)
from bob.bio.face.config.baseline.helpers import lookup_config_from_database
from bob.bio.face.utils import lookup_config_from_database
from bob.bio.face.config.baseline.templates import facenet_baseline
......
from bob.bio.face.embeddings.tf2_inception_resnet import (
InceptionResnetv2_Casia_CenterLoss_2018,
)
from bob.bio.face.config.baseline.helpers import lookup_config_from_database
from bob.bio.face.utils import lookup_config_from_database
from bob.bio.face.config.baseline.templates import facenet_baseline
......
from bob.bio.face.embeddings.tf2_inception_resnet import (
InceptionResnetv2_MsCeleb_CenterLoss_2018,
)
from bob.bio.face.config.baseline.helpers import lookup_config_from_database
from bob.bio.face.utils import lookup_config_from_database
from bob.bio.face.config.baseline.templates import facenet_baseline
annotation_type, fixed_positions, memory_demanding = lookup_config_from_database(
......
......@@ -3,7 +3,7 @@ from bob.bio.base.pipelines.vanilla_biometrics import (
VanillaBiometricsPipeline,
BioAlgorithmLegacy,
)
from bob.bio.face.config.baseline.helpers import (
from bob.bio.face.utils import (
lookup_config_from_database,
legacy_default_cropping,
make_cropper,
......
......@@ -3,7 +3,7 @@ from bob.bio.base.pipelines.vanilla_biometrics import (
VanillaBiometricsPipeline,
BioAlgorithmLegacy,
)
from bob.bio.face.config.baseline.helpers import (
from bob.bio.face.utils import (
lookup_config_from_database,
legacy_default_cropping,
make_cropper,
......
from bob.bio.face.embeddings.mobilenet_v2 import MobileNetv2_MsCeleb_ArcFace_2021
from bob.bio.face.config.baseline.helpers import lookup_config_from_database
from bob.bio.face.utils import lookup_config_from_database
from bob.bio.face.config.baseline.templates import arcface_baseline
......
from bob.bio.face.embeddings.resnet50 import Resnet50_MsCeleb_ArcFace_2021
from bob.bio.face.config.baseline.helpers import lookup_config_from_database
from bob.bio.face.utils import lookup_config_from_database
from bob.bio.face.config.baseline.templates import arcface_baseline
......
from bob.bio.face.embeddings.resnet50 import Resnet50_VGG2_ArcFace_2021
from bob.bio.face.config.baseline.helpers import lookup_config_from_database
from bob.bio.face.utils import lookup_config_from_database
from bob.bio.face.config.baseline.templates import arcface_baseline
......
from bob.bio.face.config.baseline.helpers import (
from bob.bio.face.utils import (
dnn_default_cropping,
embedding_transformer,
)
......
from bob.extension import rc
from bob.bio.face.embeddings.tf2_inception_resnet import InceptionResnetv2
from bob.bio.face.config.baseline.helpers import lookup_config_from_database
from bob.bio.face.utils import lookup_config_from_database
from bob.bio.face.config.baseline.templates import facenet_baseline
annotation_type, fixed_positions, memory_demanding = lookup_config_from_database(
......
from bob.bio.face.preprocessor import FaceCrop, MultiFaceCrop, Scale
import bob.bio.face.config.baseline.helpers as helpers
def face_crop_solver(
cropped_image_size,
cropped_positions=None,
color_channel="rgb",
fixed_positions=None,
annotator=None,
dtype="uint8",
):
"""
Decide which face cropper to use.
"""
# If there's not cropped positions, just resize
if cropped_positions is None:
return Scale(cropped_image_size)
else:
# Detects the face and crops it without eye detection
if isinstance(cropped_positions, list):
return MultiFaceCrop(
cropped_image_size=cropped_image_size,
cropped_positions_list=cropped_positions,
fixed_positions_list=fixed_positions,
color_channel=color_channel,
dtype=dtype,
annotator=annotator,
)
else:
return FaceCrop(
cropped_image_size=cropped_image_size,
cropped_positions=cropped_positions,
color_channel=color_channel,
fixed_positions=fixed_positions,
dtype=dtype,
annotator=annotator,
)
def get_default_cropped_positions(mode, cropped_image_size, annotation_type):
"""
Computes the default cropped positions for the FaceCropper,
proportionally to the target image size
Parameters
----------
mode: str
Which default cropping to use. Available modes are : `legacy` (legacy baselines), `facenet`, `arcface`,
and `pad`.
cropped_image_size : tuple
A tuple (HEIGHT, WIDTH) describing the target size of the cropped image.
annotation_type: str
Type of annotations. Possible values are: `bounding-box`, `eyes-center` and None, or a combination of those as a list
Returns
-------
cropped_positions:
The dictionary of cropped positions that will be feeded to the FaceCropper, or a list of such dictionaries if
``annotation_type`` is a list
"""
if mode == "legacy":
return helpers.legacy_default_cropping(cropped_image_size, annotation_type)
elif mode in ["dnn", "facenet", "arcface"]:
return helpers.dnn_default_cropping(cropped_image_size, annotation_type)
elif mode == "pad":
return helpers.pad_default_cropping(cropped_image_size, annotation_type)
else:
raise ValueError("Unknown default cropping mode `{}`".format(mode))
from sklearn.pipeline import make_pipeline
from bob.pipelines import wrap
from bob.bio.face import helpers
import numpy as np
import logging
from .preprocessor import FaceCrop
from .preprocessor import MultiFaceCrop
from .preprocessor import Scale
from bob.pipelines import wrap
from sklearn.pipeline import make_pipeline
logger = logging.getLogger(__name__)
......@@ -237,7 +239,7 @@ def make_cropper(
transform_extra_arguments for wrapping the cropper with a SampleWrapper.
"""
face_cropper = helpers.face_crop_solver(
face_cropper = face_crop_solver(
cropped_image_size=cropped_image_size,
cropped_positions=cropped_positions,
fixed_positions=fixed_positions,
......@@ -289,3 +291,74 @@ def embedding_transformer(
)
return transformer
def face_crop_solver(
cropped_image_size,
cropped_positions=None,
color_channel="rgb",
fixed_positions=None,
annotator=None,
dtype="uint8",
):
"""
Decide which face cropper to use.
"""
# If there's not cropped positions, just resize
if cropped_positions is None:
return Scale(cropped_image_size)
else:
# Detects the face and crops it without eye detection
if isinstance(cropped_positions, list):
return MultiFaceCrop(
cropped_image_size=cropped_image_size,
cropped_positions_list=cropped_positions,
fixed_positions_list=fixed_positions,
color_channel=color_channel,
dtype=dtype,
annotator=annotator,
)
else:
return FaceCrop(
cropped_image_size=cropped_image_size,
cropped_positions=cropped_positions,
color_channel=color_channel,
fixed_positions=fixed_positions,
dtype=dtype,
annotator=annotator,
)
def get_default_cropped_positions(mode, cropped_image_size, annotation_type):
"""
Computes the default cropped positions for the FaceCropper,
proportionally to the target image size
Parameters
----------
mode: str
Which default cropping to use. Available modes are : `legacy` (legacy baselines), `facenet`, `arcface`,
and `pad`.
cropped_image_size : tuple
A tuple (HEIGHT, WIDTH) describing the target size of the cropped image.
annotation_type: str
Type of annotations. Possible values are: `bounding-box`, `eyes-center` and None, or a combination of those as a list
Returns
-------
cropped_positions:
The dictionary of cropped positions that will be feeded to the FaceCropper, or a list of such dictionaries if
``annotation_type`` is a list
"""
if mode == "legacy":
return legacy_default_cropping(cropped_image_size, annotation_type)
elif mode in ["dnn", "facenet", "arcface"]:
return dnn_default_cropping(cropped_image_size, annotation_type)
elif mode == "pad":
return pad_default_cropping(cropped_image_size, annotation_type)
else:
raise ValueError("Unknown default cropping mode `{}`".format(mode))
......@@ -22,7 +22,7 @@ Some face embedding extractors work well on loosely cropped faces, while others
We provide a few reasonable defaults that are used in our implemented baselines. They are accessible through a function as follows :
::
from bob.bio.face.helpers import get_default_cropped_positions
from bob.bio.face.utils import get_default_cropped_positions
mode = 'legacy'
cropped_image_size=(160, 160)
annotation_type='eyes-center'
......@@ -31,7 +31,7 @@ We provide a few reasonable defaults that are used in our implemented baselines.
There are currently three available modes :
* :code:`legacy` Tight crop, used in non neural-net baselines such as :code:`gabor-graph`, :code:`lgbphs` or :code:`lda`.
* :code:`legacy` Tight crop, used in non neural-net baselines such as :code:`gabor-graph`, :code:`lgbphs` or :code:`lda`.
It is typically use with a 5:4 aspect ratio for the :code:`cropped_image_size`
* :code:`dnn` Loose crop, used for neural-net baselines such as the ArcFace or FaceNet models.
* :code:`pad` Tight crop used in some PAD baselines
......@@ -40,4 +40,3 @@ We present hereafter a visual example of those crops for the `eyes-center` annot
.. plot:: plot/default_crops.py
:include-source: True
\ No newline at end of file
import bob.io.image
from bob.bio.face.helpers import get_default_cropped_positions
from bob.bio.face.utils import get_default_cropped_positions
from bob.bio.face.preprocessor import FaceCrop
import matplotlib.pyplot as plt
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment