Skip to content
Snippets Groups Projects
Commit 5e49ee7b authored by Laurent COLBOIS's avatar Laurent COLBOIS
Browse files

Move get_default_cropped_positions to bob.bio.face.helpers

parent 6dd7d184
No related branches found
No related tags found
1 merge request!119Refactor baseline config helpers
Pipeline #51043 failed
......@@ -225,41 +225,6 @@ def pad_default_cropping(cropped_image_size, annotation_type):
return cropped_positions
def get_default_cropped_positions(mode, cropped_image_size, annotation_type):
"""
Computes the default cropped positions for the FaceCropper,
proportionally to the target image size
Parameters
----------
mode: str
Which default cropping to use. Available modes are : `legacy` (legacy baselines), `facenet`, `arcface`,
and `pad`.
cropped_image_size : tuple
A tuple (HEIGHT, WIDTH) describing the target size of the cropped image.
annotation_type: str
Type of annotations. Possible values are: `bounding-box`, `eyes-center` and None, or a combination of those as a list
Returns
-------
cropped_positions:
The dictionary of cropped positions that will be feeded to the FaceCropper, or a list of such dictionaries if
``annotation_type`` is a list
"""
if mode == "legacy":
return legacy_default_cropping(cropped_image_size, annotation_type)
elif mode in ["dnn", "facenet", "arcface"]:
return dnn_default_cropping(cropped_image_size, annotation_type)
elif mode == "pad":
return pad_default_cropping(cropped_image_size, annotation_type)
else:
raise ValueError("Unknown default cropping mode `{}`".format(mode))
def make_cropper(
cropped_image_size,
cropped_positions,
......
from bob.bio.face.preprocessor import FaceCrop, MultiFaceCrop, Scale
from bob.bio.face.config.baseline.helpers import (
legacy_default_cropping,
dnn_default_cropping,
pad_default_cropping,
)
def face_crop_solver(
......@@ -35,3 +40,38 @@ def face_crop_solver(
dtype=dtype,
annotator=annotator,
)
def get_default_cropped_positions(mode, cropped_image_size, annotation_type):
"""
Computes the default cropped positions for the FaceCropper,
proportionally to the target image size
Parameters
----------
mode: str
Which default cropping to use. Available modes are : `legacy` (legacy baselines), `facenet`, `arcface`,
and `pad`.
cropped_image_size : tuple
A tuple (HEIGHT, WIDTH) describing the target size of the cropped image.
annotation_type: str
Type of annotations. Possible values are: `bounding-box`, `eyes-center` and None, or a combination of those as a list
Returns
-------
cropped_positions:
The dictionary of cropped positions that will be feeded to the FaceCropper, or a list of such dictionaries if
``annotation_type`` is a list
"""
if mode == "legacy":
return legacy_default_cropping(cropped_image_size, annotation_type)
elif mode in ["dnn", "facenet", "arcface"]:
return dnn_default_cropping(cropped_image_size, annotation_type)
elif mode == "pad":
return pad_default_cropping(cropped_image_size, annotation_type)
else:
raise ValueError("Unknown default cropping mode `{}`".format(mode))
......@@ -19,10 +19,10 @@ How to choose the cropped positions ?
The ideal cropped positions are dependent on the specific application you are using the face cropper in.
Some face embedding extractors work well on loosely cropped faces, while others require the face to be tightly cropped.
We provide a few reasonable defaults that are used in our implemented baselines. They are accessible through an utilitary function as follows :
We provide a few reasonable defaults that are used in our implemented baselines. They are accessible through a function as follows :
::
from bob.bio.face.config.baseline.helpers import get_default_cropped_positions
from bob.bio.face.helpers import get_default_cropped_positions
mode = 'legacy'
cropped_image_size=(160, 160)
annotation_type='eyes-center'
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment