Commit afc485f4 authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Allowing facecrop helper to set the datatype

parent 08e467a7
Pipeline #42179 failed with stage
in 5 minutes and 52 seconds
...@@ -3,6 +3,7 @@ from sklearn.pipeline import make_pipeline ...@@ -3,6 +3,7 @@ from sklearn.pipeline import make_pipeline
from bob.bio.base.wrappers import wrap_sample_preprocessor from bob.bio.base.wrappers import wrap_sample_preprocessor
from bob.pipelines import wrap from bob.pipelines import wrap
from bob.bio.face.helpers import face_crop_solver from bob.bio.face.helpers import face_crop_solver
import numpy as np
def embedding_transformer_160x160(embedding, annotation_type, fixed_positions): def embedding_transformer_160x160(embedding, annotation_type, fixed_positions):
...@@ -156,7 +157,6 @@ def crop_80x64(annotation_type, fixed_positions=None, color_channel="gray"): ...@@ -156,7 +157,6 @@ def crop_80x64(annotation_type, fixed_positions=None, color_channel="gray"):
""" """
# Cropping # Cropping
CROPPED_IMAGE_HEIGHT = 80 CROPPED_IMAGE_HEIGHT = 80
CROPPED_IMAGE_WIDTH = CROPPED_IMAGE_HEIGHT * 4 // 5 CROPPED_IMAGE_WIDTH = CROPPED_IMAGE_HEIGHT * 4 // 5
...@@ -167,7 +167,7 @@ def crop_80x64(annotation_type, fixed_positions=None, color_channel="gray"): ...@@ -167,7 +167,7 @@ def crop_80x64(annotation_type, fixed_positions=None, color_channel="gray"):
cropped_image_size = (CROPPED_IMAGE_HEIGHT, CROPPED_IMAGE_WIDTH) cropped_image_size = (CROPPED_IMAGE_HEIGHT, CROPPED_IMAGE_WIDTH)
color_channel = color_channel color_channel = color_channel
dtype = np.float64
if annotation_type == "bounding-box": if annotation_type == "bounding-box":
transform_extra_arguments = (("annotations", "annotations"),) transform_extra_arguments = (("annotations", "annotations"),)
...@@ -180,6 +180,7 @@ def crop_80x64(annotation_type, fixed_positions=None, color_channel="gray"): ...@@ -180,6 +180,7 @@ def crop_80x64(annotation_type, fixed_positions=None, color_channel="gray"):
color_channel=color_channel, color_channel=color_channel,
cropped_positions={"topleft": TOP_LEFT_POS, "bottomright": BOTTOM_RIGHT_POS}, cropped_positions={"topleft": TOP_LEFT_POS, "bottomright": BOTTOM_RIGHT_POS},
fixed_positions=fixed_positions, fixed_positions=fixed_positions,
dtype=dtype
) )
elif annotation_type == "eyes-center": elif annotation_type == "eyes-center":
...@@ -192,6 +193,7 @@ def crop_80x64(annotation_type, fixed_positions=None, color_channel="gray"): ...@@ -192,6 +193,7 @@ def crop_80x64(annotation_type, fixed_positions=None, color_channel="gray"):
color_channel=color_channel, color_channel=color_channel,
cropped_positions={"leye": LEFT_EYE_POS, "reye": RIGHT_EYE_POS}, cropped_positions={"leye": LEFT_EYE_POS, "reye": RIGHT_EYE_POS},
fixed_positions=fixed_positions, fixed_positions=fixed_positions,
dtype=dtype
) )
else: else:
......
...@@ -11,6 +11,7 @@ def face_crop_solver( ...@@ -11,6 +11,7 @@ def face_crop_solver(
cropped_positions=None, cropped_positions=None,
fixed_positions=None, fixed_positions=None,
use_face_detector=False, use_face_detector=False,
dtype=np.uint8
): ):
""" """
Decide which face cropper to use. Decide which face cropper to use.
...@@ -32,5 +33,5 @@ def face_crop_solver( ...@@ -32,5 +33,5 @@ def face_crop_solver(
cropped_positions=cropped_positions, cropped_positions=cropped_positions,
color_channel=color_channel, color_channel=color_channel,
fixed_positions=fixed_positions, fixed_positions=fixed_positions,
dtype=np.uint8 dtype=dtype
) )
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment