Skip to content
Snippets Groups Projects
Commit b31ac645 authored by Olegs NIKISINS's avatar Olegs NIKISINS
Browse files

Added RGB face cropping functionality in image/video crop classes ImageFaceCrop and VideoFaceCrop

parent 837c2d84
No related branches found
No related tags found
1 merge request!2LBP+SVM, IQM+SVM experiments and documentation
Pipeline #
......@@ -54,4 +54,17 @@ video_face_crop_preproc_64_64_face_50_local_cropper = VideoFaceCrop(cropped_imag
check_face_size_flag = check_face_size_flag,
min_face_size = min_face_size,
use_local_cropper_flag = use_local_cropper_flag,
color_channel = color_channel)
\ No newline at end of file
color_channel = color_channel)
rgb_output_flag = True # Return RGB cropped face using local cropper
video_face_crop_preproc_64_64_face_50_local_cropper_rgb = VideoFaceCrop(cropped_image_size = cropped_image_size,
cropped_positions = cropped_positions,
fixed_positions = fixed_positions,
mask_sigma = mask_sigma,
mask_neighbors = mask_neighbors,
mask_seed = None,
check_face_size_flag = check_face_size_flag,
min_face_size = min_face_size,
use_local_cropper_flag = use_local_cropper_flag,
rgb_output_flag = rgb_output_flag)
\ No newline at end of file
......@@ -24,8 +24,9 @@ class ImageFaceCrop(Preprocessor):
"""
This class crops the face in the input image given annotations defining
the face bounding box. The size of the face is also normalized to the
pre-defined dimensions. If input image is RGB it is first converted to the
gray-scale format.
pre-defined dimensions. For RGB inputs it is possible to return both
color and gray-scale outputs. This option is controlled by ``rgb_output_flag``.
The algorithm is identical to the following paper:
"On the Effectiveness of Local Binary Patterns in Face Anti-spoofing"
......@@ -33,31 +34,39 @@ class ImageFaceCrop(Preprocessor):
``face_size`` : :py:class:`int`
The size of the face after normalization.
``rgb_output_flag`` : :py:class:`bool`
Return RGB cropped face if ``True``, otherwise a gray-scale image is
returned. Default: ``False``.
"""
#==========================================================================
def __init__(self, face_size):
def __init__(self,
face_size,
rgb_output_flag = False):
Preprocessor.__init__(self,
face_size = face_size)
face_size = face_size,
rgb_output_flag = rgb_output_flag)
self.face_size = face_size
self.rgb_output_flag = rgb_output_flag
#==========================================================================
def normalize_image_size(self, image, annotations, face_size):
def normalize_image_size_in_grayscale(self, image, annotations, face_size):
"""
This function crops the face in the input image given annotations defining
the face bounding box. The size of the face is also normalized to the
pre-defined dimensions. If input image is RGB it is first converted to the
gray-scale format.
This function crops the face in the input Gray-scale image given annotations
defining the face bounding box. The size of the face is also normalized to the
pre-defined dimensions.
The algorithm is identical to the following paper:
"On the Effectiveness of Local Binary Patterns in Face Anti-spoofing"
**Parameters:**
``image`` : 2D or 3D :py:class:`numpy.ndarray`
Input image (RGB or gray-scale).
``image`` : 2D :py:class:`numpy.ndarray`
Gray-scale input image.
``annotations`` : :py:class:`dict`
A dictionary containing annotations of the face bounding box.
......@@ -69,15 +78,11 @@ class ImageFaceCrop(Preprocessor):
**Returns:**
``normbbx`` : 2D :py:class:`numpy.ndarray`
An image of the cropped face of the size (face_size, face_size).
An image of the cropped face of the size (self.face_size, self.face_size).
"""
if len(image.shape) == 3:
image = bob.ip.color.rgb_to_gray(image)
cutframe = image[annotations['topleft'][0]:annotations['bottomright'][0],
annotations['topleft'][1]:annotations['bottomright'][1]]
annotations['topleft'][1]:annotations['bottomright'][1]]
tempbbx = np.ndarray((face_size, face_size), 'float64')
normbbx = np.ndarray((face_size, face_size), 'uint8')
......@@ -89,6 +94,65 @@ class ImageFaceCrop(Preprocessor):
return normbbx
#==========================================================================
def normalize_image_size(self, image, annotations, face_size, rgb_output_flag):
"""
This function crops the face in the input image given annotations defining
the face bounding box. The size of the face is also normalized to the
pre-defined dimensions. For RGB inputs it is possible to return both
color and gray-scale outputs. This option is controlled by ``rgb_output_flag``.
The algorithm is identical to the following paper:
"On the Effectiveness of Local Binary Patterns in Face Anti-spoofing"
**Parameters:**
``image`` : 2D or 3D :py:class:`numpy.ndarray`
Input image (RGB or gray-scale).
``annotations`` : :py:class:`dict`
A dictionary containing annotations of the face bounding box.
Dictionary must be as follows ``{'topleft': (row, col), 'bottomright': (row, col)}``
``face_size`` : :py:class:`int`
The size of the face after normalization.
``rgb_output_flag`` : :py:class:`bool`
Return RGB cropped face if ``True``, otherwise a gray-scale image is
returned. Default: ``False``.
**Returns:**
``face`` : 2D or 3D :py:class:`numpy.ndarray`
An image of the cropped face of the size (self.face_size, self.face_size),
rgb 3D or gray-scale 2D.
"""
if len(image.shape) == 3:
if not(rgb_output_flag):
image = bob.ip.color.rgb_to_gray(image)
if len(image.shape) == 2:
image = [image] # make gray-scale image an iterable
result = []
for image_channel in image: # for all color channels in the input image
cropped_face = self.normalize_image_size_in_grayscale(image_channel, annotations, face_size)
result.append(cropped_face)
face = np.stack(result, axis=0)
face = np.squeeze(face) # squeeze 1-st dimension for gray-scale images
return face
#==========================================================================
def __call__(self, image, annotations):
"""
......@@ -105,11 +169,12 @@ class ImageFaceCrop(Preprocessor):
**Returns:**
``norm_face_image`` : 2D :py:class:`numpy.ndarray`
An image of the cropped face of the size (self.face_size, self.face_size).
``norm_face_image`` : 2D or 3D :py:class:`numpy.ndarray`
An image of the cropped face of the size (self.face_size, self.face_size),
rgb 3D or gray-scale 2D.
"""
norm_face_image = self.normalize_image_size(image, annotations, self.face_size)
norm_face_image = self.normalize_image_size(image, annotations, self.face_size, self.rgb_output_flag)
return norm_face_image
......
......@@ -71,6 +71,11 @@ class VideoFaceCrop(Preprocessor, object):
Otherwise, the FaceCrop preprocessor from bob.bio.face is used.
Default: False.
``rgb_output_flag`` : :py:class:`bool`
Return RGB cropped face if ``True``, otherwise a gray-scale image is
returned. This flag is only valid when ``use_local_cropper_flag = True``.
Default: ``False``.
``kwargs``
Remaining keyword parameters passed to the :py:class:`Base` constructor, such as ``color_channel`` or ``dtype``.
"""
......@@ -86,6 +91,7 @@ class VideoFaceCrop(Preprocessor, object):
check_face_size_flag = False,
min_face_size = 50,
use_local_cropper_flag = False,
rgb_output_flag = False,
**kwargs):
super(VideoFaceCrop, self).__init__(cropped_image_size = cropped_image_size,
......@@ -97,6 +103,7 @@ class VideoFaceCrop(Preprocessor, object):
check_face_size_flag = check_face_size_flag,
min_face_size = min_face_size,
use_local_cropper_flag = use_local_cropper_flag,
rgb_output_flag = rgb_output_flag,
**kwargs)
self.cropped_image_size = cropped_image_size
......@@ -108,6 +115,7 @@ class VideoFaceCrop(Preprocessor, object):
self.check_face_size_flag = check_face_size_flag
self.min_face_size = min_face_size
self.use_local_cropper_flag = use_local_cropper_flag
self.rgb_output_flag = rgb_output_flag
# Save also the data stored in the kwargs:
for (k, v) in kwargs.items():
......@@ -115,7 +123,8 @@ class VideoFaceCrop(Preprocessor, object):
if self.use_local_cropper_flag:
preprocessor = ImageFaceCrop(face_size = self.cropped_image_size[0])
preprocessor = ImageFaceCrop(face_size = self.cropped_image_size[0],
rgb_output_flag = self.rgb_output_flag)
else:
......
......@@ -103,6 +103,7 @@ setup(
'video-face-crop-preproc-64 = bob.pad.face.config.preprocessor.video_face_crop:video_face_crop_preproc_64_64',
'video-face-crop-preproc-64-face-50 = bob.pad.face.config.preprocessor.video_face_crop:video_face_crop_preproc_64_64_face_50',
'video-face-crop-preproc-64-face-50-local-cropper = bob.pad.face.config.preprocessor.video_face_crop:video_face_crop_preproc_64_64_face_50_local_cropper',
'video-face-crop-preproc-64-face-50-local-cropper-rgb = bob.pad.face.config.preprocessor.video_face_crop:video_face_crop_preproc_64_64_face_50_local_cropper_rgb',
'empty-preprocessor = bob.pad.face.config.preprocessor.filename:empty_preprocessor', # no preprocessing
],
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment