Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
bob
bob.bio.vein
Commits
37d62f41
Commit
37d62f41
authored
Oct 12, 2017
by
André Anjos
💬
Browse files
Merge branch 'fv3d' into 'master'
3DFV and multiple fixes See merge request
!35
parents
249ba342
2bb9408f
Pipeline
#13143
passed with stages
in 14 minutes and 58 seconds
Changes
61
Pipelines
1
Expand all
Hide whitespace changes
Inline
Side-by-side
MANIFEST.in
View file @
37d62f41
include README.rst bootstrap-buildout.py buildout.cfg COPYING
recursive-include doc *.py *.rst
recursive-include bob/bio/vein/tests *.png *.mat *.txt *.
npy
recursive-include bob/bio/vein/tests *.png *.mat *.txt *.
hdf5
bob/bio/vein/algorithm/Correlate.py
0 → 100644
View file @
37d62f41
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import
numpy
import
skimage.feature
from
bob.bio.base.algorithm
import
Algorithm
class
Correlate
(
Algorithm
):
"""Correlate probe and model without cropping
The method is based on "cross-correlation" between a model and a probe image.
The difference between this and :py:class:`MiuraMatch` is that **no**
cropping takes place on this implementation. We simply fill the excess
boundary with zeros and extract the valid correlation region between the
probe and the model using :py:func:`skimage.feature.match_template`.
"""
def
__init__
(
self
):
# call base class constructor
Algorithm
.
__init__
(
self
,
multiple_model_scoring
=
None
,
multiple_probe_scoring
=
None
)
def
enroll
(
self
,
enroll_features
):
"""Enrolls the model by computing an average graph for each model"""
# return the generated model
return
numpy
.
array
(
enroll_features
)
def
score
(
self
,
model
,
probe
):
"""Computes the score between the probe and the model.
Parameters:
model (numpy.ndarray): The model of the user to test the probe agains
probe (numpy.ndarray): The probe to test
Returns:
float: Value between 0 and 0.5, larger value means a better match
"""
I
=
probe
.
astype
(
numpy
.
float64
)
if
len
(
model
.
shape
)
==
2
:
model
=
numpy
.
array
([
model
])
scores
=
[]
# iterate over all models for a given individual
for
md
in
model
:
R
=
md
.
astype
(
numpy
.
float64
)
Nm
=
skimage
.
feature
.
match_template
(
I
,
R
)
# figures out where the maximum is on the resulting matrix
t0
,
s0
=
numpy
.
unravel_index
(
Nm
.
argmax
(),
Nm
.
shape
)
# this is our output
scores
.
append
(
Nm
[
t0
,
s0
])
return
numpy
.
mean
(
scores
)
bob/bio/vein/algorithm/MiuraMatch.py
View file @
37d62f41
...
...
@@ -47,8 +47,8 @@ class MiuraMatch (Algorithm):
"""
def
__init__
(
self
,
ch
=
8
,
# Maximum search displacement in y-direction
cw
=
5
,
# Maximum search displacement in x-direction
ch
=
8
0
,
# Maximum search displacement in y-direction
cw
=
90
,
# Maximum search displacement in x-direction
):
# call base class constructor
...
...
@@ -94,8 +94,6 @@ class MiuraMatch (Algorithm):
if
len
(
model
.
shape
)
==
2
:
model
=
numpy
.
array
([
model
])
n_models
=
model
.
shape
[
0
]
scores
=
[]
# iterate over all models for a given individual
...
...
@@ -103,7 +101,7 @@ class MiuraMatch (Algorithm):
# erode model by (ch, cw)
R
=
md
.
astype
(
numpy
.
float64
)
h
,
w
=
R
.
shape
h
,
w
=
R
.
shape
#same as I
crop_R
=
R
[
self
.
ch
:
h
-
self
.
ch
,
self
.
cw
:
w
-
self
.
cw
]
# correlates using scipy - fastest option available iff the self.ch and
...
...
@@ -127,6 +125,6 @@ class MiuraMatch (Algorithm):
# normalizes the output by the number of pixels lit on the input
# matrices, taking into consideration the surface that produced the
# result (i.e., the eroded model and part of the probe)
scores
.
append
(
Nmm
/
(
sum
(
sum
(
crop_R
))
+
sum
(
sum
(
I
[
t0
:
t0
+
h
-
2
*
self
.
ch
,
s0
:
s0
+
w
-
2
*
self
.
cw
]
)
)))
scores
.
append
(
Nmm
/
(
crop_R
.
sum
()
+
I
[
t0
:
t0
+
h
-
2
*
self
.
ch
,
s0
:
s0
+
w
-
2
*
self
.
cw
]
.
sum
(
)))
return
numpy
.
mean
(
scores
)
bob/bio/vein/algorithm/MiuraMatchRotationFast.py
0 → 100644
View file @
37d62f41
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 18 10:02:17 2017
@author: onikisins
"""
import
numpy
as
np
import
scipy.signal
from
scipy
import
ndimage
from
skimage
import
morphology
from
skimage
import
transform
as
tf
from
bob.bio.base.algorithm
import
Algorithm
#==============================================================================
class
MiuraMatchRotationFast
(
Algorithm
):
"""
This method is an enhancement of Miura Matching algorithm introduced in:
Based on N. Miura, A. Nagasaka, and T. Miyatake. Feature extraction of finger
vein patterns based on repeated line tracking and its application to personal
identification. Machine Vision and Applications, Vol. 15, Num. 4, pp.
194--203, 2004
The algorithm is designed to compensate both rotation and translation
in a computationally efficient manner, prior to score computation.
This is achieved computing the cross-correlation of enrolment and probe samples
twice. In the first pass the probe is cross-correlated with an image, which is
the sum of pre-rotated enroll images.
This makes the cross-correlation robust to the rotation in the certain
range of angles, and with some additional steps helps to define the angle between
enrolment and probe samples. The angular range is defined by the ``angle_limit``
parameter of the algorithm.
Next, the enrolled image is rotated by the obtained angle, thus compensating the
angle between the enrolment and probe samples. After that, the ordinary Miura
matching algorithm is applied.
The matching of both binary and gray-scale vein patterns is possible.
Set the ``gray_scale_input_flag`` to ``True`` if the input is gray-scale.
The details of the this algorithm are introduced in the following paper:
Olegs Nikisins, Andre Anjos, Teodors Eglitis, Sebastien Marcel.
Fast cross-correlation based wrist vein recognition algorithm with
rotation and translation compensation.
**Parameters:**
``ch`` : :py:class:`int`
Maximum search displacement in y-direction.
Default value: 5.
``cw`` : :py:class:`int`
Maximum search displacement in x-direction.
Default value: 5.
``angle_limit`` : :py:class:`float`
Rotate the probe in the range [-angle_limit, +angle_limit] degrees.
Default value: 10.
``angle_step`` : :py:class:`float`
Rotate the probe with this step in degrees.
Default value: 1.
``perturbation_matching_flag`` : :py:class:`bool`
Compute the score using perturbation_matching method of the class.
Default: ``False``.
``kernel_radius`` : :py:class:`int`
Radius of the circular kernel used in the morphological dilation of
the enroll. Only valid when ``perturbation_matching_flag`` is ``True``.
Default: 3.
``score_fusion_method`` : :py:class:`str`
Score fusion method.
Default value: 'mean'.
Possible options: 'mean', 'max', 'median'.
``gray_scale_input_flag`` : :py:class:`bool`
Set this flag to ``True`` if image is grayscale. Defaults: ``False``.
"""
#==========================================================================
def
__init__
(
self
,
ch
=
5
,
cw
=
5
,
angle_limit
=
10
,
angle_step
=
1
,
perturbation_matching_flag
=
False
,
kernel_radius
=
3
,
score_fusion_method
=
'mean'
,
gray_scale_input_flag
=
False
):
# call base class constructor
Algorithm
.
__init__
(
self
,
ch
=
ch
,
cw
=
cw
,
angle_limit
=
angle_limit
,
angle_step
=
angle_step
,
perturbation_matching_flag
=
perturbation_matching_flag
,
kernel_radius
=
kernel_radius
,
score_fusion_method
=
score_fusion_method
,
gray_scale_input_flag
=
gray_scale_input_flag
,
multiple_model_scoring
=
None
,
multiple_probe_scoring
=
None
)
self
.
ch
=
ch
self
.
cw
=
cw
self
.
angle_limit
=
angle_limit
self
.
angle_step
=
angle_step
self
.
perturbation_matching_flag
=
perturbation_matching_flag
self
.
kernel_radius
=
kernel_radius
self
.
score_fusion_method
=
score_fusion_method
self
.
gray_scale_input_flag
=
gray_scale_input_flag
#==========================================================================
def
enroll
(
self
,
enroll_features
):
"""Enrolls the model by computing an average graph for each model"""
# return the generated model
return
enroll_features
#==========================================================================
def
perturbation_matching
(
self
,
enroll
,
probe
,
kernel_radius
):
"""
Compute the matching score as a normalized intersection of the enroll and
probe allowing perturbation of the enroll in the computation
of the intersection.
**Parameters:**
``enroll`` : 2D :py:class:`numpy.ndarray`
Binary image of the veins representing the enroll.
``probe`` : 2D :py:class:`numpy.ndarray`
Binary image of the veins representing the probe.
``kernel_radius`` : :py:class:`int`
Radius of the circular kernel used in the morphological dilation of
the enroll.
**Returns:**
``score`` : :py:class:`float`
Natching score, larger value means a better match.
"""
ellipse_kernel
=
morphology
.
disk
(
radius
=
kernel_radius
)
enroll_dilated
=
ndimage
.
morphology
.
binary_dilation
(
enroll
,
structure
=
ellipse_kernel
).
astype
(
np
.
float
)
probe_dilated
=
ndimage
.
morphology
.
binary_dilation
(
probe
,
structure
=
ellipse_kernel
).
astype
(
np
.
float
)
normalizer
=
np
.
sum
(
enroll_dilated
)
+
np
.
sum
(
probe_dilated
)
score
=
np
.
sum
(
enroll_dilated
*
probe_dilated
)
/
normalizer
return
score
#==========================================================================
def
miura_match
(
self
,
image_enroll
,
image_probe
,
ch
,
cw
,
compute_score_flag
=
True
,
perturbation_matching_flag
=
False
,
kernel_radius
=
3
):
"""
Match two binary vein images using Miura matching algorithm.
**Parameters:**
``image_enroll`` : 2D :py:class:`numpy.ndarray`
Binary image of the veins representing the model.
``image_probe`` : 2D :py:class:`numpy.ndarray`
Probing binary image of the veins.
``ch`` : :py:class:`int`
Cropping parameter in Y-direction.
``cw`` : :py:class:`int`
Cropping parameter in X-direction.
``compute_score_flag`` : :py:class:`bool`
Compute the score if True. Otherwise only the ``crop_image_probe``
is returned. Default: ``True``.
``perturbation_matching_flag`` : :py:class:`bool`
Compute the score using perturbation_matching method of the class.
Only valid if ``compute_score_flag`` is set to ``True``.
Default: ``False``.
``kernel_radius`` : :py:class:`int`
Radius of the circular kernel used in the morphological dilation of
the enroll.
**Returns:**
``score`` : :py:class:`float`
Natching score between 0 and 0.5, larger value means a better match.
Only returned if ``compute_score_flag`` is set to ``True``.
``crop_image_probe`` : 2D :py:class:`numpy.ndarray`
Cropped binary image of the probe.
"""
if
image_enroll
.
dtype
!=
np
.
float64
:
image_enroll
=
image_enroll
.
astype
(
np
.
float64
)
if
image_probe
.
dtype
!=
np
.
float64
:
image_probe
=
image_probe
.
astype
(
np
.
float64
)
h
,
w
=
image_enroll
.
shape
crop_image_enroll
=
image_enroll
[
ch
:
h
-
ch
,
cw
:
w
-
cw
]
Nm
=
scipy
.
signal
.
fftconvolve
(
image_probe
,
np
.
rot90
(
crop_image_enroll
,
k
=
2
),
'valid'
)
t0
,
s0
=
np
.
unravel_index
(
Nm
.
argmax
(),
Nm
.
shape
)
Nmm
=
Nm
[
t0
,
s0
]
crop_image_probe
=
image_probe
[
t0
:
t0
+
h
-
2
*
ch
,
s0
:
s0
+
w
-
2
*
cw
]
return_data
=
crop_image_probe
if
compute_score_flag
:
if
perturbation_matching_flag
:
score
=
self
.
perturbation_matching
(
crop_image_enroll
,
crop_image_probe
,
kernel_radius
)
else
:
score
=
Nmm
/
(
np
.
sum
(
crop_image_enroll
)
+
np
.
sum
(
crop_image_probe
)
)
return_data
=
(
score
,
crop_image_probe
)
return
return_data
#==========================================================================
def
sum_of_rotated_images
(
self
,
image
,
angle_limit
,
angle_step
,
gray_scale_input_flag
):
"""
Generate the output image, which is the sum of input images rotated
in the specified range with the defined step.
**Parameters:**
``image`` : 2D :py:class:`numpy.ndarray`
Input image.
``angle_limit`` : :py:class:`float`
Rotate the image in the range [-angle_limit, +angle_limit] degrees.
``angle_step`` : :py:class:`float`
Rotate the image with this step in degrees.
``gray_scale_input_flag`` : :py:class:`bool`
Set this flag to ``True`` if image is grayscale. Defaults: ``False``.
**Returns:**
``output_image`` : 2D :py:class:`numpy.ndarray`
Sum of rotated images.
``rotated_images`` : 3D :py:class:`numpy.ndarray`
A stack of rotated images. Array size:
(N_images, Height, Width)
"""
offset
=
np
.
array
(
image
.
shape
)
/
2
h
,
w
=
image
.
shape
image_coords
=
np
.
argwhere
(
image
)
-
offset
# centered coordinates of the vein (non-zero) pixels
if
gray_scale_input_flag
:
image_val
=
image
[
image
>
0
]
angles
=
np
.
arange
(
-
angle_limit
,
angle_limit
+
1
,
angle_step
)
/
180.
*
np
.
pi
# angles in the radians
rotated_images
=
np
.
zeros
(
(
angles
.
shape
[
0
],
image
.
shape
[
0
],
image
.
shape
[
1
])
)
for
idx
,
angle
in
enumerate
(
angles
):
rot_matrix
=
np
.
array
([[
np
.
cos
(
angle
),
-
np
.
sin
(
angle
)],
[
np
.
sin
(
angle
),
np
.
cos
(
angle
)]])
# rotation matrix
rotated_coords
=
np
.
round
(
np
.
dot
(
image_coords
,
rot_matrix
)
).
astype
(
np
.
int
)
+
offset
rotated_coords
[
rotated_coords
<
0
]
=
0
rotated_coords
[:,
0
][
rotated_coords
[:,
0
]
>=
h
]
=
h
-
1
rotated_coords
[:,
1
][
rotated_coords
[:,
1
]
>=
w
]
=
w
-
1
rotated_coords
=
rotated_coords
.
astype
(
np
.
int
)
if
gray_scale_input_flag
:
rotated_images
[
idx
,
rotated_coords
[:,
0
],
rotated_coords
[:,
1
]]
=
image_val
else
:
rotated_images
[
idx
,
rotated_coords
[:,
0
],
rotated_coords
[:,
1
]]
=
1
output_image
=
np
.
sum
(
rotated_images
,
axis
=
0
)
return
output_image
,
rotated_images
#==========================================================================
def
score
(
self
,
model
,
probe
):
"""Computes the score between the probe and the model.
**Parameters:**
``model`` : 2D :py:class:`numpy.ndarray`
Binary image of the veins representing the model.
``probe`` : 2D :py:class:`numpy.ndarray`
Probing binary image of the veins.
**Returns:**
``score_fused`` : :py:class:`float`
Natching score between 0 and 0.5, larger value means a better match.
"""
if
probe
.
dtype
!=
np
.
float64
:
probe
=
probe
.
astype
(
np
.
float64
)
scores
=
[]
angles
=
np
.
arange
(
-
self
.
angle_limit
,
self
.
angle_limit
+
1
,
self
.
angle_step
)
# iterate over all models for a given individual
for
enroll
in
model
:
if
enroll
.
dtype
!=
np
.
float64
:
enroll
=
enroll
.
astype
(
np
.
float64
)
sum_of_rotated_img_enroll
,
rotated_images_enroll
=
self
.
sum_of_rotated_images
(
enroll
,
self
.
angle_limit
,
self
.
angle_step
,
self
.
gray_scale_input_flag
)
h
,
w
=
enroll
.
shape
crop_rotated_images_enroll
=
rotated_images_enroll
[:,
self
.
ch
:
h
-
self
.
ch
,
self
.
cw
:
w
-
self
.
cw
]
crop_probe
=
self
.
miura_match
(
sum_of_rotated_img_enroll
,
probe
,
self
.
ch
,
self
.
cw
,
compute_score_flag
=
False
)
scores_internal
=
[]
for
crop_binary_image_enroll
in
crop_rotated_images_enroll
:
scores_internal
.
append
(
np
.
sum
(
crop_binary_image_enroll
*
crop_probe
)
)
idx_selected
=
np
.
argmax
(
scores_internal
)
# the index of the rotated enroll image having the best match
if
self
.
gray_scale_input_flag
:
angle
=
angles
[
idx_selected
]
enroll_rotated
=
tf
.
rotate
(
enroll
,
angle
=
-
angle
,
preserve_range
=
True
)
score
=
self
.
miura_match
(
enroll_rotated
,
probe
,
self
.
ch
,
self
.
cw
,
compute_score_flag
=
True
,
perturbation_matching_flag
=
False
,
kernel_radius
=
self
.
kernel_radius
)[
0
]
else
:
score
=
self
.
miura_match
(
rotated_images_enroll
[
idx_selected
],
probe
,
self
.
ch
,
self
.
cw
,
compute_score_flag
=
True
,
perturbation_matching_flag
=
self
.
perturbation_matching_flag
,
kernel_radius
=
self
.
kernel_radius
)[
0
]
scores
.
append
(
score
)
score_fused
=
getattr
(
np
,
self
.
score_fusion_method
)(
scores
)
return
score_fused
bob/bio/vein/algorithm/__init__.py
View file @
37d62f41
from
.MiuraMatch
import
MiuraMatch
from
.MiuraMatchRotationFast
import
MiuraMatchRotationFast
from
.Correlate
import
Correlate
from
.HammingDistance
import
HammingDistance
# gets sphinx autodoc done right - don't remove it
def
__appropriate__
(
*
args
):
"""Says object was actually declared here, an not on the import module.
Parameters:
*args: An iterable of objects to modify
Resolves `Sphinx referencing issues
<https://github.com/sphinx-doc/sphinx/issues/3048>`
"""
for
obj
in
args
:
obj
.
__module__
=
__name__
__appropriate__
(
MiuraMatch
,
MiuraMatchRotationFast
,
Correlate
,
HammingDistance
,
)
__all__
=
[
_
for
_
in
dir
()
if
not
_
.
startswith
(
'_'
)]
bob/bio/vein/configurations/fv3d.py
0 → 100644
View file @
37d62f41
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
"""`3D Fingervein`_ is a database for biometric fingervein recognition
The `3D Fingervein`_ Database for finger vein recognition consists of 13614
images from 141 subjects collected in various acquisition campaigns.
You can download the raw data of the `3D Fingervein`_ database by following
the link.
"""
from
..database.fv3d
import
Database
_fv3d_directory
=
"[YOUR_FV3D_DIRECTORY]"
"""Value of ``~/.bob_bio_databases.txt`` for this database"""
database
=
Database
(
original_directory
=
_fv3d_directory
,
original_extension
=
'.png'
,
)
"""The :py:class:`bob.bio.base.database.BioDatabase` derivative with fv3d
database settings
.. warning::
This class only provides a programmatic interface to load data in an orderly
manner, respecting usage protocols. It does **not** contain the raw
datafiles. You should procure those yourself.
Notice that ``original_directory`` is set to ``[YOUR_FV3D_DIRECTORY]``. You
must make sure to create ``${HOME}/.bob_bio_databases.txt`` setting this value
to the place where you actually installed the `3D Fingervein`_ Database, as
explained in the section :ref:`bob.bio.vein.baselines`.
"""
protocol
=
'central'
"""The default protocol to use for tests
You may modify this at runtime by specifying the option ``--protocol`` on the
command-line of ``verify.py`` or using the keyword ``protocol`` on a
configuration file that is loaded **after** this configuration resource.
"""
bob/bio/vein/configurations/gridio4g48.py
0 → 100644
View file @
37d62f41
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
'''Grid configurations for bob.bio.vein'''
import
bob.bio.base
grid
=
bob
.
bio
.
base
.
grid
.
Grid
(
number_of_preprocessing_jobs
=
48
,
number_of_extraction_jobs
=
48
,
number_of_projection_jobs
=
48
,
number_of_enrollment_jobs
=
48
,
number_of_scoring_jobs
=
48
,
training_queue
=
'4G-io-big'
,
preprocessing_queue
=
'4G-io-big'
,
extraction_queue
=
'4G-io-big'
,
projection_queue
=
'4G-io-big'
,
enrollment_queue
=
'4G-io-big'
,
scoring_queue
=
'4G-io-big'
)
'''Defines an SGE grid configuration for running at Idiap
This grid configuration will use 48 slots for each of the stages defined below.
The queue ``4G-io-big`` corresponds to the following settings:
* ``queue``: ``q1d`` (in this queue you have a maximum of 48 slots according