Skip to content
Snippets Groups Projects
Commit d7432c9d authored by André Anjos's avatar André Anjos :speech_balloon:
Browse files

[many] More clean-ups and code re-organization

parent 331df584
No related branches found
No related tags found
No related merge requests found
Showing
with 172 additions and 310 deletions
.. vim: set fileencoding=utf-8 : .. vim: set fileencoding=utf-8 :
.. Andre Anjos <andre.anjos@idiap.ch>
.. Fri 08 Jul 2016 15:38:56 CEST .. Fri 08 Jul 2016 15:38:56 CEST
.. image:: http://img.shields.io/badge/docs-stable-yellow.png .. image:: http://img.shields.io/badge/docs-stable-yellow.png
......
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.ip.base
import bob.sp
import numpy
import math
import scipy.signal
from facereclib.tools.Tool import Tool
class MiuraMatch (Tool):
"""Finger vein matching: match ratio based on
N. Miura, A. Nagasaka, and T. Miyatake. Feature extraction of finger vein patterns based on repeated line tracking and its application
to personal identification. Machine Vision and Applications, Vol. 15, Num. 4, pp. 194--203, 2004
"""
def __init__(
self,
# some similarity functions might need a GaborWaveletTransform class, so we have to provide the parameters here as well...
ch = 8, # Maximum search displacement in y-direction
cw = 5, # Maximum search displacement in x-direction
gpu = False,
):
# call base class constructor
Tool.__init__(
self,
ch = ch,
cw = cw,
multiple_model_scoring = None,
multiple_probe_scoring = None
)
self.ch = ch
self.cw = cw
self.gpu = gpu
def enroll(self, enroll_features):
"""Enrolls the model by computing an average graph for each model"""
# return the generated model
return numpy.vstack(enroll_features)
def score(self, model, probe):
"""Computes the score of the probe and the model
Return score - Value between 0 and 0.5, larger value is better match
"""
I=probe.astype(numpy.float64)
R=model.astype(numpy.float64)
h, w = R.shape
crop_R = R[self.ch:h-self.ch, self.cw:w-self.cw]
rotate_R = numpy.zeros((crop_R.shape[0], crop_R.shape[1]))
bob.ip.base.rotate(crop_R, rotate_R, 180)
#FFT for scoring!
#Nm=bob.sp.ifft(bob.sp.fft(I)*bob.sp.fft(rotate_R))
if self.gpu == True:
import xbob.cusp
Nm = xbob.cusp.conv(I, rotate_R);
else:
Nm = scipy.signal.convolve2d(I, rotate_R, 'valid');
t0, s0 = numpy.unravel_index(Nm.argmax(), Nm.shape)
Nmm = Nm[t0,s0]
#Nmm = Nm.max()
#mi = numpy.argwhere(Nmm == Nm)
#t0, s0 = mi.flatten()[:2]
score = Nmm/(sum(sum(crop_R)) + sum(sum(I[t0:t0+h-2*self.ch, s0:s0+w-2*self.cw])))
return score
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.sp import bob.sp
import bob.ip.base import bob.ip.base
...@@ -9,24 +8,27 @@ import numpy ...@@ -9,24 +8,27 @@ import numpy
import math import math
import scipy.signal import scipy.signal
from facereclib.tools.Tool import Tool from bob.bio.base.algorithm import Algorithm
class MiuraMatch (Tool):
"""Finger vein matching: match ratio based on class MiuraMatch (Algorithm):
N. Miura, A. Nagasaka, and T. Miyatake. Feature extraction of finger vein patterns based on repeated line tracking and its application """Finger vein matching: match ratio
to personal identification. Machine Vision and Applications, Vol. 15, Num. 4, pp. 194--203, 2004
Based on N. Miura, A. Nagasaka, and T. Miyatake. Feature extraction of finger
vein patterns based on repeated line tracking and its application to personal
identification. Machine Vision and Applications, Vol. 15, Num. 4, pp.
194--203, 2004
""" """
def __init__( def __init__(self,
self,
# some similarity functions might need a GaborWaveletTransform class, so we have to provide the parameters here as well... # some similarity functions might need a GaborWaveletTransform class, so we have to provide the parameters here as well...
ch = 8, # Maximum search displacement in y-direction ch = 8, # Maximum search displacement in y-direction
cw = 5, # Maximum search displacement in x-direction cw = 5, # Maximum search displacement in x-direction
gpu = False, gpu = False,
): ):
# call base class constructor # call base class constructor
Tool.__init__( Algorithm.__init__(
self, self,
ch = ch, ch = ch,
...@@ -40,8 +42,10 @@ class MiuraMatch (Tool): ...@@ -40,8 +42,10 @@ class MiuraMatch (Tool):
self.cw = cw self.cw = cw
self.gpu = gpu self.gpu = gpu
def enroll(self, enroll_features): def enroll(self, enroll_features):
"""Enrolls the model by computing an average graph for each model""" """Enrolls the model by computing an average graph for each model"""
# return the generated model # return the generated model
#import ipdb; ipdb.set_trace() #import ipdb; ipdb.set_trace()
return numpy.array(enroll_features) return numpy.array(enroll_features)
...@@ -52,7 +56,7 @@ class MiuraMatch (Tool): ...@@ -52,7 +56,7 @@ class MiuraMatch (Tool):
size_t = numpy.array(t.shape) size_t = numpy.array(t.shape)
size_a = numpy.array(a.shape) size_a = numpy.array(a.shape)
outsize = size_t + size_a - 1 outsize = size_t + size_a - 1
# Determine 2D cross correlation in Fourier domain # Determine 2D cross correlation in Fourier domain
taux = numpy.zeros(outsize) taux = numpy.zeros(outsize)
taux[0:size_t[0],0:size_t[1]] = t taux[0:size_t[0],0:size_t[1]] = t
...@@ -60,12 +64,12 @@ class MiuraMatch (Tool): ...@@ -60,12 +64,12 @@ class MiuraMatch (Tool):
aaux = numpy.zeros(outsize) aaux = numpy.zeros(outsize)
aaux[0:size_a[0],0:size_a[1]] = a aaux[0:size_a[0],0:size_a[1]] = a
Fa = bob.sp.fft(aaux.astype(numpy.complex128)) Fa = bob.sp.fft(aaux.astype(numpy.complex128))
convta = numpy.real(bob.sp.ifft(Ft*Fa)) convta = numpy.real(bob.sp.ifft(Ft*Fa))
[w, h] = size_t-size_a+1 [w, h] = size_t-size_a+1
output = convta[size_a[0]-1:size_a[0]-1+w, size_a[1]-1:size_a[1]-1+h] output = convta[size_a[0]-1:size_a[0]-1+w, size_a[1]-1:size_a[1]-1+h]
return output return output
...@@ -75,12 +79,12 @@ class MiuraMatch (Tool): ...@@ -75,12 +79,12 @@ class MiuraMatch (Tool):
""" """
#print model.shape #print model.shape
#print probe.shape #print probe.shape
I=probe.astype(numpy.float64) I=probe.astype(numpy.float64)
if len(model.shape) == 2: if len(model.shape) == 2:
model = numpy.array([model]) model = numpy.array([model])
n_models = model.shape[0] n_models = model.shape[0]
scores = [] scores = []
...@@ -90,7 +94,7 @@ class MiuraMatch (Tool): ...@@ -90,7 +94,7 @@ class MiuraMatch (Tool):
crop_R = R[self.ch:h-self.ch, self.cw:w-self.cw] crop_R = R[self.ch:h-self.ch, self.cw:w-self.cw]
rotate_R = numpy.zeros((crop_R.shape[0], crop_R.shape[1])) rotate_R = numpy.zeros((crop_R.shape[0], crop_R.shape[1]))
bob.ip.base.rotate(crop_R, rotate_R, 180) bob.ip.base.rotate(crop_R, rotate_R, 180)
#FFT for scoring! #FFT for scoring!
#Nm=bob.sp.ifft(bob.sp.fft(I)*bob.sp.fft(rotate_R)) #Nm=bob.sp.ifft(bob.sp.fft(I)*bob.sp.fft(rotate_R))
if self.gpu == True: if self.gpu == True:
Nm = self.convfft(I, rotate_R) Nm = self.convfft(I, rotate_R)
...@@ -99,13 +103,12 @@ class MiuraMatch (Tool): ...@@ -99,13 +103,12 @@ class MiuraMatch (Tool):
else: else:
Nm = self.convfft(I, rotate_R) Nm = self.convfft(I, rotate_R)
#Nm2 = scipy.signal.convolve2d(I, rotate_R, 'valid') #Nm2 = scipy.signal.convolve2d(I, rotate_R, 'valid')
t0, s0 = numpy.unravel_index(Nm.argmax(), Nm.shape) t0, s0 = numpy.unravel_index(Nm.argmax(), Nm.shape)
Nmm = Nm[t0,s0] Nmm = Nm[t0,s0]
#Nmm = Nm.max() #Nmm = Nm.max()
#mi = numpy.argwhere(Nmm == Nm) #mi = numpy.argwhere(Nmm == Nm)
#t0, s0 = mi.flatten()[:2] #t0, s0 = mi.flatten()[:2]
scores.append(Nmm/(sum(sum(crop_R)) + sum(sum(I[t0:t0+h-2*self.ch, s0:s0+w-2*self.cw])))) scores.append(Nmm/(sum(sum(crop_R)) + sum(sum(I[t0:t0+h-2*self.ch, s0:s0+w-2*self.cw]))))
return numpy.mean(scores) return numpy.mean(scores)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
"""Tool chain for computing verification scores"""
from MiuraMatch import MiuraMatch
#!/usr/bin/env python
# Pedro Tome <Pedro.Tome@idiap.ch>
"""Configuration files for different steps of the fingervein recognition tool chain"""
import databases
import preprocessing
import features
import tools
import grid
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
import facereclib from ..algorithms import MiuraMatch
from .. import tools as fingervein_tools huangwl_tool = MiuraMatch(ch=18, cw=28)
huangwl_gpu_tool = MiuraMatch(ch=18, cw=28, gpu=True)
huangwl_tool = fingervein_tools.MiuraMatch( miuramax_tool = MiuraMatch(ch=80, cw=90)
ch = 18, miuramax_gpu_tool = MiuraMatch(ch=80, cw=90, gpu=True)
cw = 28, miurarlt_tool = MiuraMatch(ch=65, cw=55)
) miurarlt_gpu_tool = MiuraMatch(ch=65, cw=55, gpu=True)
huangwl_gpu_tool = fingervein_tools.MiuraMatch(
ch = 18,
cw = 28,
gpu = True,
)
miuramax_tool = fingervein_tools.MiuraMatch(
ch = 80,
cw = 90,
)
miuramax_gpu_tool = fingervein_tools.MiuraMatch(
ch = 80,
cw = 90,
gpu = True,
)
miurarlt_tool = fingervein_tools.MiuraMatch(
ch = 65,
cw = 55,
)
miurarlt_gpu_tool = fingervein_tools.MiuraMatch(
ch = 65,
cw = 55,
gpu = True,
)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <pedro.tome@idiap.ch>
"""Configuration files for image databases"""
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import bob.db.utfvp import bob.db.utfvp
import facereclib
from bob.bio.base.database import DatabaseBob
utfvp_directory = "/idiap/resource/database/UTFVP/data/" utfvp_directory = "/idiap/resource/database/UTFVP/data/"
database = facereclib.databases.DatabaseBob( database = DatabaseBob(
database = bob.db.utfvp.Database( database = bob.db.utfvp.Database(
original_directory = utfvp_directory, original_directory = utfvp_directory,
original_extension = ".png" original_extension = ".png"
), ),
name = 'utfvp', name = 'utfvp',
) )
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import bob.db.vera import bob.db.vera
import facereclib
from bob.bio.base.database import DatabaseBob
vera_directory = "/idiap/project/vera" vera_directory = "/idiap/project/vera"
database = facereclib.databases.DatabaseBob( database = DatabaseBob(
database = bob.db.vera.Database( database = bob.db.vera.Database(
original_directory = vera_directory, original_directory = vera_directory,
original_extension = ".png", original_extension = ".png",
), ),
name = 'vera', name = 'vera',
) )
\ No newline at end of file
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
"""Configuration files for feature extractors"""
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.fingervein from ...extractors import LocalBinaryPatterns
# Parameters # Parameters
...@@ -18,25 +17,24 @@ LBP_CIRCULAR = True ...@@ -18,25 +17,24 @@ LBP_CIRCULAR = True
LBP_ROTATION_INVARIANT = False LBP_ROTATION_INVARIANT = False
LBP_COMPARE_TO_AVERAGE = False LBP_COMPARE_TO_AVERAGE = False
LBP_ADD_AVERAGE = False LBP_ADD_AVERAGE = False
# histogram options # histogram options
SPARSE_HISTOGRAM = False SPARSE_HISTOGRAM = False
SPLIT_HISTOGRAM = None SPLIT_HISTOGRAM = None
#Define feature extractor #Define feature extractor
feature_extractor = bob.fingervein.features.LocalBinaryPatterns( feature_extractor = LocalBinaryPatterns(
block_size = BLOCK_SIZE, # one or two parameters for block size block_size=BLOCK_SIZE, # one or two parameters for block size
block_overlap = BLOCK_OVERLAP, # one or two parameters for block overlap block_overlap=BLOCK_OVERLAP, # one or two parameters for block overlap
lbp_radius = LBP_RADIUS, lbp_radius=LBP_RADIUS,
lbp_neighbor_count = LBP_NEIGHBOR_COUNT, lbp_neighbor_count=LBP_NEIGHBOR_COUNT,
lbp_uniform = LBP_UNIFORM, lbp_uniform=LBP_UNIFORM,
lbp_circular = LBP_CIRCULAR, lbp_circular=LBP_CIRCULAR,
lbp_rotation_invariant = LBP_ROTATION_INVARIANT, lbp_rotation_invariant=LBP_ROTATION_INVARIANT,
lbp_compare_to_average = LBP_COMPARE_TO_AVERAGE, lbp_compare_to_average=LBP_COMPARE_TO_AVERAGE,
lbp_add_average = LBP_ADD_AVERAGE, lbp_add_average=LBP_ADD_AVERAGE,
# histogram options # histogram options
sparse_histogram = SPARSE_HISTOGRAM, sparse_histogram=SPARSE_HISTOGRAM,
split_histogram = SPLIT_HISTOGRAM split_histogram=SPLIT_HISTOGRAM
) )
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.fingervein from ...extractors import LocalBinaryPatterns
# Parameters
# Parameters
SIGMA_DERIVATES = 5 #Sigma used for determining derivatives SIGMA_DERIVATES = 5 #Sigma used for determining derivatives
GPU_ACCELERATION = False GPU_ACCELERATION = False
...@@ -13,6 +13,6 @@ GPU_ACCELERATION = False ...@@ -13,6 +13,6 @@ GPU_ACCELERATION = False
feature_extractor = bob.fingervein.features.MaximumCurvature( feature_extractor = bob.fingervein.features.MaximumCurvature(
sigma = SIGMA_DERIVATES, sigma = SIGMA_DERIVATES,
gpu = GPU_ACCELERATION gpu = GPU_ACCELERATION
) )
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.fingervein from ...extractors import NormalisedCrossCorrelation
# Parameters
#Define feature extractor
feature_extractor = bob.fingervein.features.NormalisedCrossCorrelation(
)
feature_extractor = bob.fingervein.features.NormalisedCrossCorrelation()
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.fingervein from ...extractors import RepeatedLineTracking
# Parameters # Maximum number of iterations
NUMBER_ITERATIONS = 3000 # Maximum number of iterations NUMBER_ITERATIONS = 3000
DISTANCE_R = 1 # Distance between tracking point and cross section of the profile
PROFILE_WIDTH = 21 # Width of profile
# Distance between tracking point and cross section of profile
DISTANCE_R = 1
#Define feature extractor # Width of profile
feature_extractor = bob.fingervein.features.RepeatedLineTracking( PROFILE_WIDTH = 21
iterations = NUMBER_ITERATIONS,
r = DISTANCE_R,
profile_w = PROFILE_WIDTH
)
feature_extractor = RepeatedLineTracking(
iterations=NUMBER_ITERATIONS,
r=DISTANCE_R,
profile_w=PROFILE_WIDTH
)
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 : # vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.fingervein import bob.fingervein
# Parameters # Parameters
RADIUS_NEIGHBOURHOOD_REGION = 5 # Radius of the circular neighbourhood region RADIUS_NEIGHBOURHOOD_REGION = 5 # Radius of the circular neighbourhood region
NEIGHBOURHOOD_THRESHOLD = 1 NEIGHBOURHOOD_THRESHOLD = 1
SUM_NEIGHBOURHOOD = 41 #Sum of neigbourhood threshold SUM_NEIGHBOURHOOD = 41 #Sum of neigbourhood threshold
RESCALE = True RESCALE = True
#Define feature extractor #Define feature extractor
feature_extractor = bob.fingervein.features.WideLineDetector( feature_extractor = bob.fingervein.features.WideLineDetector(
radius = RADIUS_NEIGHBOURHOOD_REGION, radius = RADIUS_NEIGHBOURHOOD_REGION,
threshold = NEIGHBOURHOOD_THRESHOLD, threshold = NEIGHBOURHOOD_THRESHOLD,
g = SUM_NEIGHBOURHOOD, g = SUM_NEIGHBOURHOOD,
rescale = RESCALE rescale = RESCALE
) )
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
"""Configuration files for SGE grid executions"""
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import facereclib import bob.bio.base.grid import Grid
# define a queue with demanding parameters
grid = facereclib.utils.GridParameters( grid = Grid(
training_queue = '32G', training_queue='32G',
# preprocessing
number_of_preprocessings_per_job = 200, number_of_preprocessings_per_job=200,
preprocessing_queue = '4G', preprocessing_queue='4G',
# feature extraction
number_of_extracted_features_per_job = 200, number_of_extraction_jobs=200,
extraction_queue = '8G', extraction_queue='8G',
# feature projection
number_of_projected_features_per_job = 200, number_of_projection_jobs=200,
projection_queue = '8G', projection_queue='8G',
# model enrollment
number_of_enrolled_models_per_job = 10, number_of_enrollment_jobs=10,
enrollment_queue = '8G', enrollment_queue='8G',
# scoring
number_of_models_per_scoring_job = 10, number_of_scoring_jobs=10,
scoring_queue = '8G' scoring_queue='8G',
) )
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import facereclib import bob.bio.base.grid import Grid
# define a queue specifically for the xbob.db.gbu database
grid = facereclib.utils.GridParameters(
training_queue = '32G',
# preprocessing
number_of_preprocessings_per_job = 1000,
preprocessing_queue = '8G',
# feature extraction
number_of_extracted_features_per_job = 100,
extraction_queue = '8G',
# feature projection
number_of_projected_features_per_job = 100,
projection_queue = '8G',
# model enrollment
number_of_enrolled_models_per_job = 100,
enrollment_queue = '8G',
# scoring
number_of_models_per_scoring_job = 10,
scoring_queue = '8G'
)
grid = Grid(
training_queue = '32G',
number_of_preprocessing_jobs = 1000,
preprocessing_queue = '8G',
number_of_extraction_jobs = 100,
extraction_queue = '8G',
number_of_projection_jobs = 100,
projection_queue = '8G',
number_of_enrollment_jobs = 100,
enrollment_queue = '8G',
number_of_scoring_jobs = 10,
scoring_queue = '8G'
)
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import facereclib import bob.bio.base.grid import Grid
# setup of the grid parameters
# define a queue with demanding parameters
grid = facereclib.utils.GridParameters(
training_queue = '8G',
# preprocessing
number_of_preprocessing_jobs = 32,
preprocessing_queue = '4G-io-big',
# feature extraction
number_of_extraction_jobs = 32,
extraction_queue = '4G-io-big',
# feature projection
number_of_projection_jobs = 32,
projection_queue = {},
# model enrollment
number_of_enrollment_jobs = 32,
enrollment_queue = {},
# scoring
number_of_scoring_jobs = 32,
scoring_queue = {'queue': 'q_gpu'},
)
grid = Grid(
training_queue = '8G',
number_of_preprocessing_jobs = 32,
preprocessing_queue = '4G-io-big',
number_of_extraction_jobs = 32,
extraction_queue = '4G-io-big',
number_of_projection_jobs = 32,
projection_queue = {},
number_of_enrollment_jobs = 32,
enrollment_queue = {},
number_of_scoring_jobs = 32,
scoring_queue = {'queue': 'q_gpu'},
)
#!/usr/bin/env python #!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import facereclib import bob.bio.base.grid import Grid
# setup of the grid parameters
# define a queue with demanding parameters grid = Grid(
grid = facereclib.utils.GridParameters( training_queue = '8G',
training_queue = '8G',
# preprocessing number_of_preprocessing_jobs = 32,
number_of_preprocessing_jobs = 32, preprocessing_queue = '4G-io-big',
preprocessing_queue = '4G-io-big',
# feature extraction number_of_extraction_jobs = 32,
number_of_extraction_jobs = 32, extraction_queue = '4G-io-big',
extraction_queue = '4G-io-big',
# feature projection number_of_projection_jobs = 32,
number_of_projection_jobs = 32, projection_queue = {},
projection_queue = {},
# model enrollment number_of_enrollment_jobs = 32,
number_of_enrollment_jobs = 32, enrollment_queue = {},
enrollment_queue = {},
# scoring number_of_scoring_jobs = 32,
number_of_scoring_jobs = 32, scoring_queue = '4G-io-big',
scoring_queue = '4G-io-big', )
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment