Commit d7432c9d authored by André Anjos's avatar André Anjos

[many] More clean-ups and code re-organization

parent 331df584
.. vim: set fileencoding=utf-8 :
.. Andre Anjos <andre.anjos@idiap.ch>
.. Fri 08 Jul 2016 15:38:56 CEST
.. image:: http://img.shields.io/badge/docs-stable-yellow.png
......
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.ip.base
import bob.sp
import numpy
import math
import scipy.signal
from facereclib.tools.Tool import Tool
class MiuraMatch (Tool):
"""Finger vein matching: match ratio based on
N. Miura, A. Nagasaka, and T. Miyatake. Feature extraction of finger vein patterns based on repeated line tracking and its application
to personal identification. Machine Vision and Applications, Vol. 15, Num. 4, pp. 194--203, 2004
"""
def __init__(
self,
# some similarity functions might need a GaborWaveletTransform class, so we have to provide the parameters here as well...
ch = 8, # Maximum search displacement in y-direction
cw = 5, # Maximum search displacement in x-direction
gpu = False,
):
# call base class constructor
Tool.__init__(
self,
ch = ch,
cw = cw,
multiple_model_scoring = None,
multiple_probe_scoring = None
)
self.ch = ch
self.cw = cw
self.gpu = gpu
def enroll(self, enroll_features):
"""Enrolls the model by computing an average graph for each model"""
# return the generated model
return numpy.vstack(enroll_features)
def score(self, model, probe):
"""Computes the score of the probe and the model
Return score - Value between 0 and 0.5, larger value is better match
"""
I=probe.astype(numpy.float64)
R=model.astype(numpy.float64)
h, w = R.shape
crop_R = R[self.ch:h-self.ch, self.cw:w-self.cw]
rotate_R = numpy.zeros((crop_R.shape[0], crop_R.shape[1]))
bob.ip.base.rotate(crop_R, rotate_R, 180)
#FFT for scoring!
#Nm=bob.sp.ifft(bob.sp.fft(I)*bob.sp.fft(rotate_R))
if self.gpu == True:
import xbob.cusp
Nm = xbob.cusp.conv(I, rotate_R);
else:
Nm = scipy.signal.convolve2d(I, rotate_R, 'valid');
t0, s0 = numpy.unravel_index(Nm.argmax(), Nm.shape)
Nmm = Nm[t0,s0]
#Nmm = Nm.max()
#mi = numpy.argwhere(Nmm == Nm)
#t0, s0 = mi.flatten()[:2]
score = Nmm/(sum(sum(crop_R)) + sum(sum(I[t0:t0+h-2*self.ch, s0:s0+w-2*self.cw])))
return score
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.sp
import bob.ip.base
......@@ -9,24 +8,27 @@ import numpy
import math
import scipy.signal
from facereclib.tools.Tool import Tool
from bob.bio.base.algorithm import Algorithm
class MiuraMatch (Tool):
"""Finger vein matching: match ratio based on
N. Miura, A. Nagasaka, and T. Miyatake. Feature extraction of finger vein patterns based on repeated line tracking and its application
to personal identification. Machine Vision and Applications, Vol. 15, Num. 4, pp. 194--203, 2004
class MiuraMatch (Algorithm):
"""Finger vein matching: match ratio
Based on N. Miura, A. Nagasaka, and T. Miyatake. Feature extraction of finger
vein patterns based on repeated line tracking and its application to personal
identification. Machine Vision and Applications, Vol. 15, Num. 4, pp.
194--203, 2004
"""
def __init__(
self,
def __init__(self,
# some similarity functions might need a GaborWaveletTransform class, so we have to provide the parameters here as well...
ch = 8, # Maximum search displacement in y-direction
cw = 5, # Maximum search displacement in x-direction
gpu = False,
):
):
# call base class constructor
Tool.__init__(
Algorithm.__init__(
self,
ch = ch,
......@@ -40,8 +42,10 @@ class MiuraMatch (Tool):
self.cw = cw
self.gpu = gpu
def enroll(self, enroll_features):
"""Enrolls the model by computing an average graph for each model"""
# return the generated model
#import ipdb; ipdb.set_trace()
return numpy.array(enroll_features)
......@@ -52,7 +56,7 @@ class MiuraMatch (Tool):
size_t = numpy.array(t.shape)
size_a = numpy.array(a.shape)
outsize = size_t + size_a - 1
# Determine 2D cross correlation in Fourier domain
taux = numpy.zeros(outsize)
taux[0:size_t[0],0:size_t[1]] = t
......@@ -60,12 +64,12 @@ class MiuraMatch (Tool):
aaux = numpy.zeros(outsize)
aaux[0:size_a[0],0:size_a[1]] = a
Fa = bob.sp.fft(aaux.astype(numpy.complex128))
convta = numpy.real(bob.sp.ifft(Ft*Fa))
[w, h] = size_t-size_a+1
output = convta[size_a[0]-1:size_a[0]-1+w, size_a[1]-1:size_a[1]-1+h]
return output
......@@ -75,12 +79,12 @@ class MiuraMatch (Tool):
"""
#print model.shape
#print probe.shape
I=probe.astype(numpy.float64)
if len(model.shape) == 2:
if len(model.shape) == 2:
model = numpy.array([model])
n_models = model.shape[0]
scores = []
......@@ -90,7 +94,7 @@ class MiuraMatch (Tool):
crop_R = R[self.ch:h-self.ch, self.cw:w-self.cw]
rotate_R = numpy.zeros((crop_R.shape[0], crop_R.shape[1]))
bob.ip.base.rotate(crop_R, rotate_R, 180)
#FFT for scoring!
#FFT for scoring!
#Nm=bob.sp.ifft(bob.sp.fft(I)*bob.sp.fft(rotate_R))
if self.gpu == True:
Nm = self.convfft(I, rotate_R)
......@@ -99,13 +103,12 @@ class MiuraMatch (Tool):
else:
Nm = self.convfft(I, rotate_R)
#Nm2 = scipy.signal.convolve2d(I, rotate_R, 'valid')
t0, s0 = numpy.unravel_index(Nm.argmax(), Nm.shape)
Nmm = Nm[t0,s0]
#Nmm = Nm.max()
#mi = numpy.argwhere(Nmm == Nm)
#mi = numpy.argwhere(Nmm == Nm)
#t0, s0 = mi.flatten()[:2]
scores.append(Nmm/(sum(sum(crop_R)) + sum(sum(I[t0:t0+h-2*self.ch, s0:s0+w-2*self.cw]))))
return numpy.mean(scores)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
"""Tool chain for computing verification scores"""
from MiuraMatch import MiuraMatch
#!/usr/bin/env python
# Pedro Tome <Pedro.Tome@idiap.ch>
"""Configuration files for different steps of the fingervein recognition tool chain"""
import databases
import preprocessing
import features
import tools
import grid
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import facereclib
from .. import tools as fingervein_tools
huangwl_tool = fingervein_tools.MiuraMatch(
ch = 18,
cw = 28,
)
huangwl_gpu_tool = fingervein_tools.MiuraMatch(
ch = 18,
cw = 28,
gpu = True,
)
miuramax_tool = fingervein_tools.MiuraMatch(
ch = 80,
cw = 90,
)
miuramax_gpu_tool = fingervein_tools.MiuraMatch(
ch = 80,
cw = 90,
gpu = True,
)
miurarlt_tool = fingervein_tools.MiuraMatch(
ch = 65,
cw = 55,
)
miurarlt_gpu_tool = fingervein_tools.MiuraMatch(
ch = 65,
cw = 55,
gpu = True,
)
from ..algorithms import MiuraMatch
huangwl_tool = MiuraMatch(ch=18, cw=28)
huangwl_gpu_tool = MiuraMatch(ch=18, cw=28, gpu=True)
miuramax_tool = MiuraMatch(ch=80, cw=90)
miuramax_gpu_tool = MiuraMatch(ch=80, cw=90, gpu=True)
miurarlt_tool = MiuraMatch(ch=65, cw=55)
miurarlt_gpu_tool = MiuraMatch(ch=65, cw=55, gpu=True)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <pedro.tome@idiap.ch>
"""Configuration files for image databases"""
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import bob.db.utfvp
import facereclib
from bob.bio.base.database import DatabaseBob
utfvp_directory = "/idiap/resource/database/UTFVP/data/"
database = facereclib.databases.DatabaseBob(
database = DatabaseBob(
database = bob.db.utfvp.Database(
original_directory = utfvp_directory,
original_extension = ".png"
),
name = 'utfvp',
)
),
name = 'utfvp',
)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import bob.db.vera
import facereclib
from bob.bio.base.database import DatabaseBob
vera_directory = "/idiap/project/vera"
database = facereclib.databases.DatabaseBob(
database = DatabaseBob(
database = bob.db.vera.Database(
original_directory = vera_directory,
original_extension = ".png",
),
original_extension = ".png",
),
name = 'vera',
)
\ No newline at end of file
)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
"""Configuration files for feature extractors"""
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.fingervein
from ...extractors import LocalBinaryPatterns
# Parameters
......@@ -18,25 +17,24 @@ LBP_CIRCULAR = True
LBP_ROTATION_INVARIANT = False
LBP_COMPARE_TO_AVERAGE = False
LBP_ADD_AVERAGE = False
# histogram options
SPARSE_HISTOGRAM = False
SPLIT_HISTOGRAM = None
#Define feature extractor
feature_extractor = bob.fingervein.features.LocalBinaryPatterns(
block_size = BLOCK_SIZE, # one or two parameters for block size
block_overlap = BLOCK_OVERLAP, # one or two parameters for block overlap
lbp_radius = LBP_RADIUS,
lbp_neighbor_count = LBP_NEIGHBOR_COUNT,
lbp_uniform = LBP_UNIFORM,
lbp_circular = LBP_CIRCULAR,
lbp_rotation_invariant = LBP_ROTATION_INVARIANT,
lbp_compare_to_average = LBP_COMPARE_TO_AVERAGE,
lbp_add_average = LBP_ADD_AVERAGE,
feature_extractor = LocalBinaryPatterns(
block_size=BLOCK_SIZE, # one or two parameters for block size
block_overlap=BLOCK_OVERLAP, # one or two parameters for block overlap
lbp_radius=LBP_RADIUS,
lbp_neighbor_count=LBP_NEIGHBOR_COUNT,
lbp_uniform=LBP_UNIFORM,
lbp_circular=LBP_CIRCULAR,
lbp_rotation_invariant=LBP_ROTATION_INVARIANT,
lbp_compare_to_average=LBP_COMPARE_TO_AVERAGE,
lbp_add_average=LBP_ADD_AVERAGE,
# histogram options
sparse_histogram = SPARSE_HISTOGRAM,
split_histogram = SPLIT_HISTOGRAM
sparse_histogram=SPARSE_HISTOGRAM,
split_histogram=SPLIT_HISTOGRAM
)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.fingervein
from ...extractors import LocalBinaryPatterns
# Parameters
# Parameters
SIGMA_DERIVATES = 5 #Sigma used for determining derivatives
GPU_ACCELERATION = False
......@@ -13,6 +13,6 @@ GPU_ACCELERATION = False
feature_extractor = bob.fingervein.features.MaximumCurvature(
sigma = SIGMA_DERIVATES,
gpu = GPU_ACCELERATION
)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.fingervein
# Parameters
#Define feature extractor
feature_extractor = bob.fingervein.features.NormalisedCrossCorrelation(
)
from ...extractors import NormalisedCrossCorrelation
feature_extractor = bob.fingervein.features.NormalisedCrossCorrelation()
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.fingervein
from ...extractors import RepeatedLineTracking
# Parameters
NUMBER_ITERATIONS = 3000 # Maximum number of iterations
DISTANCE_R = 1 # Distance between tracking point and cross section of the profile
PROFILE_WIDTH = 21 # Width of profile
# Maximum number of iterations
NUMBER_ITERATIONS = 3000
# Distance between tracking point and cross section of profile
DISTANCE_R = 1
#Define feature extractor
feature_extractor = bob.fingervein.features.RepeatedLineTracking(
iterations = NUMBER_ITERATIONS,
r = DISTANCE_R,
profile_w = PROFILE_WIDTH
)
# Width of profile
PROFILE_WIDTH = 21
feature_extractor = RepeatedLineTracking(
iterations=NUMBER_ITERATIONS,
r=DISTANCE_R,
profile_w=PROFILE_WIDTH
)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
import bob.fingervein
# Parameters
RADIUS_NEIGHBOURHOOD_REGION = 5 # Radius of the circular neighbourhood region
NEIGHBOURHOOD_THRESHOLD = 1
SUM_NEIGHBOURHOOD = 41 #Sum of neigbourhood threshold
NEIGHBOURHOOD_THRESHOLD = 1
SUM_NEIGHBOURHOOD = 41 #Sum of neigbourhood threshold
RESCALE = True
#Define feature extractor
feature_extractor = bob.fingervein.features.WideLineDetector(
radius = RADIUS_NEIGHBOURHOOD_REGION,
radius = RADIUS_NEIGHBOURHOOD_REGION,
threshold = NEIGHBOURHOOD_THRESHOLD,
g = SUM_NEIGHBOURHOOD,
rescale = RESCALE
g = SUM_NEIGHBOURHOOD,
rescale = RESCALE
)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Pedro Tome <Pedro.Tome@idiap.ch>
"""Configuration files for SGE grid executions"""
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import facereclib
# define a queue with demanding parameters
grid = facereclib.utils.GridParameters(
training_queue = '32G',
# preprocessing
number_of_preprocessings_per_job = 200,
preprocessing_queue = '4G',
# feature extraction
number_of_extracted_features_per_job = 200,
extraction_queue = '8G',
# feature projection
number_of_projected_features_per_job = 200,
projection_queue = '8G',
# model enrollment
number_of_enrolled_models_per_job = 10,
enrollment_queue = '8G',
# scoring
number_of_models_per_scoring_job = 10,
scoring_queue = '8G'
)
import bob.bio.base.grid import Grid
grid = Grid(
training_queue='32G',
number_of_preprocessings_per_job=200,
preprocessing_queue='4G',
number_of_extraction_jobs=200,
extraction_queue='8G',
number_of_projection_jobs=200,
projection_queue='8G',
number_of_enrollment_jobs=10,
enrollment_queue='8G',
number_of_scoring_jobs=10,
scoring_queue='8G',
)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import facereclib
# define a queue specifically for the xbob.db.gbu database
grid = facereclib.utils.GridParameters(
training_queue = '32G',
# preprocessing
number_of_preprocessings_per_job = 1000,
preprocessing_queue = '8G',
# feature extraction
number_of_extracted_features_per_job = 100,
extraction_queue = '8G',
# feature projection
number_of_projected_features_per_job = 100,
projection_queue = '8G',
# model enrollment
number_of_enrolled_models_per_job = 100,
enrollment_queue = '8G',
# scoring
number_of_models_per_scoring_job = 10,
scoring_queue = '8G'
)
import bob.bio.base.grid import Grid
grid = Grid(
training_queue = '32G',
number_of_preprocessing_jobs = 1000,
preprocessing_queue = '8G',
number_of_extraction_jobs = 100,
extraction_queue = '8G',
number_of_projection_jobs = 100,
projection_queue = '8G',
number_of_enrollment_jobs = 100,
enrollment_queue = '8G',
number_of_scoring_jobs = 10,
scoring_queue = '8G'
)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import facereclib
# setup of the grid parameters
# define a queue with demanding parameters
grid = facereclib.utils.GridParameters(
training_queue = '8G',
# preprocessing
number_of_preprocessing_jobs = 32,
preprocessing_queue = '4G-io-big',
# feature extraction
number_of_extraction_jobs = 32,
extraction_queue = '4G-io-big',
# feature projection
number_of_projection_jobs = 32,
projection_queue = {},
# model enrollment
number_of_enrollment_jobs = 32,
enrollment_queue = {},
# scoring
number_of_scoring_jobs = 32,
scoring_queue = {'queue': 'q_gpu'},
)
import bob.bio.base.grid import Grid
grid = Grid(
training_queue = '8G',
number_of_preprocessing_jobs = 32,
preprocessing_queue = '4G-io-big',
number_of_extraction_jobs = 32,
extraction_queue = '4G-io-big',
number_of_projection_jobs = 32,
projection_queue = {},
number_of_enrollment_jobs = 32,
enrollment_queue = {},
number_of_scoring_jobs = 32,
scoring_queue = {'queue': 'q_gpu'},
)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import facereclib
# setup of the grid parameters
# define a queue with demanding parameters
grid = facereclib.utils.GridParameters(
training_queue = '8G',
# preprocessing
number_of_preprocessing_jobs = 32,
preprocessing_queue = '4G-io-big',
# feature extraction
number_of_extraction_jobs = 32,
extraction_queue = '4G-io-big',
# feature projection
number_of_projection_jobs = 32,
projection_queue = {},
# model enrollment
number_of_enrollment_jobs = 32,
enrollment_queue = {},
# scoring
number_of_scoring_jobs = 32,
scoring_queue = '4G-io-big',
)
import bob.bio.base.grid import Grid
grid = Grid(
training_queue = '8G',
number_of_preprocessing_jobs = 32,
preprocessing_queue = '4G-io-big',
number_of_extraction_jobs = 32,
extraction_queue = '4G-io-big',
number_of_projection_jobs = 32,
projection_queue = {},
number_of_enrollment_jobs = 32,
enrollment_queue = {},
number_of_scoring_jobs = 32,
scoring_queue = '4G-io-big',
)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import facereclib
# setup of the grid parameters
# define a queue with demanding parameters
grid = facereclib.utils.GridParameters(
training_queue = '8G',
# preprocessing
number_of_preprocessings_per_job = 1000,
preprocessing_queue = {},
# feature extraction
number_of_extracted_features_per_job = 1000,
extraction_queue = {},
# feature projection
number_of_projected_features_per_job = 1000,
projection_queue = {},
# model enrollment
number_of_enrolled_models_per_job = 100,
enrollment_queue = '2G',
# scoring
number_of_models_per_scoring_job = 1500,
scoring_queue = {'queue': 'q_gpu'},
)
import bob.bio.base.grid import Grid
grid = Grid(
training_queue = '8G',
number_of_preprocessing_jobs = 1000,
preprocessing_queue = {},
number_of_extraction_jobs = 1000,
extraction_queue = {},
number_of_projection_jobs = 1000,
projection_queue = {},
number_of_enrollment_jobs = 100,