RepeatedLineTracking.py 9.25 KB
Newer Older
Pedro TOME's avatar
Pedro TOME committed
1
2
3
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :

4
5
6
import numpy
import math

Pedro TOME's avatar
Pedro TOME committed
7
8
9
10
import bob.core
import bob.io.base
import bob.ip.base

11
from bob.bio.base.extractor import Extractor
12

Pedro TOME's avatar
Pedro TOME committed
13
14

class RepeatedLineTracking (Extractor):
15
16
17
18
19
20
  """Repeated Line Tracking feature extractor

  Based on N. Miura, A. Nagasaka, and T. Miyatake. Feature extraction of finger
  vein patterns based on repeated line tracking and its application to personal
  identification. Machine Vision and Applications, Vol. 15, Num. 4, pp.
  194--203, 2004
Pedro TOME's avatar
Pedro TOME committed
21
22
23
24
25
26
27
  """

  def __init__(
      self,
      iterations = 3000, # Maximum number of iterations
      r = 1,             # Distance between tracking point and cross section of the profile
      profile_w = 21,     # Width of profile (Error: profile_w must be odd)
28
29
30
      rescale = True,
      ):

Pedro TOME's avatar
Pedro TOME committed
31
32
33
34
35
36
    # call base class constructor
    Extractor.__init__(
        self,
        iterations = iterations,
        r = r,
        profile_w = profile_w,
37
38
39
        rescale = rescale,
        )

Pedro TOME's avatar
Pedro TOME committed
40
41
42
43
44
45
46
47
    # block parameters
    self.iterations = iterations
    self.r = r
    self.profile_w = profile_w
    self.rescale = rescale


  def repeated_line_tracking(self, finger_image, mask):
48
49
50
    """Computes and returns the MiuraMax features for the given input
    fingervein image"""

Pedro TOME's avatar
Pedro TOME committed
51
52
53
54
55
    #Convert image to uint8
    if finger_image.dtype != numpy.uint8:
      finger_image = bob.core.convert(finger_image,numpy.uint8,(0,255),(0,1))

    finger_mask = numpy.zeros(mask.shape)
56
57
    finger_mask[mask == True] = 1

Pedro TOME's avatar
Pedro TOME committed
58
59
    # Rescale image if required
    if self.rescale == True:
60
      scaling_factor = 0.6
Pedro TOME's avatar
Pedro TOME committed
61
62
63
64
      finger_image = bob.ip.base.scale(finger_image,scaling_factor)
      finger_mask = bob.ip.base.scale(finger_mask,scaling_factor)
      #To eliminate residuals from the scalation of the binary mask
      finger_mask = scipy.ndimage.binary_dilation(finger_mask, structure=numpy.ones((1,1))).astype(int)
65

Pedro TOME's avatar
Pedro TOME committed
66
67
68
69
70
71
72
73
74
    p_lr = 0.5  # Probability of goin left or right
    p_ud = 0.25 # Probability of going up or down

    Tr = numpy.zeros(finger_image.shape) # Locus space
    filtermask = numpy.array(([-1,-1],[-1,0],[-1,1],[0,-1],[0,0],[0,1],[1,-1],[1,0],[1,1]))

    # Check if progile w is even
    if (self.profile_w.__mod__(2) == 0):
        print ('Error: profile_w must be odd')
75

Pedro TOME's avatar
Pedro TOME committed
76
77
78
    ro = numpy.round(self.r*math.sqrt(2)/2)    # r for oblique directions
    hW = (self.profile_w-1)/2                  # half width for horz. and vert. directions
    hWo = numpy.round(hW*math.sqrt(2)/2)       # half width for oblique directions
79

Pedro TOME's avatar
Pedro TOME committed
80
81
82
83
84
    # Omit unreachable borders
    finger_mask[0:self.r+hW,:] = 0
    finger_mask[finger_mask.shape[0]-(self.r+hW):,:] = 0
    finger_mask[:,0:self.r+hW] = 0
    finger_mask[:,finger_mask.shape[1]-(self.r+hW):] = 0
85

Pedro TOME's avatar
Pedro TOME committed
86
87
88
89
90
    ## Uniformly distributed starting points
    aux = numpy.argwhere( (finger_mask > 0) == True )
    indices = numpy.random.permutation(aux)
    indices = indices[0:self.iterations,:]    # Limit to number of iterations

91
92
    ## Iterate through all starting points
    for it in range(0,self.iterations):
Pedro TOME's avatar
Pedro TOME committed
93
94
95
96
97
98
99
100
        yc = indices[it,0] # Current tracking point, y
        xc = indices[it,1] # Current tracking point, x

        # Determine the moving-direction attributes
        # Going left or right ?
        if (numpy.random.random_sample() >= 0.5):
            Dlr = -1  # Going left
        else:
101
102
            Dlr = 1   # Going right

Pedro TOME's avatar
Pedro TOME committed
103
104
105
106
        # Going up or down ?
        if (numpy.random.random_sample() >= 0.5):
            Dud = -1  # Going up
        else:
107
108
            Dud = 1   # Going down

Pedro TOME's avatar
Pedro TOME committed
109
110
        # Initialize locus-positition table Tc
        Tc = numpy.zeros(finger_image.shape, numpy.bool)
111

Pedro TOME's avatar
Pedro TOME committed
112
113
114
115
116
117
118
        #Dlr = -1; Dud=-1; LET OP
        Vl = 1
        while (Vl > 0):
            # Determine the moving candidate point set Nc
            Nr = numpy.zeros([3,3], numpy.bool)
            Rnd = numpy.random.random_sample()
            #Rnd = 0.8 LET OP
119
            if (Rnd < p_lr):
Pedro TOME's avatar
Pedro TOME committed
120
121
                # Going left or right
                Nr[:,1+Dlr] = True
122
            elif (Rnd >= p_lr) and (Rnd < (p_lr + p_ud)):
Pedro TOME's avatar
Pedro TOME committed
123
124
                # Going up or down
                Nr[1+Dud,:] = True
125
            else:
Pedro TOME's avatar
Pedro TOME committed
126
127
128
                # Going any direction
                Nr = numpy.ones([3,3], numpy.bool)
                Nr[1,1] = False
129
            #tmp = numpy.argwhere( (~Tc[yc-2:yc+1,xc-2:xc+1] & Nr & finger_mask[yc-2:yc+1,xc-2:xc+1].astype(numpy.bool)).T.reshape(-1) == True )
Pedro TOME's avatar
Pedro TOME committed
130
131
132
133
134
            tmp = numpy.argwhere( (~Tc[yc-1:yc+2,xc-1:xc+2] & Nr & finger_mask[yc-1:yc+2,xc-1:xc+2].astype(numpy.bool)).T.reshape(-1) == True )
            Nc = numpy.concatenate((xc + filtermask[tmp,0],yc + filtermask[tmp,1]),axis=1)
            if (Nc.size==0):
                Vl=-1
                continue
135

Pedro TOME's avatar
Pedro TOME committed
136
137
            ## Detect dark line direction near current tracking point
            Vdepths = numpy.zeros((Nc.shape[0],1)) # Valley depths
138
139
            for i in range(0,Nc.shape[0]):
                ## Horizontal or vertical
Pedro TOME's avatar
Pedro TOME committed
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
                if (Nc[i,1] == yc):
                    # Horizontal plane
                    yp = Nc[i,1]
                    if (Nc[i,0] > xc):
                        # Right direction
                        xp = Nc[i,0] + self.r
                    else:
                        # Left direction
                        xp = Nc[i,0] - self.r
                    Vdepths[i] = finger_image[yp + hW, xp] - 2*finger_image[yp,xp] + finger_image[yp - hW, xp]
                elif (Nc[i,0] == xc):
                    # Vertical plane
                    xp = Nc[i,0]
                    if (Nc[i,1] > yc):
                        # Down direction
                        yp = Nc[i,1] + self.r
                    else:
                        # Up direction
                        yp = Nc[i,1] - self.r
                    Vdepths[i] = finger_image[yp, xp + hW] - 2*finger_image[yp,xp] + finger_image[yp, xp - hW]
160

Pedro TOME's avatar
Pedro TOME committed
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
                ## Oblique directions
                if ( (Nc[i,0] > xc) and (Nc[i,1] < yc) ) or ( (Nc[i,0] < xc) and (Nc[i,1] > yc) ):
                    # Diagonal, up /
                    if (Nc[i,0] > xc and Nc[i,1] < yc):
                        # Top right
                        xp = Nc[i,0] + ro
                        yp = Nc[i,1] - ro
                    else:
                        # Bottom left
                        xp = Nc[i,0] - ro
                        yp = Nc[i,1] + ro
                    Vdepths[i] = finger_image[yp - hWo, xp - hWo] - 2*finger_image[yp,xp] + finger_image[yp + hWo, xp + hWo]
                else:
                    # Diagonal, down \
                    if (Nc[i,0] < xc and Nc[i,1] < yc):
                        # Top left
                        xp = Nc[i,0] - ro
                        yp = Nc[i,1] - ro
                    else:
                        # Bottom right
                        xp = Nc[i,0] + ro
                        yp = Nc[i,1] + ro
                    Vdepths[i] = finger_image[yp + hWo, xp - hWo] - 2*finger_image[yp,xp] + finger_image[yp - hWo, xp + hWo]
            # End search of candidates
            index = numpy.argmax(Vdepths)  #Determine best candidate
            # Register tracking information
187
            Tc[yc, xc] = True
Pedro TOME's avatar
Pedro TOME committed
188
189
190
191
192
            # Increase value of tracking space
            Tr[yc, xc] = Tr[yc, xc] + 1
            # Move tracking point
            xc = Nc[index, 0]
            yc = Nc[index, 1]
193

Pedro TOME's avatar
Pedro TOME committed
194
    img_veins = Tr
195

Pedro TOME's avatar
Pedro TOME committed
196
197
198
199
200
    # Binarise the vein image
    md = numpy.median(img_veins[img_veins>0])
    img_veins_bin = img_veins > md
    img_veins_bin = scipy.ndimage.binary_closing(img_veins_bin, structure=numpy.ones((2,2))).astype(int)

201
    #import ipdb; ipdb.set_trace()
Pedro TOME's avatar
Pedro TOME committed
202
203
    #img_veins_bin2 = scipy.ndimage.binary_closing(img_veins_bin, structure=numpy.ones((2,2))).astype(int)
    #from PIL import Image
204

Pedro TOME's avatar
Pedro TOME committed
205
206
207
208
    #Image.fromarray(bob.core.convert(img_veins_bin,numpy.uint8,(0,255),(0,1))).show()
    #skel = self.skeletonize(img_veins_bin2)
    #Image.fromarray(bob.core.convert(skel,numpy.uint8,(0,255),(0,1))).show()

209
210
211
    return img_veins_bin.astype(numpy.float64)


Pedro TOME's avatar
Pedro TOME committed
212
213
  def skeletonize(self, img):
    import scipy.ndimage.morphology as m
214
215
216
217
218
    h1 = numpy.array([[0, 0, 0],[0, 1, 0],[1, 1, 1]])
    m1 = numpy.array([[1, 1, 1],[0, 0, 0],[0, 0, 0]])
    h2 = numpy.array([[0, 0, 0],[1, 1, 0],[0, 1, 0]])
    m2 = numpy.array([[0, 1, 1],[0, 0, 1],[0, 0, 0]])
    hit_list = []
Pedro TOME's avatar
Pedro TOME committed
219
    miss_list = []
220
    for k in range(4):
Pedro TOME's avatar
Pedro TOME committed
221
222
223
        hit_list.append(numpy.rot90(h1, k))
        hit_list.append(numpy.rot90(h2, k))
        miss_list.append(numpy.rot90(m1, k))
224
        miss_list.append(numpy.rot90(m2, k))
Pedro TOME's avatar
Pedro TOME committed
225
226
227
    img = img.copy()
    while True:
        last = img
228
229
230
231
        for hit, miss in zip(hit_list, miss_list):
            hm = m.binary_hit_or_miss(img, hit, miss)
            img = numpy.logical_and(img, numpy.logical_not(hm))
        if numpy.all(img == last):
Pedro TOME's avatar
Pedro TOME committed
232
233
234
235
            break
    return img


236
237
238
239
  def __call__(self, image):
    """Reads the input image, extract the features based on Maximum Curvature
    of the fingervein image, and writes the resulting template"""

Pedro TOME's avatar
Pedro TOME committed
240
    finger_image = image[0]    #Normalized image with or without histogram equalization
241
242
243
244
    finger_mask = image[1]

    return self.repeated_line_tracking(finger_image, finger_mask)

Pedro TOME's avatar
Pedro TOME committed
245
246
247
248

  def save_feature(self, feature, feature_file):
    f = bob.io.base.HDF5File(feature_file, 'w')
    f.set('feature', feature)
249

Pedro TOME's avatar
Pedro TOME committed
250
251
252
253
  def read_feature(self, feature_file):
    f = bob.io.base.HDF5File(feature_file, 'r')
    image = f.read('feature')
    return (image)