RepeatedLineTracking.py 8.95 KB
Newer Older
Pedro TOME's avatar
Pedro TOME committed
1 2 3
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :

4
import math
André Anjos's avatar
André Anjos committed
5 6
import numpy
import scipy.ndimage
7

Pedro TOME's avatar
Pedro TOME committed
8 9 10 11
import bob.core
import bob.io.base
import bob.ip.base

12
from bob.bio.base.extractor import Extractor
13

Pedro TOME's avatar
Pedro TOME committed
14 15

class RepeatedLineTracking (Extractor):
16 17 18 19 20 21
  """Repeated Line Tracking feature extractor

  Based on N. Miura, A. Nagasaka, and T. Miyatake. Feature extraction of finger
  vein patterns based on repeated line tracking and its application to personal
  identification. Machine Vision and Applications, Vol. 15, Num. 4, pp.
  194--203, 2004
Pedro TOME's avatar
Pedro TOME committed
22 23 24 25 26 27
  """

  def __init__(
      self,
      iterations = 3000, # Maximum number of iterations
      r = 1,             # Distance between tracking point and cross section of the profile
André Anjos's avatar
André Anjos committed
28
      profile_w = 21,    # Width of profile (Error: profile_w must be odd)
29
      rescale = True,
André Anjos's avatar
André Anjos committed
30
      seed = 0,          # Seed for the algorithm's random walk
31 32
      ):

Pedro TOME's avatar
Pedro TOME committed
33 34 35 36 37 38
    # call base class constructor
    Extractor.__init__(
        self,
        iterations = iterations,
        r = r,
        profile_w = profile_w,
39
        rescale = rescale,
André Anjos's avatar
André Anjos committed
40
        seed = seed,
41 42
        )

Pedro TOME's avatar
Pedro TOME committed
43 44 45 46 47
    # block parameters
    self.iterations = iterations
    self.r = r
    self.profile_w = profile_w
    self.rescale = rescale
André Anjos's avatar
André Anjos committed
48
    self.seed = seed
Pedro TOME's avatar
Pedro TOME committed
49 50 51


  def repeated_line_tracking(self, finger_image, mask):
52 53 54
    """Computes and returns the MiuraMax features for the given input
    fingervein image"""

André Anjos's avatar
André Anjos committed
55 56 57
    # Sets the random seed before starting to process
    numpy.random.seed(self.seed)

Pedro TOME's avatar
Pedro TOME committed
58 59 60 61 62
    #Convert image to uint8
    if finger_image.dtype != numpy.uint8:
      finger_image = bob.core.convert(finger_image,numpy.uint8,(0,255),(0,1))

    finger_mask = numpy.zeros(mask.shape)
63 64
    finger_mask[mask == True] = 1

Pedro TOME's avatar
Pedro TOME committed
65 66
    # Rescale image if required
    if self.rescale == True:
67
      scaling_factor = 0.6
Pedro TOME's avatar
Pedro TOME committed
68 69 70 71
      finger_image = bob.ip.base.scale(finger_image,scaling_factor)
      finger_mask = bob.ip.base.scale(finger_mask,scaling_factor)
      #To eliminate residuals from the scalation of the binary mask
      finger_mask = scipy.ndimage.binary_dilation(finger_mask, structure=numpy.ones((1,1))).astype(int)
72

Pedro TOME's avatar
Pedro TOME committed
73 74 75 76 77 78 79 80 81
    p_lr = 0.5  # Probability of goin left or right
    p_ud = 0.25 # Probability of going up or down

    Tr = numpy.zeros(finger_image.shape) # Locus space
    filtermask = numpy.array(([-1,-1],[-1,0],[-1,1],[0,-1],[0,0],[0,1],[1,-1],[1,0],[1,1]))

    # Check if progile w is even
    if (self.profile_w.__mod__(2) == 0):
        print ('Error: profile_w must be odd')
82

Pedro TOME's avatar
Pedro TOME committed
83 84 85
    ro = numpy.round(self.r*math.sqrt(2)/2)    # r for oblique directions
    hW = (self.profile_w-1)/2                  # half width for horz. and vert. directions
    hWo = numpy.round(hW*math.sqrt(2)/2)       # half width for oblique directions
86

Pedro TOME's avatar
Pedro TOME committed
87
    # Omit unreachable borders
88 89 90 91 92
    border = int(self.r+hW)
    finger_mask[0:border,:] = 0
    finger_mask[finger_mask.shape[0]-border:,:] = 0
    finger_mask[:,0:border] = 0
    finger_mask[:,finger_mask.shape[1]-border:] = 0
93

Pedro TOME's avatar
Pedro TOME committed
94 95 96 97 98
    ## Uniformly distributed starting points
    aux = numpy.argwhere( (finger_mask > 0) == True )
    indices = numpy.random.permutation(aux)
    indices = indices[0:self.iterations,:]    # Limit to number of iterations

99 100
    ## Iterate through all starting points
    for it in range(0,self.iterations):
Pedro TOME's avatar
Pedro TOME committed
101 102 103 104 105 106 107 108
        yc = indices[it,0] # Current tracking point, y
        xc = indices[it,1] # Current tracking point, x

        # Determine the moving-direction attributes
        # Going left or right ?
        if (numpy.random.random_sample() >= 0.5):
            Dlr = -1  # Going left
        else:
109 110
            Dlr = 1   # Going right

Pedro TOME's avatar
Pedro TOME committed
111 112 113 114
        # Going up or down ?
        if (numpy.random.random_sample() >= 0.5):
            Dud = -1  # Going up
        else:
115 116
            Dud = 1   # Going down

Pedro TOME's avatar
Pedro TOME committed
117 118
        # Initialize locus-positition table Tc
        Tc = numpy.zeros(finger_image.shape, numpy.bool)
119

Pedro TOME's avatar
Pedro TOME committed
120 121 122 123 124 125 126
        #Dlr = -1; Dud=-1; LET OP
        Vl = 1
        while (Vl > 0):
            # Determine the moving candidate point set Nc
            Nr = numpy.zeros([3,3], numpy.bool)
            Rnd = numpy.random.random_sample()
            #Rnd = 0.8 LET OP
127
            if (Rnd < p_lr):
Pedro TOME's avatar
Pedro TOME committed
128 129
                # Going left or right
                Nr[:,1+Dlr] = True
130
            elif (Rnd >= p_lr) and (Rnd < (p_lr + p_ud)):
Pedro TOME's avatar
Pedro TOME committed
131 132
                # Going up or down
                Nr[1+Dud,:] = True
133
            else:
Pedro TOME's avatar
Pedro TOME committed
134 135 136
                # Going any direction
                Nr = numpy.ones([3,3], numpy.bool)
                Nr[1,1] = False
137
            #tmp = numpy.argwhere( (~Tc[yc-2:yc+1,xc-2:xc+1] & Nr & finger_mask[yc-2:yc+1,xc-2:xc+1].astype(numpy.bool)).T.reshape(-1) == True )
Pedro TOME's avatar
Pedro TOME committed
138 139 140 141 142
            tmp = numpy.argwhere( (~Tc[yc-1:yc+2,xc-1:xc+2] & Nr & finger_mask[yc-1:yc+2,xc-1:xc+2].astype(numpy.bool)).T.reshape(-1) == True )
            Nc = numpy.concatenate((xc + filtermask[tmp,0],yc + filtermask[tmp,1]),axis=1)
            if (Nc.size==0):
                Vl=-1
                continue
143

Pedro TOME's avatar
Pedro TOME committed
144 145
            ## Detect dark line direction near current tracking point
            Vdepths = numpy.zeros((Nc.shape[0],1)) # Valley depths
146 147
            for i in range(0,Nc.shape[0]):
                ## Horizontal or vertical
Pedro TOME's avatar
Pedro TOME committed
148 149 150 151 152 153 154 155 156
                if (Nc[i,1] == yc):
                    # Horizontal plane
                    yp = Nc[i,1]
                    if (Nc[i,0] > xc):
                        # Right direction
                        xp = Nc[i,0] + self.r
                    else:
                        # Left direction
                        xp = Nc[i,0] - self.r
157
                    Vdepths[i] = finger_image[int(yp + hW), int(xp)] - 2*finger_image[int(yp),int(xp)] + finger_image[int(yp - hW), int(xp)]
Pedro TOME's avatar
Pedro TOME committed
158 159 160 161 162 163 164 165 166
                elif (Nc[i,0] == xc):
                    # Vertical plane
                    xp = Nc[i,0]
                    if (Nc[i,1] > yc):
                        # Down direction
                        yp = Nc[i,1] + self.r
                    else:
                        # Up direction
                        yp = Nc[i,1] - self.r
167
                    Vdepths[i] = finger_image[int(yp), int(xp + hW)] - 2*finger_image[int(yp),int(xp)] + finger_image[int(yp), int(xp - hW)]
168

Pedro TOME's avatar
Pedro TOME committed
169 170 171 172 173 174 175 176 177 178 179
                ## Oblique directions
                if ( (Nc[i,0] > xc) and (Nc[i,1] < yc) ) or ( (Nc[i,0] < xc) and (Nc[i,1] > yc) ):
                    # Diagonal, up /
                    if (Nc[i,0] > xc and Nc[i,1] < yc):
                        # Top right
                        xp = Nc[i,0] + ro
                        yp = Nc[i,1] - ro
                    else:
                        # Bottom left
                        xp = Nc[i,0] - ro
                        yp = Nc[i,1] + ro
180
                    Vdepths[i] = finger_image[int(yp - hWo), int(xp - hWo)] - 2*finger_image[int(yp),int(xp)] + finger_image[int(yp + hWo), int(xp + hWo)]
Pedro TOME's avatar
Pedro TOME committed
181 182 183 184 185 186 187 188 189 190
                else:
                    # Diagonal, down \
                    if (Nc[i,0] < xc and Nc[i,1] < yc):
                        # Top left
                        xp = Nc[i,0] - ro
                        yp = Nc[i,1] - ro
                    else:
                        # Bottom right
                        xp = Nc[i,0] + ro
                        yp = Nc[i,1] + ro
191
                    Vdepths[i] = finger_image[int(yp + hWo), int(xp - hWo)] - 2*finger_image[int(yp),int(xp)] + finger_image[int(yp - hWo), int(xp + hWo)]
Pedro TOME's avatar
Pedro TOME committed
192 193 194
            # End search of candidates
            index = numpy.argmax(Vdepths)  #Determine best candidate
            # Register tracking information
195
            Tc[yc, xc] = True
Pedro TOME's avatar
Pedro TOME committed
196 197 198 199 200
            # Increase value of tracking space
            Tr[yc, xc] = Tr[yc, xc] + 1
            # Move tracking point
            xc = Nc[index, 0]
            yc = Nc[index, 1]
201

Pedro TOME's avatar
Pedro TOME committed
202
    img_veins = Tr
203

Pedro TOME's avatar
Pedro TOME committed
204 205 206 207 208
    # Binarise the vein image
    md = numpy.median(img_veins[img_veins>0])
    img_veins_bin = img_veins > md
    img_veins_bin = scipy.ndimage.binary_closing(img_veins_bin, structure=numpy.ones((2,2))).astype(int)

209 210 211
    return img_veins_bin.astype(numpy.float64)


Pedro TOME's avatar
Pedro TOME committed
212 213
  def skeletonize(self, img):
    import scipy.ndimage.morphology as m
214 215 216 217 218
    h1 = numpy.array([[0, 0, 0],[0, 1, 0],[1, 1, 1]])
    m1 = numpy.array([[1, 1, 1],[0, 0, 0],[0, 0, 0]])
    h2 = numpy.array([[0, 0, 0],[1, 1, 0],[0, 1, 0]])
    m2 = numpy.array([[0, 1, 1],[0, 0, 1],[0, 0, 0]])
    hit_list = []
Pedro TOME's avatar
Pedro TOME committed
219
    miss_list = []
220
    for k in range(4):
Pedro TOME's avatar
Pedro TOME committed
221 222 223
        hit_list.append(numpy.rot90(h1, k))
        hit_list.append(numpy.rot90(h2, k))
        miss_list.append(numpy.rot90(m1, k))
224
        miss_list.append(numpy.rot90(m2, k))
Pedro TOME's avatar
Pedro TOME committed
225 226 227
    img = img.copy()
    while True:
        last = img
228 229 230 231
        for hit, miss in zip(hit_list, miss_list):
            hm = m.binary_hit_or_miss(img, hit, miss)
            img = numpy.logical_and(img, numpy.logical_not(hm))
        if numpy.all(img == last):
Pedro TOME's avatar
Pedro TOME committed
232 233 234 235
            break
    return img


236 237 238 239
  def __call__(self, image):
    """Reads the input image, extract the features based on Maximum Curvature
    of the fingervein image, and writes the resulting template"""

Pedro TOME's avatar
Pedro TOME committed
240
    finger_image = image[0]    #Normalized image with or without histogram equalization
241 242 243
    finger_mask = image[1]

    return self.repeated_line_tracking(finger_image, finger_mask)