From a21d16c5e5d7be79f6e0881b48b30687180afd8f Mon Sep 17 00:00:00 2001
From: Teodors Eglitis <teodors.eglitis@idiap.ch>
Date: Mon, 24 Oct 2016 14:07:29 +0200
Subject: [PATCH] Utils for ROI annotations and more

---
 MANIFEST.in                                   |   2 +-
 bob/bio/vein/preprocessor/utils/__init__.py   |   6 +
 bob/bio/vein/preprocessor/utils/utils.py      | 360 ++++++++++++++++++
 .../preprocessors/0019_3_1_120509-160517.txt  |  49 +++
 .../preprocessors/ConstructAnnotations.npy    | Bin 0 -> 1486 bytes
 .../preprocessors/ConstructAnnotations.png    | Bin 0 -> 3056 bytes
 .../preprocessors/ConstructAnnotations.txt    |  31 ++
 bob/bio/vein/tests/test.py                    |  57 +++
 doc/api.rst                                   |   5 +
 9 files changed, 509 insertions(+), 1 deletion(-)
 create mode 100644 bob/bio/vein/preprocessor/utils/__init__.py
 create mode 100644 bob/bio/vein/preprocessor/utils/utils.py
 create mode 100644 bob/bio/vein/tests/preprocessors/0019_3_1_120509-160517.txt
 create mode 100644 bob/bio/vein/tests/preprocessors/ConstructAnnotations.npy
 create mode 100644 bob/bio/vein/tests/preprocessors/ConstructAnnotations.png
 create mode 100644 bob/bio/vein/tests/preprocessors/ConstructAnnotations.txt

diff --git a/MANIFEST.in b/MANIFEST.in
index bfc4f45..9d106fb 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,3 +1,3 @@
 include README.rst bootstrap-buildout.py buildout.cfg COPYING
 recursive-include doc *.py *.rst
-recursive-include bob/bio/vein/tests *.png *.mat
+recursive-include bob/bio/vein/tests *.png *.mat *.txt *.npy
diff --git a/bob/bio/vein/preprocessor/utils/__init__.py b/bob/bio/vein/preprocessor/utils/__init__.py
new file mode 100644
index 0000000..516dc5e
--- /dev/null
+++ b/bob/bio/vein/preprocessor/utils/__init__.py
@@ -0,0 +1,6 @@
+from .utils import ManualRoiCut
+from .utils import ConstructVeinImage
+from .utils import RotateImage
+
+# gets sphinx autodoc done right - don't remove it
+__all__ = [_ for _ in dir() if not _.startswith('_')]
diff --git a/bob/bio/vein/preprocessor/utils/utils.py b/bob/bio/vein/preprocessor/utils/utils.py
new file mode 100644
index 0000000..b323781
--- /dev/null
+++ b/bob/bio/vein/preprocessor/utils/utils.py
@@ -0,0 +1,360 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Fri Aug  5 17:12:41 2016
+"""
+
+# import what is needed:
+import numpy as np
+from PIL import Image, ImageDraw, ImageFilter
+import scipy.ndimage
+from scipy.signal import convolve2d
+import scipy.ndimage.filters as fi
+import os
+import six
+
+
+class ManualRoiCut():
+  """
+  Class for manual roi extraction -- ``ManualRoiCut``.
+  
+  Parameters:
+  
+  annotation (File, list): The name of annotation file, with full path containing 
+  ROI annotation data (``Bob`` format, ``(x, y)``) **or** the list of annotation 
+  points (tuples) in ``Bob`` format -- ``(x, y)``
+    
+  image (File, :py:class:`numpy.ndarray`), optional: The name of the image to be annotation -  
+  full path or image data as :py:class:`numpy.ndarray`. Image is an optional parameter,
+  because it isn't needed to generate ROI binary mask.
+  
+  sizes (tuple): optional - a tuple of image size in ``Bob`` format ``(x,y)``.
+  This parameter is used **if** no image is given to generate binary mask.
+    
+  Returns:
+  
+  A ``uint8`` :py:class:`numpy.ndarray` 2D array (image) containing ROI mask.
+  Value ``1`` determines ROI area, value ``0`` -- outside ROI area. ``uint8``
+  is chosen so that annotations could be used in the ``bob.bio.vein`` platform
+  (there seems to be problems when saving / loading ``bool`` objects).
+    
+  Examples:
+  
+  -  generate ROI mask::
+  
+      from bob.bio.vein.preprocessors.utils import ManualRoiCut
+      roi = ManualRoiCut(roi_annotation_points).roi_mask()
+
+  - replace image's outside-ROI with value ``pixel_value``::
+  
+      from bob.bio.vein.preprocessors.utils import ManualRoiCut
+      image_cutted = ManualRoiCut(roi_annotation_points, image).roi_image(pixel_value=0)
+  
+  """
+
+  def __init__(self,annotation, image = None, sizes = (480, 480)):
+    if isinstance(annotation, six.string_types):
+        if os.path.exists(annotation):
+            with open(annotation,'r') as f:
+                retval = np.loadtxt(f, ndmin=2)
+            self.annotation = list([tuple([k[1], k[0]]) for k in retval])
+        else:
+            raise IOError("Doesn' t exist file: {}".format(annotation))
+            return 1
+    else :
+        # Convert from Bob format(x,y) to regular (y, x)
+        self.annotation = list([tuple([k[1], k[0]]) for k in annotation])
+    
+    #load image:
+    if image is not None:
+          if isinstance(image, six.string_types):
+              if os.path.exists(image):
+                  image = Image.open(image)
+                  self.image = np.array(image)
+              else:
+                  raise IOError("Doesn't exist file: {}".format(annotation))
+                  return 1
+          else:
+              self.image = np.array(image)
+          self.size_y = self.image.shape[0]
+          self.size_x = self.image.shape[1]
+    else:
+        self.image = None
+        self.size_y = sizes[1]
+        self.size_x = sizes[0]
+  def roi_mask(self):
+      """Method roi_mask - generates ROI mask.
+        
+      Returns: A ``uint8`` :py:class:`numpy.ndarray` 2D array (image)
+      containing ROI mask. Value ``1`` determines ROI area, ``0`` -- outside
+      ROI area.
+      """
+      mask = Image.new('L', (self.size_x, self.size_y), 0)
+      ImageDraw.Draw(mask).polygon(self.annotation, outline=1, fill=1)
+      mask = np.array(mask, dtype = np.uint8)
+      mask = 0 < mask
+      return mask
+  def roi_image(self, pixel_value = 0):
+      """Method roi_image - replaces outside ROI pixel values with ``pixel_value``
+      (default - 0).
+      
+      pixel_value (integer): if given, outside-ROI region is replaced with this 
+      value. By default replaced with 0.
+        
+      Returns: A copy of image that class was initialized with, outside ROI pixel
+      values are replaced with ``pixel_value``.
+      """
+      if self.image is not None:
+          mask = self.roi_mask()
+          self.image[mask == 0] = pixel_value
+          return self.image
+      else:
+          raise IOError("No input image given, can't perform non-ROI region removal")
+          return 1
+
+            
+class ConstructVeinImage():
+  """
+  Constructs a binary image from manual annotations. The class is made to be used with
+  the ``bob.db.biowave_v1`` database.
+  
+  The returned 2D array (see ``return value``, below) corresponds to a person's
+  vein pattern, marked by human-expert.
+  
+  Parameters:
+  
+  annotation_dictionary (:py:class:`dict`): Dictionary containing image and annotation data.
+  Such :py:class:`dict` can be returned by the high level ``bob.db.biowave_v1`` 
+  implementation of the ``bob.db.biowave_v1`` database. It is supposed to contain
+  fields:
+  - ``image``
+  - ``roi_annotations``
+  - ``vein_annotations``
+  
+  Although only the ``image.shape[0]``, ``image.shape[1]`` and variable 
+  ``vein_annotations`` are used.
+  
+  center (:py:class:`bool`): Flag, if set to ``True``, annotations are centered.
+  
+  Returns:
+  
+  :py:class:`numpy.ndarray` : A 2D array with ``uint8`` values - value ``1``
+  represents annotated vein object. The output image is constructed using
+  annotation information - points.
+  Each line's points are connected and 5 pixels wide line is drawn. After 
+  all lines are drawn, lines are smoothed using Median filter with 
+  size 5x5 pixels.
+    
+  Examples::
+  
+      from bob.bio.vein.preprocessors.utils import ConstructVeinImage
+      vein_image = ConstructVeinImage(annotation_dictionary, center = self.center).return_annotations()
+  """
+  def __init__(self, annotation_dictionary, center = False):
+    self.image            = annotation_dictionary["image"]
+    #self.roi_annotations  = annotation_dictionary["roi_annotations"]
+    self.vein_annotations = annotation_dictionary["vein_annotations"]
+    self.center           = center
+  def return_annotations(self):
+    """method that returns annotations"""
+    im = Image.new('L', (self.image.shape[0], self.image.shape[1]), (0)) 
+    draw = ImageDraw.Draw(im)
+    if self.center == True:
+      xes_all = [point[1] for line in self.vein_annotations for point in line]
+      yes_all = [point[0] for line in self.vein_annotations for point in line]
+      for line in self.vein_annotations:
+        xes = [point[1] - np.round(np.mean(xes_all)) + 239 for point in line]
+        yes = [point[0] - np.round(np.mean(yes_all)) + 239 for point in line]
+        for point in range(len(line) - 1):
+          draw.line((xes[point],yes[point], xes[point+1], yes[point+1]), fill=1, width = 5)
+    else:
+      for line in self.vein_annotations:
+        xes = [point[1] for point in line]
+        yes = [point[0] for point in line]
+        for point in range(len(line) - 1):
+          draw.line((xes[point],yes[point], xes[point+1], yes[point+1]), fill=1, width = 5)
+    im = im.filter(ImageFilter.MedianFilter(5))
+    im = np.array(im, dtype = np.uint8)
+    return im
+  
+    
+    
+    
+    
+    
+class RotateImage():
+  """
+  RotateImage - automatically rotates image.
+  
+  So far tested only with annotations (binary images). Algorithm iteratively
+  search for rotation angle such that when image is filtered with the 
+  ``vein filter`` (As published in the BIOSIG 2015), the ``mean`` filtered 
+  image's vector angle (for the pixels in filtered image with a magnitude at least 1/2 of the 
+  maximal value of the filtered image) is ``+/- 0.5`` [deg].
+  
+  Parameters:
+  
+  image (:py:class:`numpy.ndarray`) : A 2D array containing input image. 
+  Currently tested only with binary images.
+  
+  dark_lines (:py:class:`bool`) : A flag (default value - ``False``)
+  that determines what kind of lines algorithm is going to search for.
+  With default value ``False`` it will search for *whiter than
+  background* lines (as is the case with annotations). If set 
+  to ``True`` -- will search for *darker than background* lines 
+  (as is the case with vein images).
+      
+  Returns:
+  
+    :py:class:`numpy.ndarray` : A 2D array with rotated input image
+    
+  Examples::
+  
+      from bob.bio.vein.preprocessors.utils import RotateImage
+      image = RotateImage(image, dark_lines = False).rotate()
+  """
+  def __init__(self, image, dark_lines = False):
+    self.image            = image
+    self.dark_lines       = dark_lines
+  def __rotate_point__(self, x,y, angle):
+    """
+    [xp, yp] = __rotate_point__(x,y, angle)
+    """
+    if type(x) is list:
+      if len(x) != len(y):
+        raise IOError("Length of x and y should be equal")
+      xp = []
+      yp = []
+      for nr in range(len(x)):
+        xp.append(x[nr] * np.cos(np.radians(angle)) - y[nr] * np.sin(np.radians(angle)))
+        yp.append(y[nr] * np.cos(np.radians(angle)) + x[nr] * np.sin(np.radians(angle)))
+    else:
+      xp = x * np.cos(np.radians(angle)) - y * np.sin(np.radians(angle))
+      yp = y * np.cos(np.radians(angle)) + x * np.sin(np.radians(angle))
+    
+    return int(np.round(xp)), int(np.round(yp))
+  
+  def __guss_mask__(self, guss_size=27, sigma=6):
+      """Returns a 2D Gaussian kernel array."""
+      inp = np.zeros((guss_size, guss_size))
+      inp[guss_size//2, guss_size//2] = 1
+      return fi.gaussian_filter(inp, sigma)
+  
+  def __ramp__(self, a):
+    a = np.array(a)
+    a[a < 0]=0 
+    return a
+  
+  def __vein_filter__(self, image, a = 3, b = 4, sigma = 4, guss_size = 15, only_lines = True, dark_lines = True):
+    """
+    Vein filter
+    """
+    if dark_lines == True:
+      Z = 1
+    else:
+      Z = -1
+    
+    if type(image) != np.ndarray:
+      image = np.array(image, dtype = np.float)
+    
+    padsize = 2*a+b
+    gaussian_mask = self.__guss_mask__(guss_size, sigma)
+    
+    
+    f2 = np.lib.pad(image, ((padsize, padsize), (padsize, padsize)), 'edge')
+    f2 = convolve2d(f2, gaussian_mask, mode='same')
+    
+    result = np.zeros(image.shape)
+    
+    for angle in np.arange(0,179,11.25 / 2):
+      [ap, bp] = self.__rotate_point__(-b,-2*a, angle)
+      mask_1 = f2[padsize+ap:-padsize+ap,padsize+bp:-padsize+bp]
+      
+      [ap, bp] = self.__rotate_point__(-b,-1*a, angle)
+      mask_2 = f2[padsize+ap:-padsize+ap,padsize+bp:-padsize+bp]
+      
+      [ap, bp] = self.__rotate_point__(-b,   0, angle)
+      mask_3 = f2[padsize+ap:-padsize+ap,padsize+bp:-padsize+bp]
+      
+      [ap, bp] = self.__rotate_point__(-b, 1*a, angle)
+      mask_4 = f2[padsize+ap:-padsize+ap,padsize+bp:-padsize+bp]
+      
+      [ap, bp] = self.__rotate_point__(-b, 2*a, angle)
+      mask_5 = f2[padsize+ap:-padsize+ap,padsize+bp:-padsize+bp]
+      
+      [ap, bp] = self.__rotate_point__(+b,-2*a, angle)
+      mask_6 = f2[padsize+ap:-padsize+ap,padsize+bp:-padsize+bp] 
+      
+      [ap, bp] = self.__rotate_point__(+b,-1*a, angle)
+      mask_7 = f2[padsize+ap:-padsize+ap,padsize+bp:-padsize+bp]
+      
+      [ap, bp] = self.__rotate_point__(+b,   0, angle)
+      mask_8 = f2[padsize+ap:-padsize+ap,padsize+bp:-padsize+bp]
+      
+      [ap, bp] = self.__rotate_point__(+b, 1*a, angle)
+      mask_9 = f2[padsize+ap:-padsize+ap,padsize+bp:-padsize+bp]
+      
+      [ap, bp] = self.__rotate_point__(+b, 2*a, angle)
+      mask_10 = f2[padsize+ap:-padsize+ap,padsize+bp:-padsize+bp]
+      
+      amplitude_rez = self.__ramp__(Z*(mask_1+mask_5+mask_6+mask_10)*3 \
+                       -Z*(mask_2+mask_3+mask_4+mask_7+mask_8+mask_9)*2)
+                       
+      if only_lines == True:
+        col = np.zeros((6,image.shape[0], image.shape[1]))
+        col[0] = np.minimum(self.__ramp__(-Z*mask_2+Z*mask_1),self.__ramp__(-Z*mask_2+Z*mask_5))
+        col[1] = np.minimum(self.__ramp__(-Z*mask_3+Z*mask_1),self.__ramp__(-Z*mask_3+Z*mask_5))
+        col[2] = np.minimum(self.__ramp__(-Z*mask_4+Z*mask_1),self.__ramp__(-Z*mask_4+Z*mask_5))
+        col[3] = np.minimum(self.__ramp__(-Z*mask_7+Z*mask_6),self.__ramp__(-Z*mask_7+Z*mask_10))
+        col[4] = np.minimum(self.__ramp__(-Z*mask_8+Z*mask_6),self.__ramp__(-Z*mask_8+Z*mask_10))
+        col[5] = np.minimum(self.__ramp__(-Z*mask_9+Z*mask_6),self.__ramp__(-Z*mask_9+Z*mask_10))
+        angle_rez = np.min(col, axis = 0)
+        amplitude_rez[angle_rez==0] = 0
+        
+      result = result + amplitude_rez*np.exp(1j*2*(angle - 90)*np.pi/180)
+      
+    result = np.abs(result) * np.exp(1j*np.angle(result)/2)
+    return result
+    
+  def __get_rotatation_angle__(self,image, dark_lines = False):
+    """
+    angle = get_rotatation_angle(image)
+    
+    Returns the rotation angle in deg.
+    """
+    result = self.__vein_filter__(image, a = 4, b = 1, sigma = 2, guss_size = 15, only_lines = True, dark_lines = False)
+    result_nonzero = result[np.abs(result) > np.abs(result).max() / 2]
+    result_angle = np.angle(result_nonzero, deg=True)
+    angle = result_angle.mean()
+    return angle
+  
+  def __rotate_image__(self, image, angle):
+    """
+    image = rotate_image(image, angle)
+    """
+    image = scipy.ndimage.rotate(image, angle, reshape = False, cval=0)
+    image[image > 255] = 255
+    image[image < 0]   = 0
+    return image
+  
+  def __align_image__(self, image, precision = 0.5, iterations = 25, dark_lines = False):
+    """
+    [image, rotation_angle, angle_error] = align_image(image, precision = 0.5, iterations = 25)
+    """
+    rotation_angle = 0
+    angle_error = self.__get_rotatation_angle__(image, dark_lines)
+    if abs(angle_error) <= precision:
+      return image, rotation_angle, angle_error
+    for k in range(iterations):
+      rotation_angle = rotation_angle + (angle_error * 0.33)
+      image = self.__rotate_image__(image, angle_error * 0.33)
+      angle_error = self.__get_rotatation_angle__(image, dark_lines)
+      #print(rotation_angle)
+      if abs(angle_error) <= precision or k == iterations - 1:
+        return image, rotation_angle, angle_error
+
+  def rotate(self):
+    """A call method that executes image rotation
+    """
+    [rotated_image, rotation_angle, angle_error] = self.__align_image__(image = self.image, dark_lines = self.dark_lines)
+    rotated_image = np.array(rotated_image, dtype = self.image.dtype)
+    return rotated_image
diff --git a/bob/bio/vein/tests/preprocessors/0019_3_1_120509-160517.txt b/bob/bio/vein/tests/preprocessors/0019_3_1_120509-160517.txt
new file mode 100644
index 0000000..ad6c946
--- /dev/null
+++ b/bob/bio/vein/tests/preprocessors/0019_3_1_120509-160517.txt
@@ -0,0 +1,49 @@
+26 0
+27 7
+29 24
+31 38
+40 80
+45 104
+50 124
+58 156
+61 171
+65 187
+67 194
+69 205
+71 223
+74 251
+76 274
+79 298
+81 324
+83 341
+83 358
+86 385
+87 408
+89 445
+90 474
+91 518
+91 534
+91 559
+90 581
+90 620
+90 633
+88 648
+89 655
+89 672
+277 672
+279 648
+280 606
+284 552
+284 482
+280 413
+280 358
+281 309
+286 277
+288 238
+288 195
+285 165
+285 138
+285 111
+288 49
+291 20
+294 0
diff --git a/bob/bio/vein/tests/preprocessors/ConstructAnnotations.npy b/bob/bio/vein/tests/preprocessors/ConstructAnnotations.npy
new file mode 100644
index 0000000000000000000000000000000000000000..33c4f26d2a7bb119f5bea4750e7daa67d3759744
GIT binary patch
literal 1486
zcmWIWW@Zs#0D)wGanCC(FKeP$85lsAlYxODv8X8CKrgSLa<X5jZ$Km?gBwG&dP-_>
za*?`~f_ja=x{iW+T7FSUQDR<veo;y)NZ2hgr#MwdLA^L5u^?65N<qV1M^i_kRsk7s
zH83UTmF5;y>LuqFrRwFD=9FY678NB{a>W;=Cg<lBmlTyImv9v_LR4|(r9fm0nY<ZV
zLm89Wg9@27ycxaOTcHY4N-7Idxe8fAnfxuh8N3<Wg9=$SyqQB8YyABDy#D|H{~t_v
z+m|GD#ul<^#1^t^c*}VoZ!6^RRsvC+-l`yq%Uj#~SX&{tw~_bJwn83nGZ4k=Z409K
zyj?&Pzqc2N67cp1X%O@d^FG;DDCC{seW|Tb*gMhta$BK@cbfN&wn9<wED$B;UFv<U
ztx(*%%KK_tp@ermh?4Yf_P*X$DCOPfeY34l+Iyn+t+qlL@7dmW+X`j9mwDfBE0ptI
z=l!6qP~LkBh*I$04WbmiFL>W+D^&8n=6$=ZP}%z?$Y>Ss+aNPlz3+KnXe(6nz7KMe
zy7vR`Lv4i`-jBU^wH0c5Kk?qyR;ZO4Td1w!eFvmV$NN5r()E4@qV&Aqfy~hN{sN*5
zy#IncV(81r2%?RA*%?9pGxil>^gi2GXyPl$=zXTG(9~Cp5fol#zWR*bN7@R_eeD>%
zcefQ<_&PI!Y_;_D0`skWL%@7%-zYF`<C_GgZGAJrw4HB0qxYV+LVMpbM(=%Xg$}+o
zjNS*@3LSmBz~(ynPGa;v(N^f}yO7cQTw9@w?=nV^S6zKqgWcukyPXjfobJB+7(wCU
z;d_qJ`$=1&r|(5Z?`Lg=Ua7H#-WuL*-aFe0eY|_UceEAydQSpTe%{l)x3?Agd(Q&}
zPk{GQ5EbaX4nzfcZ}r~VRv7HP7es}49|2LJ-X}p+nD<rhEp3J2-uFPf2=8YgD$@HM
zNMDrqZxAoqmyHob$N2Jq=~z(UZEY)z^OXhj<9$^aL1rZQYJ=%SUqdjR<ZH(0y}7M0
z+1C<Gr+@-=OIu+oC{VYy6{e-e7N%=>AN8(oE6ngd0irU!FM8Ls6=r$g@UCqu%=W(T
zUDsBa<NeaRp{+32`;&J|TVbB}cki~g!hBx_M(>Wc!UA7DM(@71!a|UvCbkt8fgClZ
zt+3cvozZ({TVV+(M(4B@min49de3VsECV@aVOwFjuPvkZ(ze11Uq?poRc(cpAV;lh
zE3EQ$Vf5b6R#@%p3U+i&YHVSxhIf~@ep_Lk_XKatw!(Vvsopkig$>?wz3tly8@(5M
zJGT`!d9UzxYb$K_UgPcAR@mab$=j!`u+@9JcR*WVoA(~?(6+*M?*rZuZG|1)N4=xl
z3Ol{ec*nODc6nd-PH8Lb_I~J{-d5P-{lYu5t+3boop)ASVP9%&VZVm=4^T>&;Qh_}
zX<Ok$?=Rl3+6pImfAN0bRyf)Fi}%;I!YSS#z5lcoPWAra{kN@fn)e%D7RI*1>E2Jl
z^bGHNzHE$bg)_ZxfazJ@m%#Mw)Y!r~8s5R)U)l=idPjPHYb%`Ro#g$it#H0~2FRcV
z-g&-EjBSMry-UCbE%L7PWo2wDT<qQ8%g)$VxWv01OfOAMElJV~@MdHZVa8nqgNkMl
dZfFFN@B%r&o0SbDz{tSJV9vn6(8vnn0RW(rJCy(c

literal 0
HcmV?d00001

diff --git a/bob/bio/vein/tests/preprocessors/ConstructAnnotations.png b/bob/bio/vein/tests/preprocessors/ConstructAnnotations.png
new file mode 100644
index 0000000000000000000000000000000000000000..3c87424906f75834b90c9158454101dc27bed444
GIT binary patch
literal 3056
zcmeAS@N?(olHy`uVBq!ia0y~yV0-|=91IK$3=#VudNMF@Z})U@45^s&cJA#-i!B6P
zx_kfrk57|N*<2>wc*HkyW~H2rp>3Y|GYxO2h7N;c63IP=WD?K1Go|*jrvFU;!scM?
znqr=Fuv*1D=irlw4{X6MM{KIJ&m|sw5@BE|=F`V|-P5j4FQQ=TBlDjUr<=-8yu04C
z+CWCRmGyEmTut}_3l2W+dusm`ZZ+N9_@FdGYYS_>W57b)8M54J$D2-WOeoT+5S<k4
znRxIE)Y!?%2eU8ughYK{6KT%iPJ=3%A%jrQI<MGMY!6rG?T!r(LOas5bSqfZ_KThA
zWBq^V3Tw21wOCB^67_8fMLIj!JpFSHn#L@uROHNei7>Dgi)mf#77cO!dc)5=n{{T$
zaL2W)9TJOaPm;(zD0)k|=PsX4MYCF|@U}#5yOSa}B)2_~D&8`i*(~Q^ID^fzZ4b0;
z1&<wVdTk;(Z}HRE#~U7K7j&NU^ln+6QE}Npr=pwfu<D*2ZMqfH)edin_@Ku7ZCQV~
zgHFZlze;aeHaFkpj{abF_Fh19&cWMT&LuDSwe^ARH{N7<&gc((zqvQ<;6La8NGkWB
z^_kPBzeUe^VDBURO7G{gt?i{v`yKaRdg^=gm~8cl^gedZy0h;WzMI;rwtvUR-o&Q+
z)hzc4T)OQZRGCywVBHnn*YGpcjdNabs)eEQ_xG>!nadN-a-TQ*c9mHoa?$ht@PH_B
zv15OQoPQ_0kZkIh*Sa`y|EU{}VsG+ZvVGgtcX9CxjuUr-UfkHQ^S8NN(uRaL#)h^F
z_%sBBcy>f9J-W+Wo+}d<+$zwsJz8m({8`3ZUWzQUe@p1jST3x0pmy@brn6@gIN09q
z=VSQpYrr!5_5lH&>&1H;xl_y~nvMtNN9~<%-E4O0<80GUKDW1gonCF8^Z9U_;@k(X
zqrd;y^Cxvr!VTqz&Y3O0<sP&4Gy5HrW#Gu$QU2iFe!ky@&lv?|1b1Y<5@q{t%fNE@
z?PUK2=IdA)4i<#IRhV&~*<r&@VTpt1uOB)m{L9l(z$QaIzSdJct=Un)rej-c%I0R}
zA1|f$)_cZ27fam6&T)`AO?!DYm)5)|_3rr&E-q&}+GpRr-I8wZAoS-^;pqu(dTcHR
zk2IHVEPfKss3ak{=2@AD$iWW5K8HJt`~q0)W4NP_U3j^=*oA?UO?7W7_qwLNT{+sb
znr3fV$jANbfZ(48MSMT6ug_+8FL=1~fXTYvDR+%#CG42gvCVex0Zulh>I!8hFN1qV
zYCNlL7&(vKp3gJaU{Zq3VHO9@Z4X}`6q>VFj)_T4;@3xZO-JEBEJ3^+ejok{9*f=c
zo9_XOy~}T<3nveX=!CkwXAL&!NbFf_!9Fjc@A3wN2b^_@hp#heNTg*rUp&UZdRQd0
zOZ&bc>wWL%nu|WZeIqlWY4dV6PLG4FZ*CrB2;QJvBRQifzvZD;uiO)s)r~qg=6+yp
z;1XN4(41N8MvQCNLB0FEDxx;Fi8B@nI&!aj`|#z#*!i_44O$kz>slE@3mz^zzDQ%M
zT*N^yp?54{1&OAI)Qj$ZUM-TW6LU~{_xFk&)q0(F9~#PC5?ICG-Pr$fZYiU7h5zBZ
zvm)G*H#BjFe?M5&z#aE^NBp#qZw=ZN(Jk5~$~&64n@<_`##bDCrgESuo3TnE!sW0<
z!ok@#3mO(VEf>v^auhowxbUC?_dcn43k%u3nv}o%apK-rp>XwIp-&*YSj=9f*B95G
z;M?%v?Bj$<HFx<pJUG;_Cuk~1%E75q?(O}*GwhJFSdDJ2vbxxQrzTfVjTve&{SEpR
zdrvQw;<(GX<$+>k>a3?61qYQCK6mThIDX)Hqgc$$&AW8t`|p(gG5FtiuVJx|c=o@V
zD1*S(V}1os8TrbO^yu%>e;{lVUiNq96yK-8@n_TjE#Cfk@_IdG^9dWD%1OV_3qE5V
zzw>y0Qx@O+2U6^ZH(y^+ZD;CgvElcF(g(rKE^Y@~FaNBxUH_w?_@_+y<4N0G=0AEJ
zd1d)L1%`OvAhjQN;^u}sbLYQO=oG$Venx4|;S8nJrvA+jr-#(~9dzG)R<|=}`NX&<
zA~s*+txhjr*E^?)UuMw(`SW*N{0_Q5u2}k1MOSV4gT*-}JeTw=RwsxEYupZ0U-{s)
zXHda!Pn(N{ws-s29N{mx#p3)Xs-yRY;_8IU9$5!vWz}CRR8GIr#6Ee^-iQJh19qFW
z5A3AHxqV6kriK;tai<-d9QxrxshE-OrwO4SEGFGOFeUWEiXT3~iz3%>OkH_uui>><
z|MeZ`*4IxtmLGL+`n)&)KkSY<`Te8y)AafBM(R5>U%IaP_oVq<`i$H?KQHHBW?!~Z
zx{_7C!YlUhfAhD!K2@LjHdXB?o2ESd{+8MgQ~%s&c=qyf*^~K;Y~8!8oAw84T(suA
z$6wL*yCHjtZ^7Gh|Krbvzt~ddyqN!9+jo~BmnGsgf&7PGo9}+`d5LU`iIV<|xqF&A
zPrsV_L^LL0{f6b9IzJ|HTsEI@q=|cXo0N!A`0q1K)_bnp^OApXrRn_P_vYIl%oqOn
ztbI%I#qbF)U!7RI#BamR<4rr6&nWyTP}jLwEzDup)e_HkP<^`E-VE&kMS&WDUIumX
zI|T|pbq9~>T{?b{>shh-4;HPHX##sf7iY_b{z(&Gv*3lyNmlV+>_?QGU!2hY;nSk}
zXnyPlKN0>(u^(*tcPK0EPMH7YzT(~oZIx?I`K#`F@TbXqdR%}}q~hKOcb7_T`76XR
z&o1tNK;^DUQ@;Oc+CNX_*V$EHJx}R3KJdH3c88^jKQ&PG08i8H@EyPYo2UE<;&>Nn
z6}8qWn4^zl;jG)7y$|H3NHxz$*vWNPA;Le3pQUNFdHMgi@};K71qDT#w2P;@zVGtU
z+<)Y{>wlfJm64}7h4vUq{@rTu|N8Az1%a3qQ(nyf-uZK>z@2>=TQ12w<z_qBy7Kva
zs{oF5aoawduP^$kD!51A&+LFo(|5gjXTO^4@>UeodAW}BUS}UaOS*=5gpiEj>8~Y%
z_uUWfk}VM6TxUNsU<Whbs%w8^JMIgL&0QF?!1KfW#SixjPhC|vlV!bE$lQf72||C0
zO>7LmI<PMkiuqwZO>EuygK9U9yV`Gf+r9PuVwr?ZEbC8A`R1)25%XNXq$&4@^fo&S
zrD>6TPm6iDr!?I^vBqK-V|?eo<{j~M2d}8EVyrs&3eoxiwQtTeUG1Ns)D*QLT}yce
z>+O6O1MX?z(GNu581o7q)Vbl(f@tLMtxfPx7R_LFZ|R@Z6uV(#LRkdsZtl8+S{6!o
zSluUZn~C}KvrdLIiIz0gZu`%>GU4+vaMP!QbvJikg6Nx>FIfE!3MQUr^*gxk9QT~2
zvrVj%59{2RTX2DE-gNO&4_50v`5vt9ExPV3YZJnsn=ffPD;;!j-MM@ZR`E46!#@1^
zxQcgWLi%^!l@FRsFLRZ3);Vk4$h*IRYu+^R!U$IDgZdGyv&HW;#V$U{Djj$5T?_Y?
zCgnrZnj<#szR$GwLC1!t3Sw_w#Viob;o8!)ygxiOgY|b!P-EBzO>n62t$$#7s97iN
zpqi4-47sv{pG;@PZm>DhWP4K3QFq6JxWzMBtq&d4y}`0G;e6=?ZZn~#t;#wI5z=`F
zQ)P5z6Y~zbuKn^*XnTUQUWC<d*1Ut4_U1Q6Z%`C}Bjl@h!-HemyG~ISZ)d$5!I4jz
zcy}lKEHv4j@bs08;GZV$L$gd-xW%)|4j#(le$({yqQD!w8ylSTBWm@!<2G3RX*zoR
z;-99Y8xx9HShDXosVPKU%XQJ<5Zk}IF@D3ETTJ^C3Uy}4h#uUetjl=t*4zyZ51O)C
zQdw3nQi%A~v#=>4Q-wv_J;G^&k(f_kTf#%<MBamkG&rZpq-t_bGtp6s*e)z4xTacT
zjZ%b|<iS;f(*$EKyzVY#dA+ljL-pY0PfD$Q34vEwPP<09fB5rJtEDd?u!SZ2UaE%5
f4Gn{pu0QPmpP$v6e`EPqP*dL1)z4*}Q$iB}e20qw

literal 0
HcmV?d00001

diff --git a/bob/bio/vein/tests/preprocessors/ConstructAnnotations.txt b/bob/bio/vein/tests/preprocessors/ConstructAnnotations.txt
new file mode 100644
index 0000000..83beedf
--- /dev/null
+++ b/bob/bio/vein/tests/preprocessors/ConstructAnnotations.txt
@@ -0,0 +1,31 @@
+11 91
+8 322
+76 320
+114 307
+140 300
+176 292
+225 292
+269 288
+330 287
+405 288
+436 290
+456 288
+468 276
+473 242
+472 208
+470 184
+466 146
+455 116
+440 93
+424 77
+397 69
+358 64
+298 60
+247 52
+201 38
+160 25
+130 7
+106 7
+81 16
+46 46
+22 71
diff --git a/bob/bio/vein/tests/test.py b/bob/bio/vein/tests/test.py
index 277c21d..33d0ccb 100644
--- a/bob/bio/vein/tests/test.py
+++ b/bob/bio/vein/tests/test.py
@@ -14,6 +14,7 @@ the generated sphinx documentation)
 
 import os
 import numpy
+import numpy as np
 import nose.tools
 
 import pkg_resources
@@ -186,3 +187,59 @@ def test_miura_match():
 
   score_imp = MM.score(template_vein, probe_imp_vein)
   assert numpy.isclose(score_imp, 0.172906739278421)
+  
+def test_manualRoiCut():
+    from bob.bio.vein.preprocessors.utils.utils import ManualRoiCut
+    image_path      = F(('preprocessors', '0019_3_1_120509-160517.png'))
+    annotation_path  = F(('preprocessors', '0019_3_1_120509-160517.txt'))
+
+    c = ManualRoiCut(annotation_path, image_path)
+    mask_1 = c.roi_mask()
+    image_1 = c.roi_image()
+    # create mask using size:
+    c = ManualRoiCut(annotation_path, sizes=(672,380))
+    mask_2 = c.roi_mask()
+    
+    # loading image:
+    image = bob.io.base.load(image_path)
+    c = ManualRoiCut(annotation_path, image)
+    mask_3 = c.roi_mask()
+    image_3 = c.roi_image()
+    # load text file:
+    with open(annotation_path,'r') as f:
+        retval = numpy.loadtxt(f, ndmin=2)
+        
+    # carefully -- this is BOB format --- (x,y)
+    annotation = list([tuple([k[0], k[1]]) for k in retval])
+    c = ManualRoiCut(annotation, image)
+    mask_4 = c.roi_mask()
+    image_4 = c.roi_image()
+    
+    assert (mask_1 == mask_2).all()
+    assert (mask_1 == mask_3).all()
+    assert (mask_1 == mask_4).all()
+    assert (image_1 == image_3).all()
+    assert (image_1 == image_4).all()
+    
+def test_ConstructAnnotations():
+  """
+  Test ConstructAnnotations preprocessor
+  """
+  image_filename = "/idiap/home/teglitis/Desktop/REFACTOR_ALL/src/bob.bio.vein/bob/bio/vein/tests/preprocessors/ConstructAnnotations.png"
+  roi_annotations_filename = "/idiap/home/teglitis/Desktop/REFACTOR_ALL/src/bob.bio.vein/bob/bio/vein/tests/preprocessors/ConstructAnnotations.txt"
+  vein_annotations_filename = "/idiap/home/teglitis/Desktop/REFACTOR_ALL/src/bob.bio.vein/bob/bio/vein/tests/preprocessors/ConstructAnnotations.npy"
+  
+  image = bob.io.base.load( image_filename )
+  roi_annotations = np.loadtxt(roi_annotations_filename, dtype='uint16')
+  roi_annotations =  [tuple([point[0], point[1]]) for point in roi_annotations]
+  fp = open(vein_annotations_filename, 'rt')
+  vein_annotations = np.load(fp)
+  vein_annotations = vein_annotations['arr_0'].tolist()
+  fp.close()
+  vein_annotations = [[tuple([point[0], point[1]]) for point in line] for line in vein_annotations]
+  
+  annotation_dictionary = {"image" : image, "roi_annotations" : roi_annotations, "vein_annotations" : vein_annotations}
+  from bob.bio.vein.preprocessors import ConstructAnnotations
+  preprocessor = ConstructAnnotations(center = True, rotate = True)
+  output = preprocessor(annotation_dictionary)
+  assert np.array_equal(output, image)
diff --git a/doc/api.rst b/doc/api.rst
index 0e9cab1..69648f7 100644
--- a/doc/api.rst
+++ b/doc/api.rst
@@ -24,6 +24,11 @@ Pre-processors
 .. automodule:: bob.bio.vein.preprocessor
 
 
+Pre-processor utilities
+-----------------------
+
+.. automodule:: bob.bio.vein.preprocessor.utils
+
 Feature Extractors
 ------------------
 
-- 
GitLab