diff --git a/bob/paper/nir_patch_pooling/script/annotate_database.py b/bob/paper/nir_patch_pooling/script/annotate_database.py
index 071084e90b1f2dd1048d086b877e81546ff89f91..8aec28e662930865854ff2f6c2642ee5364dcb9e 100755
--- a/bob/paper/nir_patch_pooling/script/annotate_database.py
+++ b/bob/paper/nir_patch_pooling/script/annotate_database.py
@@ -155,7 +155,7 @@ class AnnotationGenerator:
         # collect the files to be processed
         self.filelist = self.database.objects()
         total = len(self.filelist)
-        logger.info("Files to be annotated: {}".format(total))
+        print("Files to be annotated: {}".format(total))
 
         # calculate split indices if computation is parallel
         if(job_index != -1):
diff --git a/bob/paper/nir_patch_pooling/script/convert_mlfp_database.py b/bob/paper/nir_patch_pooling/script/convert_mlfp_database.py
index 75d495f1f5923b48f87bf45441b839636cbd3a38..da34e1f6b3951441a8179e972655f24e19863be9 100755
--- a/bob/paper/nir_patch_pooling/script/convert_mlfp_database.py
+++ b/bob/paper/nir_patch_pooling/script/convert_mlfp_database.py
@@ -1,59 +1,48 @@
 #
-# script to generate face annotations for NMAD directly over NIR data
+# @ desc: script to convert MLFP (NIR) data into specific format
 # @ Ketan Kotwal
 #
-#----------------------------------------------------------
-
+#------------------------------------------------------------------------------
 
 # imports
-from bob.pad.face.preprocessor.FaceCropAlign import detect_face_landmarks_in_image
-from bob.ip.color import rgb_to_gray
-from bob.ip.facelandmarks import detect_landmarks
-from bob.bio.video import FrameContainer
-import bob.io.base
-import numpy as np
 import scipy.io as spio
 import os, sys
+from bob.bio.video import FrameContainer
+from bob.io.base import create_directories_safe, HDF5File
 
+frames_per_video = 20
 
-#import logging
-#logger = logging.getLogger(__name__)
-#logger.setLevel(logging.INFO)
-
-#----------------------------------------------------------
+#------------------------------------------------------------------------------
 
 class MLFPConvertor:
 
     def __init__(self, input_directory, output_directory):
 
-        self.input_directory = input_directory.rstrip("/")
-        self.output_directory = output_directory.rstrip("/")
+        self.input_directory = input_directory
+        self.output_directory = output_directory
         if not os.path.exists(self.output_directory):
             os.makedirs(self.output_directory)
 
-        self.file_objects = self._load_db(self.input_directory)
+        self.file_objects = self.load_db(self.input_directory)
 
         print("Input directory: {}".format(self.input_directory))
         print("output directory: {}".format(self.output_directory))
 
-#--------------------------------------
+#------------------------------------------------------------------------------
 
-    def _load_db(self, _):
+    def load_db(self, db_directory):
         
         file_list = []
-        for dirpath, dirs, files in os.walk(self.input_directory):
+        for dirpath, dirs, files in os.walk(db_directory):
             for name in files:
                 file_path = os.path.join(dirpath, name)
                 file_path = file_path[43:]
                 if ("Infrared.mat" in file_path):
                     file_list.append(file_path[:-4])
 
-        print("Found {} feature files to be fused".format(len(file_list)))        	       
-
-        print(file_list[:10])
         return file_list
 
-#----------------------------------------------------------
+#------------------------------------------------------------------------------
  
     def process_file(self, filename):
 
@@ -61,82 +50,54 @@ class MLFPConvertor:
         fc = spio.loadmat(os.path.join(self.input_directory, filename + ".mat"))
         fc = fc["IR"]
 
+        # select first frames_per_video frames and add to framecontainer
         fc_bob = FrameContainer()
-        for i, frame in enumerate(fc[:20]):
-            #print("    Frame {:02d}".format(i))
+        for idx, frame in enumerate(fc[:frames_per_video]):
             frame = frame[0]/256.0
-            fc_bob.add(i, frame.astype(np.uint8), None)
+            fc_bob.add(idx, frame.astype(np.uint8), None)
+
 
+        # save fc to hdf file
         out_filepath = os.path.join(self.output_directory, filename + ".hdf5")
-        bob.io.base.create_directories_safe(directory=os.path.split(out_filepath)[0], dryrun=False)
+        create_directories_safe(directory=os.path.split(out_filepath)[0], dryrun=False)
 
-        print(len(fc_bob), type(fc_bob))	
-        f_out = bob.io.base.HDF5File(out_filepath, 'w')
+        f_out = HDF5File(out_filepath, 'w')
         fc_bob.save(f_out)
         del f_out
 
         return
 
-#--------------------------------------
+#------------------------------------------------------------------------------
    
     def run(self):
 
         total = len(self.file_objects)
-        print("Found {} files to be converted".format(total))
+        print("Files to be converted to .hdf5: {}".format(total))
         
-        for i, f in enumerate(self.file_objects[:]):
+        for idx, f in enumerate(self.file_objects):
 
-            print("[{:03d}/{:03d}] Sample: {}".format(i+1, total, f))
+            print("[{:03d}/{:03d}] Sample: {}".format(idx+1, total, f))
 
-            json_filepath = os.path.join(self.output_directory, f + ".hdf5")
-            if not os.path.exists(json_filepath):
+            out_filepath = os.path.join(self.output_directory, f + ".hdf5")
+            if not os.path.exists(out_filepath):
                 self.process_file(f)
             else:
                 print("File exist for {}. Skipping".format(f))
 
-#----------------------------------------------------------
-
-    def get_protocol(self):
-
-        real = []
-        attack = []
-
-        for i, path in enumerate(self.file_objects[:]):
-            print(path)
-            if("/R" in path):
-                line = "{} {}".format(path, str(000))
-                real.append(line)
-            elif("/M" in path):
-                line = "{} {} {}".format(path, str(111), "attack")
-                attack.append(line)
-            else:
-                print("dont know: ", path)
-        
-        print("BF: {}. PA: {}".format(len(real), len(attack)))
-
-        with open("for_real.lst", "w") as f:
-            f.write("\n".join(real) + "\n")
-
-        with open("for_attack.lst", "w") as f:
-            f.write("\n".join(attack) + "\n")
-
-
-        return
-
-
-#----------------------------------------------------------
+#------------------------------------------------------------------------------
 
+#input_directory = "/idiap/resource/database/MLFP/NIR_Protocol"          
+#output_directory = "/idiap/temp/kkotwal/nmad_experiments/mlfp_int2/"
 
 def main():
 
-    input_directory = "/idiap/resource/database/MLFP/NIR_Protocol"          
-    output_directory = "/idiap/temp/kkotwal/nmad_experiments/mlfp_int2/"
+    input_directory = sys.argv[1]
+    output_directory = sys.argv[2]
 
-    ag = MLFPConvertor(input_directory, output_directory) 
-    #ag.get_protocol()
-    ag.run()
+    m_conv = MLFPConvertor(input_directory, output_directory) 
+    m_conv.run()
     
-#----------------------------------------------------------
+#------------------------------------------------------------------------------
 
 if __name__ == "__main__":