diff --git a/bob/bio/base/script/grid_search.py b/bob/bio/base/script/grid_search.py
index 8294d0f0a42c8ddd0a17ebad786696beb9c488e4..73287b174303e8cf55e584b8af0b2f838f7b3e15 100755
--- a/bob/bio/base/script/grid_search.py
+++ b/bob/bio/base/script/grid_search.py
@@ -266,7 +266,7 @@ def check_requirements(replacements):
   for key in configuration.replace:
     # check that the key is one of the known steps
     if key not in steps:
-      raise ValueError("The step '%s' defined in the configuration file is unknown; choose one of %s" % (key, steps))
+      raise ValueError("The step '%s' defined in the configuration file '%s' is unknown; choose one of %s" % (key, args.configuration_file, steps))
     values.update(extract_values(configuration.replace[key], replacements))
   for requirement in configuration.requirements:
     test = replace(requirement, values)
@@ -444,6 +444,10 @@ def main(command_line_parameters = None):
   create_recursive(replace_dict, step_index = 0, directories = {}, dependency_level = 0)
 
   # finally, write some information about the
-  logger.info("The number of executed tasks is: %d, which are split up into %d jobs that are executed in the grid" %(task_count, job_count))
+  if args.grid is not None:
+    logger.info("The number of executed tasks is: %d, which are split up into %d jobs that are executed in the grid" , task_count, job_count)
+
+  if args.parallel is not None:
+    logger.info("The total amount of finsihed tasks is: %d", task_count)
 
   return score_directories
diff --git a/bob/bio/base/tools/extractor.py b/bob/bio/base/tools/extractor.py
index 384a9991f6e844753e2e4145d378c01cab03dbc3..00d0e93243c3dd5ae54fc93da73bf9011e001509 100644
--- a/bob/bio/base/tools/extractor.py
+++ b/bob/bio/base/tools/extractor.py
@@ -38,6 +38,7 @@ def train_extractor(extractor, preprocessor, force = False):
   if utils.check_file(fs.extractor_file, force, 1000):
     logger.info("- Extraction: extractor '%s' already exists.", fs.extractor_file)
   else:
+    bob.io.base.create_directories_safe(os.path.dirname(fs.extractor_file))
     # read training files
     train_files = fs.training_list('preprocessed', 'train_extractor', arrange_by_client = extractor.split_training_data_by_client)
     train_data = read_preprocessed_data(train_files, preprocessor, extractor.split_training_data_by_client)
@@ -46,7 +47,6 @@ def train_extractor(extractor, preprocessor, force = False):
     else:
       logger.info("- Extraction: training extractor '%s' using %d training files:", fs.extractor_file, len(train_files))
     # train model
-    bob.io.base.create_directories_safe(os.path.dirname(fs.extractor_file))
     extractor.train(train_data, fs.extractor_file)
 
 
diff --git a/bob/bio/base/tools/grid.py b/bob/bio/base/tools/grid.py
index 8a536d12efd1075497eeb8e1bb47f5e37cee5661..05cde15cb96ccc8f16b0131511d6da08502bead3 100644
--- a/bob/bio/base/tools/grid.py
+++ b/bob/bio/base/tools/grid.py
@@ -114,12 +114,6 @@ class GridSubmission:
       return self.fake_job_id
 
 
-  def grid_job_id(self):
-    id = os.getenv('JOB_ID')
-    if id is not None:
-      return int(id)
-    return id
-
   def execute_local(self):
     """Starts the local deamon and waits until it has finished."""
     logger.info("Starting jman deamon to run the jobs on the local machine.")
diff --git a/bob/bio/base/utils/resources.py b/bob/bio/base/utils/resources.py
index c2acef4b487b90240670fb8778aade11eb17465b..3ef80843ee6b6cdcb4c54ce4d61c95d66fc2d3b0 100644
--- a/bob/bio/base/utils/resources.py
+++ b/bob/bio/base/utils/resources.py
@@ -59,7 +59,7 @@ def read_config_file(filename, keyword = None):
   """
 
   if not os.path.exists(filename):
-    raise IOError("The given configuration file '%s' could not be found" % file)
+    raise IOError("The given configuration file '%s' could not be found" % filename)
 
   import string
   import random