Commit 6674ddb0 authored by Manuel Günther's avatar Manuel Günther
Browse files

Optimized order of commands that might have failed to read files in some cases

parent e50a7a20
......@@ -108,12 +108,13 @@ def project(algorithm, extractor, groups = None, indices = None, force = False):
if not utils.check_file(projected_file, force, 1000):
logger.debug("... Projecting features for file '%s'", feature_file)
# create output directory before reading the data file (is sometimes required, when relative directories are specified, especially, including a .. somewhere)
bob.io.base.create_directories_safe(os.path.dirname(projected_file))
# load feature
feature = extractor.read_feature(feature_file)
# project feature
projected = algorithm.project(feature)
# write it
bob.io.base.create_directories_safe(os.path.dirname(projected_file))
algorithm.write_feature(projected, projected_file)
else:
......@@ -230,13 +231,13 @@ def enroll(algorithm, extractor, compute_zt_norm, indices = None, groups = ['dev
if not utils.check_file(model_file, force, 1000):
enroll_files = fs.enroll_files(model_id, group, 'projected' if algorithm.use_projected_features_for_enrollment else 'extracted')
logger.debug("... Enrolling model from %d features to file '%s'", len(enroll_files), model_file)
bob.io.base.create_directories_safe(os.path.dirname(model_file))
# load all files into memory
enroll_features = [reader.read_feature(enroll_file) for enroll_file in enroll_files]
model = algorithm.enroll(enroll_features)
# save the model
bob.io.base.create_directories_safe(os.path.dirname(model_file))
algorithm.write_model(model, model_file)
else:
......@@ -261,13 +262,13 @@ def enroll(algorithm, extractor, compute_zt_norm, indices = None, groups = ['dev
if not utils.check_file(t_model_file, force, 1000):
t_enroll_files = fs.t_enroll_files(t_model_id, group, 'projected' if algorithm.use_projected_features_for_enrollment else 'extracted')
logger.debug("... Enrolling T-model from %d features to file '%s'", len(t_enroll_files), t_model_file)
bob.io.base.create_directories_safe(os.path.dirname(t_model_file))
# load all files into memory
t_enroll_features = [reader.read_feature(t_enroll_file) for t_enroll_file in t_enroll_files]
t_model = algorithm.enroll(t_enroll_features)
# save model
bob.io.base.create_directories_safe(os.path.dirname(t_model_file))
algorithm.write_model(t_model, t_model_file)
else:
logger.debug("... Skipping T-model file '%s' since it exists", t_model_file)
......@@ -98,12 +98,13 @@ def extract(extractor, preprocessor, groups=None, indices = None, force = False)
if not utils.check_file(feature_file, force, 1000):
logger.debug("... Extracting features for data file '%s'", data_file)
# create output directory before reading the data file (is sometimes required, when relative directories are specified, especially, including a .. somewhere)
bob.io.base.create_directories_safe(os.path.dirname(feature_file))
# load data
data = preprocessor.read_data(data_file)
# extract feature
feature = extractor(data)
# write feature
bob.io.base.create_directories_safe(os.path.dirname(feature_file))
extractor.write_feature(feature, feature_file)
else:
logger.debug("... Skipping preprocessed data '%s' since feature file '%s' exists", data_file, feature_file)
......
......@@ -57,6 +57,8 @@ def preprocess(preprocessor, groups = None, indices = None, force = False):
if not utils.check_file(preprocessed_data_file, force, 1000):
logger.debug("... Processing original data file '%s'", file_name)
data = preprocessor.read_original_data(file_name)
# create output directory before reading the data file (is sometimes required, when relative directories are specified, especially, including a .. somewhere)
bob.io.base.create_directories_safe(os.path.dirname(preprocessed_data_file))
# get the annotations; might be None
annotations = fs.get_annotations(annotation_list[i])
......@@ -67,7 +69,6 @@ def preprocess(preprocessor, groups = None, indices = None, force = False):
logger.error("Preprocessing of file '%s' was not successful", file_name)
# write the data
bob.io.base.create_directories_safe(os.path.dirname(preprocessed_data_file))
preprocessor.write_data(preprocessed_data, preprocessed_data_file)
else:
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment