Skip to content
Snippets Groups Projects
Commit 0f669bce authored by Pavel KORSHUNOV's avatar Pavel KORSHUNOV
Browse files

Merge branch 'config-support' into 'master'

Support for configuration files

See merge request !13
parents ee0abff6 de6231d8
No related branches found
No related tags found
1 merge request!13Support for configuration files
Pipeline #
...@@ -30,7 +30,7 @@ from .. import PadFile ...@@ -30,7 +30,7 @@ from .. import PadFile
from .. import PadDatabase from .. import PadDatabase
from bob.bio.base.database import FileListBioDatabase from bob.bio.base.database import FileListBioDatabase
class FileListPadDatabase(FileListBioDatabase, PadDatabase): class FileListPadDatabase(PadDatabase, FileListBioDatabase):
"""This class provides a user-friendly interface to databases that are given as file lists. """This class provides a user-friendly interface to databases that are given as file lists.
Keyword parameters: Keyword parameters:
...@@ -160,7 +160,7 @@ class FileListPadDatabase(FileListBioDatabase, PadDatabase): ...@@ -160,7 +160,7 @@ class FileListPadDatabase(FileListBioDatabase, PadDatabase):
return [self.pad_file_class(client_id=f.client_id, path=f.path, attack_type=f.attack_type, file_id=f.id) return [self.pad_file_class(client_id=f.client_id, path=f.path, attack_type=f.attack_type, file_id=f.id)
for f in files] for f in files]
def groups(self, protocol=None): def groups(self, protocol=None, add_world=False, add_subworld=False):
"""This function returns the list of groups for this database. """This function returns the list of groups for this database.
protocol : str or ``None`` protocol : str or ``None``
...@@ -272,7 +272,7 @@ class FileListPadDatabase(FileListBioDatabase, PadDatabase): ...@@ -272,7 +272,7 @@ class FileListPadDatabase(FileListBioDatabase, PadDatabase):
return self._make_pad(retval) return self._make_pad(retval)
def annotations(self, file): def annotations(self, file):
return super(FileListPadDatabase, self).annotations(file) return FileListBioDatabase.annotations(self, file)
def tobjects(self, groups=None, protocol=None, model_ids=None, **kwargs): def tobjects(self, groups=None, protocol=None, model_ids=None, **kwargs):
pass pass
......
...@@ -34,8 +34,7 @@ class TestFile(PadFile): ...@@ -34,8 +34,7 @@ class TestFile(PadFile):
attack_type = None attack_type = None
if "attack" in path: if "attack" in path:
attack_type = "attack" attack_type = "attack"
PadFile.__init__(self, client_id=1, path=path, super(TestFile, self).__init__(client_id=1, path=path, file_id=id, attack_type=attack_type)
file_id=id, attack_type=attack_type)
def load(self, directory=None, extension='.hdf5'): def load(self, directory=None, extension='.hdf5'):
"""Loads the data at the specified location and using the given extension. """Loads the data at the specified location and using the given extension.
...@@ -100,9 +99,11 @@ class Interface(BaseInterface): ...@@ -100,9 +99,11 @@ class Interface(BaseInterface):
dumpparser = subparsers.add_parser('dumplist', help="") dumpparser = subparsers.add_parser('dumplist', help="")
dumpparser.add_argument('-d', '--directory', dest="directory", default='', dumpparser.add_argument('-d', '--directory', dest="directory", default='',
help="if given, this path will be prepended to every entry returned (defaults to '%(default)s')") help="if given, this path will be prepended to every entry returned "
"(defaults to '%(default)s')")
dumpparser.add_argument('-e', '--extension', dest="extension", default='', dumpparser.add_argument('-e', '--extension', dest="extension", default='',
help="if given, this extension will be appended to every entry returned (defaults to '%(default)s')") help="if given, this extension will be appended to every entry returned "
"(defaults to '%(default)s')")
dumpparser.add_argument('--self-test', dest="selftest", default=False, dumpparser.add_argument('--self-test', dest="selftest", default=False,
action='store_true', help=SUPPRESS) action='store_true', help=SUPPRESS)
...@@ -199,4 +200,4 @@ class TestDatabase(PadDatabase): ...@@ -199,4 +200,4 @@ class TestDatabase(PadDatabase):
return None return None
database = TestDatabase(original_directory=data_dir, original_extension='') database = TestDatabase()
\ No newline at end of file
...@@ -46,8 +46,10 @@ def create_database(): ...@@ -46,8 +46,10 @@ def create_database():
class TestDatabaseSql (bob.pad.base.database.PadDatabase, bob.db.base.SQLiteBaseDatabase): class TestDatabaseSql (bob.pad.base.database.PadDatabase, bob.db.base.SQLiteBaseDatabase):
def __init__(self): def __init__(self):
bob.pad.base.database.PadDatabase.__init__(self, 'pad_test', original_directory="original/directory", original_extension=".orig") bob.pad.base.database.PadDatabase.__init__(self, 'pad_test',
original_directory="original/directory", original_extension=".orig")
bob.db.base.SQLiteBaseDatabase.__init__(self, dbfile, TestFileSql) bob.db.base.SQLiteBaseDatabase.__init__(self, dbfile, TestFileSql)
def groups(self, protocol=None): def groups(self, protocol=None):
...@@ -59,4 +61,4 @@ class TestDatabaseSql (bob.pad.base.database.PadDatabase, bob.db.base.SQLiteBase ...@@ -59,4 +61,4 @@ class TestDatabaseSql (bob.pad.base.database.PadDatabase, bob.db.base.SQLiteBase
def annotations(self, file): def annotations(self, file):
return None return None
database = TestDatabaseSql() database = TestDatabaseSql()
\ No newline at end of file
...@@ -5,6 +5,10 @@ ...@@ -5,6 +5,10 @@
# @date: Wed 19 Aug 13:43:21 2015 # @date: Wed 19 Aug 13:43:21 2015
# #
"""
Execute PAD algorithms on a database with presentation attacks.
"""
import argparse import argparse
import os import os
...@@ -16,12 +20,9 @@ logger = bob.core.log.setup("bob.pad.base") ...@@ -16,12 +20,9 @@ logger = bob.core.log.setup("bob.pad.base")
from bob.pad.base.database import PadDatabase from bob.pad.base.database import PadDatabase
from bob.bio.base import utils
from . import FileSelector from . import FileSelector
from .. import database from .. import database
from bob.bio.base import tools
"""Execute spoofmetric recognition algorithms on a certain spoofmetric database.
"""
def is_idiap(): def is_idiap():
...@@ -62,36 +63,8 @@ def command_line_parser(description=__doc__, exclude_resources_from=[]): ...@@ -62,36 +63,8 @@ def command_line_parser(description=__doc__, exclude_resources_from=[]):
####################################################################################### #######################################################################################
############## options that are required to be specified ####################### ############## options that are required to be specified #######################
config_group = parser.add_argument_group( config_group = tools.command_line_config_group(parser, package_prefix='bob.pad.',
'\nParameters defining the experiment. Most of these parameters can be a registered resource, a ' exclude_resources_from=exclude_resources_from)
'configuration file, or even a string that defines a newly created object')
config_group.add_argument('-d', '--database', metavar='x', nargs='+', required=True,
help='Database and the protocol; registered databases are: %s' % utils.resource_keys(
'database', exclude_resources_from, package_prefix='bob.pad.'))
config_group.add_argument('-p', '--preprocessor', metavar='x', nargs='+', required=True,
help='Data preprocessing; registered preprocessors are: %s' % utils.resource_keys(
'preprocessor', exclude_resources_from, package_prefix='bob.pad.'))
config_group.add_argument('-e', '--extractor', metavar='x', nargs='+', required=True,
help='Feature extraction; registered feature extractors are: %s' % utils.resource_keys(
'extractor', exclude_resources_from, package_prefix='bob.pad.'))
config_group.add_argument('-a', '--algorithm', metavar='x', nargs='+', required=True,
help='Anti-spoofing registered algorithms are: %s' % utils.resource_keys(
'algorithm', exclude_resources_from, package_prefix='bob.pad.'))
config_group.add_argument('-g', '--grid', metavar='x', nargs='+',
help='Configuration for the grid setup; if not specified, the commands are '
'executed sequentially on the local machine.')
config_group.add_argument('--imports', metavar='LIB', nargs='+', default=['bob.pad.base'],
help='If one of your configuration files is an actual command, please specify the '
'lists of required libraries (imports) to execute this command')
config_group.add_argument('-s', '--sub-directory', metavar='DIR', required=True,
help='The sub-directory where the files of the current experiment should be stored. '
'Please specify a directory name with a name describing your experiment.')
config_group.add_argument('--groups', metavar='GROUP', nargs='+', default=['dev'],
help="The groups (i.e., 'train', 'dev', 'eval') for which the models and scores "
"should be generated; by default, only the 'dev' group is evaluated")
config_group.add_argument('-P', '--protocol', metavar='PROTOCOL',
help='Overwrite the protocol that is stored in the database by the given one '
'(might not by applicable for all databases).')
####################################################################################### #######################################################################################
############## options to modify default directories or file names #################### ############## options to modify default directories or file names ####################
...@@ -170,6 +143,12 @@ def command_line_parser(description=__doc__, exclude_resources_from=[]): ...@@ -170,6 +143,12 @@ def command_line_parser(description=__doc__, exclude_resources_from=[]):
flag_group.add_argument('-A', '--allow-missing-files', action='store_true', flag_group.add_argument('-A', '--allow-missing-files', action='store_true',
help="If given, missing files will not stop the processing; this is helpful if not " help="If given, missing files will not stop the processing; this is helpful if not "
"all files of the database can be processed; missing scores will be NaN.") "all files of the database can be processed; missing scores will be NaN.")
flag_group.add_argument('-r', '--parallel', type=int,
help='This flag is a shortcut for running the commands on the local machine with '
'the given amount of parallel threads; equivalent to --grid '
'bob.bio.base.grid.Grid("local", number_of_parallel_threads=X) '
'--run-local-scheduler --stop-on-failure.')
flag_group.add_argument('-t', '--environment', dest='env', nargs='*', default=[], flag_group.add_argument('-t', '--environment', dest='env', nargs='*', default=[],
help='Passes specific environment variables to the job.') help='Passes specific environment variables to the job.')
...@@ -213,61 +192,50 @@ def initialize(parsers, command_line_parameters=None, skips=[]): ...@@ -213,61 +192,50 @@ def initialize(parsers, command_line_parameters=None, skips=[]):
args : namespace args : namespace
A namespace of arguments as read from the command line. A namespace of arguments as read from the command line.
.. note:: .. note:: The database, preprocessor, extractor, algorithm and grid (if specified) are actual
instances of the according classes.
The database, preprocessor, extractor, algorithm and grid (if specified) are actual
instances of the according classes.
""" """
# execute-only args = tools.command_line_skip_group(parsers, command_line_parameters, skips)
if skips is not None: args_dictionary = {'required': ['database', 'preprocessor', 'extractor', 'algorithm', 'sub_directory'],
####################################################################################### 'common': ['protocol', 'grid', 'parallel', 'verbose', 'groups', 'temp_directory',
################# options for skipping parts of the toolchain ######################### 'result_directory', 'allow_missing_files', 'dry_run', 'force'],
skip_group = parsers['main'].add_argument_group( 'optional': ['preprocessed_directory', 'extracted_directory', 'projected_directory',
'\nFlags that allow to skip certain parts of the experiments. This does only make sense when the ' 'extractor_file', 'projector_file']
'generated files are already there (e.g. when reusing parts of other experiments)') }
for skip in skips: keywords = (
skip_group.add_argument('--skip-%s' % skip, action='store_true', help='Skip the %s step.' % skip) "protocol",
skip_group.add_argument('-o', '--execute-only', nargs='+', choices=skips, "groups",
help='If specified, executes only the given parts of the tool chain.') "parallel",
"preferred_package",
args = parsers['main'].parse_args(command_line_parameters) "temp_directory",
"result_directory",
# evaluate skips "extractor_file",
if skips is not None and args.execute_only is not None: "projector_file",
for skip in skips: "gridtk_database_file",
if skip not in args.execute_only: "experiment_info_file",
exec ("args.skip_%s = True" % (skip.replace("-", "_"))) "database_directories_file",
"preprocessed_directory",
# logging "extracted_directory",
bob.core.log.set_verbosity_level(logger, args.verbose) "projected_directory",
"score_directories",
# timer "grid_log_directory",
if args.timer is not None and not len(args.timer): "verbose",
args.timer = ('real', 'system', 'user') "dry_run",
"force",
# load configuration resources "write_compressed_score_files",
args.database = utils.load_resource(' '.join(args.database), 'database', imports=args.imports, "stop_on_failure",
package_prefix='bob.pad.') "run_local_scheduler",
args.preprocessor = utils.load_resource(' '.join(args.preprocessor), 'preprocessor', imports=args.imports, "external_dependencies",
package_prefix='bob.pad.') "timer",
args.extractor = utils.load_resource(' '.join(args.extractor), 'extractor', imports=args.imports, "nice",
package_prefix='bob.pad.') "delete_jobs_finished_with_status",
args.algorithm = utils.load_resource(' '.join(args.algorithm), 'algorithm', imports=args.imports, "allow_missing_files",
package_prefix='bob.pad.') "env",
if args.grid is not None: )
args.grid = utils.load_resource(' '.join(args.grid), 'grid', imports=args.imports, package_prefix='bob.pad.') args = tools.parse_config_file(parsers, args, args_dictionary, keywords, skips)
# set base directories args = tools.set_extra_flags(args)
if args.temp_directory is None:
args.temp_directory = "/idiap/temp/%s/%s" % (os.environ["USER"], args.database.name) if is_idiap() else "temp"
if args.result_directory is None:
args.result_directory = "/idiap/user/%s/%s" % (
os.environ["USER"], args.database.name) if is_idiap() else "results"
args.temp_directory = os.path.join(args.temp_directory, args.sub_directory)
args.result_directory = os.path.join(args.result_directory, args.sub_directory)
args.grid_log_directory = os.path.join(args.temp_directory, args.grid_log_directory)
# protocol command line override # protocol command line override
if args.protocol is not None: if args.protocol is not None:
......
...@@ -5,7 +5,8 @@ ...@@ -5,7 +5,8 @@
[buildout] [buildout]
parts = scripts parts = scripts
eggs = bob.pad.base eggs = bob.pad.base
bob.extension bob.db.base
bob.bio.base
gridtk gridtk
extensions = bob.buildout extensions = bob.buildout
...@@ -13,7 +14,6 @@ extensions = bob.buildout ...@@ -13,7 +14,6 @@ extensions = bob.buildout
auto-checkout = * auto-checkout = *
develop = src/bob.db.base develop = src/bob.db.base
src/bob.bio.base src/bob.bio.base
src/bob.extension
. .
; options for bob.buildout ; options for bob.buildout
...@@ -24,7 +24,6 @@ newest = false ...@@ -24,7 +24,6 @@ newest = false
[sources] [sources]
bob.db.base = git git@gitlab.idiap.ch:bob/bob.db.base.git bob.db.base = git git@gitlab.idiap.ch:bob/bob.db.base.git
bob.bio.base = git git@gitlab.idiap.ch:bob/bob.bio.base.git bob.bio.base = git git@gitlab.idiap.ch:bob/bob.bio.base.git
bob.extension = git git@gitlab.idiap.ch:bob/bob.extension.git
[scripts] [scripts]
recipe = bob.buildout:scripts recipe = bob.buildout:scripts
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment