diff --git a/bob/bio/base/algorithm/Algorithm.py b/bob/bio/base/algorithm/Algorithm.py index 8935d4d6ac63c5fcde6ced57f0e32d8c4791b55d..9cc06732f7b8ac35f1bbaaafda189cc5db1f8e37 100644 --- a/bob/bio/base/algorithm/Algorithm.py +++ b/bob/bio/base/algorithm/Algorithm.py @@ -47,7 +47,7 @@ class Algorithm: See :py:func:`bob.bio.base.score_fusion_strategy` for possible values. kwargs : ``key=value`` pairs - A list of keyword arguments to be written in the :py:meth:`__str__` function. + A list of keyword arguments to be written in the `__str__` function. """ diff --git a/bob/bio/base/algorithm/BIC.py b/bob/bio/base/algorithm/BIC.py index a5eb7e01182befe3328346e6459037b08fb7e5fc..d1add235f713bd786474a1dc974fa80f7fce90f3 100644 --- a/bob/bio/base/algorithm/BIC.py +++ b/bob/bio/base/algorithm/BIC.py @@ -12,275 +12,284 @@ from .Algorithm import Algorithm from .. import utils import logging -logger = logging.getLogger("bob.bio.base") - -class BIC (Algorithm): - """Computes the Intrapersonal/Extrapersonal classifier using a generic feature type and feature comparison function. - - In this generic implementation, any distance or similarity vector that results as a comparison of two feature vectors can be used. - Currently two different versions are implemented: One with [MWP98]_ and one without (a generalization of [GW09]_) subspace projection of the features. - The implementation of the BIC training is taken from :ref:`bob.learn.linear <bob.learn.linear>`. - - **Parameters:** - - comparison_function : function - The function to compare the features in the original feature space. - For a given pair of features, this function is supposed to compute a vector of similarity (or distance) values. - In the easiest case, it just computes the element-wise difference of the feature vectors, but more difficult functions can be applied, and the function might be specialized for the features you put in. - - maximum_training_pair_count : int or None - Limit the number of training image pairs to the given value, i.e., to avoid memory issues. - - subspace_dimensions : (int, int) or None - A tuple of sizes of the intrapersonal and extrapersonal subspaces. - If given, subspace projection is performed (cf. [MWP98]_) and the subspace projection matrices are truncated to the given sizes. - If omitted, no subspace projection is performed (cf. [GW09]_). - - uses_dffs : bool - Only valid, when ``subspace_dimensions`` are specified. - Use the *Distance From Feature Space* (DFFS) (cf. [MWP98]_) during scoring. - Use this flag with care! - - read_function : function - A function to read a feature from :py:class:`bob.io.base.HDF5File`. - This function need to be appropriate to read the type of features that you are using. - By default, :py:func:`bob.bio.base.load` is used. - - write_function : function - A function to write a feature to :py:class:`bob.io.base.HDF5File`. - This function is used to write the model and need to be appropriate to write the type of features that you are using. - By default, :py:func:`bob.bio.base.save` is used. - - kwargs : ``key=value`` pairs - A list of keyword arguments directly passed to the :py:class:`Algorithm` base class constructor. - """ - - def __init__( - self, - comparison_function, # the function to be used to compare two features; this highly depends on the type of features that are used - maximum_training_pair_count = None, # if set, limit the number of training pairs to the given number in a non-random manner - subspace_dimensions = None, # if set as a pair (intra_dim, extra_dim), PCA subspace truncation for the two classes is performed - uses_dffs = False, # use the distance from feature space; only valid when PCA truncation is enabled; WARNING: uses this flag with care - read_function = utils.load, - write_function = utils.save, - **kwargs # parameters directly sent to the base class - ): - - # call base class function and register that this tool requires training for the enrollment - Algorithm.__init__( - self, - requires_enroller_training = True, - - comparison_function = str(comparison_function), - maximum_training_pair_count = maximum_training_pair_count, - subspace_dimensions = subspace_dimensions, - uses_dffs = uses_dffs, - read_function=str(read_function), - write_function=str(write_function), - - **kwargs - ) - - # set up the BIC tool - self.comparison_function = comparison_function - self.read_function = read_function - self.write_function = write_function - self.maximum_pair_count = maximum_training_pair_count - self.use_dffs = uses_dffs - if subspace_dimensions is not None: - self.M_I = subspace_dimensions[0] - self.M_E = subspace_dimensions[1] - self.bic_machine = bob.learn.linear.BICMachine(self.use_dffs) - else: - self.bic_machine = bob.learn.linear.BICMachine(False) - self.M_I = None - self.M_E = None - - - def _trainset_for(self, pairs): - """Computes the array containing the comparison results for the given set of image pairs.""" - return numpy.vstack([self.comparison_function(f1, f2) for (f1, f2) in pairs]) - - - def train_enroller(self, train_features, enroller_file): - """Trains the BIC by computing intra-personal and extra-personal subspaces. - - First, two lists of pairs are computed, which contain intra-personal and extra-personal feature pairs, respectively. - Afterward, the comparison vectors are computed using the ``comparison_function`` specified in the constructor. - Finally, the :py:class:`bob.learn.linear.BICTrainer` is used to train a :py:class:`bob.learn.linear.BICMachine`. - - **Parameters:** - train_features : [[object]] - A list of lists of feature vectors, which are used to train the BIC. - Each sub-list contains the features of one client. +logger = logging.getLogger("bob.bio.base") - enroller_file : str - A writable file, into which the resulting :py:class:`bob.learn.linear.BICMachine` will be written. - """ - # compute intrapersonal and extrapersonal pairs - logger.info(" -> Computing pairs") - intra_pairs, extra_pairs = bob.learn.linear.bic_intra_extra_pairs(train_features) - # limit pairs, if desired - if self.maximum_pair_count is not None: - if len(intra_pairs) > self.maximum_pair_count: - logger.info(" -> Limiting intrapersonal pairs from %d to %d" %(len(intra_pairs), self.maximum_pair_count)) - intra_pairs = utils.selected_elements(intra_pairs, self.maximum_pair_count) - if len(extra_pairs) > self.maximum_pair_count: - logger.info(" -> Limiting extrapersonal pairs from %d to %d" %(len(extra_pairs), self.maximum_pair_count)) - extra_pairs = utils.selected_elements(extra_pairs, self.maximum_pair_count) +class BIC(Algorithm): + """Computes the Intrapersonal/Extrapersonal classifier using a generic feature type and feature comparison function. + In this generic implementation, any distance or similarity vector that results as a comparison of two feature vectors can be used. + Currently two different versions are implemented: One with [MWP98]_ and one without (a generalization of [GW09]_) subspace projection of the features. + The implementation of the BIC training is taken from :ref:`bob.learn.linear <bob.learn.linear>`. - # train the BIC Machine with these pairs - logger.info(" -> Computing %d intrapersonal results", len(intra_pairs)) - intra_vectors = self._trainset_for(intra_pairs) - logger.info(" -> Computing %d extrapersonal results", len(extra_pairs)) - extra_vectors = self._trainset_for(extra_pairs) + **Parameters:** - logger.info(" -> Training BIC machine") - trainer = bob.learn.linear.BICTrainer(self.M_I, self.M_E) if self.M_I is not None else bob.learn.linear.BICTrainer() - trainer.train(intra_vectors, extra_vectors, self.bic_machine) + comparison_function : function + The function to compare the features in the original feature space. + For a given pair of features, this function is supposed to compute a vector of similarity (or distance) values. + In the easiest case, it just computes the element-wise difference of the feature vectors, but more difficult functions can be applied, and the function might be specialized for the features you put in. - # save the machine to file - self.bic_machine.save(bob.io.base.HDF5File(enroller_file, 'w')) + maximum_training_pair_count : int or None + Limit the number of training image pairs to the given value, i.e., to avoid memory issues. + subspace_dimensions : (int, int) or None + A tuple of sizes of the intrapersonal and extrapersonal subspaces. + If given, subspace projection is performed (cf. [MWP98]_) and the subspace projection matrices are truncated to the given sizes. + If omitted, no subspace projection is performed (cf. [GW09]_). - def load_enroller(self, enroller_file): - """Reads the :py:class:`bob.learn.linear.BICMachine` from file. + uses_dffs : bool + Only valid, when ``subspace_dimensions`` are specified. + Use the *Distance From Feature Space* (DFFS) (cf. [MWP98]_) during scoring. + Use this flag with care! - The :py:attr:`bob.learn.linear.BICMachine.use_DFFS` will be overwritten by the ``use_dffs`` value specified in this class' constructor. + read_function : function + A function to read a feature from :py:class:`bob.io.base.HDF5File`. + This function need to be appropriate to read the type of features that you are using. + By default, :py:func:`bob.bio.base.load` is used. - **Parameters:** + write_function : function + A function to write a feature to :py:class:`bob.io.base.HDF5File`. + This function is used to write the model and need to be appropriate to write the type of features that you are using. + By default, :py:func:`bob.bio.base.save` is used. - enroller_file : str - An existing file, from which the :py:class:`bob.learn.linear.BICMachine` will be read. + kwargs : ``key=value`` pairs + A list of keyword arguments directly passed to the :py:class:`Algorithm` base class constructor. """ - self.bic_machine.load(bob.io.base.HDF5File(enroller_file, 'r')) - # to set this should not be required, but just in case - # you re-use a trained enroller file that hat different setup of use_DFFS - self.bic_machine.use_DFFS = self.use_dffs + def __init__( + self, + comparison_function, + # the function to be used to compare two features; this highly depends on the type of features that are used + maximum_training_pair_count=None, + # if set, limit the number of training pairs to the given number in a non-random manner + subspace_dimensions=None, + # if set as a pair (intra_dim, extra_dim), PCA subspace truncation for the two classes is performed + uses_dffs=False, + # use the distance from feature space; only valid when PCA truncation is enabled; WARNING: uses this flag with care + read_function=utils.load, + write_function=utils.save, + **kwargs # parameters directly sent to the base class + ): + + # call base class function and register that this tool requires training for the enrollment + Algorithm.__init__( + self, + requires_enroller_training=True, + + comparison_function=str(comparison_function), + maximum_training_pair_count=maximum_training_pair_count, + subspace_dimensions=subspace_dimensions, + uses_dffs=uses_dffs, + read_function=str(read_function), + write_function=str(write_function), + + **kwargs + ) + + # set up the BIC tool + self.comparison_function = comparison_function + self.read_function = read_function + self.write_function = write_function + self.maximum_pair_count = maximum_training_pair_count + self.use_dffs = uses_dffs + if subspace_dimensions is not None: + self.M_I = subspace_dimensions[0] + self.M_E = subspace_dimensions[1] + self.bic_machine = bob.learn.linear.BICMachine(self.use_dffs) + else: + self.bic_machine = bob.learn.linear.BICMachine(False) + self.M_I = None + self.M_E = None + + def _trainset_for(self, pairs): + """Computes the array containing the comparison results for the given set of image pairs.""" + return numpy.vstack([self.comparison_function(f1, f2) for (f1, f2) in pairs]) + + def train_enroller(self, train_features, enroller_file): + """Trains the BIC by computing intra-personal and extra-personal subspaces. + + First, two lists of pairs are computed, which contain intra-personal and extra-personal feature pairs, respectively. + Afterward, the comparison vectors are computed using the ``comparison_function`` specified in the constructor. + Finally, the :py:class:`bob.learn.linear.BICTrainer` is used to train a :py:class:`bob.learn.linear.BICMachine`. + + **Parameters:** + + train_features : [[object]] + A list of lists of feature vectors, which are used to train the BIC. + Each sub-list contains the features of one client. + + enroller_file : str + A writable file, into which the resulting :py:class:`bob.learn.linear.BICMachine` will be written. + """ + + # compute intrapersonal and extrapersonal pairs + logger.info(" -> Computing pairs") + intra_pairs, extra_pairs = bob.learn.linear.bic_intra_extra_pairs(train_features) + # limit pairs, if desired + if self.maximum_pair_count is not None: + if len(intra_pairs) > self.maximum_pair_count: + logger.info( + " -> Limiting intrapersonal pairs from %d to %d" % (len(intra_pairs), self.maximum_pair_count)) + intra_pairs = utils.selected_elements(intra_pairs, self.maximum_pair_count) + if len(extra_pairs) > self.maximum_pair_count: + logger.info( + " -> Limiting extrapersonal pairs from %d to %d" % (len(extra_pairs), self.maximum_pair_count)) + extra_pairs = utils.selected_elements(extra_pairs, self.maximum_pair_count) + + # train the BIC Machine with these pairs + logger.info(" -> Computing %d intrapersonal results", len(intra_pairs)) + intra_vectors = self._trainset_for(intra_pairs) + logger.info(" -> Computing %d extrapersonal results", len(extra_pairs)) + extra_vectors = self._trainset_for(extra_pairs) + + logger.info(" -> Training BIC machine") + trainer = bob.learn.linear.BICTrainer(self.M_I, + self.M_E) if self.M_I is not None else bob.learn.linear.BICTrainer() + trainer.train(intra_vectors, extra_vectors, self.bic_machine) + + # save the machine to file + self.bic_machine.save(bob.io.base.HDF5File(enroller_file, 'w')) + + def load_enroller(self, enroller_file): + """Reads the :py:class:`bob.learn.linear.BICMachine` from file. + + The :py:attr:`bob.learn.linear.BICMachine.use_DFFS` will be overwritten by the ``use_dffs`` value specified in this class' constructor. + + **Parameters:** + + enroller_file : str + An existing file, from which the :py:class:`bob.learn.linear.BICMachine` will be read. + """ + self.bic_machine.load(bob.io.base.HDF5File(enroller_file, 'r')) + # to set this should not be required, but just in case + # you re-use a trained enroller file that hat different setup of use_DFFS + self.bic_machine.use_DFFS = self.use_dffs + + def enroll(self, enroll_features): + """enroll(enroll_features) -> model - def enroll(self, enroll_features): - """enroll(enroll_features) -> model + Enrolls the model by storing all given input features. + The features must be writable with the ``write_function`` defined in the constructor. - Enrolls the model by storing all given input features. - The features must be writable with the ``write_function`` defined in the constructor. + **Parameters:** + + enroll_features : [object] + The list of projected features to enroll the model from. + + **Returns:** + + model : [object] + The enrolled model (which is identical to the input features). + """ + return enroll_features - **Parameters:** + def write_model(self, model, model_file): + """Writes all features of the model into one HDF5 file. - enroll_features : [object] - The list of projected features to enroll the model from. + To write the features, the ``write_function`` specified in the constructor is employed. - **Returns:** + **Parameters:** - model : [object] - The enrolled model (which is identical to the input features). - """ - return enroll_features + model : [object] + The model to write, which is a list of features. + model_file : str or :py:class:`bob.io.base.HDF5File` + The file (open for writing) or a file name to write into. + """ + hdf5 = model_file if isinstance(model_file, bob.io.base.HDF5File) else bob.io.base.HDF5File(model_file, 'w') + for i, f in enumerate(model): + hdf5.create_group("Feature%d" % i) + hdf5.cd("Feature%d" % i) + self.write_function(f, hdf5) + hdf5.cd("..") - def write_model(self, model, model_file): - """Writes all features of the model into one HDF5 file. + def read_model(self, model_file): + """read_model(model_file) -> model - To write the features, the ``write_function`` specified in the constructor is employed. + Reads all features of the model from the given HDF5 file. - **Parameters:** + To read the features, the ``read_function`` specified in the constructor is employed. - model : [object] - The model to write, which is a list of features. + **Parameters:** - model_file : str or :py:class:`bob.io.base.HDF5File` - The file (open for writing) or a file name to write into. - """ - hdf5 = model_file if isinstance(model_file, bob.io.base.HDF5File) else bob.io.base.HDF5File(model_file, 'w') - for i, f in enumerate(model): - hdf5.create_group("Feature%d" % i) - hdf5.cd("Feature%d" % i) - self.write_function(f, hdf5) - hdf5.cd("..") - - - def read_model(self, model_file): - """read_model(model_file) -> model + model_file : str or :py:class:`bob.io.base.HDF5File` + The file (open for reading) or the name of an existing file to read from. - Reads all features of the model from the given HDF5 file. + **Returns:** - To read the features, the ``read_function`` specified in the constructor is employed. - - **Parameters:** + model : [object] + The read model, which is a list of features. + """ + hdf5 = bob.io.base.HDF5File(model_file) + i = 0 + model = [] + while hdf5.has_group("Feature%d" % i): + hdf5.cd("Feature%d" % i) + model.append(self.read_function(hdf5)) + hdf5.cd("..") + i += 1 + return model - model_file : str or :py:class:`bob.io.base.HDF5File` - The file (open for reading) or the name of an existing file to read from. + def read_probe(self, probe_file): + """read_probe(probe_file) -> probe - **Returns:** + Reads the probe feature from the given HDF5 file. - model : [object] - The read model, which is a list of features. - """ - hdf5 = bob.io.base.HDF5File(model_file) - i = 0 - model = [] - while hdf5.has_group("Feature%d" % i): - hdf5.cd("Feature%d" % i) - model.append(self.read_function(hdf5)) - hdf5.cd("..") - i += 1 - return model + To read the feature, the ``read_function`` specified in the constructor is employed. + **Parameters:** - def read_probe(self, probe_file): - """read_probe(probe_file) -> probe + probe_file : str or :py:class:`bob.io.base.HDF5File` + The file (open for reading) or the name of an existing file to read from. - Reads the probe feature from the given HDF5 file. + **Returns:** - To read the feature, the ``read_function`` specified in the constructor is employed. + probe : object + The read probe, which is a feature. + """ + return self.read_function(bob.io.base.HDF5File(probe_file)) - **Parameters:** + def score(self, model, probe): + """score(model, probe) -> float - probe_file : str or :py:class:`bob.io.base.HDF5File` - The file (open for reading) or the name of an existing file to read from. + Computes the BIC score between the model and the probe. + First, the ``comparison_function`` is used to create the comparison vectors between all model features and the probe feature. + Then, a BIC score is computed for each comparison vector, and the BIC scores are fused using the :py:func:`model_fusion_function` defined in the :py:class:`Algorithm` base class. - **Returns:** + **Parameters:** - probe : object - The read probe, which is a feature. - """ - return self.read_function(bob.io.base.HDF5File(probe_file)) + model : [object] + The model storing all model features. + probe : object + The probe feature. - def score(self, model, probe): - """score(model, probe) -> float + **Returns:** - Computes the BIC score between the model and the probe. - First, the ``comparison_function`` is used to create the comparison vectors between all model features and the probe feature. - Then, a BIC score is computed for each comparison vector, and the BIC scores are fused using the :py:func:`model_fusion_function` defined in the :py:class:`Algorithm` base class. + score : float + A fused BIC similarity value between ``model`` and ``probe``. + """ + # compute average score for the models + scores = [] + for i in range(len(model)): + diff = self.comparison_function(model[i], probe) + assert len(diff) == self.bic_machine.input_size + scores.append(self.bic_machine(diff)) + return self.model_fusion_function(scores) - **Parameters:** + # re-define unused functions, just so that they do not get documented + def train_projector(*args, **kwargs): + raise NotImplementedError() - model : [object] - The model storing all model features. + def load_projector(*args, **kwargs): + pass - probe : object - The probe feature. + def project(*args, **kwargs): + raise NotImplementedError() - **Returns:** + def write_feature(*args, **kwargs): + raise NotImplementedError() - score : float - A fused BIC similarity value between ``model`` and ``probe``. - """ - # compute average score for the models - scores = [] - for i in range(len(model)): - diff = self.comparison_function(model[i], probe) - assert len(diff) == self.bic_machine.input_size - scores.append(self.bic_machine(diff)) - return self.model_fusion_function(scores) - - # re-define unused functions, just so that they do not get documented - def train_projector(*args,**kwargs): raise NotImplementedError() - def load_projector(*args,**kwargs): pass - def project(*args,**kwargs): raise NotImplementedError() - def write_feature(*args,**kwargs): raise NotImplementedError() - def read_feature(*args,**kwargs): raise NotImplementedError() + def read_feature(*args, **kwargs): + raise NotImplementedError() diff --git a/bob/bio/base/database/database.py b/bob/bio/base/database/database.py index 7393ee4d1200479d138d3eefbbd775f8eb85380d..8e6f76f886b06384f9aa1131f2289972e8147dd9 100644 --- a/bob/bio/base/database/database.py +++ b/bob/bio/base/database/database.py @@ -10,7 +10,64 @@ import bob.db.base import bob.bio.base.database + class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): + """This class represents the basic API for database access. + Please use this class as a base class for your database access classes. + Do not forget to call the constructor of this base class in your derived class. + + **Parameters:** + + name : str + A unique name for the database. + + all_files_options : dict + Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query when retrieving all data. + + extractor_training_options : dict + Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the extractor training. + + projector_training_options : dict + Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the projector training. + + enroller_training_options : dict + Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the enroller training. + + check_original_files_for_existence : bool + Enables to test for the original data files when querying the database. + + original_directory : str + The directory where the original data of the database are stored. + + original_extension : str + The file name extension of the original data. + + annotation_directory : str + The directory where the image annotations of the database are stored, if any. + + annotation_extension : str + The file name extension of the annotation files. + + annotation_type : str + The type of the annotation file to read, see `bob.db.base.annotations.read_annotation_file` for accepted formats. + + protocol : str or ``None`` + The name of the protocol that defines the default experimental setup for this database. + + .. todo:: Check if the ``None`` protocol is supported. + + training_depends_on_protocol : bool + Specifies, if the training set used for training the extractor and the projector depend on the protocol. + This flag is used to avoid re-computation of data when running on the different protocols of the same database. + + models_depend_on_protocol : bool + Specifies, if the models depend on the protocol. + This flag is used to avoid re-computation of models when running on the different protocols of the same database. + + kwargs : ``key=value`` pairs + The arguments of the :py:class:`Database` base class constructor. + + """ def __init__( self, name, @@ -32,62 +89,6 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): models_depend_on_protocol=False, **kwargs ): - """This class represents the basic API for database access. - Please use this class as a base class for your database access classes. - Do not forget to call the constructor of this base class in your derived class. - - **Parameters:** - - name : str - A unique name for the database. - - all_files_options : dict - Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query when retrieving all data. - - extractor_training_options : dict - Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the extractor training. - - projector_training_options : dict - Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the projector training. - - enroller_training_options : dict - Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the enroller training. - - check_original_files_for_existence : bool - Enables to test for the original data files when querying the database. - - original_directory : str - The directory where the original data of the database are stored. - - original_extension : str - The file name extension of the original data. - - annotation_directory : str - The directory where the image annotations of the database are stored, if any. - - annotation_extension : str - The file name extension of the annotation files. - - annotation_type : str - The type of the annotation file to read, see :py:func:`bob.bio.base.database.read_annotation_file` for accepted formats. - - protocol : str or ``None`` - The name of the protocol that defines the default experimental setup for this database. - - .. todo:: Check if the ``None`` protocol is supported. - - training_depends_on_protocol : bool - Specifies, if the training set used for training the extractor and the projector depend on the protocol. - This flag is used to avoid re-computation of data when running on the different protocols of the same database. - - models_depend_on_protocol : bool - Specifies, if the models depend on the protocol. - This flag is used to avoid re-computation of models when running on the different protocols of the same database. - - kwargs : ``key=value`` pairs - The arguments of the :py:class:`Database` base class constructor. - - """ assert isinstance(name, str) @@ -214,13 +215,13 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): **Parameters:** - files : [:py:class:`File`] + files : [:py:class:`BioFile`] The list of files to be uniquified and sorted. **Returns:** - sorted : [:py:class:`File`] - The sorted list of files, with duplicate :py:attr:`File.id`\s being removed. + sorted : [:py:class:`BioFile`] + The sorted list of files, with duplicate `BioFile.id`\s being removed. """ # sort files using their sort function sorted_files = sorted(files) @@ -242,13 +243,13 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): **Parameters:** - files : :py:class:`File` - A list of files that should be split up by :py:attr:`File.client_id`. + files : :py:class:`BioFile` + A list of files that should be split up by `BioFile.client_id`. **Returns:** - files_by_client : [[:py:class:`File`]] - The list of lists of files, where each sub-list groups the files with the same :py:attr:`File.client_id` + files_by_client : [[:py:class:`BioFile`]] + The list of lists of files, where each sub-list groups the files with the same `BioFile.client_id` """ client_files = {} for file in files: @@ -264,11 +265,11 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): def annotations(self, file): """ Returns the annotations for the given File object, if available. - It uses :py:func:`bob.bio.base.database.read_annotation_file` to load the annotations. + It uses `bob.db.base.annotations.read_annotation_file` to load the annotations. **Parameters:** - file : :py:class:`File` + file : :py:class:`BioFile` The file for which annotations should be returned. **Returns:** @@ -293,7 +294,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): **Parameters:** - files : [:py:class:`File`] + files : [:py:class:`BioFile`] The list of file object to retrieve the file names for. directory : str @@ -323,7 +324,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): **Parameters:** - files : [:py:class:`File`] + files : [:py:class:`BioFile`] The list of file object to retrieve the original data file names for. **Returns:** @@ -413,7 +414,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): Keyword parameters: - file : :py:class:`File` or a derivative + file : :py:class:`BioFile` or a derivative The File objects for which the file name should be retrieved Return value : str @@ -444,7 +445,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): **Returns:** - files : [:py:class:`File`] + files : [:py:class:`BioFile`] The sorted and unique list of all files of the database. """ return self.sort(self.objects(protocol=self.protocol, groups=groups, **self.all_files_options)) @@ -467,7 +468,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): **Returns:** - files : [:py:class:`File`] or [[:py:class:`File`]] + files : [:py:class:`BioFile`] or [[:py:class:`BioFile`]] The (arranged) list of files used for the training of the given step. """ if step is None: @@ -502,7 +503,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): **Returns:** - files : [:py:class:`File`] + files : [:py:class:`BioFile`] The sorted and unique list of test files of the database. """ return self.sort(self.objects(protocol=self.protocol, groups=groups, **self.all_files_options)) @@ -551,7 +552,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): **Returns:** - files : [:py:class:`File`] + files : [:py:class:`BioFile`] The list of files used for to probe the model with the given model id. """ if model_id is not None: @@ -605,7 +606,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)): **Returns:** - files : [:py:class:`FileSet`] or something similar + files : [:py:class:`BioFileSet`] or something similar The list of file sets used to probe the model with the given model id.""" if model_id is not None: file_sets = self.object_sets(protocol=self.protocol, groups=group, model_ids=(model_id,), purposes='probe', @@ -714,7 +715,7 @@ class ZTBioDatabase(BioDatabase): **Returns:** - files : [:py:class:`File`] + files : [:py:class:`BioFile`] The sorted and unique list of all files of the database. """ files = self.objects(protocol=self.protocol, groups=groups, **self.all_files_options) @@ -776,7 +777,7 @@ class ZTBioDatabase(BioDatabase): **Returns:** - files : [:py:class:`File`] + files : [:py:class:`BioFile`] The sorted list of files used for to enroll the model with the given model id. """ return self.sort(self.tobjects(protocol=self.protocol, groups=group, model_ids=(t_model_id,))) @@ -785,7 +786,7 @@ class ZTBioDatabase(BioDatabase): """z_probe_files(group = 'dev') -> files Returns a list of probe files used to compute the Z-Norm, respecting the current protocol. - The Z-probe files can be limited using the ``z_probe_options`` in the query to :py:meth:`ZTBioDatabase.z_probe_files` + The Z-probe files can be limited using the ``z_probe_options`` in the query to :py:meth:`bob.bio.base.database.ZTBioDatabase.z_probe_files` **Parameters:** @@ -794,7 +795,7 @@ class ZTBioDatabase(BioDatabase): **Returns:** - files : [:py:class:`File`] + files : [:py:class:`BioFile`] The unique list of files used to compute the Z-norm. """ return self.sort(self.zobjects(protocol=self.protocol, groups=group, **self.z_probe_options)) @@ -812,7 +813,7 @@ class ZTBioDatabase(BioDatabase): **Returns:** - files : [:py:class:`FileSet`] + files : [:py:class:`BioFileSet`] The unique list of file sets used to compute the Z-norm. """ raise NotImplementedError("Please implement this function in derived classes") @@ -822,12 +823,16 @@ class ZTBioDatabase(BioDatabase): Returns the client id for the given T-Norm model id. In this base class implementation, we just use the :py:meth:`client_id_from_model_id` function. Overload this function if you need another behavior. + **Parameters:** + t_model_id : int or str A unique ID that identifies the T-Norm model. group : one of ``('dev', 'eval')`` The group to get the client ids for. + **Returns:** + client_id : [int] or [str] A unique ID that identifies the client, to which the T-Norm model belongs. """ diff --git a/bob/bio/base/database/file.py b/bob/bio/base/database/file.py index 3dca312725b7e1cb231d22d43445aea4acbd14b2..d196203edc9089a9c61fdf5134dfe6b0b9d60245 100644 --- a/bob/bio/base/database/file.py +++ b/bob/bio/base/database/file.py @@ -18,7 +18,7 @@ class BioFile(bob.db.base.File): The id of the client this file belongs to. Its type depends on your implementation. If you use an SQL database, this should be an SQL type like Integer or String. - For path and file_id, please refer to :py:class:`bob.db.base.File` constructor + For path and file_id, please refer to :py:class:`bob.db.base.file.File` constructor """ bob.db.base.File.__init__(self, path, file_id) @@ -28,34 +28,36 @@ class BioFile(bob.db.base.File): class BioFileSet(BioFile): - """This class defines the minimum interface of a set of database files that needs to be exported. - Use this class, whenever the database provides several files that belong to the same probe. - Each file set has an id, and a list of associated files, which are of type :py:class:`BioFile` of the same client. - The file set id can be anything hashable, but needs to be unique all over the database. - **Parameters:** - file_set_id : str or int - A unique ID that identifies the file set. - files : [:py:class:`BioFile`] - A non-empty list of BioFile objects that should be stored inside this file. - All files of that list need to have the same client ID. - """ - - def __init__(self, file_set_id, files, path=None): - - # don't accept empty file lists - assert len(files), "Cannot create an empty BioFileSet" - - # call base class constructor - BioFile.__init__(self, files[0].client_id, "+".join(f.path for f in files) if path is None else path, file_set_id) - - # check that all files come from the same client - assert all(f.client_id == self.client_id for f in files) - - # The list of files contained in this set - self.files = files - """The list of :py:class:`BioFile` objects stored in this file set""" - - def __lt__(self, other): - """Defines an order between file sets by using the order of the file set ids.""" - # compare two BioFile set objects by comparing their IDs - return self.id < other.id + """This class defines the minimum interface of a set of database files that needs to be exported. + Use this class, whenever the database provides several files that belong to the same probe. + Each file set has an id, and a list of associated files, which are of type :py:class:`BioFile` of the same client. + The file set id can be anything hashable, but needs to be unique all over the database. + + **Parameters:** + + file_set_id : str or int + A unique ID that identifies the file set. + files : [:py:class:`BioFile`] + A non-empty list of BioFile objects that should be stored inside this file. + All files of that list need to have the same client ID. + """ + + def __init__(self, file_set_id, files, path=None): + # don't accept empty file lists + assert len(files), "Cannot create an empty BioFileSet" + + # call base class constructor + BioFile.__init__(self, files[0].client_id, "+".join(f.path for f in files) if path is None else path, + file_set_id) + + # check that all files come from the same client + assert all(f.client_id == self.client_id for f in files) + + # The list of files contained in this set + self.files = files + """The list of :py:class:`BioFile` objects stored in this file set""" + + def __lt__(self, other): + """Defines an order between file sets by using the order of the file set ids.""" + # compare two BioFile set objects by comparing their IDs + return self.id < other.id diff --git a/bob/bio/base/extractor/Extractor.py b/bob/bio/base/extractor/Extractor.py index 4a0dbcd8f0c3aded41d11eecbbfafd44a8da4a67..b4c238d203e4a3111404a9c9ae2ce86aad2c73fa 100644 --- a/bob/bio/base/extractor/Extractor.py +++ b/bob/bio/base/extractor/Extractor.py @@ -24,7 +24,7 @@ class Extractor: Ignored, if ``requires_training`` is ``False`` kwargs : ``key=value`` pairs - A list of keyword arguments to be written in the :py:meth:`__str__` function. + A list of keyword arguments to be written in the `__str__` function. """ def __init__( @@ -89,7 +89,7 @@ class Extractor: **Parameters:** feature : object - The extracted feature, i.e., what is returned from :py:meth:`__call__`. + The extracted feature, i.e., what is returned from `__call__`. feature_file : str or :py:class:`bob.io.base.HDF5File` The file open for writing, or the name of the file to write. diff --git a/bob/bio/base/extractor/Linearize.py b/bob/bio/base/extractor/Linearize.py index be71e0fe1ad621ffbb1c70029e297d2baff22325..e69de5d1b953120ab72191464a940ce5a80557ff 100644 --- a/bob/bio/base/extractor/Linearize.py +++ b/bob/bio/base/extractor/Linearize.py @@ -6,40 +6,41 @@ from .Extractor import Extractor import numpy -class Linearize (Extractor): - """Extracts features by simply concatenating all elements of the data into one long vector. - If a ``dtype`` is specified in the contructor, it is assured that the resulting - """ +class Linearize(Extractor): + """Extracts features by simply concatenating all elements of the data into one long vector. - def __init__(self, dtype=None): - """If the ``dtype`` parameter is given, it specifies the data type that is enforced for the features.""" - Extractor.__init__(self, dtype = dtype) - self.dtype = dtype + If a ``dtype`` is specified in the contructor, it is assured that the resulting + """ - def __call__(self, data): - """__call__(data) -> data + def __init__(self, dtype=None): + """If the ``dtype`` parameter is given, it specifies the data type that is enforced for the features.""" + Extractor.__init__(self, dtype=dtype) + self.dtype = dtype - Takes data of arbitrary dimensions and linearizes it into a 1D vector; enforcing the data type, if desired. + def __call__(self, data): + """__call__(data) -> data - **Parameters:** + Takes data of arbitrary dimensions and linearizes it into a 1D vector; enforcing the data type, if desired. - data : :py:class:`numpy.ndarray` - The preprocessed data to be transformed into one vector. + **Parameters:** - **Returns:** + data : :py:class:`numpy.ndarray` + The preprocessed data to be transformed into one vector. - data : 1D :py:class:`numpy.ndarray` - The extracted feature vector, of the desired ``dtype`` (if specified). - """ - assert isinstance(data, numpy.ndarray) + **Returns:** + + data : 1D :py:class:`numpy.ndarray` + The extracted feature vector, of the desired ``dtype`` (if specified). + """ + assert isinstance(data, numpy.ndarray) - linear = numpy.reshape(data, data.size) - if self.dtype is not None: - linear = linear.astype(self.dtype) - return linear + linear = numpy.reshape(data, data.size) + if self.dtype is not None: + linear = linear.astype(self.dtype) + return linear + # re-define unused functions, just so that they do not get documented + def train(*args, **kwargs): raise NotImplementedError() - # re-define unused functions, just so that they do not get documented - def train(*args,**kwargs): raise NotImplementedError() - def load(*args,**kwargs): pass + def load(*args, **kwargs): pass diff --git a/bob/bio/base/preprocessor/Preprocessor.py b/bob/bio/base/preprocessor/Preprocessor.py index 6c61cabd18c5831afae1757a1ca0c1015c23d12a..4ab8de669df7e60e19e48967cfdc08030f48290a 100644 --- a/bob/bio/base/preprocessor/Preprocessor.py +++ b/bob/bio/base/preprocessor/Preprocessor.py @@ -5,104 +5,101 @@ from .. import utils -class Preprocessor: - """This is the base class for all preprocessors. - It defines the minimum requirements for all derived proprocessor classes. - - **Parameters:** - - writes_data : bool - Select, if the preprocessor actually writes preprocessed images, or if it is simply returning values. - - read_original_data: callable - This function is used to read the original data from file. - It takes three inputs: A :py:class:`bob.bio.base.database.BioFile` (or one of its derivatives), the original directory (as ``str``) and the original extension (as ``str``). - kwargs : ``key=value`` pairs - A list of keyword arguments to be written in the :py:meth:`__str__` function. - """ +class Preprocessor: + """This is the base class for all preprocessors. + It defines the minimum requirements for all derived proprocessor classes. - def __init__(self, writes_data = True, read_original_data = utils.read_original_data, **kwargs): - # Each class needs to have a constructor taking - # all the parameters that are required for the preprocessing as arguments - self.writes_data = writes_data - self.read_original_data = read_original_data - self._kwargs = kwargs - pass + **Parameters:** + writes_data : bool + Select, if the preprocessor actually writes preprocessed images, or if it is simply returning values. - # The call function (i.e. the operator() in C++ terms) - def __call__(self, data, annotations): - """__call__(data, annotations) -> dara + read_original_data: callable + This function is used to read the original data from file. + It takes three inputs: A :py:class:`bob.bio.base.database.BioFile` (or one of its derivatives), the original directory (as ``str``) and the original extension (as ``str``). - This is the call function that you have to overwrite in the derived class. - The parameters that this function will receive are: + kwargs : ``key=value`` pairs + A list of keyword arguments to be written in the `__str__` function. + """ - **Parameters:** + def __init__(self, writes_data=True, read_original_data=utils.read_original_data, **kwargs): + # Each class needs to have a constructor taking + # all the parameters that are required for the preprocessing as arguments + self.writes_data = writes_data + self.read_original_data = read_original_data + self._kwargs = kwargs + pass - data : object - The original data that needs preprocessing, usually a :py:class:`numpy.ndarray`, but might be different. + # The call function (i.e. the operator() in C++ terms) + def __call__(self, data, annotations): + """__call__(data, annotations) -> dara - annotations : {} or None - The annotations (if any) that belongs to the given ``data``; as a dictionary. - The type of the annotation depends on your kind of problem. + This is the call function that you have to overwrite in the derived class. + The parameters that this function will receive are: - **Returns:** + **Parameters:** - data : object - The *preprocessed* data, usually a :py:class:`numpy.ndarray`, but might be different. - """ - raise NotImplementedError("Please overwrite this function in your derived class") + data : object + The original data that needs preprocessing, usually a :py:class:`numpy.ndarray`, but might be different. + annotations : {} or None + The annotations (if any) that belongs to the given ``data``; as a dictionary. + The type of the annotation depends on your kind of problem. - def __str__(self): - """__str__() -> info + **Returns:** - This function returns all parameters of this class (and its derived class). + data : object + The *preprocessed* data, usually a :py:class:`numpy.ndarray`, but might be different. + """ + raise NotImplementedError("Please overwrite this function in your derived class") - **Returns:** + def __str__(self): + """__str__() -> info - info : str - A string containing the full information of all parameters of this (and the derived) class. - """ - return utils.pretty_print(self, self._kwargs) + This function returns all parameters of this class (and its derived class). - ############################################################ - ### Special functions that might be overwritten on need - ############################################################ + **Returns:** + info : str + A string containing the full information of all parameters of this (and the derived) class. + """ + return utils.pretty_print(self, self._kwargs) - def write_data(self, data, data_file): - """Writes the given *preprocessed* data to a file with the given name. - In this base class implementation, we simply use :py:func:`bob.bio.base.save` for that. - If you have a different format (e.g. not images), please overwrite this function. + ############################################################ + ### Special functions that might be overwritten on need + ############################################################ - **Parameters:** + def write_data(self, data, data_file): + """Writes the given *preprocessed* data to a file with the given name. + In this base class implementation, we simply use :py:func:`bob.bio.base.save` for that. + If you have a different format (e.g. not images), please overwrite this function. - data : object - The preprocessed data, i.e., what is returned from :py:meth:`__call__`. + **Parameters:** - data_file : str or :py:class:`bob.io.base.HDF5File` - The file open for writing, or the name of the file to write. - """ - utils.save(data, data_file) + data : object + The preprocessed data, i.e., what is returned from `__call__`. + data_file : str or :py:class:`bob.io.base.HDF5File` + The file open for writing, or the name of the file to write. + """ + utils.save(data, data_file) - def read_data(self, data_file): - """read_data(data_file) -> data + def read_data(self, data_file): + """read_data(data_file) -> data - Reads the *preprocessed* data from file. - In this base class implementation, it uses :py:func:`bob.bio.base.load` to do that. - If you have different format, please overwrite this function. + Reads the *preprocessed* data from file. + In this base class implementation, it uses :py:func:`bob.bio.base.load` to do that. + If you have different format, please overwrite this function. - **Parameters:** + **Parameters:** - data_file : str or :py:class:`bob.io.base.HDF5File` - The file open for reading or the name of the file to read from. + data_file : str or :py:class:`bob.io.base.HDF5File` + The file open for reading or the name of the file to read from. - **Returns:** + **Returns:** - data : object (usually :py:class:`numpy.ndarray`) - The preprocessed data read from file. - """ - return utils.load(data_file) + data : object (usually :py:class:`numpy.ndarray`) + The preprocessed data read from file. + """ + return utils.load(data_file) diff --git a/bob/bio/base/tools/FileSelector.py b/bob/bio/base/tools/FileSelector.py index 16592e26b1fd04593eb3d84e442daf3260ff93ba..120724d902dbffec0d0ab3bc8b6a217aaa111b06 100644 --- a/bob/bio/base/tools/FileSelector.py +++ b/bob/bio/base/tools/FileSelector.py @@ -15,7 +15,7 @@ class FileSelector: **Parameters:** - database : :py:class:`bob.bio.base.database.Database` or derived + database : :py:class:`bob.bio.base.database.BioDatabase` or derived The database object that provides the list of files. preprocessed_directory : str diff --git a/bob/bio/base/tools/command_line.py b/bob/bio/base/tools/command_line.py index f88e960c01413e802a730d54b1e9e105733c64fe..4b992f396393ba906b3a68da10c7ac256d0a7815 100644 --- a/bob/bio/base/tools/command_line.py +++ b/bob/bio/base/tools/command_line.py @@ -8,7 +8,6 @@ logger = bob.core.log.setup("bob.bio.base") from .. import utils from . import FileSelector -from bob.bio.base.database import BioDatabase """Execute biometric recognition algorithms on a certain biometric database. """ @@ -201,6 +200,7 @@ def initialize(parsers, command_line_parameters = None, skips = []): .. note:: The database, preprocessor, extractor, algorithm and grid (if specified) are actual instances of the according classes. """ + from bob.bio.base.database import BioDatabase # add execute-only flags to command line options if skips is not None: diff --git a/bob/bio/base/utils/singleton.py b/bob/bio/base/utils/singleton.py index 9a2092c47024cff8271206db463ef6542629ebbe..9b316a539d401ddc3edf4e5562203453c32c7e9a 100644 --- a/bob/bio/base/utils/singleton.py +++ b/bob/bio/base/utils/singleton.py @@ -7,7 +7,7 @@ class Singleton: The decorated class can define one `__init__` function that takes an arbitrary list of parameters. - To get the singleton instance, use the :py:meth:`instance` method. Trying to use :py:meth:`__call__` will result in a :py:class:`TypeError` being raised. + To get the singleton instance, use the :py:meth:`instance` method. Trying to use `__call__` will result in a `TypeError` being raised. Limitations: diff --git a/doc/implementation.rst b/doc/implementation.rst index 77686756fe9aa5477da5b253ac8405f3cac1eeac..5b0e1fd31ecc4feb94cf72d08ecf04da63a1318c 100644 --- a/doc/implementation.rst +++ b/doc/implementation.rst @@ -219,7 +219,7 @@ Verification Database Interface For most of the data sets, we rely on the database interfaces from Bob_. -Particularly, all databases that are derived from the :py:class:`bob.bio.base.database.BioDatabase` (click :ref:`here <verification_databases>` for a list of implemented databases) are supported by a special derivation of the databases from above. +Particularly, all databases that are derived from the :py:class:`bob.bio.base.database.BioDatabase` (click `here <https://github.com/idiap/bob/wiki/Packages>`_ for a list of implemented databases) are supported by a special derivation of the databases from above. For these databases, the special :py:class:`bob.bio.base.database.BioDatabase` interface is provided, which takes the Bob_ database as parameter. Several such databases are defined in the according packages, i.e., :ref:`bob.bio.spear <bob.bio.spear>`, :ref:`bob.bio.face <bob.bio.face>` and :ref:`bob.bio.video <bob.bio.video>`. For Bob_'s ZT-norm databases, we provide the :py:class:`bob.bio.base.database.ZTBioDatabase` interface. @@ -315,7 +315,7 @@ The process of registering a resource is relatively easy. We use the SetupTools_ mechanism of registering so-called entry points in the ``setup.py`` file of the according ``bob.bio`` package. Particularly, we use a specific list of entry points, which are: -* ``bob.bio.database`` to register an instance of a (derivation of a) :py:class:`bob.bio.base.database.Database` +* ``bob.bio.database`` to register an instance of a (derivation of a) :py:class:`bob.bio.base.database.BioDatabase` * ``bob.bio.preprocessor`` to register an instance of a (derivation of a) :py:class:`bob.bio.base.preprocessor.Preprocessor` * ``bob.bio.extractor`` to register an instance of a (derivation of a) :py:class:`bob.bio.base.extractor.Extractor` * ``bob.bio.algorithm`` to register an instance of a (derivation of a) :py:class:`bob.bio.base.algorithm.Algorithm` diff --git a/doc/implemented.rst b/doc/implemented.rst index 1fb13eaf878e04d82dec8c3f42cea66a3fed98d6..d2a9489173cc214ea52ba072874f844f30ce74b6 100644 --- a/doc/implemented.rst +++ b/doc/implemented.rst @@ -57,7 +57,6 @@ Grid Configuration .. automodule:: bob.bio.base.grid - .. data:: PREDEFINED_QUEUES A dictionary of predefined queue keywords, which are adapted to the Idiap_ SGE. @@ -65,12 +64,5 @@ Grid Configuration .. adapted from http://stackoverflow.com/a/29789910/3301902 to ge a nice dictionary content view - .. exec:: - import json - from bob.bio.base.grid import PREDEFINED_QUEUES - json_obj = json.dumps(PREDEFINED_QUEUES, sort_keys=True, indent=2) - json_obj = json_obj.replace("\n", "\n ") - print ('.. code-block:: JavaScript\n\n PREDEFINED_QUEUES = %s\n\n' % json_obj) - .. include:: links.rst diff --git a/doc/installation.rst b/doc/installation.rst index 6fd14c3c3bb4314a6bfc089a93a9223c9153cb21..d2d7acd24068413dd722748a2dcb518a0eb31c83 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -47,7 +47,7 @@ There, in the ``eggs`` section of the ``buildout.cfg`` file, simply list the ``b gridtk in order to download and install all packages that are required for your experiments. -In the example above, you might want to run a video face recognition experiments using the :py:class:`bob.bio.face.preprocessor.FaceDetector` and the :py:class:`bob.bio.face.extractor.DCTBlocks` feature extractor defined in :ref:`bob.bio.face <bob.bio.face>`, the :py:class:`bob.bio.gmm.algorithm.IVector` algorithm defined in :ref:`bob.bio.gmm <bob.bio.gmm>` and the video extensions defined in :ref:`bob.bio.video <bob.bio.video>`, using the YouTube faces database interface defined in :ref:`bob.db.youtube <bob.db.youtube>`. +In the example above, you might want to run a video face recognition experiments using the :py:class:`bob.bio.face.preprocessor.FaceDetect` and the :py:class:`bob.bio.face.extractor.DCTBlocks` feature extractor defined in :ref:`bob.bio.face <bob.bio.face>`, the :py:class:`bob.bio.gmm.algorithm.IVector` algorithm defined in :ref:`bob.bio.gmm <bob.bio.gmm>` and the video extensions defined in :ref:`bob.bio.video <bob.bio.video>`, using the YouTube faces database interface defined in :ref:`bob.db.youtube <bob.db.youtube>`. Running the simple command line: .. code-block:: sh @@ -64,7 +64,7 @@ Databases With ``bob.bio`` you will run biometric recognition experiments using some default biometric recognition databases. Though the verification protocols are implemented in ``bob.bio``, the original data are **not included**. To download the original data of the databases, please refer to the according Web-pages. -For a list of supported databases including their download URLs, please refer to the :ref:`verification_databases`. +For a list of supported databases including their download URLs, please refer to the `verification_databases <https://github.com/idiap/bob/wiki/Packages>`_. After downloading the original data for the databases, you will need to tell ``bob.bio``, where these databases can be found. For this purpose, we have decided to implement a special file, where you can set your directories. diff --git a/test-requirements.txt b/test-requirements.txt index 89e63d6ac71191c42c61748db750825d0f213b62..179b931cd7d5cf46667d6214a2af16d6a0cb8b58 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,4 +1,9 @@ bob.db.atnt bob.io.image +bob.bio.face +bob.bio.gmm # For the docs +bob.bio.spear +bob.bio.video +bob.db.youtube matplotlib gridtk