Commit b88c4e60 authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Fixing sphinx warnings

parent 303ed8f2
Pipeline #4233 passed with stages
in 134 minutes and 6 seconds
......@@ -47,7 +47,7 @@ class Algorithm:
See :py:func:`bob.bio.base.score_fusion_strategy` for possible values.
kwargs : ``key=value`` pairs
A list of keyword arguments to be written in the :py:meth:`__str__` function.
A list of keyword arguments to be written in the `__str__` function.
"""
......
This diff is collapsed.
......@@ -10,7 +10,64 @@ import bob.db.base
import bob.bio.base.database
class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
"""This class represents the basic API for database access.
Please use this class as a base class for your database access classes.
Do not forget to call the constructor of this base class in your derived class.
**Parameters:**
name : str
A unique name for the database.
all_files_options : dict
Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query when retrieving all data.
extractor_training_options : dict
Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the extractor training.
projector_training_options : dict
Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the projector training.
enroller_training_options : dict
Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the enroller training.
check_original_files_for_existence : bool
Enables to test for the original data files when querying the database.
original_directory : str
The directory where the original data of the database are stored.
original_extension : str
The file name extension of the original data.
annotation_directory : str
The directory where the image annotations of the database are stored, if any.
annotation_extension : str
The file name extension of the annotation files.
annotation_type : str
The type of the annotation file to read, see `bob.db.base.annotations.read_annotation_file` for accepted formats.
protocol : str or ``None``
The name of the protocol that defines the default experimental setup for this database.
.. todo:: Check if the ``None`` protocol is supported.
training_depends_on_protocol : bool
Specifies, if the training set used for training the extractor and the projector depend on the protocol.
This flag is used to avoid re-computation of data when running on the different protocols of the same database.
models_depend_on_protocol : bool
Specifies, if the models depend on the protocol.
This flag is used to avoid re-computation of models when running on the different protocols of the same database.
kwargs : ``key=value`` pairs
The arguments of the :py:class:`Database` base class constructor.
"""
def __init__(
self,
name,
......@@ -32,62 +89,6 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
models_depend_on_protocol=False,
**kwargs
):
"""This class represents the basic API for database access.
Please use this class as a base class for your database access classes.
Do not forget to call the constructor of this base class in your derived class.
**Parameters:**
name : str
A unique name for the database.
all_files_options : dict
Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query when retrieving all data.
extractor_training_options : dict
Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the extractor training.
projector_training_options : dict
Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the projector training.
enroller_training_options : dict
Dictionary of options passed to the :py:meth:`bob.bio.base.database.BioDatabase.objects` database query used to retrieve the files for the enroller training.
check_original_files_for_existence : bool
Enables to test for the original data files when querying the database.
original_directory : str
The directory where the original data of the database are stored.
original_extension : str
The file name extension of the original data.
annotation_directory : str
The directory where the image annotations of the database are stored, if any.
annotation_extension : str
The file name extension of the annotation files.
annotation_type : str
The type of the annotation file to read, see :py:func:`bob.bio.base.database.read_annotation_file` for accepted formats.
protocol : str or ``None``
The name of the protocol that defines the default experimental setup for this database.
.. todo:: Check if the ``None`` protocol is supported.
training_depends_on_protocol : bool
Specifies, if the training set used for training the extractor and the projector depend on the protocol.
This flag is used to avoid re-computation of data when running on the different protocols of the same database.
models_depend_on_protocol : bool
Specifies, if the models depend on the protocol.
This flag is used to avoid re-computation of models when running on the different protocols of the same database.
kwargs : ``key=value`` pairs
The arguments of the :py:class:`Database` base class constructor.
"""
assert isinstance(name, str)
......@@ -214,13 +215,13 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
**Parameters:**
files : [:py:class:`File`]
files : [:py:class:`BioFile`]
The list of files to be uniquified and sorted.
**Returns:**
sorted : [:py:class:`File`]
The sorted list of files, with duplicate :py:attr:`File.id`\s being removed.
sorted : [:py:class:`BioFile`]
The sorted list of files, with duplicate `BioFile.id`\s being removed.
"""
# sort files using their sort function
sorted_files = sorted(files)
......@@ -242,13 +243,13 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
**Parameters:**
files : :py:class:`File`
A list of files that should be split up by :py:attr:`File.client_id`.
files : :py:class:`BioFile`
A list of files that should be split up by `BioFile.client_id`.
**Returns:**
files_by_client : [[:py:class:`File`]]
The list of lists of files, where each sub-list groups the files with the same :py:attr:`File.client_id`
files_by_client : [[:py:class:`BioFile`]]
The list of lists of files, where each sub-list groups the files with the same `BioFile.client_id`
"""
client_files = {}
for file in files:
......@@ -264,11 +265,11 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
def annotations(self, file):
"""
Returns the annotations for the given File object, if available.
It uses :py:func:`bob.bio.base.database.read_annotation_file` to load the annotations.
It uses `bob.db.base.annotations.read_annotation_file` to load the annotations.
**Parameters:**
file : :py:class:`File`
file : :py:class:`BioFile`
The file for which annotations should be returned.
**Returns:**
......@@ -293,7 +294,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
**Parameters:**
files : [:py:class:`File`]
files : [:py:class:`BioFile`]
The list of file object to retrieve the file names for.
directory : str
......@@ -323,7 +324,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
**Parameters:**
files : [:py:class:`File`]
files : [:py:class:`BioFile`]
The list of file object to retrieve the original data file names for.
**Returns:**
......@@ -413,7 +414,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
Keyword parameters:
file : :py:class:`File` or a derivative
file : :py:class:`BioFile` or a derivative
The File objects for which the file name should be retrieved
Return value : str
......@@ -444,7 +445,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
**Returns:**
files : [:py:class:`File`]
files : [:py:class:`BioFile`]
The sorted and unique list of all files of the database.
"""
return self.sort(self.objects(protocol=self.protocol, groups=groups, **self.all_files_options))
......@@ -467,7 +468,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
**Returns:**
files : [:py:class:`File`] or [[:py:class:`File`]]
files : [:py:class:`BioFile`] or [[:py:class:`BioFile`]]
The (arranged) list of files used for the training of the given step.
"""
if step is None:
......@@ -502,7 +503,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
**Returns:**
files : [:py:class:`File`]
files : [:py:class:`BioFile`]
The sorted and unique list of test files of the database.
"""
return self.sort(self.objects(protocol=self.protocol, groups=groups, **self.all_files_options))
......@@ -551,7 +552,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
**Returns:**
files : [:py:class:`File`]
files : [:py:class:`BioFile`]
The list of files used for to probe the model with the given model id.
"""
if model_id is not None:
......@@ -605,7 +606,7 @@ class BioDatabase(six.with_metaclass(abc.ABCMeta, bob.db.base.Database)):
**Returns:**
files : [:py:class:`FileSet`] or something similar
files : [:py:class:`BioFileSet`] or something similar
The list of file sets used to probe the model with the given model id."""
if model_id is not None:
file_sets = self.object_sets(protocol=self.protocol, groups=group, model_ids=(model_id,), purposes='probe',
......@@ -714,7 +715,7 @@ class ZTBioDatabase(BioDatabase):
**Returns:**
files : [:py:class:`File`]
files : [:py:class:`BioFile`]
The sorted and unique list of all files of the database.
"""
files = self.objects(protocol=self.protocol, groups=groups, **self.all_files_options)
......@@ -776,7 +777,7 @@ class ZTBioDatabase(BioDatabase):
**Returns:**
files : [:py:class:`File`]
files : [:py:class:`BioFile`]
The sorted list of files used for to enroll the model with the given model id.
"""
return self.sort(self.tobjects(protocol=self.protocol, groups=group, model_ids=(t_model_id,)))
......@@ -785,7 +786,7 @@ class ZTBioDatabase(BioDatabase):
"""z_probe_files(group = 'dev') -> files
Returns a list of probe files used to compute the Z-Norm, respecting the current protocol.
The Z-probe files can be limited using the ``z_probe_options`` in the query to :py:meth:`ZTBioDatabase.z_probe_files`
The Z-probe files can be limited using the ``z_probe_options`` in the query to :py:meth:`bob.bio.base.database.ZTBioDatabase.z_probe_files`
**Parameters:**
......@@ -794,7 +795,7 @@ class ZTBioDatabase(BioDatabase):
**Returns:**
files : [:py:class:`File`]
files : [:py:class:`BioFile`]
The unique list of files used to compute the Z-norm.
"""
return self.sort(self.zobjects(protocol=self.protocol, groups=group, **self.z_probe_options))
......@@ -812,7 +813,7 @@ class ZTBioDatabase(BioDatabase):
**Returns:**
files : [:py:class:`FileSet`]
files : [:py:class:`BioFileSet`]
The unique list of file sets used to compute the Z-norm.
"""
raise NotImplementedError("Please implement this function in derived classes")
......@@ -822,12 +823,16 @@ class ZTBioDatabase(BioDatabase):
Returns the client id for the given T-Norm model id.
In this base class implementation, we just use the :py:meth:`client_id_from_model_id` function.
Overload this function if you need another behavior.
**Parameters:**
t_model_id : int or str
A unique ID that identifies the T-Norm model.
group : one of ``('dev', 'eval')``
The group to get the client ids for.
**Returns:**
client_id : [int] or [str]
A unique ID that identifies the client, to which the T-Norm model belongs.
"""
......
......@@ -18,7 +18,7 @@ class BioFile(bob.db.base.File):
The id of the client this file belongs to.
Its type depends on your implementation.
If you use an SQL database, this should be an SQL type like Integer or String.
For path and file_id, please refer to :py:class:`bob.db.base.File` constructor
For path and file_id, please refer to :py:class:`bob.db.base.file.File` constructor
"""
bob.db.base.File.__init__(self, path, file_id)
......@@ -28,34 +28,36 @@ class BioFile(bob.db.base.File):
class BioFileSet(BioFile):
"""This class defines the minimum interface of a set of database files that needs to be exported.
Use this class, whenever the database provides several files that belong to the same probe.
Each file set has an id, and a list of associated files, which are of type :py:class:`BioFile` of the same client.
The file set id can be anything hashable, but needs to be unique all over the database.
**Parameters:**
file_set_id : str or int
A unique ID that identifies the file set.
files : [:py:class:`BioFile`]
A non-empty list of BioFile objects that should be stored inside this file.
All files of that list need to have the same client ID.
"""
def __init__(self, file_set_id, files, path=None):
# don't accept empty file lists
assert len(files), "Cannot create an empty BioFileSet"
# call base class constructor
BioFile.__init__(self, files[0].client_id, "+".join(f.path for f in files) if path is None else path, file_set_id)
# check that all files come from the same client
assert all(f.client_id == self.client_id for f in files)
# The list of files contained in this set
self.files = files
"""The list of :py:class:`BioFile` objects stored in this file set"""
def __lt__(self, other):
"""Defines an order between file sets by using the order of the file set ids."""
# compare two BioFile set objects by comparing their IDs
return self.id < other.id
"""This class defines the minimum interface of a set of database files that needs to be exported.
Use this class, whenever the database provides several files that belong to the same probe.
Each file set has an id, and a list of associated files, which are of type :py:class:`BioFile` of the same client.
The file set id can be anything hashable, but needs to be unique all over the database.
**Parameters:**
file_set_id : str or int
A unique ID that identifies the file set.
files : [:py:class:`BioFile`]
A non-empty list of BioFile objects that should be stored inside this file.
All files of that list need to have the same client ID.
"""
def __init__(self, file_set_id, files, path=None):
# don't accept empty file lists
assert len(files), "Cannot create an empty BioFileSet"
# call base class constructor
BioFile.__init__(self, files[0].client_id, "+".join(f.path for f in files) if path is None else path,
file_set_id)
# check that all files come from the same client
assert all(f.client_id == self.client_id for f in files)
# The list of files contained in this set
self.files = files
"""The list of :py:class:`BioFile` objects stored in this file set"""
def __lt__(self, other):
"""Defines an order between file sets by using the order of the file set ids."""
# compare two BioFile set objects by comparing their IDs
return self.id < other.id
......@@ -24,7 +24,7 @@ class Extractor:
Ignored, if ``requires_training`` is ``False``
kwargs : ``key=value`` pairs
A list of keyword arguments to be written in the :py:meth:`__str__` function.
A list of keyword arguments to be written in the `__str__` function.
"""
def __init__(
......@@ -89,7 +89,7 @@ class Extractor:
**Parameters:**
feature : object
The extracted feature, i.e., what is returned from :py:meth:`__call__`.
The extracted feature, i.e., what is returned from `__call__`.
feature_file : str or :py:class:`bob.io.base.HDF5File`
The file open for writing, or the name of the file to write.
......
......@@ -6,40 +6,41 @@
from .Extractor import Extractor
import numpy
class Linearize (Extractor):
"""Extracts features by simply concatenating all elements of the data into one long vector.
If a ``dtype`` is specified in the contructor, it is assured that the resulting
"""
class Linearize(Extractor):
"""Extracts features by simply concatenating all elements of the data into one long vector.
def __init__(self, dtype=None):
"""If the ``dtype`` parameter is given, it specifies the data type that is enforced for the features."""
Extractor.__init__(self, dtype = dtype)
self.dtype = dtype
If a ``dtype`` is specified in the contructor, it is assured that the resulting
"""
def __call__(self, data):
"""__call__(data) -> data
def __init__(self, dtype=None):
"""If the ``dtype`` parameter is given, it specifies the data type that is enforced for the features."""
Extractor.__init__(self, dtype=dtype)
self.dtype = dtype
Takes data of arbitrary dimensions and linearizes it into a 1D vector; enforcing the data type, if desired.
def __call__(self, data):
"""__call__(data) -> data
**Parameters:**
Takes data of arbitrary dimensions and linearizes it into a 1D vector; enforcing the data type, if desired.
data : :py:class:`numpy.ndarray`
The preprocessed data to be transformed into one vector.
**Parameters:**
**Returns:**
data : :py:class:`numpy.ndarray`
The preprocessed data to be transformed into one vector.
data : 1D :py:class:`numpy.ndarray`
The extracted feature vector, of the desired ``dtype`` (if specified).
"""
assert isinstance(data, numpy.ndarray)
**Returns:**
data : 1D :py:class:`numpy.ndarray`
The extracted feature vector, of the desired ``dtype`` (if specified).
"""
assert isinstance(data, numpy.ndarray)
linear = numpy.reshape(data, data.size)
if self.dtype is not None:
linear = linear.astype(self.dtype)
return linear
linear = numpy.reshape(data, data.size)
if self.dtype is not None:
linear = linear.astype(self.dtype)
return linear
# re-define unused functions, just so that they do not get documented
def train(*args, **kwargs): raise NotImplementedError()
# re-define unused functions, just so that they do not get documented
def train(*args,**kwargs): raise NotImplementedError()
def load(*args,**kwargs): pass
def load(*args, **kwargs): pass
......@@ -5,104 +5,101 @@
from .. import utils
class Preprocessor:
"""This is the base class for all preprocessors.
It defines the minimum requirements for all derived proprocessor classes.
**Parameters:**
writes_data : bool
Select, if the preprocessor actually writes preprocessed images, or if it is simply returning values.
read_original_data: callable
This function is used to read the original data from file.
It takes three inputs: A :py:class:`bob.bio.base.database.BioFile` (or one of its derivatives), the original directory (as ``str``) and the original extension (as ``str``).
kwargs : ``key=value`` pairs
A list of keyword arguments to be written in the :py:meth:`__str__` function.
"""
class Preprocessor:
"""This is the base class for all preprocessors.
It defines the minimum requirements for all derived proprocessor classes.
def __init__(self, writes_data = True, read_original_data = utils.read_original_data, **kwargs):
# Each class needs to have a constructor taking
# all the parameters that are required for the preprocessing as arguments
self.writes_data = writes_data
self.read_original_data = read_original_data
self._kwargs = kwargs
pass
**Parameters:**
writes_data : bool
Select, if the preprocessor actually writes preprocessed images, or if it is simply returning values.
# The call function (i.e. the operator() in C++ terms)
def __call__(self, data, annotations):
"""__call__(data, annotations) -> dara
read_original_data: callable
This function is used to read the original data from file.
It takes three inputs: A :py:class:`bob.bio.base.database.BioFile` (or one of its derivatives), the original directory (as ``str``) and the original extension (as ``str``).
This is the call function that you have to overwrite in the derived class.
The parameters that this function will receive are:
kwargs : ``key=value`` pairs
A list of keyword arguments to be written in the `__str__` function.
"""
**Parameters:**
def __init__(self, writes_data=True, read_original_data=utils.read_original_data, **kwargs):
# Each class needs to have a constructor taking
# all the parameters that are required for the preprocessing as arguments
self.writes_data = writes_data
self.read_original_data = read_original_data
self._kwargs = kwargs
pass
data : object
The original data that needs preprocessing, usually a :py:class:`numpy.ndarray`, but might be different.
# The call function (i.e. the operator() in C++ terms)
def __call__(self, data, annotations):
"""__call__(data, annotations) -> dara
annotations : {} or None
The annotations (if any) that belongs to the given ``data``; as a dictionary.
The type of the annotation depends on your kind of problem.
This is the call function that you have to overwrite in the derived class.
The parameters that this function will receive are:
**Returns:**
**Parameters:**
data : object
The *preprocessed* data, usually a :py:class:`numpy.ndarray`, but might be different.
"""
raise NotImplementedError("Please overwrite this function in your derived class")
data : object
The original data that needs preprocessing, usually a :py:class:`numpy.ndarray`, but might be different.
annotations : {} or None
The annotations (if any) that belongs to the given ``data``; as a dictionary.
The type of the annotation depends on your kind of problem.
def __str__(self):
"""__str__() -> info
**Returns:**
This function returns all parameters of this class (and its derived class).
data : object
The *preprocessed* data, usually a :py:class:`numpy.ndarray`, but might be different.
"""
raise NotImplementedError("Please overwrite this function in your derived class")
**Returns:**
def __str__(self):
"""__str__() -> info
info : str
A string containing the full information of all parameters of this (and the derived) class.
"""
return utils.pretty_print(self, self._kwargs)
This function returns all parameters of this class (and its derived class).
############################################################
### Special functions that might be overwritten on need
############################################################
**Returns:**
info : str
A string containing the full information of all parameters of this (and the derived) class.
"""
return utils.pretty_print(self, self._kwargs)
def write_data(self, data, data_file):
"""Writes the given *preprocessed* data to a file with the given name.
In this base class implementation, we simply use :py:func:`bob.bio.base.save` for that.
If you have a different format (e.g. not images), please overwrite this function.
############################################################
### Special functions that might be overwritten on need
############################################################
**Parameters:**
def write_data(self, data, data_file):
"""Writes the given *preprocessed* data to a file with the given name.
In this base class implementation, we simply use :py:func:`bob.bio.base.save` for that.
If you have a different format (e.g. not images), please overwrite this function.
data : object
The preprocessed data, i.e., what is returned from :py:meth:`__call__`.
**Parameters:**
data_file : str or :py:class:`bob.io.base.HDF5File`
The file open for writing, or the name of the file to write.
"""
utils.save(data, data_file)
data : object
The preprocessed data, i.e., what is returned from `__call__`.
data_file : str or :py:class:`bob.io.base.HDF5File`
The file open for writing, or the name of the file to write.
"""
utils.save(data, data_file)
def read_data(self, data_file):
"""read_data(data_file) -> data
def read_data(self, data_file):
"""read_data(data_file) -> data
Reads the *preprocessed* data from file.