Commit da89316e authored by Amir MOHAMMADI's avatar Amir MOHAMMADI
Browse files

multiple fixes

parent 07db91e9
......@@ -90,7 +90,7 @@ class Algorithm (object):
**kwargs # parameters from the derived class that should be reported in the __str__() function
):
warnings.warn("`bob.bio.base.algorithm.Algorithm` will be deprecated in 01/01/2021. "\
"Please, implement your biometric algorithm using `bob.pipelines` (https://gitlab.idiap.ch/bob/bob.pipelines).", DeprecationWarning)
......@@ -197,9 +197,9 @@ class Algorithm (object):
"""score_for_multiple_models(models, probe) -> score
This function computes the score between the given model list and the given probe.
In this base class implementation, it computes the scores for each model using the :py:meth:`score` method,
In this base class implementation, it computes the scores for each model using the ``score`` method,
and fuses the scores using the fusion method specified in the constructor of this class.
Usually this function is called from derived class :py:meth:`score` functions.
Usually this function is called from derived class ``score`` functions.
**Parameters:**
......@@ -227,7 +227,7 @@ class Algorithm (object):
"""score_for_multiple_probes(model, probes) -> score
This function computes the score between the given model and the given probe files.
In this base class implementation, it computes the scores for each probe file using the :py:meth:`score` method,
In this base class implementation, it computes the scores for each probe file using the ``score`` method,
and fuses the scores using the fusion method specified in the constructor of this class.
**Parameters:**
......@@ -312,7 +312,7 @@ class Algorithm (object):
**Parameters:**
model : object
A model as returned by the :py:meth:`enroll` function, which should be written.
A model as returned by the ``enroll`` function, which should be written.
model_file : str or :py:class:`bob.io.base.HDF5File`
The file open for writing, or the file name to write to.
......
......@@ -7,7 +7,7 @@ class Callable(Annotator):
Attributes
----------
callable : callable
callable
A callable with the following signature:
``annotations = callable(sample, **kwargs)`` that takes numpy array and
returns annotations in dictionary format for that biometric sample.
......
......@@ -51,7 +51,7 @@ class BioAlgorithm(metaclass=ABCMeta):
Parameters
----------
biometric_references : list
A list of :py:class:`SampleSet` objects to be used for
A list of :any:`bob.pipelines.SampleSet` objects to be used for
creating biometric references. The sets must be identified
with a unique id and a path, for eventual checkpointing.
"""
......@@ -107,11 +107,11 @@ class BioAlgorithm(metaclass=ABCMeta):
----------
probes : list
A list of :py:class:`SampleSet` objects to be used for
A list of :any:`bob.pipelines.SampleSet` objects to be used for
scoring the input references
biometric_references : list
A list of :py:class:`Sample` objects to be used for
A list of :any:`bob.pipelines.Sample` objects to be used for
scoring the input probes, must have an ``id`` attribute that
will be used to cross-reference which probes need to be scored.
......@@ -287,10 +287,15 @@ class BioAlgorithm(metaclass=ABCMeta):
class Database(metaclass=ABCMeta):
"""Base class for Vanilla Biometric pipeline"""
def __init__(self, name, protocol, allow_scoring_with_all_biometric_references, **kwargs) -> None:
super().__init__(**kwargs)
self.name = name
self.protocol = protocol
self.allow_scoring_with_all_biometric_references = allow_scoring_with_all_biometric_references
@abstractmethod
def background_model_samples(self):
"""Returns :py:class:`Sample`'s to train a background model
"""Returns :any:`bob.pipelines.Sample`'s to train a background model
Returns
......@@ -303,7 +308,7 @@ class Database(metaclass=ABCMeta):
@abstractmethod
def references(self, group="dev"):
"""Returns :py:class:`Reference`'s to enroll biometric references
"""Returns references to enroll biometric references
Parameters
......@@ -322,7 +327,7 @@ class Database(metaclass=ABCMeta):
@abstractmethod
def probes(self, group):
"""Returns :py:class:`Probe`'s to score biometric references
"""Returns probes to score biometric references
Parameters
......@@ -356,9 +361,14 @@ class Database(metaclass=ABCMeta):
"""
pass
@abstractmethod
def groups(self):
pass
@abstractmethod
def protocols(self):
pass
def reference_ids(self, group):
return [s.reference_id for s in self.references(group=group)]
......@@ -366,7 +376,7 @@ class Database(metaclass=ABCMeta):
class ScoreWriter(metaclass=ABCMeta):
"""
Defines base methods to read, write scores and concatenate scores
for :py:class:`BioAlgorithm`
for :any:`bob.bio.base.pipelines.vanilla_biometrics.BioAlgorithm`
"""
def __init__(self, path, extension=".txt"):
......
......@@ -125,39 +125,33 @@ class DatabaseConnector(Database):
self.memory_demanding = memory_demanding
def background_model_samples(self):
"""Returns :py:class:`Sample`'s to train a background model (group
"""Returns :any:`bob.pipelines.Sample`'s to train a background model (group
``world``).
Returns
-------
samples : list
List of samples conforming the pipeline API for background
model training. See, e.g., :py:func:`.pipelines.first`.
samples : list
List of samples conforming the pipeline API for background
model training.
"""
objects = self.database.training_files()
return [_biofile_to_delayed_sample(k, self.database) for k in objects]
def references(self, group="dev"):
"""Returns :py:class:`Reference`'s to enroll biometric references
"""Returns references to enroll biometric references
Parameters
----------
group : :py:class:`str`, optional
A ``group`` to be plugged at
:py:meth:`bob.db.base.Database.objects`
group : :py:class:`str`, optional
A ``group`` to be plugged at ``database.objects``
Returns
-------
references : list
List of samples conforming the pipeline API for the creation of
biometric references. See, e.g., :py:func:`.pipelines.first`.
references : list
List of samples conforming the pipeline API for the creation of
biometric references. See, e.g., :py:func:`.pipelines.first`.
"""
......@@ -179,24 +173,20 @@ class DatabaseConnector(Database):
return retval
def probes(self, group="dev"):
"""Returns :py:class:`Probe`'s to score biometric references
"""Returns probes to score biometric references
Parameters
----------
group : str
A ``group`` to be plugged at
:py:meth:`bob.db.base.Database.objects`
group : str
A ``group`` to be plugged at ``database.objects``
Returns
-------
probes : list
List of samples conforming the pipeline API for the creation of
biometric probes. See, e.g., :py:func:`.pipelines.first`.
probes : list
List of samples conforming the pipeline API for the creation of
biometric probes.
"""
probes = dict()
......
......@@ -57,10 +57,10 @@ def execute_vanilla_biometrics(
Parameters
----------
pipeline: Instance of :py:class:`~bob.bio.base.pipelines.vanilla_biometrics.VanillaBiometricsPipeline`
pipeline: Instance of :py:class:`bob.bio.base.pipelines.vanilla_biometrics.VanillaBiometricsPipeline`
A constructed vanilla-biometrics pipeline.
database: Instance of :py:class:`~bob.bio.base.pipelines.vanilla_biometrics.abstract_class.Database`
database: Instance of :py:class:`bob.bio.base.pipelines.vanilla_biometrics.abstract_class.Database`
A database interface instance
dask_client: instance of :py:class:`dask.distributed.Client` or ``None``
......@@ -175,10 +175,10 @@ def execute_vanilla_biometrics_ztnorm(
Parameters
----------
pipeline: Instance of :py:class:`~bob.bio.base.pipelines.vanilla_biometrics.VanillaBiometricsPipeline`
pipeline: Instance of :py:class:`bob.bio.base.pipelines.vanilla_biometrics.VanillaBiometricsPipeline`
A constructed vanilla-biometrics pipeline.
database: Instance of :py:class:`~bob.bio.base.pipelines.vanilla_biometrics.abstract_class.Database`
database: Instance of :py:class:`bob.bio.base.pipelines.vanilla_biometrics.abstract_class.Database`
A database interface instance
dask_client: instance of :py:class:`dask.distributed.Client` or ``None``
......
......@@ -197,7 +197,7 @@ def vanilla_biometrics(
.. Note::
Refrain from calling this function directly from a script. Prefer
:py:func:`~bob.bio.base.pipelines.vanilla_biometrics.execute_vanilla_biometrics`
:py:func:`bob.bio.base.pipelines.vanilla_biometrics.execute_vanilla_biometrics`
instead.
"""
......
# see https://docs.python.org/3/library/pkgutil.html
from pkgutil import extend_path
from collections import defaultdict
__path__ = extend_path(__path__, __name__)
from collections import defaultdict
def split_X_by_y(X, y):
def split_X_by_y(X, y):
training_data = defaultdict(list)
for x1, y1 in zip(X, y):
training_data[y1].append(x1)
......@@ -12,7 +9,33 @@ def split_X_by_y(X, y):
return training_data
from .preprocessor import PreprocessorTransformer
from .extractor import ExtractorTransformer
from .algorithm import AlgorithmTransformer
# gets sphinx autodoc done right - don't remove it
def __appropriate__(*args):
"""Says object was actually declared here, and not in the import module.
Fixing sphinx warnings of not being able to find classes, when path is
shortened.
Parameters
----------
*args
An iterable of objects to modify
Resolves `Sphinx referencing issues
<https://github.com/sphinx-doc/sphinx/issues/3048>`
"""
for obj in args:
obj.__module__ = __name__
__appropriate__(
PreprocessorTransformer,
ExtractorTransformer,
AlgorithmTransformer,
)
__all__ = [_ for _ in dir() if not _.startswith("_")]
......@@ -11,9 +11,9 @@ import os
class AlgorithmTransformer(TransformerMixin, BaseEstimator):
"""Class that wraps :py:class:`bob.bio.base.algorithm.Algorithm`
:any:`LegacyAlgorithmMixin.fit` maps to :py:meth:`bob.bio.base.algorithm.Algorithm.train_projector`
:any:`AlgorithmTransformer.fit` maps to :py:meth:`bob.bio.base.algorithm.Algorithm.train_projector`
:any:`LegacyAlgorithmMixin.transform` maps :py:meth:`bob.bio.base.algorithm.Algorithm.project`
:any:`AlgorithmTransformer.transform` maps :py:meth:`bob.bio.base.algorithm.Algorithm.project`
Example
-------
......@@ -26,8 +26,8 @@ class AlgorithmTransformer(TransformerMixin, BaseEstimator):
Parameters
----------
instance: object
An instance of bob.bio.base.algorithm.Algorithm
instance: object
An instance of bob.bio.base.algorithm.Algorithm
"""
......
......@@ -7,18 +7,15 @@ from . import split_X_by_y
class ExtractorTransformer(TransformerMixin, BaseEstimator):
"""
Scikit learn transformer for :py:class:`bob.bio.base.extractor.Extractor`.
"""Scikit learn transformer for :py:class:`bob.bio.base.extractor.Extractor`.
Parameters
----------
instance: object
An instance of :py:class:`bob.bio.base.extractor.Extractor`
instance: object
An instance of :py:class:`bob.bio.base.extractor.Extractor`
model_path: ``str``
Model path in case :any:`bob.bio.base.extractor.Extractor.requires_training` is equal to true
model_path: ``str``
Model path in case ``instance.requires_training`` is equal to ``True``.
"""
def __init__(
......
......@@ -6,16 +6,12 @@ from bob.bio.base.preprocessor import Preprocessor
class PreprocessorTransformer(TransformerMixin, BaseEstimator):
"""
Scikit learn transformer for :py:class:`bob.bio.base.preprocessor.Preprocessor`.
"""Scikit learn transformer for :py:class:`bob.bio.base.preprocessor.Preprocessor`.
Parameters
----------
instance: object
An instance of `bob.bio.base.preprocessor.Preprocessor`
instance: object
An instance of `bob.bio.base.preprocessor.Preprocessor`
"""
def __init__(
......
......@@ -21,7 +21,6 @@ import logging
logger = logging.getLogger("bob.bio.base")
#: Keywords for which resources are defined.
valid_keywords = (
"database",
"preprocessor",
......@@ -32,6 +31,7 @@ valid_keywords = (
"annotator",
"pipeline",
)
"""Keywords for which resources are defined."""
def _collect_config(paths):
......@@ -60,9 +60,7 @@ def _collect_config(paths):
def read_config_file(filenames, keyword=None):
"""read_config_file(filenames, keyword = None) -> config
Use this function to read the given configuration file.
"""Use this function to read the given configuration file.
If a keyword is specified, only the configuration according to this keyword is returned.
Otherwise a dictionary of the configurations read from the configuration file is returned.
......@@ -118,9 +116,7 @@ def load_resource(
package_prefix="bob.bio.",
preferred_package=None,
):
"""load_resource(resource, keyword, imports = ['bob.bio.base'], package_prefix='bob.bio.', preferred_package = None) -> resource
Loads the given resource that is registered with the given keyword.
"""Loads the given resource that is registered with the given keyword.
The resource can be:
1. a resource as defined in the setup.py
......@@ -133,7 +129,7 @@ def load_resource(
Any string interpretable as a resource (see above).
keyword : str
A valid resource keyword, can be one of :py:attr:`valid_keywords`.
A valid resource keyword, can be one of :any:`bob.bio.base.utils.resources.valid_keywords`.
imports : [str]
A list of strings defining which modules to import, when constructing new objects (option 3).
......@@ -233,7 +229,7 @@ def extensions(keywords=valid_keywords, package_prefix="bob.bio."):
keywords : [str]
A list of keywords to load entry points for.
Defaults to all :py:attr:`valid_keywords`.
Defaults to all :any:`bob.bio.base.utils.resources.valid_keywords`.
package_prefix : str
Package namespace, in which we search for entry points, e.g., ``bob.bio``.
......
......@@ -16,17 +16,15 @@ a command-line script:
$ bob bio annotate --help
This script accepts two main parameters a database object that inherits from
:any:`bob.bio.base.database.BioDatabase` and an annotator object that inherits
from :any:`bob.bio.base.annotator.Annotator`. Please see the help message of
the script for more information.
:any:`bob.bio.base.pipelines.vanilla_biometrics.Database` and an annotator
object that inherits from :any:`bob.bio.base.annotator.Annotator`. Please see
the help message of the script for more information.
The script can also be run in parallel using :ref:`gridtk`:
The script can also be run in parallel using Dask:
.. code-block:: sh
$ jman submit --array 64 -- bob bio annotate /path/to/config.py --array 64
The number that is given to the ``--array`` options should match.
$ bob bio annotate /path/to/config.py --dask-client sge
You can find the list of readily available annotator configurations using the
``resources.py`` command:
......
......@@ -12,7 +12,7 @@ The transition to the pipeline concept changed the way data goes from the raw sa
However, a set of tools was implemented to support the older bob implementations (designated as *legacy*) of database, preprocessor, extractor, and algorithms.
This adaptation consists of wrapper classes that take a legacy bob class as input and constructs a Transformer or :py:class:`BiometricAlgorithm` out of it.
This adaptation consists of wrapper classes that take a legacy bob class as input and constructs a Transformer or :any:`bob.bio.base.pipelines.vanilla_biometrics.BioAlgorithm` out of it.
.. WARNING::
......@@ -154,7 +154,10 @@ Legacy Database Connector
This *legacy database wrapper* is used to translate an old ``bob.db`` package functions into a bob pipelines database interface.
It uses :py:func:`~bob.db.base.objects` to retrieve a list of files for each role (``world``, ``references``, and ``probes``) and specified group (``dev`` and ``eval``) and creates the matching :py:class:`Sample` and :py:class:`SampleSet` lists.
It uses :any:`bob.bio.base.database.BioDatabase.objects` to retrieve a list of
files for each role (``world``, ``references``, and ``probes``) and specified
group (``dev`` and ``eval``) and creates the matching :any:`bob.pipelines.Sample` and
:any:`bob.pipelines.SampleSet` lists.
This example shows the creation of the Mobio database interface in the bob.pipelines format from the legacy bob.db:
......@@ -183,8 +186,12 @@ This example shows the creation of the Mobio database interface in the bob.pipel
Legacy Preprocessor wrapper
---------------------------
The :py:class:`~bob.bio.base.transformer.PreprocessorTransformer` wrapper takes a :py:class`bob.bio.base.preprocessor` from the old :py:mod:`bob.bio.base` as input and creates a Transformer out of it.
The :py:meth:`~bob.bio.base.preprocessor.__call__` method of the :py:class`~bob.bio.base.preprocessor` class is called when the :py:meth:`Transformer.transform` method is called.
The :py:class:`bob.bio.base.transformers.PreprocessorTransformer` wrapper takes
a :py:class`bob.bio.base.preprocessor` from the old :py:mod:`bob.bio.base` as
input and creates a Transformer out of it. The
``bob.bio.base.preprocessor.Preprocessor.__call__`` method of the
:py:class`~bob.bio.base.preprocessor.Preprocessor` class is called when the
``Transformer.transform`` method is called.
This example shows how to create a Transformer out of a legacy preprocessor (FaceCrop, from bob.bio.face):
......@@ -208,8 +215,8 @@ This example shows how to create a Transformer out of a legacy preprocessor (Fac
Legacy Extractor wrapper
------------------------
A similar wrapper is available for the legacy :py:mod:`bob.bio.base` Extractor. It is the :py:class:`~bob.bio.base.transformer.ExtractorTransformer`.
It maps the :py:meth:`Transformer.transform` method to the :py:meth:`~bob.bio.base.extractor.__call__` of the legacy Extractor.
A similar wrapper is available for the legacy :py:mod:`bob.bio.base` Extractor. It is the :py:class:`bob.bio.base.transformers.ExtractorTransformer`.
It maps the ``Transformer.transform`` method to the ``bob.bio.base.extractor.Extractor.__call__`` of the legacy Extractor.
Here is an example showing how to create a Transformer from a legacy Extractor (Linearize, from bob.bio.base):
......@@ -225,12 +232,15 @@ Here is an example showing how to create a Transformer from a legacy Extractor (
Legacy Algorithm wrappers
-------------------------
Lastly, :py:class:`~bob.bio.base.transformer.AlgorithmTransformer` and :py:class:`~bob.bio.base.pipelines.vanilla_biometrics.legacy.BioAlgorithmLegacy` are available to map correctly a legacy Algorithm to a Transformer and a :py:class:`BioAlgorithm`.
Lastly, :py:class:`bob.bio.base.transformers.AlgorithmTransformer` and
:any:`bob.bio.base.pipelines.vanilla_biometrics.BioAlgorithmLegacy`
are available to map correctly a legacy Algorithm to a Transformer and a
:any:`bob.bio.base.pipelines.vanilla_biometrics.BioAlgorithm`.
Those two adaptors are needed as the legacy Algorithm could consist of a projector that could be trainable (with methods :py:meth:`~bob.bio.base.algorithm.Algorithm.project` and :py:meth:`~bob.bio.base.algorithm.Algorithm.train_projector`), which correspond to a Transformer in the new API.
The enrollment and scoring of the legacy algorithm were done using the :py:meth:`~bob.bio.base.algorithm.Algorithm.enroll` and :py:meth:`~bob.bio.base.algorithm.Algorithm.score` methods, which can be mapped to the same methods in a :py:class:`BioAlgorithm`.
Those two adaptors are needed as the legacy Algorithm could consist of a projector that could be trainable (with methods :py:meth:`bob.bio.base.algorithm.Algorithm.project` and :py:meth:`bob.bio.base.algorithm.Algorithm.train_projector`), which correspond to a Transformer in the new API.
The enrollment and scoring of the legacy algorithm were done using the :py:meth:`bob.bio.base.algorithm.Algorithm.enroll` and :py:meth:`bob.bio.base.algorithm.Algorithm.score` methods, which can be mapped to the same methods in a :any:`bob.bio.base.pipelines.vanilla_biometrics.BioAlgorithm`.
Here is an example showing how to create the Transformer out of a bob.bio.base Algorithm (:py:class:`~bob.bio.base.Distance`):
Here is an example showing how to create the Transformer out of a bob.bio.base Algorithm (:py:class:`bob.bio.base.algorithm.Distance`):
.. code-block:: python
......@@ -246,8 +256,11 @@ Here is an example showing how to create the Transformer out of a bob.bio.base A
# Create the BioAlgorithm from the legacy Algorithm
algorithm_transformer = AlgorithmTransformer( legacy_algorithm )
And here is an example of the creation of the :py:class:`BioAlgorithm` from the bob.bio.base Algorithm (:py:class:`~bob.bio.base.Distance`) with the :py:class:`~bob.bio.base.pipelines.vanilla_biometrics.legacy.BioAlgorithmLegacy`.
This will map correctly the :py:meth:`enroll` and :py:meth:`score` methods:
And here is an example of the creation of the
:any:`bob.bio.base.pipelines.vanilla_biometrics.BioAlgorithm` from the
bob.bio.base Algorithm (:py:class:`bob.bio.base.algorithm.Distance`) with the
:any:`bob.bio.base.pipelines.vanilla_biometrics.BioAlgorithmLegacy`. This will
map correctly the ``enroll`` and ``score`` methods:
.. code-block:: python
......
......@@ -12,10 +12,10 @@ Database
.. autosummary::
bob.bio.base.pipelines.vanilla_biometrics.abstract_classes.Database
bob.bio.base.pipelines.vanilla_biometrics.abstract_classes.Database.background_model_samples
bob.bio.base.pipelines.vanilla_biometrics.abstract_classes.Database.references
bob.bio.base.pipelines.vanilla_biometrics.abstract_classes.Database.probes
bob.bio.base.pipelines.vanilla_biometrics.Database
bob.bio.base.pipelines.vanilla_biometrics.Database.background_model_samples
bob.bio.base.pipelines.vanilla_biometrics.Database.references
bob.bio.base.pipelines.vanilla_biometrics.Database.probes
Database implementations
........................
......@@ -30,18 +30,18 @@ Biometric Algorithm
.. autosummary::
bob.bio.base.pipelines.vanilla_biometrics.abstract_classes.BioAlgorithm
bob.bio.base.pipelines.vanilla_biometrics.abstract_classes.BioAlgorithm.score
bob.bio.base.pipelines.vanilla_biometrics.abstract_classes.BioAlgorithm.enroll
bob.bio.base.pipelines.vanilla_biometrics.BioAlgorithm
bob.bio.base.pipelines.vanilla_biometrics.BioAlgorithm.score
bob.bio.base.pipelines.vanilla_biometrics.BioAlgorithm.enroll
Writing Scores
~~~~~~~~~~~~~~
.. autosummary::
bob.bio.base.pipelines.vanilla_biometrics.abstract_classes.ScoreWriter
bob.bio.base.pipelines.vanilla_biometrics.score_writers.FourColumnsScoreWriter
bob.bio.base.pipelines.vanilla_biometrics.score_writers.CSVScoreWriter
bob.bio.base.pipelines.vanilla_biometrics.ScoreWriter
bob.bio.base.pipelines.vanilla_biometrics.FourColumnsScoreWriter
bob.bio.base.pipelines.vanilla_biometrics.CSVScoreWriter
Assembling the pipeline
~~~~~~~~~~~~~~~~~~~~~~~
......@@ -59,23 +59,23 @@ Creating Database interfaces from legacy
.. autosummary::
bob.bio.base.pipelines.vanilla_biometrics.legacy.DatabaseConnector
bob.bio.base.pipelines.vanilla_biometrics.DatabaseConnector
Creating Transformers from legacy constructs
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autosummary::
bob.bio.base.transformers.preprocessor.PreprocessorTransformer
bob.bio.base.transformers.extractor.ExtractorTransformer
bob.bio.base.transformers.algorithm.AlgorithmTransformer
bob.bio.base.transformers.PreprocessorTransformer
bob.bio.base.transformers.ExtractorTransformer
bob.bio.base.transformers.AlgorithmTransformer
Creating BioAlgorithms from legacy Algorithm
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autosummary::
bob.bio.base.pipelines.vanilla_biometrics.legacy.BioAlgorithmLegacy
bob.bio.base.pipelines.vanilla_biometrics.BioAlgorithmLegacy
......@@ -113,11 +113,11 @@ Functions dealing with resources
.. autosummary::
bob.bio.base.load_resource
bob.bio.base.read_config_file
bob.bio.base.resource_keys
bob.bio.base.extensions
bob.bio.base.valid_keywords
bob.bio.base.utils.resources.load_resource
bob.bio.base.utils.resources.read_config_file
bob.bio.base.utils.resources.resource_keys
bob.bio.base.utils.resources.extensions
bob.bio.base.utils.resources.valid_keywords
Miscellaneous functions
......@@ -192,6 +192,9 @@ Details
.. automodule:: bob.bio.base.script.figure
.. automodule:: bob.bio.base.script.commands
.. automodule:: bob.bio.base.script.gen
.. automodule:: bob.bio.base.utils
.. automodule:: bob.bio.base.utils.resources
.. automodule:: bob.bio.base.utils.io
......
This diff is collapsed.
......@@ -46,7 +46,7 @@ It's composed of:
:py:class:`sklearn.base.BaseEstimator` and
:py:class:`sklearn.base.TransformerMixin`. A Transformer can be trained if
needed and applies one or several transformations on an input sample. It must
implement a :py:meth:`Transformer.transform` and a :py:meth:`Transformer.fit`
implement a ``Transformer.transform`` and a :py:meth:`Transformer.fit`
method. Multiple transformers can be chained together, each working on the
output of the previous one.
......@@ -66,7 +66,7 @@ Transformer
Following the structure of
`pipelines of scikit-learn <https://scikit-learn.org/stable/modules/generated/sklearn.pipeline.Pipeline.html>`__,
a Transformer is a class that must implement a :py:meth:`Transformer.transform`
a Transformer is a class that must implement a ``Transformer.transform``
and a :py:meth:`Transformer.fit` method. This class represents a simple
operation that can be applied to data, like preprocessing of a sample or
extraction of a feature vector from data.
......