Renamed CSVDatasetDevEval to CSVDataset

parent 517fd546
Pipeline #46404 passed with stage
in 10 minutes and 16 seconds
from .csv_dataset import (
CSVDatasetDevEval,
CSVDataset,
CSVToSampleLoader,
CSVDatasetCrossValidation,
CSVBaseSampleLoader,
AnnotationsLoader,
LSTToSampleLoader,
CSVDatasetDevEvalZTNorm,
CSVDatasetZTNorm,
)
from .file import BioFile
from .file import BioFileSet
......@@ -37,7 +37,7 @@ __appropriate__(
BioDatabase,
ZTBioDatabase,
CSVBaseSampleLoader,
CSVDatasetDevEval,
CSVDataset,
CSVToSampleLoader,
CSVDatasetCrossValidation,
)
......
......@@ -260,7 +260,7 @@ def path_discovery(dataset_protocol_path, option1, option2):
return op1 if op1 else find_element_in_tarball(dataset_protocol_path, option2)
class CSVDatasetDevEval(Database):
class CSVDataset(Database):
"""
Generic filelist dataset for :any:` bob.bio.base.pipelines.vanilla_biometrics.VanillaBiometricsPipeline` pipeline.
Check :any:`vanilla_biometrics_features` for more details about the Vanilla Biometrics Dataset
......@@ -570,14 +570,14 @@ class CSVDatasetDevEval(Database):
return groups
class CSVDatasetDevEvalZTNorm(Database):
class CSVDatasetZTNorm(Database):
"""
Generic filelist dataset for :any:`bob.bio.base.pipelines.vanilla_biometrics.ZTNormPipeline` pipelines.
Check :any:`vanilla_biometrics_features` for more details about the Vanilla Biometrics Dataset
interface.
This dataset interface takes as in put a :any:`CSVDatasetDevEval` as input and have two extra methods:
:any:`CSVDatasetDevEvalZTNorm.zprobes` and :any:`CSVDatasetDevEvalZTNorm.treferences`.
This dataset interface takes as in put a :any:`CSVDataset` as input and have two extra methods:
:any:`CSVDatasetZTNorm.zprobes` and :any:`CSVDatasetZTNorm.treferences`.
To create a new dataset, you need to provide a directory structure similar to the one below:
......@@ -595,8 +595,8 @@ class CSVDatasetDevEvalZTNorm(Database):
Parameters
----------
database: :any:`CSVDatasetDevEval`
:any:`CSVDatasetDevEval` to be aggregated
database: :any:`CSVDataset`
:any:`CSVDataset` to be aggregated
"""
......
......@@ -8,12 +8,12 @@ import os
import bob.io.base
import bob.io.base.test_utils
from bob.bio.base.database import (
CSVDatasetDevEval,
CSVDataset,
CSVToSampleLoader,
CSVDatasetCrossValidation,
AnnotationsLoader,
LSTToSampleLoader,
CSVDatasetDevEvalZTNorm,
CSVDatasetZTNorm,
)
import nose.tools
from bob.pipelines import DelayedSample, SampleSet
......@@ -62,7 +62,7 @@ def check_all_true(list_of_something, something):
def test_csv_file_list_dev_only():
dataset = CSVDatasetDevEval(example_dir, "protocol_only_dev")
dataset = CSVDataset(example_dir, "protocol_only_dev")
assert len(dataset.background_model_samples()) == 8
assert check_all_true(dataset.background_model_samples(), DelayedSample)
......@@ -75,7 +75,7 @@ def test_csv_file_list_dev_only():
def test_csv_file_list_dev_only_metadata():
dataset = CSVDatasetDevEval(example_dir, "protocol_only_dev_metadata")
dataset = CSVDataset(example_dir, "protocol_only_dev_metadata")
assert len(dataset.background_model_samples()) == 8
assert check_all_true(dataset.background_model_samples(), DelayedSample)
......@@ -107,7 +107,7 @@ def test_csv_file_list_dev_eval():
)
def run(filename):
dataset = CSVDatasetDevEval(
dataset = CSVDataset(
filename,
"protocol_dev_eval",
csv_to_sample_loader=CSVToSampleLoader(
......@@ -161,7 +161,7 @@ def test_csv_file_list_dev_eval_score_norm():
)
def run(filename):
dataset = CSVDatasetDevEval(
dataset = CSVDataset(
filename,
"protocol_dev_eval",
csv_to_sample_loader=CSVToSampleLoader(
......@@ -175,7 +175,7 @@ def test_csv_file_list_dev_eval_score_norm():
extension="",
),
)
znorm_dataset = CSVDatasetDevEvalZTNorm(dataset)
znorm_dataset = CSVDatasetZTNorm(dataset)
assert len(znorm_dataset.background_model_samples()) == 8
assert check_all_true(znorm_dataset.background_model_samples(), DelayedSample)
......@@ -222,7 +222,7 @@ def test_csv_file_list_dev_eval_sparse():
)
)
dataset = CSVDatasetDevEval(
dataset = CSVDataset(
example_dir,
"protocol_dev_eval_sparse",
csv_to_sample_loader=CSVToSampleLoader(
......@@ -280,7 +280,7 @@ def test_csv_file_list_dev_eval_sparse():
def test_lst_file_list_dev_eval():
dataset = CSVDatasetDevEval(
dataset = CSVDataset(
legacy_example_dir,
"",
csv_to_sample_loader=LSTToSampleLoader(
......@@ -315,7 +315,7 @@ def test_lst_file_list_dev_eval():
def test_lst_file_list_dev_eval_sparse():
dataset = CSVDatasetDevEval(
dataset = CSVDataset(
legacy_example_dir,
"",
csv_to_sample_loader=LSTToSampleLoader(
......@@ -351,7 +351,7 @@ def test_lst_file_list_dev_eval_sparse():
def test_lst_file_list_dev_sparse_filelist2():
dataset = CSVDatasetDevEval(
dataset = CSVDataset(
legacy2_example_dir,
"",
csv_to_sample_loader=LSTToSampleLoader(
......@@ -369,7 +369,7 @@ def test_lst_file_list_dev_sparse_filelist2():
def test_csv_file_list_atnt():
dataset = CSVDatasetDevEval(atnt_protocol_path, "idiap_protocol")
dataset = CSVDataset(atnt_protocol_path, "idiap_protocol")
assert len(dataset.background_model_samples()) == 200
assert len(dataset.references()) == 20
assert len(dataset.probes()) == 100
......@@ -419,7 +419,7 @@ def run_experiment(dataset):
def test_atnt_experiment():
dataset = CSVDatasetDevEval(
dataset = CSVDataset(
dataset_protocol_path=atnt_protocol_path,
protocol_name="idiap_protocol",
csv_to_sample_loader=CSVToSampleLoader(
......
......@@ -22,7 +22,7 @@ Database implementations
.. autosummary::
.. bob.bio.base.database.CSVDatasetDevEval
.. bob.bio.base.database.CSVDataset
.. bob.bio.base.database.CSVDatasetCrossValidation
Biometric Algorithm
......
......@@ -57,7 +57,7 @@ $ bob bio pipelines vanilla-biometrics -d my_database.py -p <pipeline_name>
The ``database`` object defined in ``my_database.py`` is an instance of either:
- A :py:class:`~bob.bio.base.database.CSVDatasetDevEval` (see :ref:`here <bob.bio.base.database.csv_file_interface>`),
- A :py:class:`~bob.bio.base.database.CSVDataset` (see :ref:`here <bob.bio.base.database.csv_file_interface>`),
- A :py:class:`~bob.bio.base.database.CSVDatasetCrossValidation` (see :ref:`here <bob.bio.base.database.csv_cross_validation>`),
- Your own implementation of a :ref:`Database Interface <bob.bio.base.database.interface_class>`,
- A :ref:`legacy Database connector <bob.bio.base.legacy.database_connector>`.
......@@ -73,7 +73,7 @@ This method is less complete and less flexible than implementing a :ref:`full in
Protocol definition is possible and a set of csv files (at least ``dev_enroll.csv`` and ``dev_probe.csv``) in a folder must be created for each protocol.
The interface is created with :py:class:`~bob.bio.base.database.CSVDatasetDevEval`.
The interface is created with :py:class:`~bob.bio.base.database.CSVDataset`.
This class takes as input the base directory, and the protocol sub-directory of the :ref:`csv file structure <bob.bio.base.database.csv_file_structure>`, and finally a ``csv_to_sample_loader`` that will load a sample data from a csv row read from the csv files.
This csv_to_sample_loader needs to know the dataset base path and the extension of the dataset files.
......@@ -151,11 +151,11 @@ The following file structure and file naming must be followed, in order for the
- The ``train.csv`` file (as shown in ``my_protocol_2``) is optional and contains the information of the *world* set.
- The ``eval_enroll.csv`` and ``eval_probe.csv`` files (as shown in ``my_protocol_2``) are optional and contain the information of the *eval* set.
In this example, ``my_dataset`` would be the base path given to the ``dataset_protocol_path`` parameter of :py:class:`~bob.bio.base.database.CSVDatasetDevEval`, and ``my_protocol_1`` the ``protocol_name`` parameter:
In this example, ``my_dataset`` would be the base path given to the ``dataset_protocol_path`` parameter of :py:class:`~bob.bio.base.database.CSVDataset`, and ``my_protocol_1`` the ``protocol_name`` parameter:
.. code-block:: python
from bob.bio.base.database import CSVDatasetDevEval, AnnotationsLoader
from bob.bio.base.database import CSVDataset, AnnotationsLoader
# Define a loading function called for each sample with its path
def my_load_function(full_path):
......@@ -172,7 +172,7 @@ In this example, ``my_dataset`` would be the base path given to the ``dataset_pr
)
# Create the csv interface
database = CSVDatasetDevEval("my_dataset", "my_protocol_1", csv_to_sample_loader=my_sample_loader)
database = CSVDataset("my_dataset", "my_protocol_1", csv_to_sample_loader=my_sample_loader)
This will create a database interface with:
......@@ -189,7 +189,7 @@ CSV file Cross-validation Database interface
The :py:class:`~bob.bio.base.database.CSVDatasetCrossValidation` takes only one CSV file of identities and creates the necessary sets pseudo-randomly.
The format of the CSV file is the same as in :py:class:`~bob.bio.base.database.CSVDatasetDevEval`, comma separated with a header:
The format of the CSV file is the same as in :py:class:`~bob.bio.base.database.CSVDataset`, comma separated with a header:
.. code-block:: text
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment