Skip to content
Snippets Groups Projects
Commit 679b71ad authored by Amir MOHAMMADI's avatar Amir MOHAMMADI
Browse files

Porting to dask pipelines

parent 0b334636
No related branches found
No related tags found
1 merge request!110Porting to dask pipelines
Pipeline #45314 failed
Showing
with 25 additions and 961 deletions
#!/usr/bin/env python
from bob.pad.base.algorithm import SVMCascadePCA
#=======================================================================================
# Define instances here:
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.2}
N = 2
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n2_gamma_02 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.1}
N = 2
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n2_gamma_01 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.05}
N = 2
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n2_gamma_005 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.01}
N = 2
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n2_gamma_001 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
#=======================================================================================
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.1}
N = 10
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n10_gamma_01 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.05}
N = 10
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n10_gamma_005 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.01}
N = 10
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n10_gamma_001 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.005}
N = 10
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n10_gamma_0005 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
#=======================================================================================
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.5}
N = 20
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n20_gamma_05 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.2}
N = 20
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n20_gamma_02 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.1}
N = 20
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n20_gamma_01 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.05}
N = 20
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n20_gamma_005 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.01}
N = 20
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n20_gamma_001 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.005}
N = 20
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n20_gamma_0005 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.001}
N = 20
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n20_gamma_0001 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
#=======================================================================================
MACHINE_TYPE = 'ONE_CLASS'
KERNEL_TYPE = 'RBF'
SVM_KWARGS = {'nu': 0.001, 'gamma': 0.1}
N = 2
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = False
algorithm_n2_gamma_01_video_level = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=SVM_KWARGS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
#=======================================================================================
# Test the cascade of two-class SVMs.
MACHINE_TYPE = 'C_SVC'
KERNEL_TYPE = 'RBF'
TRAINER_GRID_SEARCH_PARAMS = {'cost': 1, 'gamma': 0.01}
N = 2
POS_SCORES_SLOPE = 0.01
FRAME_LEVEL_SCORES_FLAG = True
algorithm_n2_two_class_svm_c1_gamma_001 = SVMCascadePCA(
machine_type=MACHINE_TYPE,
kernel_type=KERNEL_TYPE,
svm_kwargs=TRAINER_GRID_SEARCH_PARAMS,
N=N,
pos_scores_slope=POS_SCORES_SLOPE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
#!/usr/bin/env python
from bob.pad.base.algorithm import OneClassGMM
#=======================================================================================
# Define instances here:
N_COMPONENTS = 2
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_2 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 3
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_3 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 4
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_4 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 5
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_5 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 6
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_6 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 7
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_7 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 8
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_8 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 9
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_9 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 10
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_10 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
#=======================================================================================
# above 10 Gaussians:
N_COMPONENTS = 12
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_12 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 14
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_14 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 16
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_16 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 18
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_18 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 20
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_20 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
#=======================================================================================
# above 20 Gaussians:
N_COMPONENTS = 25
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_25 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 30
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_30 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 35
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_35 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 40
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_40 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 45
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_45 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 50
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_50 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
#=======================================================================================
# above 50 Gaussians:
N_COMPONENTS = 60
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_60 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 70
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_70 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 80
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_80 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 90
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_90 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 100
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_100 = OneClassGMM(
n_components=N_COMPONENTS, frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
#=======================================================================================
# 50 Gaussians, different random seeds:
N_COMPONENTS = 50
RANDOM_STATE = 0
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_50_0 = OneClassGMM(
n_components=N_COMPONENTS,
random_state=RANDOM_STATE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 50
RANDOM_STATE = 1
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_50_1 = OneClassGMM(
n_components=N_COMPONENTS,
random_state=RANDOM_STATE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 50
RANDOM_STATE = 2
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_50_2 = OneClassGMM(
n_components=N_COMPONENTS,
random_state=RANDOM_STATE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 50
RANDOM_STATE = 3
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_50_3 = OneClassGMM(
n_components=N_COMPONENTS,
random_state=RANDOM_STATE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 50
RANDOM_STATE = 4
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_50_4 = OneClassGMM(
n_components=N_COMPONENTS,
random_state=RANDOM_STATE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 50
RANDOM_STATE = 5
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_50_5 = OneClassGMM(
n_components=N_COMPONENTS,
random_state=RANDOM_STATE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 50
RANDOM_STATE = 6
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_50_6 = OneClassGMM(
n_components=N_COMPONENTS,
random_state=RANDOM_STATE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 50
RANDOM_STATE = 7
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_50_7 = OneClassGMM(
n_components=N_COMPONENTS,
random_state=RANDOM_STATE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 50
RANDOM_STATE = 8
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_50_8 = OneClassGMM(
n_components=N_COMPONENTS,
random_state=RANDOM_STATE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
N_COMPONENTS = 50
RANDOM_STATE = 9
FRAME_LEVEL_SCORES_FLAG = True
algorithm_gmm_50_9 = OneClassGMM(
n_components=N_COMPONENTS,
random_state=RANDOM_STATE,
frame_level_scores_flag=FRAME_LEVEL_SCORES_FLAG)
#!/usr/bin/env python
from bob.pad.base.algorithm import SVM
#=======================================================================================
# Define instances here:
machine_type = 'C_SVC'
kernel_type = 'RBF'
n_samples = 10000
# trainer_grid_search_params = {'cost': [2**p for p in range(-5, 16, 2)], 'gamma': [2**p for p in range(-15, 4, 2)]}
trainer_grid_search_params = {
'cost': [2**p for p in range(-3, 14, 2)],
'gamma': [2**p for p in range(-15, 0, 2)]
}
mean_std_norm_flag = True
frame_level_scores_flag = False # one score per video(!) in this case
video_svm_pad_algorithm_10k_grid_mean_std = SVM(
machine_type=machine_type,
kernel_type=kernel_type,
n_samples=n_samples,
trainer_grid_search_params=trainer_grid_search_params,
mean_std_norm_flag=mean_std_norm_flag,
frame_level_scores_flag=frame_level_scores_flag)
frame_level_scores_flag = True # one score per frame(!) in this case
video_svm_pad_algorithm_10k_grid_mean_std_frame_level = SVM(
machine_type=machine_type,
kernel_type=kernel_type,
n_samples=n_samples,
trainer_grid_search_params=trainer_grid_search_params,
mean_std_norm_flag=mean_std_norm_flag,
frame_level_scores_flag=frame_level_scores_flag)
trainer_grid_search_params = {
'cost': [1],
'gamma': [0]
} # set the default LibSVM parameters
video_svm_pad_algorithm_default_svm_param_mean_std_frame_level = SVM(
machine_type=machine_type,
kernel_type=kernel_type,
n_samples=n_samples,
trainer_grid_search_params=trainer_grid_search_params,
mean_std_norm_flag=mean_std_norm_flag,
frame_level_scores_flag=frame_level_scores_flag)
#!/usr/bin/env python
# encoding: utf-8
from bob.pad.face.database import BRSUPadDatabase
from bob.pad.face.database import BRSUPadDatabase
from bob.pad.base.pipelines.vanilla_pad import DatabaseConnector
from bob.extension import rc
database = BRSUPadDatabase(
protocol='test',
original_directory=rc['bob.db.brsu.directory'],
database = DatabaseConnector(
BRSUPadDatabase(
protocol="test",
original_directory=rc["bob.db.brsu.directory"],
)
)
#!/usr/bin/env python
"""Config file for the CASIA FASD dataset.
Please run ``bob config set bob.db.casia_fasd.directory /path/to/casia_fasd_files``
in terminal to point to the original files of the dataset on your computer."""
from bob.pad.face.database import CasiaFasdPadDatabase
database = CasiaFasdPadDatabase()
from bob.pad.base.pipelines.vanilla_pad import DatabaseConnector
database = DatabaseConnector(CasiaFasdPadDatabase())
#!/usr/bin/env python
# encoding: utf-8
from bob.pad.face.database import CasiaSurfPadDatabase
from bob.pad.base.pipelines.vanilla_pad import DatabaseConnector
from bob.extension import rc
database = CasiaSurfPadDatabase(
protocol='color',
original_directory=rc['bob.db.casiasurf.directory'],
original_extension=".jpg",
database = DatabaseConnector(
CasiaSurfPadDatabase(
protocol="color",
original_directory=rc.get("bob.db.casiasurf.directory"),
original_extension=".jpg",
)
)
#!/usr/bin/env python
"""`CELEBA`_ is a face makeup spoofing database adapted for face PAD experiments.
......@@ -9,48 +8,15 @@ the link.
"""
from bob.extension import rc
from bob.pad.base.pipelines.vanilla_pad import DatabaseConnector
from bob.pad.face.database.celeb_a import CELEBAPadDatabase
# Directory where the data files are stored.
# This directory is given in the .bob_bio_databases.txt file located in your home directory
ORIGINAL_DIRECTORY = "[YOUR_CELEB_A_DATABASE_DIRECTORY]"
"""Value of ``~/.bob_bio_databases.txt`` for this database"""
ORIGINAL_EXTENSION = "" # extension of the data files
database = CELEBAPadDatabase(
protocol='grandtest',
original_directory=ORIGINAL_DIRECTORY,
original_extension=ORIGINAL_EXTENSION,
training_depends_on_protocol=True
database = DatabaseConnector(
CELEBAPadDatabase(
protocol="grandtest",
original_directory=rc.get("bob.db.celeba.directory"),
original_extension="",
training_depends_on_protocol=True,
)
)
"""The :py:class:`bob.pad.base.database.PadDatabase` derivative with CELEBA
database settings.
.. warning::
This class only provides a programmatic interface to load data in an orderly
manner, respecting usage protocols. It does **not** contain the raw
data files. You should procure those yourself.
Notice that ``original_directory`` is set to ``[YOUR_CELEBA_DATABASE_DIRECTORY]``.
You must make sure to create ``${HOME}/.bob_bio_databases.txt`` setting this
value to the place where you actually installed the CELEBA Database, as
explained in the section :ref:`bob.pad.face.baselines`.
"""
protocol = 'grandtest'
"""The default protocol to use for reproducing the baselines.
You may modify this at runtime by specifying the option ``--protocol`` on the
command-line of ``spoof.py`` or using the keyword ``protocol`` on a
configuration file that is loaded **after** this configuration resource.
"""
groups = ["train", "dev", "eval"]
"""The default groups to use for reproducing the baselines.
You may modify this at runtime by specifying the option ``--groups`` on the
command-line of ``spoof.py`` or using the keyword ``groups`` on a
configuration file that is loaded **after** this configuration resource.
"""
#!/usr/bin/env python
"""
BATL Db is a database for face PAD experiments.
"""
from bob.pad.face.database import BatlPadDatabase
# Directory where the data files are stored.
# This directory is given in the .bob_bio_databases.txt file located in your home directory
ORIGINAL_DIRECTORY = "[YOUR_BATL_DB_DIRECTORY]"
"""Value of ``~/.bob_bio_databases.txt`` for this database"""
ORIGINAL_EXTENSION = ".h5" # extension of the data files
ANNOTATIONS_TEMP_DIR = ""
PROTOCOL = 'nowig-color-50'
database = BatlPadDatabase(
protocol=PROTOCOL,
original_directory=ORIGINAL_DIRECTORY,
original_extension=ORIGINAL_EXTENSION,
annotations_temp_dir=ANNOTATIONS_TEMP_DIR,
landmark_detect_method="mtcnn",
training_depends_on_protocol=True,
)
"""The :py:class:`bob.pad.base.database.BatlPadDatabase` derivative with BATL Db
database settings.
.. warning::
This class only provides a programmatic interface to load data in an orderly
manner, respecting usage protocols. It does **not** contain the raw
data files. You should procure those yourself.
Notice that ``original_directory`` is set to ``[BatlPadDatabase]``.
You must make sure to create ``${HOME}/.bob_bio_databases.txt`` file setting this
value to the places where you actually installed the BATL database.
"""
protocol = PROTOCOL
"""
You may modify this at runtime by specifying the option ``--protocol`` on the
command-line of ``spoof.py`` or using the keyword ``protocol`` on a
configuration file that is loaded **after** this configuration resource.
"""
groups = ["train", "dev", "eval"]
"""The default groups to use for reproducing the baselines.
You may modify this at runtime by specifying the option ``--groups`` on the
command-line of ``spoof.py`` or using the keyword ``groups`` on a
configuration file that is loaded **after** this configuration resource.
"""
#!/usr/bin/env python
"""
BATL Db is a database for face PAD experiments.
"""
from bob.pad.face.database import BatlPadDatabase
# Directory where the data files are stored.
# This directory is given in the .bob_bio_databases.txt file located in your home directory
ORIGINAL_DIRECTORY = "[YOUR_BATL_DB_DIRECTORY]"
"""Value of ``~/.bob_bio_databases.txt`` for this database"""
ORIGINAL_EXTENSION = ".h5" # extension of the data files
ANNOTATIONS_TEMP_DIR = ""
PROTOCOL = 'nowig-depth-50'
database = BatlPadDatabase(
protocol=PROTOCOL,
original_directory=ORIGINAL_DIRECTORY,
original_extension=ORIGINAL_EXTENSION,
annotations_temp_dir=ANNOTATIONS_TEMP_DIR,
landmark_detect_method="mtcnn",
training_depends_on_protocol=True,
)
"""The :py:class:`bob.pad.base.database.BatlPadDatabase` derivative with BATL Db
database settings.
.. warning::
This class only provides a programmatic interface to load data in an orderly
manner, respecting usage protocols. It does **not** contain the raw
data files. You should procure those yourself.
Notice that ``original_directory`` is set to ``[BatlPadDatabase]``.
You must make sure to create ``${HOME}/.bob_bio_databases.txt`` file setting this
value to the places where you actually installed the BATL database.
"""
protocol = PROTOCOL
"""
You may modify this at runtime by specifying the option ``--protocol`` on the
command-line of ``spoof.py`` or using the keyword ``protocol`` on a
configuration file that is loaded **after** this configuration resource.
"""
groups = ["train", "dev", "eval"]
"""The default groups to use for reproducing the baselines.
You may modify this at runtime by specifying the option ``--groups`` on the
command-line of ``spoof.py`` or using the keyword ``groups`` on a
configuration file that is loaded **after** this configuration resource.
"""
#!/usr/bin/env python
"""
BATL Db is a database for face PAD experiments.
"""
from bob.pad.face.database import BatlPadDatabase
# Directory where the data files are stored.
# This directory is given in the .bob_bio_databases.txt file located in your home directory
ORIGINAL_DIRECTORY = "[YOUR_BATL_DB_DIRECTORY]"
"""Value of ``~/.bob_bio_databases.txt`` for this database"""
ORIGINAL_EXTENSION = ".h5" # extension of the data files
ANNOTATIONS_TEMP_DIR = ""
PROTOCOL = 'nowig-infrared-50'
database = BatlPadDatabase(
protocol=PROTOCOL,
original_directory=ORIGINAL_DIRECTORY,
original_extension=ORIGINAL_EXTENSION,
annotations_temp_dir=ANNOTATIONS_TEMP_DIR,
landmark_detect_method="mtcnn",
training_depends_on_protocol=True,
)
"""The :py:class:`bob.pad.base.database.BatlPadDatabase` derivative with BATL Db
database settings.
.. warning::
This class only provides a programmatic interface to load data in an orderly
manner, respecting usage protocols. It does **not** contain the raw
data files. You should procure those yourself.
Notice that ``original_directory`` is set to ``[BatlPadDatabase]``.
You must make sure to create ``${HOME}/.bob_bio_databases.txt`` file setting this
value to the places where you actually installed the BATL database.
"""
protocol = PROTOCOL
"""
You may modify this at runtime by specifying the option ``--protocol`` on the
command-line of ``spoof.py`` or using the keyword ``protocol`` on a
configuration file that is loaded **after** this configuration resource.
"""
groups = ["train", "dev", "eval"]
"""The default groups to use for reproducing the baselines.
You may modify this at runtime by specifying the option ``--groups`` on the
command-line of ``spoof.py`` or using the keyword ``groups`` on a
configuration file that is loaded **after** this configuration resource.
"""
#!/usr/bin/env python
"""
Idiap BATL DB is a database for face PAD experiments.
"""
from bob.pad.face.database import BatlPadDatabase
# Directory where the data files are stored.
# This directory is given in the .bob_bio_databases.txt file located in your home directory
ORIGINAL_DIRECTORY = "[YOUR_BATL_DB_DIRECTORY]"
"""Value of ``~/.bob_bio_databases.txt`` for this database"""
ORIGINAL_EXTENSION = ".h5" # extension of the data files
ANNOTATIONS_TEMP_DIR = "" # NOTE: this variable is NOT assigned in the instance of the BatlPadDatabase, thus "rc" functionality defined in bob.extension will be involved
PROTOCOL = 'grandtest-color*infrared*depth-10' # use 10 frames for PAD experiments
database = BatlPadDatabase(
protocol=PROTOCOL,
original_directory=ORIGINAL_DIRECTORY,
original_extension=ORIGINAL_EXTENSION,
landmark_detect_method="mtcnn", # detect annotations using mtcnn
exclude_attacks_list=['makeup'],
exclude_pai_all_sets=True, # exclude makeup from all the sets, which is the default behavior for grandtest protocol
append_color_face_roi_annot=False) # do not append annotations, defining ROI in the cropped face image, to the dictionary of annotations
"""The :py:class:`bob.pad.base.database.BatlPadDatabase` derivative with BATL Db
database settings.
.. warning::
This class only provides a programmatic interface to load data in an orderly
manner, respecting usage protocols. It does **not** contain the raw
data files. You should procure those yourself.
Notice that ``original_directory`` is set to ``[YOUR_BATL_DB_DIRECTORY]``.
You must make sure to create ``${HOME}/.bob_bio_databases.txt`` file setting this
value to the places where you actually installed the BATL Govt database.
"""
protocol = PROTOCOL
"""
You may modify this at runtime by specifying the option ``--protocol`` on the
command-line of ``spoof.py`` or using the keyword ``protocol`` on a
configuration file that is loaded **after** this configuration resource.
"""
groups = ["train", "dev", "eval"]
"""The default groups to use for reproducing the baselines.
You may modify this at runtime by specifying the option ``--groups`` on the
command-line of ``spoof.py`` or using the keyword ``groups`` on a
configuration file that is loaded **after** this configuration resource.
"""
#!/usr/bin/env python
"""
BATL Db is a database for face PAD experiments.
"""
from bob.pad.face.database import BatlPadDatabase
# Directory where the data files are stored.
# This directory is given in the .bob_bio_databases.txt file located in your home directory
ORIGINAL_DIRECTORY = "[YOUR_BATL_DB_DIRECTORY]"
"""Value of ``~/.bob_bio_databases.txt`` for this database"""
ORIGINAL_EXTENSION = ".h5" # extension of the data files
ANNOTATIONS_TEMP_DIR = ""
PROTOCOL = 'nowig-thermal-50'
database = BatlPadDatabase(
protocol=PROTOCOL,
original_directory=ORIGINAL_DIRECTORY,
original_extension=ORIGINAL_EXTENSION,
annotations_temp_dir=ANNOTATIONS_TEMP_DIR,
landmark_detect_method="mtcnn",
training_depends_on_protocol=True,
)
"""The :py:class:`bob.pad.base.database.BatlPadDatabase` derivative with BATL Db
database settings.
.. warning::
This class only provides a programmatic interface to load data in an orderly
manner, respecting usage protocols. It does **not** contain the raw
data files. You should procure those yourself.
Notice that ``original_directory`` is set to ``[BatlPadDatabase]``.
You must make sure to create ``${HOME}/.bob_bio_databases.txt`` file setting this
value to the places where you actually installed the BATL database.
"""
protocol = PROTOCOL
"""
You may modify this at runtime by specifying the option ``--protocol`` on the
command-line of ``spoof.py`` or using the keyword ``protocol`` on a
configuration file that is loaded **after** this configuration resource.
"""
groups = ["train", "dev", "eval"]
"""The default groups to use for reproducing the baselines.
You may modify this at runtime by specifying the option ``--groups`` on the
command-line of ``spoof.py`` or using the keyword ``groups`` on a
configuration file that is loaded **after** this configuration resource.
"""
#!/usr/bin/env python
from bob.pad.face.extractor import FrameDiffFeatures
#=======================================================================================
# Define instances here:
window_size = 20
overlap = 0
frame_diff_feat_extr_w20_over0 = FrameDiffFeatures(
window_size=window_size, overlap=overlap)
from bob.bio.base.extractor import CallableExtractor
from bob.pad.face.extractor import OpticalFlow
extractor = CallableExtractor(OpticalFlow())
#!/usr/bin/env python
from bob.pad.face.extractor import LBPHistogram
from bob.bio.video.extractor import Wrapper
#=======================================================================================
# Define instances here:
lbptype = 'uniform'
elbptype = 'regular'
rad = 1
neighbors = 8
circ = False
dtype = None
video_lbp_histogram_extractor_n8r1_uniform = Wrapper(LBPHistogram(
lbptype=lbptype,
elbptype=elbptype,
rad=rad,
neighbors=neighbors,
circ=circ,
dtype=dtype))
#!/usr/bin/env python
from bob.pad.face.extractor import ImageQualityMeasure
import bob.bio.video
#=======================================================================================
# Define instances here:
galbally = True
msu = True
dtype = None
video_quality_measure_galbally_msu = bob.bio.video.extractor.Wrapper(ImageQualityMeasure(galbally=galbally, msu=msu, dtype=dtype))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment