diff --git a/.gitignore b/.gitignore index 0e3948a5ced112eda7112d1b8c26f62df089a17d..084c8fd31722a3b5c9a22e13e930d67656e6fe6d 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ sphinx dist build record.txt +.DS_Store diff --git a/bob/bio/base/config/baselines/pca_atnt_legacy.py b/bob/bio/base/config/baselines/pca_atnt_legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..8dbd0ddc302d736d5d53865fe28a8d2e6ad60555 --- /dev/null +++ b/bob/bio/base/config/baselines/pca_atnt_legacy.py @@ -0,0 +1,81 @@ +# from bob.bio.base.pipelines.vanilla_biometrics.legacy import DatabaseConnector, AlgorithmAdaptor + +import bob.db.atnt +from bob.bio.base.pipelines.vanilla_biometrics.legacy import DatabaseConnector + +database = DatabaseConnector(bob.db.atnt.Database(), protocol="Default") + +from sklearn.pipeline import Pipeline, make_pipeline +from sklearn.decomposition import PCA + +from bob.pipelines.mixins import CheckpointMixin, SampleMixin +from bob.bio.base.mixins import CheckpointSampleLinearize +from bob.bio.base.mixins.legacy import LegacyProcessorMixin + + +class CheckpointSamplePCA(CheckpointMixin, SampleMixin, PCA): + """ + Enables SAMPLE and CHECKPOINTIN handling for https://scikit-learn.org/stable/modules/generated/sklearn.decomposition.PCA.html + """ + + pass + + +#### PREPROCESSOR LEGACY ### +import functools + +# Cropping +CROPPED_IMAGE_HEIGHT = 80 +CROPPED_IMAGE_WIDTH = CROPPED_IMAGE_HEIGHT * 4 // 5 + +# eye positions for frontal images +RIGHT_EYE_POS = (CROPPED_IMAGE_HEIGHT // 5, CROPPED_IMAGE_WIDTH // 4 - 1) +LEFT_EYE_POS = (CROPPED_IMAGE_HEIGHT // 5, CROPPED_IMAGE_WIDTH // 4 * 3) + + +# RANDOM EYES POSITIONS +# I JUST MADE UP THESE NUMBERS +FIXED_RIGHT_EYE_POS = (30, 30) +FIXED_LEFT_EYE_POS = (20, 50) +import bob.bio.face + +face_cropper = functools.partial( + bob.bio.face.preprocessor.FaceCrop, + cropped_image_size=(CROPPED_IMAGE_HEIGHT, CROPPED_IMAGE_WIDTH), + cropped_positions={"leye": LEFT_EYE_POS, "reye": RIGHT_EYE_POS}, + fixed_positions={"leye": FIXED_LEFT_EYE_POS, "reye": FIXED_RIGHT_EYE_POS}, +) + + +from bob.pipelines.mixins import mix_me_up + +preprocessor = mix_me_up((CheckpointMixin, SampleMixin), LegacyProcessorMixin) + +from bob.pipelines.mixins import dask_it + +extractor = Pipeline( + steps=[ + ("0", preprocessor(callable=face_cropper, features_dir="./example/extractor0")), + ("1", CheckpointSampleLinearize(features_dir="./example/extractor1")), + ( + "2", + CheckpointSamplePCA( + features_dir="./example/extractor2", model_path="./example/pca.pkl" + ), + ), + ] +) +# extractor = dask_it(extractor) + +from bob.bio.base.pipelines.vanilla_biometrics.biometric_algorithm import ( + Distance, + BiometricAlgorithmCheckpointMixin, +) + + +class CheckpointDistance(BiometricAlgorithmCheckpointMixin, Distance): + pass + + +algorithm = CheckpointDistance(features_dir="./example/") +# algorithm = Distance() diff --git a/bob/bio/base/mixins/legacy.py b/bob/bio/base/mixins/legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..3c3cc47002bd84a0eb265c1a24d1b7625ab88876 --- /dev/null +++ b/bob/bio/base/mixins/legacy.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python +# vim: set fileencoding=utf-8 : +# @author: Tiago de Freitas Pereira <tiago.pereira@idiap.ch> + + +""" +Mixins to handle legacy components +""" + +from bob.pipelines.mixins import CheckpointMixin, SampleMixin +from sklearn.base import TransformerMixin +from sklearn.utils.validation import check_array + + +class LegacyProcessorMixin(TransformerMixin): + """Class that wraps :py:class:`bob.bio.base.preprocessor.Preprocessor` and + :py:class:`bob.bio.base.extractor.Extractors` + + + Example + ------- + + Wrapping preprocessor with functtools + >>> from bob.bio.base.mixins.legacy import LegacyProcessorMixin + >>> from bob.bio.face.preprocessor import FaceCrop + >>> import functools + >>> transformer = LegacyProcessorMixin(functools.partial(FaceCrop, cropped_image_size=(10,10))) + + Example + ------- + Wrapping extractor + >>> from bob.bio.base.mixins.legacy import LegacyProcessorMixin + >>> from bob.bio.face.extractor import Linearize + >>> transformer = LegacyProcessorMixin(Linearize) + + + Parameters + ---------- + callable: callable + Calleble function that instantiates the scikit estimator + + """ + + def __init__(self, callable=None): + self.callable = callable + self.instance = None + + def fit(self, X, y=None, **fit_params): + return self + + def transform(self, X): + + X = check_array(X, allow_nd=True) + + # Instantiates and do the "real" transform + if self.instance is None: + self.instance = self.callable() + return [self.instance(x) for x in X] diff --git a/bob/bio/base/script/vanilla_biometrics.py b/bob/bio/base/script/vanilla_biometrics.py index 574ee404f4a48fd7c1af3f802c54afd3b7bf7519..744df43001101859b11868b5d9a006c90350b780 100644 --- a/bob/bio/base/script/vanilla_biometrics.py +++ b/bob/bio/base/script/vanilla_biometrics.py @@ -201,8 +201,8 @@ def vanilla_biometrics( else: raise TypeError("The output of the pipeline is not writeble") - dask_client.shutdown() - + if dask_client is not None: + dask_client.shutdown() @click.command()