Skip to content
Snippets Groups Projects
Commit cbe29954 authored by Olegs NIKISINS's avatar Olegs NIKISINS
Browse files

Moved the mean-std normalizer class to utils

parent 57d0638e
Branches
Tags
1 merge request!14MLP class and config to train it
Pipeline #26632 passed
......@@ -10,8 +10,6 @@ from bob.pad.face.database import BatlPadDatabase
from torch import nn
import numpy as np
#==============================================================================
# Define parameters here:
......@@ -61,64 +59,9 @@ Transformations to be applied to the input data sample.
Note: the variable or function name ``transform`` must be the same in
all configuration files. This transformation is handled in DataFolder.
"""
from bob.learn.pytorch.utils import compute_mean_std_bf_class
from bob.learn.pytorch.utils import mean_std_normalize
import torch
class _MeanStdNormalizer():
"""
The functionality of this class can be split into sub-tasks:
1. When **first** called, the mean-std normalization parameters are
pre-computed using **bona-fide** samples from the training set of the
database defined above.
2. In the next calls, the pre-computed mean-std normalizers are used
for normalization of the of the input training feature vectors.
"""
def __init__(self):
self.features_mean = None
self.features_std = None
def __call__(self, x):
"""
Pre-compute normalizers and use them for mean-std normalization.
Also, converts normalized features to Tensors.
Arguments
---------
x : 1D :py:class:`numpy.ndarray`
Feature vector to be normalizaed. The size is ``(n_features, )``
Returns
-------
x_norm : Tensor
Normalized feature vector of the size ``(1, n_features)``
"""
if self.features_mean is None or self.features_std is None: # pre-compute normalization parameters
print ("Computing mean-std normalization parameters using real samples of the training set")
# compute the normalization parameters on the fly:
features_mean, features_std = compute_mean_std_bf_class(kwargs)
# save normalization parameters:
print ("Setting the normalization parameters")
self.features_mean = features_mean
self.features_std = features_std
# normalize the sample
x_norm, _, _ = mean_std_normalize(features = np.expand_dims(x, axis=0),
features_mean = self.features_mean,
features_std = self.features_std)
x_norm.squeeze()
return torch.Tensor(x_norm).unsqueeze(0)
from bob.learn.pytorch.utils import MeanStdNormalizer
transform = _MeanStdNormalizer()
transform = MeanStdNormalizer(kwargs)
"""
Set the kwargs of the "dataset" instance of the DataFolder class.
......
......@@ -175,7 +175,13 @@ def compute_mean_std_bf_class(kwargs):
Returns
-------
features_mean: numpy array
1D numpy array containing mean of the features computed using bona-fide
samples of the training set.
features_std: numpy array
1D numpy array containing std of the features computed using bona-fide
samples of the training set.
"""
kwargs_copy = kwargs.copy()
......@@ -200,3 +206,63 @@ def compute_mean_std_bf_class(kwargs):
return features_mean, features_std
# =============================================================================
class MeanStdNormalizer():
"""
The functionality of this class can be split into sub-tasks:
1. When **first** called, the mean-std normalization parameters are
pre-computed using **bona-fide** samples from the training set of the
database defined above.
2. In the next calls, the pre-computed mean-std normalizers are used
for normalization of the of the input training feature vectors.
Arguments
---------
kwargs : dict
The kwargs used to inintialize an instance of the DataFolder class.
"""
def __init__(self, kwargs):
self.kwargs = kwargs
self.features_mean = None
self.features_std = None
def __call__(self, x):
"""
Pre-compute normalizers and use them for mean-std normalization.
Also, converts normalized features to Tensors.
Arguments
---------
x : 1D :py:class:`numpy.ndarray`
Feature vector to be normalizaed. The size is ``(n_features, )``
Returns
-------
x_norm : Tensor
Normalized feature vector of the size ``(1, n_features)``
"""
if self.features_mean is None or self.features_std is None: # pre-compute normalization parameters
print ("Computing mean-std normalization parameters using real samples of the training set")
# compute the normalization parameters on the fly:
features_mean, features_std = compute_mean_std_bf_class(self.kwargs)
# save normalization parameters:
print ("Setting the normalization parameters")
self.features_mean = features_mean
self.features_std = features_std
# normalize the sample
x_norm, _, _ = mean_std_normalize(features = np.expand_dims(x, axis=0),
features_mean = self.features_mean,
features_std = self.features_std)
x_norm.squeeze()
return torch.Tensor(x_norm).unsqueeze(0)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment