Skip to content
Snippets Groups Projects
Commit 86aa5175 authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Implemented and for the BICMachine

parent 9eba717e
No related branches found
No related tags found
1 merge request!11WIP: First attempt to approach the issue bob.bio.base#106
...@@ -10,6 +10,7 @@ bob.extension.load_bob_library("bob.learn.linear", __file__) ...@@ -10,6 +10,7 @@ bob.extension.load_bob_library("bob.learn.linear", __file__)
from ._library import * from ._library import *
from .machine import Machine from .machine import Machine
from .bic import BICMachine
from . import version from . import version
from .version import module as __version__ from .version import module as __version__
from .version import api as __api_version__ from .version import api as __api_version__
......
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
from ._library import BICMachine as _BICMachine_C
import bob.learn.activation
import numpy
class BICMachine(_BICMachine_C):
__doc__ = _BICMachine_C.__doc__
def to_dict(self):
"""
Dumps its content to a :py:class:`dict`
**Returns**
A :py:class:`dict` with :py:class:`bob.learn.linear.Machine` variables
"""
output_dict = dict()
output_dict["project_data"] = self.project_data
output_dict["intra_mean"] = self.intra_mean
output_dict["intra_variance"] = self.intra_variance
if self.project_data:
output_dict["use_DFFS"] = self.use_DFFS
output_dict["intra_subspace"] = self.intra_subspace
output_dict["intra_rho"] = self.intra_rho
output_dict["extra_mean"] = self.extra_mean
output_dict["extra_variance"] = self.extra_variance
if self.project_data:
output_dict["extra_rho"] = self.extra_rho
output_dict["extra_subspace"] = self.extra_subspace
return output_dict
@classmethod
def from_dict(cls, input_dict):
"""
Loads itself from a python dict :py:class:`dict`
"""
machine = cls(input_dict["use_DFFS"])
machine.project_data = input_dict["project_data"]
machine.intra_mean = input_dict["intra_mean"].astype("float64")
machine.intra_variance = input_dict["intra_variance"].astype("float64")
machine.extra_mean = input_dict["extra_mean"].astype("float64")
machine.extra_variance = input_dict["extra_variance"].astype("float64")
if machine.project_data:
machine.intra_subspace = input_dict["intra_subspace"].astype("float64")
machine.intra_rho = input_dict["intra_rho"]
machine.extra_subspace = input_dict["extra_subspace"].astype("float64")
machine.extra_rho = input_dict["extra_rho"]
return machine
...@@ -74,14 +74,14 @@ def test_raises(): ...@@ -74,14 +74,14 @@ def test_raises():
def test_BIC(): def test_BIC():
# Tests the BIC training of the BICTrainer # Tests the BIC training of the BICTrainer
intra_data, extra_data = training_data() intra_data, extra_data = training_data()
# train BIC machine # train BIC machine
trainer = bob.learn.linear.BICTrainer(2,2) trainer = bob.learn.linear.BICTrainer(2,2)
# So, now without rho... # So, now without rho...
machine = bob.learn.linear.BICMachine(False) machine = bob.learn.linear.BICMachine(False)
# First, train the machine with intrapersonal data only # First, train the machine with intrapersonal data only
trainer.train(intra_data, intra_data, machine) trainer.train(intra_data, intra_data, machine)
assert machine.input_size == 5 assert machine.input_size == 5
...@@ -102,6 +102,13 @@ def test_BIC(): ...@@ -102,6 +102,13 @@ def test_BIC():
# assert machine == machine2 # assert machine == machine2
# But, in fact the machines should be identical. # But, in fact the machines should be identical.
assert machine.is_similar_to(machine2, 1e-10, 1e-15) assert machine.is_similar_to(machine2, 1e-10, 1e-15)
# Testing loading and save
input_dict = machine.to_dict()
machine_from_dict = bob.learn.linear.BICMachine.from_dict(input_dict)
assert machine_from_dict == machine
def test_bic_split(): def test_bic_split():
# Tests the auxiliary function bic_intra_extra_pairs # Tests the auxiliary function bic_intra_extra_pairs
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment