Skip to content
Snippets Groups Projects
Commit 9234ab61 authored by Amir MOHAMMADI's avatar Amir MOHAMMADI
Browse files

Merge branch 'processors' into 'master'

Base stacked processors are moved to bob.extension

See merge request !136
parents 40ae4bb5 fd2e5a4f
No related branches found
No related tags found
1 merge request!136Base stacked processors are moved to bob.extension
Pipeline #
from ..utils.processors import SequentialProcessor, ParallelProcessor from bob.extension.processors import SequentialProcessor, ParallelProcessor
from .Extractor import Extractor from .Extractor import Extractor
from bob.io.base import HDF5File from bob.io.base import HDF5File
...@@ -171,7 +171,7 @@ class ParallelExtractor(ParallelProcessor, MultipleExtractor): ...@@ -171,7 +171,7 @@ class ParallelExtractor(ParallelProcessor, MultipleExtractor):
[ 1., 2., 3.]]), array([[ 0.5, 1. , 1.5], [ 1., 2., 3.]]), array([[ 0.5, 1. , 1.5],
[ 0.5, 1. , 1.5]])] [ 0.5, 1. , 1.5]])]
The data may be further processed using a :any:`SequentialProcessor`: The data may be further processed using a :any:`SequentialExtractor`:
>>> from bob.bio.base.extractor import SequentialExtractor >>> from bob.bio.base.extractor import SequentialExtractor
>>> total_extractor = SequentialExtractor( >>> total_extractor = SequentialExtractor(
......
from ..utils.processors import SequentialProcessor, ParallelProcessor from bob.extension.processors import SequentialProcessor, ParallelProcessor
from .Preprocessor import Preprocessor from .Preprocessor import Preprocessor
...@@ -78,7 +78,7 @@ class ParallelPreprocessor(ParallelProcessor, Preprocessor): ...@@ -78,7 +78,7 @@ class ParallelPreprocessor(ParallelProcessor, Preprocessor):
[ 1., 2., 3.]]), array([[ 0.5, 1. , 1.5], [ 1., 2., 3.]]), array([[ 0.5, 1. , 1.5],
[ 0.5, 1. , 1.5]])] [ 0.5, 1. , 1.5]])]
The data may be further processed using a :any:`SequentialProcessor`: The data may be further processed using a :any:`SequentialPreprocessor`:
>>> from bob.bio.base.preprocessor import SequentialPreprocessor >>> from bob.bio.base.preprocessor import SequentialPreprocessor
>>> total_preprocessor = SequentialPreprocessor( >>> total_preprocessor = SequentialPreprocessor(
......
from functools import partial from functools import partial
import numpy as np import numpy as np
import tempfile import tempfile
from bob.bio.base.utils.processors import (
SequentialProcessor, ParallelProcessor)
from bob.bio.base.preprocessor import ( from bob.bio.base.preprocessor import (
SequentialPreprocessor, ParallelPreprocessor, CallablePreprocessor) SequentialPreprocessor, ParallelPreprocessor, CallablePreprocessor)
from bob.bio.base.extractor import ( from bob.bio.base.extractor import (
...@@ -15,16 +13,6 @@ SEQ_DATA = PROCESSORS[1](PROCESSORS[0](DATA)) ...@@ -15,16 +13,6 @@ SEQ_DATA = PROCESSORS[1](PROCESSORS[0](DATA))
PAR_DATA = (PROCESSORS[0](DATA), PROCESSORS[1](DATA)) PAR_DATA = (PROCESSORS[0](DATA), PROCESSORS[1](DATA))
def test_processors():
proc = SequentialProcessor(PROCESSORS)
data = proc(DATA)
assert np.allclose(data, SEQ_DATA)
proc = ParallelProcessor(PROCESSORS)
data = proc(DATA)
assert all(np.allclose(x1, x2) for x1, x2 in zip(data, PAR_DATA))
def test_preprocessors(): def test_preprocessors():
processors = [CallablePreprocessor(p, False) for p in PROCESSORS] processors = [CallablePreprocessor(p, False) for p in PROCESSORS]
proc = SequentialPreprocessor(processors) proc = SequentialPreprocessor(processors)
......
...@@ -6,7 +6,6 @@ ...@@ -6,7 +6,6 @@
from .resources import * from .resources import *
from .io import * from .io import *
from .singleton import * from .singleton import *
from . import processors
import six import six
import inspect import inspect
import numpy import numpy
......
class SequentialProcessor(object):
"""A helper class which takes several processors and applies them one by
one sequentially.
Attributes
----------
processors : list
A list of processors to apply.
Examples
--------
You can use this class to apply a chain of processes on your data. For
example:
>>> import numpy as np
>>> from functools import partial
>>> from bob.bio.base.utils.processors import SequentialProcessor
>>> raw_data = np.array([[1, 2, 3], [1, 2, 3]])
>>> seq_processor = SequentialProcessor(
... [np.cast['float64'], lambda x: x / 2, partial(np.mean, axis=1)])
>>> seq_processor(raw_data)
array([ 1., 1.])
>>> np.all(seq_processor(raw_data) ==
... np.mean(np.cast['float64'](raw_data) / 2, axis=1))
True
"""
def __init__(self, processors, **kwargs):
super(SequentialProcessor, self).__init__(**kwargs)
self.processors = processors
def __call__(self, data, **kwargs):
"""Applies the processors on the data sequentially. The output of the
first one goes as input to the next one.
Parameters
----------
data : object
The data that needs to be processed.
**kwargs
Any kwargs are passed to the processors.
Returns
-------
object
The processed data.
"""
for processor in self.processors:
data = processor(data, **kwargs)
return data
class ParallelProcessor(object):
"""A helper class which takes several processors and applies them on each
processor separately and yields their outputs one by one.
Attributes
----------
processors : list
A list of processors to apply.
Examples
--------
You can use this class to apply several processes on your data and get all
the results back. For example:
>>> import numpy as np
>>> from functools import partial
>>> from bob.bio.base.utils.processors import ParallelProcessor
>>> raw_data = np.array([[1, 2, 3], [1, 2, 3]])
>>> parallel_processor = ParallelProcessor(
... [np.cast['float64'], lambda x: x / 2.0])
>>> list(parallel_processor(raw_data))
[array([[ 1., 2., 3.],
[ 1., 2., 3.]]), array([[ 0.5, 1. , 1.5],
[ 0.5, 1. , 1.5]])]
The data may be further processed using a :any:`SequentialProcessor`:
>>> from bob.bio.base.utils.processors import SequentialProcessor
>>> total_processor = SequentialProcessor(
... [parallel_processor, list, partial(np.concatenate, axis=1)])
>>> total_processor(raw_data)
array([[ 1. , 2. , 3. , 0.5, 1. , 1.5],
[ 1. , 2. , 3. , 0.5, 1. , 1.5]])
"""
def __init__(self, processors, **kwargs):
super(ParallelProcessor, self).__init__(**kwargs)
self.processors = processors
def __call__(self, data, **kwargs):
"""Applies the processors on the data independently and outputs a
generator of their outputs.
Parameters
----------
data : object
The data that needs to be processed.
**kwargs
Any kwargs are passed to the processors.
Yields
------
object
The processed data from processors one by one.
"""
for processor in self.processors:
yield processor(data, **kwargs)
...@@ -42,14 +42,6 @@ Miscellaneous functions ...@@ -42,14 +42,6 @@ Miscellaneous functions
bob.bio.base.selected_indices bob.bio.base.selected_indices
Generic classes
---------------
.. autosummary::
bob.bio.base.utils.processors.SequentialProcessor
bob.bio.base.utils.processors.ParallelProcessor
Tools to run recognition experiments Tools to run recognition experiments
------------------------------------ ------------------------------------
...@@ -117,7 +109,4 @@ Details ...@@ -117,7 +109,4 @@ Details
.. autoclass:: FileSelector .. autoclass:: FileSelector
.. automodule:: bob.bio.base.utils.processors
.. include:: links.rst .. include:: links.rst
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment