Commit e2a41e7c authored by Amir MOHAMMADI's avatar Amir MOHAMMADI
Browse files

Base stacked processors are moved to bob.extension

parent dd5f62bb
Pipeline #17511 failed with stage
in 38 minutes and 36 seconds
from ..utils.processors import SequentialProcessor, ParallelProcessor
from bob.extension.processors import SequentialProcessor, ParallelProcessor
from .Extractor import Extractor
from bob.io.base import HDF5File
......
from ..utils.processors import SequentialProcessor, ParallelProcessor
from bob.extension.processors import SequentialProcessor, ParallelProcessor
from .Preprocessor import Preprocessor
......
from functools import partial
import numpy as np
import tempfile
from bob.bio.base.utils.processors import (
SequentialProcessor, ParallelProcessor)
from bob.bio.base.preprocessor import (
SequentialPreprocessor, ParallelPreprocessor, CallablePreprocessor)
from bob.bio.base.extractor import (
......@@ -15,16 +13,6 @@ SEQ_DATA = PROCESSORS[1](PROCESSORS[0](DATA))
PAR_DATA = (PROCESSORS[0](DATA), PROCESSORS[1](DATA))
def test_processors():
proc = SequentialProcessor(PROCESSORS)
data = proc(DATA)
assert np.allclose(data, SEQ_DATA)
proc = ParallelProcessor(PROCESSORS)
data = proc(DATA)
assert all(np.allclose(x1, x2) for x1, x2 in zip(data, PAR_DATA))
def test_preprocessors():
processors = [CallablePreprocessor(p, False) for p in PROCESSORS]
proc = SequentialPreprocessor(processors)
......
......@@ -6,7 +6,6 @@
from .resources import *
from .io import *
from .singleton import *
from . import processors
import six
import inspect
import numpy
......
class SequentialProcessor(object):
"""A helper class which takes several processors and applies them one by
one sequentially.
Attributes
----------
processors : list
A list of processors to apply.
Examples
--------
You can use this class to apply a chain of processes on your data. For
example:
>>> import numpy as np
>>> from functools import partial
>>> from bob.bio.base.utils.processors import SequentialProcessor
>>> raw_data = np.array([[1, 2, 3], [1, 2, 3]])
>>> seq_processor = SequentialProcessor(
... [np.cast['float64'], lambda x: x / 2, partial(np.mean, axis=1)])
>>> seq_processor(raw_data)
array([ 1., 1.])
>>> np.all(seq_processor(raw_data) ==
... np.mean(np.cast['float64'](raw_data) / 2, axis=1))
True
"""
def __init__(self, processors, **kwargs):
super(SequentialProcessor, self).__init__(**kwargs)
self.processors = processors
def __call__(self, data, **kwargs):
"""Applies the processors on the data sequentially. The output of the
first one goes as input to the next one.
Parameters
----------
data : object
The data that needs to be processed.
**kwargs
Any kwargs are passed to the processors.
Returns
-------
object
The processed data.
"""
for processor in self.processors:
data = processor(data, **kwargs)
return data
class ParallelProcessor(object):
"""A helper class which takes several processors and applies them on each
processor separately and yields their outputs one by one.
Attributes
----------
processors : list
A list of processors to apply.
Examples
--------
You can use this class to apply several processes on your data and get all
the results back. For example:
>>> import numpy as np
>>> from functools import partial
>>> from bob.bio.base.utils.processors import ParallelProcessor
>>> raw_data = np.array([[1, 2, 3], [1, 2, 3]])
>>> parallel_processor = ParallelProcessor(
... [np.cast['float64'], lambda x: x / 2.0])
>>> list(parallel_processor(raw_data))
[array([[ 1., 2., 3.],
[ 1., 2., 3.]]), array([[ 0.5, 1. , 1.5],
[ 0.5, 1. , 1.5]])]
The data may be further processed using a :any:`SequentialProcessor`:
>>> from bob.bio.base.utils.processors import SequentialProcessor
>>> total_processor = SequentialProcessor(
... [parallel_processor, list, partial(np.concatenate, axis=1)])
>>> total_processor(raw_data)
array([[ 1. , 2. , 3. , 0.5, 1. , 1.5],
[ 1. , 2. , 3. , 0.5, 1. , 1.5]])
"""
def __init__(self, processors, **kwargs):
super(ParallelProcessor, self).__init__(**kwargs)
self.processors = processors
def __call__(self, data, **kwargs):
"""Applies the processors on the data independently and outputs a
generator of their outputs.
Parameters
----------
data : object
The data that needs to be processed.
**kwargs
Any kwargs are passed to the processors.
Yields
------
object
The processed data from processors one by one.
"""
for processor in self.processors:
yield processor(data, **kwargs)
......@@ -42,14 +42,6 @@ Miscellaneous functions
bob.bio.base.selected_indices
Generic classes
---------------
.. autosummary::
bob.bio.base.utils.processors.SequentialProcessor
bob.bio.base.utils.processors.ParallelProcessor
Tools to run recognition experiments
------------------------------------
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment