Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
bob
bob.bio.base
Commits
e2a41e7c
Commit
e2a41e7c
authored
Mar 09, 2018
by
Amir MOHAMMADI
Browse files
Base stacked processors are moved to bob.extension
parent
dd5f62bb
Pipeline
#17511
failed with stage
in 38 minutes and 36 seconds
Changes
6
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
bob/bio/base/extractor/stacks.py
View file @
e2a41e7c
from
..utils
.processors
import
SequentialProcessor
,
ParallelProcessor
from
bob.extension
.processors
import
SequentialProcessor
,
ParallelProcessor
from
.Extractor
import
Extractor
from
bob.io.base
import
HDF5File
...
...
bob/bio/base/preprocessor/stacks.py
View file @
e2a41e7c
from
..utils
.processors
import
SequentialProcessor
,
ParallelProcessor
from
bob.extension
.processors
import
SequentialProcessor
,
ParallelProcessor
from
.Preprocessor
import
Preprocessor
...
...
bob/bio/base/test/test_stacks.py
View file @
e2a41e7c
from
functools
import
partial
import
numpy
as
np
import
tempfile
from
bob.bio.base.utils.processors
import
(
SequentialProcessor
,
ParallelProcessor
)
from
bob.bio.base.preprocessor
import
(
SequentialPreprocessor
,
ParallelPreprocessor
,
CallablePreprocessor
)
from
bob.bio.base.extractor
import
(
...
...
@@ -15,16 +13,6 @@ SEQ_DATA = PROCESSORS[1](PROCESSORS[0](DATA))
PAR_DATA
=
(
PROCESSORS
[
0
](
DATA
),
PROCESSORS
[
1
](
DATA
))
def
test_processors
():
proc
=
SequentialProcessor
(
PROCESSORS
)
data
=
proc
(
DATA
)
assert
np
.
allclose
(
data
,
SEQ_DATA
)
proc
=
ParallelProcessor
(
PROCESSORS
)
data
=
proc
(
DATA
)
assert
all
(
np
.
allclose
(
x1
,
x2
)
for
x1
,
x2
in
zip
(
data
,
PAR_DATA
))
def
test_preprocessors
():
processors
=
[
CallablePreprocessor
(
p
,
False
)
for
p
in
PROCESSORS
]
proc
=
SequentialPreprocessor
(
processors
)
...
...
bob/bio/base/utils/__init__.py
View file @
e2a41e7c
...
...
@@ -6,7 +6,6 @@
from
.resources
import
*
from
.io
import
*
from
.singleton
import
*
from
.
import
processors
import
six
import
inspect
import
numpy
...
...
bob/bio/base/utils/processors.py
deleted
100644 → 0
View file @
dd5f62bb
class
SequentialProcessor
(
object
):
"""A helper class which takes several processors and applies them one by
one sequentially.
Attributes
----------
processors : list
A list of processors to apply.
Examples
--------
You can use this class to apply a chain of processes on your data. For
example:
>>> import numpy as np
>>> from functools import partial
>>> from bob.bio.base.utils.processors import SequentialProcessor
>>> raw_data = np.array([[1, 2, 3], [1, 2, 3]])
>>> seq_processor = SequentialProcessor(
... [np.cast['float64'], lambda x: x / 2, partial(np.mean, axis=1)])
>>> seq_processor(raw_data)
array([ 1., 1.])
>>> np.all(seq_processor(raw_data) ==
... np.mean(np.cast['float64'](raw_data) / 2, axis=1))
True
"""
def
__init__
(
self
,
processors
,
**
kwargs
):
super
(
SequentialProcessor
,
self
).
__init__
(
**
kwargs
)
self
.
processors
=
processors
def
__call__
(
self
,
data
,
**
kwargs
):
"""Applies the processors on the data sequentially. The output of the
first one goes as input to the next one.
Parameters
----------
data : object
The data that needs to be processed.
**kwargs
Any kwargs are passed to the processors.
Returns
-------
object
The processed data.
"""
for
processor
in
self
.
processors
:
data
=
processor
(
data
,
**
kwargs
)
return
data
class
ParallelProcessor
(
object
):
"""A helper class which takes several processors and applies them on each
processor separately and yields their outputs one by one.
Attributes
----------
processors : list
A list of processors to apply.
Examples
--------
You can use this class to apply several processes on your data and get all
the results back. For example:
>>> import numpy as np
>>> from functools import partial
>>> from bob.bio.base.utils.processors import ParallelProcessor
>>> raw_data = np.array([[1, 2, 3], [1, 2, 3]])
>>> parallel_processor = ParallelProcessor(
... [np.cast['float64'], lambda x: x / 2.0])
>>> list(parallel_processor(raw_data))
[array([[ 1., 2., 3.],
[ 1., 2., 3.]]), array([[ 0.5, 1. , 1.5],
[ 0.5, 1. , 1.5]])]
The data may be further processed using a :any:`SequentialProcessor`:
>>> from bob.bio.base.utils.processors import SequentialProcessor
>>> total_processor = SequentialProcessor(
... [parallel_processor, list, partial(np.concatenate, axis=1)])
>>> total_processor(raw_data)
array([[ 1. , 2. , 3. , 0.5, 1. , 1.5],
[ 1. , 2. , 3. , 0.5, 1. , 1.5]])
"""
def
__init__
(
self
,
processors
,
**
kwargs
):
super
(
ParallelProcessor
,
self
).
__init__
(
**
kwargs
)
self
.
processors
=
processors
def
__call__
(
self
,
data
,
**
kwargs
):
"""Applies the processors on the data independently and outputs a
generator of their outputs.
Parameters
----------
data : object
The data that needs to be processed.
**kwargs
Any kwargs are passed to the processors.
Yields
------
object
The processed data from processors one by one.
"""
for
processor
in
self
.
processors
:
yield
processor
(
data
,
**
kwargs
)
doc/py_api.rst
View file @
e2a41e7c
...
...
@@ -42,14 +42,6 @@ Miscellaneous functions
bob.bio.base.selected_indices
Generic classes
---------------
.. autosummary::
bob.bio.base.utils.processors.SequentialProcessor
bob.bio.base.utils.processors.ParallelProcessor
Tools to run recognition experiments
------------------------------------
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment