diff --git a/bob/bio/base/extractor/stacks.py b/bob/bio/base/extractor/stacks.py
index 780dafdb47b211ad0b8aa70af2b0c559b9f636e1..1024a6bf45b1627d0f1119dde8b7b45c2ce1d7b2 100644
--- a/bob/bio/base/extractor/stacks.py
+++ b/bob/bio/base/extractor/stacks.py
@@ -1,4 +1,4 @@
-from ..utils.processors import SequentialProcessor, ParallelProcessor
+from bob.extension.processors import SequentialProcessor, ParallelProcessor
 from .Extractor import Extractor
 from bob.io.base import HDF5File
 
@@ -171,7 +171,7 @@ class ParallelExtractor(ParallelProcessor, MultipleExtractor):
            [ 1.,  2.,  3.]]), array([[ 0.5,  1. ,  1.5],
            [ 0.5,  1. ,  1.5]])]
 
-    The data may be further processed using a :any:`SequentialProcessor`:
+    The data may be further processed using a :any:`SequentialExtractor`:
 
     >>> from bob.bio.base.extractor import SequentialExtractor
     >>> total_extractor = SequentialExtractor(
diff --git a/bob/bio/base/preprocessor/stacks.py b/bob/bio/base/preprocessor/stacks.py
index 0ffc67ea9052a9e5a63cfe6950f7a2d235f7d0c4..a23e77b80232f9a694faabc7b1c667744b30d0b8 100644
--- a/bob/bio/base/preprocessor/stacks.py
+++ b/bob/bio/base/preprocessor/stacks.py
@@ -1,4 +1,4 @@
-from ..utils.processors import SequentialProcessor, ParallelProcessor
+from bob.extension.processors import SequentialProcessor, ParallelProcessor
 from .Preprocessor import Preprocessor
 
 
@@ -78,7 +78,7 @@ class ParallelPreprocessor(ParallelProcessor, Preprocessor):
            [ 1.,  2.,  3.]]), array([[ 0.5,  1. ,  1.5],
            [ 0.5,  1. ,  1.5]])]
 
-    The data may be further processed using a :any:`SequentialProcessor`:
+    The data may be further processed using a :any:`SequentialPreprocessor`:
 
     >>> from bob.bio.base.preprocessor import SequentialPreprocessor
     >>> total_preprocessor = SequentialPreprocessor(
diff --git a/bob/bio/base/test/test_stacks.py b/bob/bio/base/test/test_stacks.py
index a296a9a33ebed07b3176d5955c84a7e254f9ca71..6b27123a06cd6ecf913502641526f45cf1551860 100644
--- a/bob/bio/base/test/test_stacks.py
+++ b/bob/bio/base/test/test_stacks.py
@@ -1,8 +1,6 @@
 from functools import partial
 import numpy as np
 import tempfile
-from bob.bio.base.utils.processors import (
-    SequentialProcessor, ParallelProcessor)
 from bob.bio.base.preprocessor import (
     SequentialPreprocessor, ParallelPreprocessor, CallablePreprocessor)
 from bob.bio.base.extractor import (
@@ -15,16 +13,6 @@ SEQ_DATA = PROCESSORS[1](PROCESSORS[0](DATA))
 PAR_DATA = (PROCESSORS[0](DATA), PROCESSORS[1](DATA))
 
 
-def test_processors():
-  proc = SequentialProcessor(PROCESSORS)
-  data = proc(DATA)
-  assert np.allclose(data, SEQ_DATA)
-
-  proc = ParallelProcessor(PROCESSORS)
-  data = proc(DATA)
-  assert all(np.allclose(x1, x2) for x1, x2 in zip(data, PAR_DATA))
-
-
 def test_preprocessors():
   processors = [CallablePreprocessor(p, False) for p in PROCESSORS]
   proc = SequentialPreprocessor(processors)
diff --git a/bob/bio/base/utils/__init__.py b/bob/bio/base/utils/__init__.py
index 2cd7501a46599d3497387dc3c19ce2c32b3cf01b..75737bdcead5cabb6aecc3e39e438cb4ab492944 100644
--- a/bob/bio/base/utils/__init__.py
+++ b/bob/bio/base/utils/__init__.py
@@ -6,7 +6,6 @@
 from .resources import *
 from .io import *
 from .singleton import *
-from . import processors
 import six
 import inspect
 import numpy
diff --git a/bob/bio/base/utils/processors.py b/bob/bio/base/utils/processors.py
deleted file mode 100644
index b01a953dc34e5962bb74e03801d60f9227ed2838..0000000000000000000000000000000000000000
--- a/bob/bio/base/utils/processors.py
+++ /dev/null
@@ -1,109 +0,0 @@
-class SequentialProcessor(object):
-    """A helper class which takes several processors and applies them one by
-    one sequentially.
-
-    Attributes
-    ----------
-    processors : list
-        A list of processors to apply.
-
-    Examples
-    --------
-    You can use this class to apply a chain of processes on your data. For
-    example:
-
-    >>> import numpy as np
-    >>> from functools import  partial
-    >>> from bob.bio.base.utils.processors import SequentialProcessor
-    >>> raw_data = np.array([[1, 2, 3], [1, 2, 3]])
-    >>> seq_processor = SequentialProcessor(
-    ...     [np.cast['float64'], lambda x: x / 2, partial(np.mean, axis=1)])
-    >>> seq_processor(raw_data)
-    array([ 1.,  1.])
-    >>> np.all(seq_processor(raw_data) ==
-    ...        np.mean(np.cast['float64'](raw_data) / 2, axis=1))
-    True
-    """
-
-    def __init__(self, processors, **kwargs):
-        super(SequentialProcessor, self).__init__(**kwargs)
-        self.processors = processors
-
-    def __call__(self, data, **kwargs):
-        """Applies the processors on the data sequentially. The output of the
-        first one goes as input to the next one.
-
-        Parameters
-        ----------
-        data : object
-            The data that needs to be processed.
-        **kwargs
-            Any kwargs are passed to the processors.
-
-        Returns
-        -------
-        object
-            The processed data.
-        """
-        for processor in self.processors:
-            data = processor(data, **kwargs)
-        return data
-
-
-class ParallelProcessor(object):
-    """A helper class which takes several processors and applies them on each
-    processor separately and yields their outputs one by one.
-
-    Attributes
-    ----------
-    processors : list
-        A list of processors to apply.
-
-    Examples
-    --------
-    You can use this class to apply several processes on your data and get all
-    the results back. For example:
-
-    >>> import numpy as np
-    >>> from functools import  partial
-    >>> from bob.bio.base.utils.processors import ParallelProcessor
-    >>> raw_data = np.array([[1, 2, 3], [1, 2, 3]])
-    >>> parallel_processor = ParallelProcessor(
-    ...     [np.cast['float64'], lambda x: x / 2.0])
-    >>> list(parallel_processor(raw_data))
-    [array([[ 1.,  2.,  3.],
-           [ 1.,  2.,  3.]]), array([[ 0.5,  1. ,  1.5],
-           [ 0.5,  1. ,  1.5]])]
-
-    The data may be further processed using a :any:`SequentialProcessor`:
-
-    >>> from bob.bio.base.utils.processors import SequentialProcessor
-    >>> total_processor = SequentialProcessor(
-    ...     [parallel_processor, list, partial(np.concatenate, axis=1)])
-    >>> total_processor(raw_data)
-    array([[ 1. ,  2. ,  3. ,  0.5,  1. ,  1.5],
-           [ 1. ,  2. ,  3. ,  0.5,  1. ,  1.5]])
-    """
-
-    def __init__(self, processors, **kwargs):
-        super(ParallelProcessor, self).__init__(**kwargs)
-        self.processors = processors
-
-    def __call__(self, data, **kwargs):
-        """Applies the processors on the data independently and outputs a
-        generator of their outputs.
-
-        Parameters
-        ----------
-        data : object
-            The data that needs to be processed.
-        **kwargs
-            Any kwargs are passed to the processors.
-
-        Yields
-        ------
-        object
-            The processed data from processors one by one.
-        """
-        for processor in self.processors:
-            yield processor(data, **kwargs)
diff --git a/doc/py_api.rst b/doc/py_api.rst
index 13daaa8fe4d4b5327d2ac5c817dec2f99043af0b..c9841b2bcc2bd386e1cdf21b04a4e7a30a1dd806 100644
--- a/doc/py_api.rst
+++ b/doc/py_api.rst
@@ -42,14 +42,6 @@ Miscellaneous functions
    bob.bio.base.selected_indices
 
 
-Generic classes
----------------
-
-.. autosummary::
-   bob.bio.base.utils.processors.SequentialProcessor
-   bob.bio.base.utils.processors.ParallelProcessor
-
-
 Tools to run recognition experiments
 ------------------------------------
 
@@ -117,7 +109,4 @@ Details
    .. autoclass:: FileSelector
 
 
-.. automodule:: bob.bio.base.utils.processors
-
-
 .. include:: links.rst