diff --git a/beat/backend/python/data.py b/beat/backend/python/data.py
index c40ceff719ba5cb18a3dec47b04f57bfc5356d72..02b1b25232ab32a932d611e0c8d17c015a37bb25 100644
--- a/beat/backend/python/data.py
+++ b/beat/backend/python/data.py
@@ -26,33 +26,38 @@
 ###############################################################################
 
 
-"""Data I/O classes and functions"""
+"""
+====
+data
+====
+
+Data I/O classes and functions
+"""
 
 import os
 import re
 import glob
 import simplejson as json
-import select
 import time
-import tempfile
 import abc
 import zmq
+import logging
+import six
+
 from functools import reduce
 from collections import namedtuple
 
-import logging
-logger = logging.getLogger(__name__)
-
-import six
 from .hash import hashFileContents
 from .dataformat import DataFormat
 from .algorithm import Algorithm
 
+logger = logging.getLogger(__name__)
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class RemoteException(Exception):
+    """Exception happening on a remote location"""
 
     def __init__(self, kind, message):
         super(RemoteException, self).__init__()
@@ -71,7 +76,7 @@ class RemoteException(Exception):
             return '(usr) {}'.format(self.user_error)
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 def mixDataIndices(list_of_data_indices):
@@ -129,7 +134,7 @@ def mixDataIndices(list_of_data_indices):
     return result
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 def getAllFilenames(filename, start_index=None, end_index=None):
@@ -150,8 +155,8 @@ def getAllFilenames(filename, start_index=None, end_index=None):
 
     Returns:
 
-        (data_filenames, indices_filenames, data_checksum_filenames, indices_checksum_filenames)
-
+        (data_filenames, indices_filenames,
+         data_checksum_filenames, indices_checksum_filenames)
     """
 
     index_re = re.compile(r'^.*\.(\d+)\.(\d+)\.(data|index)(.checksum)?$')
@@ -191,7 +196,7 @@ def getAllFilenames(filename, start_index=None, end_index=None):
     return (data_filenames, indices_filenames, data_checksum_filenames, indices_checksum_filenames)
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class DataSource(object):
@@ -273,7 +278,7 @@ class DataSource(object):
         self.ready = True
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class CachedDataSource(DataSource):
@@ -360,8 +365,8 @@ class CachedDataSource(DataSource):
               3. Contiguous indices if they are present
             """
 
-            # Make sure that we have a perfect match between data files and checksum
-            # files
+            # Make sure that we have a perfect match between data files and
+            # checksum files
             checksum_filenames_noext = [os.path.splitext(f)[0] for f in checksum_filenames]
 
             if data_filenames != checksum_filenames_noext:
@@ -498,7 +503,7 @@ class CachedDataSource(DataSource):
         return (data, infos.start_index, infos.end_index)
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class DatabaseOutputDataSource(DataSource):
@@ -617,7 +622,7 @@ class DatabaseOutputDataSource(DataSource):
         return (data, infos.start_index, infos.end_index)
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class RemoteDataSource(DataSource):
@@ -748,7 +753,7 @@ class RemoteDataSource(DataSource):
         self.ready = True
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class DataSink(object):
@@ -777,14 +782,17 @@ class DataSink(object):
 
     @abc.abstractmethod
     def isConnected(self):
-        pass
+        """Returns whether the data sink is connected"""
 
+        pass
 
     def close(self):
+        """Closes the data sink"""
+
         pass
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class StdoutDataSink(DataSink):
@@ -834,7 +842,7 @@ class StdoutDataSink(DataSink):
         return True
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class CachedDataSink(DataSink):
@@ -867,12 +875,12 @@ class CachedDataSink(DataSink):
           filename (str): Name of the file to generate
 
           dataformat (dataformat.DataFormat): The dataformat to be used
-            inside this file. All objects stored inside this file will respect that
-            format.
+            inside this file. All objects stored inside this file will respect
+            that format.
 
-          encoding (str): String defining the encoding to be used for encoding the
-            data. Only a few options are supported: ``binary`` (the default) or
-            ``json`` (debugging purposes).
+          encoding (str): String defining the encoding to be used for encoding
+            the data. Only a few options are supported: ``binary``
+            (the default) or ``json`` (debugging purposes).
 
         """
 
@@ -1016,7 +1024,7 @@ class CachedDataSink(DataSink):
         return (self.filename is not None)
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 def load_data_index(cache_root, hash_path):
@@ -1072,7 +1080,7 @@ def load_data_index(cache_root, hash_path):
     return sorted(retval) + [end_index + 1]
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 def load_data_index_db(cache_root, hash_path):
@@ -1105,7 +1113,7 @@ def load_data_index_db(cache_root, hash_path):
     return retval
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 def _foundCommonIndices(lst):
@@ -1119,7 +1127,7 @@ def _foundCommonIndices(lst):
     return common_indices
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 def foundSplitRanges(lst, n_split):