diff --git a/beat/web/backend/helpers.py b/beat/web/backend/helpers.py
index 0fd425d659e47aba534839700bc7fdcdfaaf0602..98f18efecd270eb8b07dde183ca51c9cdf6e0cec 100755
--- a/beat/web/backend/helpers.py
+++ b/beat/web/backend/helpers.py
@@ -943,7 +943,7 @@ def load_results_from_cache(block, cached_file):
                                    beat.core.hash.toPath(cached_file.hash)),
                       settings.PREFIX)
 
-    output_data = data_source.next()[0]
+    (output_data, start_index, end_index) = data_source[0]
     if output_data is not None:
         algorithm = beat.core.algorithm.Algorithm(
             settings.PREFIX, block.algorithm.fullname())
diff --git a/beat/web/backend/management/commands/qsetup.py b/beat/web/backend/management/commands/qsetup.py
index 9bb625a7dc38db23305412796feaf47570f068c4..2a0f01ce0d5b763201cb116faf2be84bf9619850 100755
--- a/beat/web/backend/management/commands/qsetup.py
+++ b/beat/web/backend/management/commands/qsetup.py
@@ -42,7 +42,7 @@ import socket
 
 CORES = psutil.cpu_count()
 RAM = psutil.virtual_memory().total/(1024*1024)
-ENVIRONMENT = {'name': 'Python 2.7', 'version': '1.2.0'}
+ENVIRONMENT = {'name': 'Python 2.7', 'version': '1.3.0'}
 CXX_ENVIRONMENT = {'name': 'Cxx backend', 'version': '1.1.0'}
 ENVKEY = '%(name)s (%(version)s)' % ENVIRONMENT
 CXX_ENVKEY = '%(name)s (%(version)s)' % CXX_ENVIRONMENT
diff --git a/beat/web/backend/models/job.py b/beat/web/backend/models/job.py
index 6b9d054eb7b51dd1b7923dd1893754f7711d326c..2ddd21cf477f322417bd4200beb3b5ca5ba1901d 100755
--- a/beat/web/backend/models/job.py
+++ b/beat/web/backend/models/job.py
@@ -125,8 +125,12 @@ class JobSplitManager(models.Manager):
 
 
         # Load the list of indices for each inputs
-        indices = [ beat.core.data.load_data_index(settings.CACHE_ROOT, x['path'])
-                    for x in inputs ]
+        indices = []
+        for input_cfg in inputs:
+            if 'database' in input_cfg:
+                indices.extend(beat.core.data.load_data_index_db(settings.CACHE_ROOT, input_cfg['path']))
+            else:
+                indices.append(beat.core.data.load_data_index(settings.CACHE_ROOT, input_cfg['path']))
 
 
         # Attempt to split the indices
diff --git a/beat/web/backend/tests/common.py b/beat/web/backend/tests/common.py
index a35016259d9f477f75dd5a3adf416c3d13ab7ec6..294024f994dfc2861f2940dfedf5df53d8ad75dc 100755
--- a/beat/web/backend/tests/common.py
+++ b/beat/web/backend/tests/common.py
@@ -45,6 +45,7 @@ from ..management.commands import qsetup
 
 from beat.core.dataformat import DataFormat
 from beat.core.data import CachedDataSink
+from beat.core.database import Database
 import beat.core.hash
 
 import os
@@ -62,7 +63,7 @@ ONE_QUEUE_TWO_WORKERS = {
             "cores-per-slot": 1,
             "max-slots-per-user": 2,
             "environments": [
-                'Python 2.7 (1.2.0)'
+                'Python 2.7 (1.3.0)'
             ],
             "slots": {
                 'node1': {
@@ -90,12 +91,12 @@ ONE_QUEUE_TWO_WORKERS = {
         }
     },
     "environments": {
-        "Python 2.7 (1.2.0)": {
+        "Python 2.7 (1.3.0)": {
             "name": 'Python 2.7',
-            "version": '1.2.0',
+            "version": '1.3.0',
             "short_description": "Test",
             "description": "Test environment",
-            "languages": "python",
+            "languages": ["python"],
         },
     },
 }
@@ -185,7 +186,11 @@ class BackendUtilitiesMixin(object):
 
         for index, split in enumerate(splits):
             sink = CachedDataSink()
-            sink.setup(path, dataformat, process_id=index)
+
+            start_data_index = split[0][0] if isinstance(split[0], tuple) else split[0]
+            end_data_index = split[-1][1] if isinstance(split[-1], tuple) else split[-1]
+
+            sink.setup(path, dataformat, start_data_index, end_data_index)
 
             for indices in split:
                 if not isinstance(indices, tuple):
@@ -196,8 +201,8 @@ class BackendUtilitiesMixin(object):
                     end = indices[1]
 
                 sink.write({
-                    'value': value,
-                },
+                        'value': value,
+                    },
                     start_data_index = start,
                     end_data_index = end
                 )
@@ -207,6 +212,17 @@ class BackendUtilitiesMixin(object):
             sink.close()
 
 
+    def prepare_databases(self, configuration):
+        for _, cfg in configuration['datasets'].items():
+            path = beat.core.hash.toPath(beat.core.hash.hashDataset(
+                    cfg['database'], cfg['protocol'], cfg['set']), suffix='.db')
+
+            if not os.path.exists(os.path.join(settings.CACHE_ROOT, path)):
+                database = Database(settings.PREFIX, cfg['database'])
+                view = database.view(cfg['protocol'], cfg['set'])
+                view.index(os.path.join(settings.CACHE_ROOT, path))
+
+
 #----------------------------------------------------------
 
 
diff --git a/beat/web/backend/tests/test_helpers.py b/beat/web/backend/tests/test_helpers.py
index a872ccb92c463d9392900cbc502f0582c24457dc..7833714c08f22e27cbcf092cc370ad8f85694bbe 100755
--- a/beat/web/backend/tests/test_helpers.py
+++ b/beat/web/backend/tests/test_helpers.py
@@ -663,6 +663,8 @@ class SplitNewJobsTest(BaseBackendTestCase):
 
         xp = Experiment.objects.get(name=fullname.split('/')[-1])
 
+        self.prepare_databases(xp.declaration)
+
         b0 = xp.blocks.all()[0]
 
         schedule_experiment(xp)
@@ -670,10 +672,6 @@ class SplitNewJobsTest(BaseBackendTestCase):
         self.assertEqual(Job.objects.count(), 2)
         self.assertEqual(JobSplit.objects.count(), 0)
 
-        self.generate_cached_files(b0.inputs.all()[0].database.hash, [
-            [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-        ])
-
         split_new_jobs()
 
         self.assertEqual(JobSplit.objects.count(), 2)
@@ -698,6 +696,9 @@ class SplitNewJobsTest(BaseBackendTestCase):
         xp1 = Experiment.objects.get(name=fullname1.split('/')[-1])
         xp2 = Experiment.objects.get(name=fullname2.split('/')[-1])
 
+        self.prepare_databases(xp1.declaration)
+        self.prepare_databases(xp2.declaration)
+
         schedule_experiment(xp1)
         schedule_experiment(xp2)
 
@@ -729,6 +730,9 @@ class SplitNewJobsTest(BaseBackendTestCase):
         xp1 = Experiment.objects.get(name=fullname1.split('/')[-1])
         xp2 = Experiment.objects.get(name=fullname2.split('/')[-1])
 
+        self.prepare_databases(xp1.declaration)
+        self.prepare_databases(xp2.declaration)
+
         schedule_experiment(xp1)
         schedule_experiment(xp2)
 
@@ -737,10 +741,6 @@ class SplitNewJobsTest(BaseBackendTestCase):
 
         b0 = xp1.blocks.all()[0]
 
-        self.generate_cached_files(b0.inputs.all()[0].database.hash, [
-            [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-        ])
-
         split_new_jobs()
 
         self.assertEqual(JobSplit.objects.count(), 3)
@@ -768,10 +768,12 @@ class SplitNewJobsTest(BaseBackendTestCase):
 
 
     def test_one_experiment_two_uneven_slots(self):
-        fullname = 'user/user/single/1/single_split_2'
+        fullname = 'user/user/duo/1/split_2'
 
         xp = Experiment.objects.get(name=fullname.split('/')[-1])
 
+        self.prepare_databases(xp.declaration)
+
         b0 = xp.blocks.all()[0]
 
         schedule_experiment(xp)
@@ -779,10 +781,6 @@ class SplitNewJobsTest(BaseBackendTestCase):
         self.assertEqual(Job.objects.count(), 2)
         self.assertEqual(JobSplit.objects.count(), 0)
 
-        self.generate_cached_files(b0.inputs.all()[0].database.hash, [
-            [(0, 2), (3, 5), (6, 8)]
-        ])
-
         split_new_jobs()
 
         self.assertEqual(JobSplit.objects.count(), 2)
@@ -796,6 +794,9 @@ class SplitNewJobsTest(BaseBackendTestCase):
         split1 = b0.job.splits.all()[0]
         split2 = b0.job.splits.all()[1]
 
+        print split2.start_index
+        print split2.end_index
+
         self.check_split(split1, split_index=0, start_index=0, end_index=5)
         self.check_split(split2, split_index=1, start_index=6, end_index=8)
 
@@ -805,6 +806,8 @@ class SplitNewJobsTest(BaseBackendTestCase):
 
         xp = Experiment.objects.get(name=fullname.split('/')[-1])
 
+        self.prepare_databases(xp.declaration)
+
         b0 = xp.blocks.all()[0]
 
         schedule_experiment(xp)
@@ -812,27 +815,27 @@ class SplitNewJobsTest(BaseBackendTestCase):
         self.assertEqual(Job.objects.count(), 2)
         self.assertEqual(JobSplit.objects.count(), 0)
 
-        self.generate_cached_files(b0.inputs.all()[0].database.hash, [
-            [(0, 2), (3, 5), (6, 8)]
-        ])
-
         split_new_jobs()
 
-        self.assertEqual(JobSplit.objects.count(), 3)
+        self.assertEqual(JobSplit.objects.count(), 5)
 
         xp.refresh_from_db()
 
         b0 = xp.blocks.all()[0]
 
-        self.assertEqual(b0.job.splits.count(), 3)
+        self.assertEqual(b0.job.splits.count(), 5)
 
         split1 = b0.job.splits.all()[0]
         split2 = b0.job.splits.all()[1]
         split3 = b0.job.splits.all()[2]
+        split4 = b0.job.splits.all()[3]
+        split5 = b0.job.splits.all()[4]
 
-        self.check_split(split1, split_index=0, start_index=0, end_index=2)
-        self.check_split(split2, split_index=1, start_index=3, end_index=5)
-        self.check_split(split3, split_index=2, start_index=6, end_index=8)
+        self.check_split(split1, split_index=0, start_index=0, end_index=0)
+        self.check_split(split2, split_index=1, start_index=1, end_index=1)
+        self.check_split(split3, split_index=2, start_index=2, end_index=2)
+        self.check_split(split4, split_index=3, start_index=3, end_index=3)
+        self.check_split(split5, split_index=4, start_index=4, end_index=4)
 
 
     def test_similar_experiment_after_assignation(self):
@@ -844,6 +847,8 @@ class SplitNewJobsTest(BaseBackendTestCase):
         xp1 = Experiment.objects.get(name=fullname.split('/')[-1])
         xp2 = xp1.fork(name='single_fork')
 
+        self.prepare_databases(xp1.declaration)
+
         schedule_experiment(xp1)
         split_new_jobs()
 
@@ -882,6 +887,8 @@ class SplitNewJobsTest(BaseBackendTestCase):
         xp1 = Experiment.objects.get(name=fullname.split('/')[-1])
         xp2 = xp1.fork(name='single_fork')
 
+        self.prepare_databases(xp1.declaration)
+
         schedule_experiment(xp1)
         split_new_jobs()
 
@@ -921,6 +928,8 @@ class SplitNewJobsTest(BaseBackendTestCase):
 
         xp = Experiment.objects.get(name=fullname.split('/')[-1])
 
+        self.prepare_databases(xp.declaration)
+
         schedule_experiment(xp)
 
         self.assertEqual(Job.objects.count(), 2)
@@ -962,6 +971,8 @@ class SplitNewJobsTest(BaseBackendTestCase):
 
         xp = Experiment.objects.get(name=fullname.split('/')[-1])
 
+        self.prepare_databases(xp.declaration)
+
         schedule_experiment(xp)
 
         self.assertEqual(Job.objects.count(), 2)
@@ -1013,6 +1024,8 @@ class AssignSplitsToWorkersTest(BaseBackendTestCase):
 
         xp = Experiment.objects.get(name=fullname.split('/')[-1])
 
+        self.prepare_databases(xp.declaration)
+
         schedule_experiment(xp)
         split_new_jobs()
 
@@ -1040,6 +1053,9 @@ class AssignSplitsToWorkersTest(BaseBackendTestCase):
         xp1 = Experiment.objects.get(name=fullname1.split('/')[-1])
         xp2 = Experiment.objects.get(name=fullname2.split('/')[-1])
 
+        self.prepare_databases(xp1.declaration)
+        self.prepare_databases(xp2.declaration)
+
         schedule_experiment(xp1)
         schedule_experiment(xp2)
         split_new_jobs()
@@ -1068,6 +1084,9 @@ class AssignSplitsToWorkersTest(BaseBackendTestCase):
         xp1 = Experiment.objects.get(name=fullname1.split('/')[-1])
         xp2 = Experiment.objects.get(name=fullname2.split('/')[-1])
 
+        self.prepare_databases(xp1.declaration)
+        self.prepare_databases(xp2.declaration)
+
         schedule_experiment(xp1)
         schedule_experiment(xp2)
         split_new_jobs()
@@ -1105,6 +1124,9 @@ class AssignSplitsToWorkersTest(BaseBackendTestCase):
         xp1 = Experiment.objects.get(name=fullname1.split('/')[-1])
         xp2 = Experiment.objects.get(name=fullname2.split('/')[-1])
 
+        self.prepare_databases(xp1.declaration)
+        self.prepare_databases(xp2.declaration)
+
         schedule_experiment(xp1)
         schedule_experiment(xp2)
         split_new_jobs()
@@ -1132,6 +1154,9 @@ class AssignSplitsToWorkersTest(BaseBackendTestCase):
         xp1 = Experiment.objects.get(name=fullname.split('/')[-1])
         xp2 = xp1.fork(name='single_fork')
 
+        self.prepare_databases(xp1.declaration)
+        self.prepare_databases(xp2.declaration)
+
         schedule_experiment(xp1)
         schedule_experiment(xp2)
         split_new_jobs()
@@ -1180,14 +1205,12 @@ class GetConfigurationForSplitTest(BaseBackendTestCase):
     def prepare_experiment(self, name):
         xp = Experiment.objects.get(name=name.split('/')[-1])
 
+        self.prepare_databases(xp.declaration)
+
         schedule_experiment(xp)
 
         b0 = xp.blocks.all()[0]
 
-        self.generate_cached_files(b0.inputs.all()[0].database.hash, [
-            [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-        ])
-
         split_new_jobs()
 
         xp.refresh_from_db()
@@ -1277,15 +1300,13 @@ class SplitHelpersBaseTest(BaseBackendTestCase):
     def prepare_experiment(self, name, generate_cache=True):
         xp = Experiment.objects.get(name=name.split('/')[-1])
 
+        if generate_cache:
+            self.prepare_databases(xp.declaration)
+
         schedule_experiment(xp)
 
         b0 = xp.blocks.all()[0]
 
-        if generate_cache:
-            self.generate_cached_files(b0.inputs.all()[0].database.hash, [
-                [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-            ])
-
         split_new_jobs()
 
         xp.refresh_from_db()
@@ -1717,6 +1738,7 @@ class OnSplitStartedTest(SplitHelpersBaseTest):
 
     def test_one_split(self):
         xp = self.prepare_experiment('user/user/single/1/single')
+
         assigned_splits = assign_splits_to_workers()
 
         split = assigned_splits[0]
@@ -1730,6 +1752,7 @@ class OnSplitStartedTest(SplitHelpersBaseTest):
 
     def test_two_splits(self):
         xp = self.prepare_experiment('user/user/single/1/single_split_2')
+
         assigned_splits = assign_splits_to_workers()
 
         split0 = assigned_splits[0]
diff --git a/beat/web/backend/tests/test_scheduler.py b/beat/web/backend/tests/test_scheduler.py
index 67578f0d7559a2e96ef44595cd536f69e211bd8d..3be4a970983bc44d23f5022e4e6497c38a30db4c 100755
--- a/beat/web/backend/tests/test_scheduler.py
+++ b/beat/web/backend/tests/test_scheduler.py
@@ -93,7 +93,7 @@ class TestSchedulerBase(TransactionTestCase, BackendUtilitiesMixin):
   def start_scheduler(self):
     (pid, self.scheduler_thread) = start_scheduler(settings_module='beat.web.settings.test',
                                                    interval=1, address='127.0.0.1',
-                                                   port=52000)
+                                                   port=50800)
 
 
   def stop_scheduler(self):
@@ -105,7 +105,7 @@ class TestSchedulerBase(TransactionTestCase, BackendUtilitiesMixin):
 
   def start_worker(self, name):
     (pid, worker_thread) = start_worker(name, settings.PREFIX, settings.CACHE_ROOT,
-                                        'tcp://127.0.0.1:52000')
+                                        'tcp://127.0.0.1:50800')
 
     self.worker_threads[name] = worker_thread
 
@@ -258,6 +258,8 @@ class TestExecution(TestSchedulerBase):
 
     xp = Experiment.objects.get(name=fullname.split('/')[-1])
 
+    self.prepare_databases(xp.declaration)
+
     schedule_experiment(xp)
     xp.refresh_from_db()
 
@@ -280,6 +282,8 @@ class TestExecution(TestSchedulerBase):
 
     xp = Experiment.objects.get(name=fullname.split('/')[-1])
 
+    self.prepare_databases(xp.declaration)
+
     schedule_experiment(xp)
     xp.refresh_from_db()
 
@@ -304,6 +308,9 @@ class TestExecution(TestSchedulerBase):
     xp1 = Experiment.objects.get(name=fullname1.split('/')[-1])
     xp2 = Experiment.objects.get(name=fullname2.split('/')[-1])
 
+    self.prepare_databases(xp1.declaration)
+    self.prepare_databases(xp2.declaration)
+
     schedule_experiment(xp1)
     schedule_experiment(xp2)
 
@@ -351,6 +358,9 @@ class TestExecution(TestSchedulerBase):
     xp1 = Experiment.objects.get(name=fullname1.split('/')[-1])
     xp2 = Experiment.objects.get(name=fullname2.split('/')[-1])
 
+    self.prepare_databases(xp1.declaration)
+    self.prepare_databases(xp2.declaration)
+
     schedule_experiment(xp1)
     schedule_experiment(xp2)
 
@@ -374,6 +384,9 @@ class TestExecution(TestSchedulerBase):
     xp1 = Experiment.objects.get(name=fullname1.split('/')[-1])
     xp2 = Experiment.objects.get(name=fullname2.split('/')[-1])
 
+    self.prepare_databases(xp1.declaration)
+    self.prepare_databases(xp2.declaration)
+
     schedule_experiment(xp1)
 
     xp1.refresh_from_db()
@@ -398,6 +411,8 @@ class TestExecution(TestSchedulerBase):
 
     xp = Experiment.objects.get(name=fullname.split('/')[-1])
 
+    self.prepare_databases(xp.declaration)
+
     schedule_experiment(xp)
     xp.refresh_from_db()
 
@@ -418,6 +433,8 @@ class TestExecution(TestSchedulerBase):
 
     xp = Experiment.objects.get(name=fullname.split('/')[-1])
 
+    self.prepare_databases(xp.declaration)
+
     schedule_experiment(xp)
     xp.refresh_from_db()
 
@@ -442,6 +459,9 @@ class TestExecution(TestSchedulerBase):
     xp1 = Experiment.objects.get(name=fullname1.split('/')[-1])
     xp2 = Experiment.objects.get(name=fullname2.split('/')[-1])
 
+    self.prepare_databases(xp1.declaration)
+    self.prepare_databases(xp2.declaration)
+
     schedule_experiment(xp1)
     schedule_experiment(xp2)
 
@@ -490,6 +510,8 @@ class TestCancellation(TestSchedulerBase):
   def process(self, experiment_name, block_name=None):
     xp = Experiment.objects.get(name=experiment_name.split('/')[-1])
 
+    self.prepare_databases(xp.declaration)
+
     schedule_experiment(xp)
     xp.refresh_from_db()
 
@@ -523,6 +545,9 @@ class TestCancellation(TestSchedulerBase):
     xp1 = Experiment.objects.get(name=experiment_name1.split('/')[-1])
     xp2 = Experiment.objects.get(name=experiment_name2.split('/')[-1])
 
+    self.prepare_databases(xp1.declaration)
+    self.prepare_databases(xp2.declaration)
+
     schedule_experiment(xp1)
     schedule_experiment(xp2)
     xp1.refresh_from_db()
diff --git a/beat/web/backend/tests/test_setup.py b/beat/web/backend/tests/test_setup.py
index 5f953ad9ca26aa42ff45ef5d98b33fb52d1ebcfb..10001f2be8e72013098e3fdc230a379b3d5991a9 100755
--- a/beat/web/backend/tests/test_setup.py
+++ b/beat/web/backend/tests/test_setup.py
@@ -52,7 +52,7 @@ QUEUES_WITHOUT_PRIORITY = {
           "time-limit": 180, #3 hours
           "cores-per-slot": 1,
           "max-slots-per-user": 4,
-          "environments": ['Python 2.7 (1.2.0)'],
+          "environments": ['Python 2.7 (1.3.0)'],
           "groups": [
               "Default",
           ],
@@ -69,7 +69,7 @@ QUEUES_WITHOUT_PRIORITY = {
             "time-limit": 360, #6 hours
             "cores-per-slot": 2,
             "max-slots-per-user": 2,
-            "environments": ['Python 2.7 (1.2.0)'],
+            "environments": ['Python 2.7 (1.3.0)'],
             "groups": [
                 "Default",
             ],
@@ -86,7 +86,7 @@ QUEUES_WITHOUT_PRIORITY = {
             "time-limit": 720, #12 hours
             "cores-per-slot": 4,
             "max-slots-per-user": 1,
-            "environments": ['Python 2.7 (1.2.0)'],
+            "environments": ['Python 2.7 (1.3.0)'],
             "groups": [
                 "Default",
             ],
@@ -106,9 +106,9 @@ QUEUES_WITHOUT_PRIORITY = {
       }
     },
   "environments": {
-      'Python 2.7 (1.2.0)': {
+      'Python 2.7 (1.3.0)': {
           "name": 'Python 2.7',
-          "version": '1.2.0',
+          "version": '1.3.0',
           "short_description": "Test",
           "description": "Test environment",
           "languages": "python",
@@ -124,7 +124,7 @@ PRIORITY_QUEUES = {
             "time-limit": 180, #3 hours
             "cores-per-slot": 1,
             "max-slots-per-user": 2,
-            "environments": ['Python 2.7 (1.2.0)'],
+            "environments": ['Python 2.7 (1.3.0)'],
             "groups": [
                 "Default",
             ],
@@ -145,7 +145,7 @@ PRIORITY_QUEUES = {
         "time-limit": 360, #6 hours
         "cores-per-slot": 2,
         "max-slots-per-user": 1,
-        "environments": ['Python 2.7 (1.2.0)'],
+        "environments": ['Python 2.7 (1.3.0)'],
         "groups": [
             "Default",
         ],
@@ -166,7 +166,7 @@ PRIORITY_QUEUES = {
         "time-limit": 180, #3 hours
         "cores-per-slot": 1,
         "max-slots-per-user": 8,
-        "environments": ['Python 2.7 (1.2.0)'],
+        "environments": ['Python 2.7 (1.3.0)'],
         "groups": [
             "Default",
         ],
@@ -196,9 +196,9 @@ PRIORITY_QUEUES = {
         )
     ]),
     "environments": {
-        'Python 2.7 (1.2.0)': {
+        'Python 2.7 (1.3.0)': {
             "name": 'Python 2.7',
-          "version": '1.2.0',
+          "version": '1.3.0',
           "short_description": "Test",
           "description": "Test environment",
           "languages": "python",
@@ -303,7 +303,7 @@ class BackendSetup(BaseBackendTestCase):
         env = q1.environments.first()
 
         self.assertEqual(env.name, 'Python 2.7')
-        self.assertEqual(env.version, '1.2.0')
+        self.assertEqual(env.version, '1.3.0')
 
         self.assertEqual(q1.slots.count(), 1)
         self.assertEqual(q2.slots.count(), 1)
@@ -412,7 +412,7 @@ class BackendSetup(BaseBackendTestCase):
         env = q1.environments.first()
 
         self.assertEqual(env.name, 'Python 2.7')
-        self.assertEqual(env.version, '1.2.0')
+        self.assertEqual(env.version, '1.3.0')
 
         self.assertEqual(q1.slots.count(), 2)
         self.assertEqual(q1_special.slots.count(), 2)
diff --git a/beat/web/databases/admin.py b/beat/web/databases/admin.py
old mode 100644
new mode 100755
index 233cc12f9adc4b27ab8d72c7bb56aab010ce7e77..2d1b556e9244b896de3052cbc37ad6d42477eaae
--- a/beat/web/databases/admin.py
+++ b/beat/web/databases/admin.py
@@ -277,8 +277,8 @@ class DatabaseSetOutputInline(admin.TabularInline):
 
     model           = DatabaseSetOutputModel
     extra           = 0
-    ordering        = ('hash',)
-    readonly_fields = ('hash', 'template')
+    ordering        = ('template__name',)
+    readonly_fields = ('template',)
 
     def has_delete_permission(self, request, obj=None):
         return False
@@ -289,13 +289,13 @@ class DatabaseSetOutputInline(admin.TabularInline):
 
 class DatabaseSet(admin.ModelAdmin):
 
-    list_display        = ('id', 'protocol', 'name', 'template')
+    list_display        = ('id', 'protocol', 'name', 'template', 'hash')
     search_fields       = ['name',
                            'template__name',
                            'protocol__database__name',
                            'protocol__name']
     list_display_links  = ('id', 'name')
-    readonly_fields = ('name', 'template', 'protocol')
+    readonly_fields = ('name', 'template', 'protocol', 'hash')
 
     inlines = [
         DatabaseSetOutputInline,
diff --git a/beat/web/databases/migrations/0004_beat_backend_python_1_5_x.py b/beat/web/databases/migrations/0004_beat_backend_python_1_5_x.py
new file mode 100644
index 0000000000000000000000000000000000000000..0701c6b8644423ce03367535207600da14ca678c
--- /dev/null
+++ b/beat/web/databases/migrations/0004_beat_backend_python_1_5_x.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.9.13 on 2018-01-25 09:06
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+from beat.backend.python.hash import hashDataset
+
+
+def compute_hashes(apps, schema_editor):
+    '''Refreshes each database so datasets/outputs are recreated'''
+
+    DatabaseSet = apps.get_model("databases", "DatabaseSet")
+
+    if DatabaseSet.objects.count():
+        print('')
+
+    for db_set in DatabaseSet.objects.order_by('id'):
+        print("Computing hashes for database set '%s/%d/%s/%s'..." % \
+                (db_set.protocol.database.name, db_set.protocol.database.version,
+                 db_set.protocol.name, db_set.name))
+
+        db_set.hash = hashDataset('%s/%d' % (db_set.protocol.database.name, db_set.protocol.database.version),
+                                  db_set.protocol.name, db_set.name)
+        db_set.save()
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('databases', '0003_auto_20160704_1316'),
+    ]
+
+    operations = [
+        migrations.RemoveField(
+            model_name='databasesetoutput',
+            name='hash',
+        ),
+        migrations.AddField(
+            model_name='databaseset',
+            name='hash',
+            field=models.CharField(default='', max_length=64),
+            preserve_default=False,
+        ),
+        migrations.RunPython(compute_hashes),
+        migrations.AlterField(
+            model_name='databaseset',
+            name='hash',
+            field=models.CharField(max_length=64, unique=True),
+            preserve_default=False,
+        ),
+    ]
diff --git a/beat/web/databases/models.py b/beat/web/databases/models.py
index d70d8f3dce290075ad7736a0d6dc69cf0aebb4f0..3d8f5447568ec3fbc772800c3ef1529c5559b23d 100755
--- a/beat/web/databases/models.py
+++ b/beat/web/databases/models.py
@@ -34,6 +34,7 @@ from django.conf import settings
 from django.core.urlresolvers import reverse
 
 import beat.core.database
+from beat.backend.python.hash import hashDataset
 
 from ..dataformats.models import DataFormat
 
@@ -352,6 +353,18 @@ class DatabaseSetTemplate(models.Model):
 
 class DatabaseSetManager(models.Manager):
 
+    def create(self, protocol, template, name):
+        dataset = DatabaseSet(
+            name = name,
+            template = template,
+            protocol = protocol,
+            hash = hashDataset(protocol.database.fullname(), protocol.name, name)
+        )
+
+        dataset.save()
+        return dataset
+
+
     def get_by_natural_key(self, database_name, database_version, protocol_name, name, template_name):
         return self.get(
             protocol__database__name=database_name,
@@ -371,6 +384,7 @@ class DatabaseSet(models.Model):
     name        = models.CharField(max_length=200, blank=True)
     template    = models.ForeignKey(DatabaseSetTemplate, related_name='sets',
                                     on_delete=models.CASCADE)
+    hash = models.CharField(max_length=64, unique=True)
 
     class Meta:
         unique_together = ('protocol', 'name', 'template')
@@ -424,8 +438,16 @@ class DatabaseSetTemplateOutput(models.Model):
 
 class DatabaseSetOutputManager(models.Manager):
 
-    def get_by_natural_key(self, hash):
-        return self.get(hash=hash)
+    def get_by_natural_key(self, database_name, database_version, protocol_name, name,
+                           template_name, output_name):
+        return self.get(
+            set__protocol__database__name=database_name,
+            set__protocol__database__version=database_version,
+            set__protocol__name=protocol_name,
+            set__name=name,
+            set__template__name=template_name,
+            template__name=output_name,
+        )
 
 
 class DatabaseSetOutput(models.Model):
@@ -433,7 +455,6 @@ class DatabaseSetOutput(models.Model):
                                  related_name='instances', on_delete=models.CASCADE)
     set = models.ForeignKey(DatabaseSet, related_name='outputs',
                             on_delete=models.CASCADE)
-    hash = models.CharField(max_length=64, unique=True)
 
     objects = DatabaseSetOutputManager()
 
@@ -455,4 +476,4 @@ class DatabaseSetOutput(models.Model):
         return self.template.all_needed_dataformats()
 
     def natural_key(self):
-        return (self.hash,)
+        return (self.set.natural_key(), self.template.name)
diff --git a/beat/web/databases/signals.py b/beat/web/databases/signals.py
old mode 100644
new mode 100755
index e440eb6e29d9326168f1034c29fa4f8a7045e300..8732721847a19858f54ae6762b378a4d4b2e8705
--- a/beat/web/databases/signals.py
+++ b/beat/web/databases/signals.py
@@ -133,12 +133,11 @@ def refresh_protocols(sender, instance, **kwargs):
                 )
 
                 if not dataset: #create
-                    dataset = DatabaseSet(
+                    dataset = DatabaseSet.objects.create(
                         name = set_attr['name'],
                         template = dataset_template,
                         protocol = protocol,
                     )
-                    dataset.save()
 
                 # Create the database set template output
                 for output_name, format_name in set_attr['outputs'].items():
@@ -191,16 +190,15 @@ def refresh_protocols(sender, instance, **kwargs):
                             database_template_output[0]
 
                     # Create the database set output
-                    hash = core.hash_output(protocol.name,
-                                            dataset.name, output_name)
                     dataset_output = \
-                        DatabaseSetOutput.objects.filter(hash=hash)
+                        DatabaseSetOutput.objects.filter(template=database_template_output,
+                                                         set=dataset,
+                        )
 
                     if not dataset_output: # create
                         dataset_output = DatabaseSetOutput(
                             template=database_template_output,
                             set=dataset,
-                            hash=hash,
                         )
                         dataset_output.save()
 
diff --git a/beat/web/experiments/models/experiment.py b/beat/web/experiments/models/experiment.py
index 85ad7621f84d5358e2b27351c313a218593f9962..db9371fd8801134e297cc49178fd5362a658badd 100755
--- a/beat/web/experiments/models/experiment.py
+++ b/beat/web/experiments/models/experiment.py
@@ -587,7 +587,8 @@ class Experiment(Shareable):
             b.inputs.clear()
             for v in job_description['inputs'].values():
                 if 'database' in v: #database input
-                    db = DatabaseSetOutput.objects.get(hash=v['hash'])
+                    db = DatabaseSetOutput.objects.get(set__hash=v['hash'],
+                                                       template__name=v['output'])
                     BlockInput.objects.get_or_create(block=b,
                                                      channel=v['channel'], database=db)
                 else:
diff --git a/buildout.cfg b/buildout.cfg
index a3d4671da9f65a02a6e5973821193619a80fe235..c2a0400dfa8629fa26202520df6480b8a8954077 100644
--- a/buildout.cfg
+++ b/buildout.cfg
@@ -103,8 +103,8 @@ on_update = true
 
 [cxx_algorithms]
 recipe = collective.recipe.cmd
-cmds = ./src/beat.core//buildout_compile_cxx_algorithm.sh build
-uninstall_cmds = ./src/beat.core/buildout_compile_cxx_algorithm cleanup
+cmds = ./src/beat.core/buildout_compile_cxx_algorithm.sh build
+uninstall_cmds = ./src/beat.core/buildout_compile_cxx_algorithm.sh cleanup
 on_install = true
 on_update = true