diff --git a/beat/web/experiments/migrations/0001_initial.py b/beat/web/experiments/migrations/0001_initial.py
index b5cbfb1c7bee401ae7e7cf88fd5f361308096519..d343f1368919099f1e0b22f50d856c3e268bd11f 100644
--- a/beat/web/experiments/migrations/0001_initial.py
+++ b/beat/web/experiments/migrations/0001_initial.py
@@ -27,104 +27,272 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, models
 from django.conf import settings
-import beat.web.experiments.models
+from django.db import migrations
+from django.db import models
+
 import beat.web.common.models
+import beat.web.experiments.models
 
 
 class Migration(migrations.Migration):
 
     dependencies = [
         migrations.swappable_dependency(settings.AUTH_USER_MODEL),
-        ('databases', '0001_initial'),
-        ('algorithms', '0001_initial'),
-        ('toolchains', '0001_initial'),
-        ('backend', '0001_initial'),
-        ('team', '0001_initial'),
+        ("databases", "0001_initial"),
+        ("algorithms", "0001_initial"),
+        ("toolchains", "0001_initial"),
+        ("backend", "0001_initial"),
+        ("team", "0001_initial"),
     ]
 
     operations = [
         migrations.CreateModel(
-            name='Block',
+            name="Block",
             fields=[
-                ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
-                ('name', models.CharField(max_length=200)),
-                ('status', models.CharField(default='N', max_length=1, choices=[('N', 'Not cached'), ('P', 'Processing'), ('C', 'Cached'), ('F', 'Failed')])),
-                ('analyzer', models.BooleanField(default=False)),
-                ('creation_date', models.DateTimeField(auto_now_add=True, null=True)),
-                ('start_date', models.DateTimeField(null=True, blank=True)),
-                ('end_date', models.DateTimeField(null=True, blank=True)),
-                ('algorithm', models.ForeignKey(related_name='blocks', to='algorithms.Algorithm', on_delete=models.CASCADE)),
-                ('environment', models.ForeignKey(related_name='blocks', to='backend.Environment', null=True, on_delete=models.SET_NULL)),
+                (
+                    "id",
+                    models.AutoField(
+                        verbose_name="ID",
+                        serialize=False,
+                        auto_created=True,
+                        primary_key=True,
+                    ),
+                ),
+                ("name", models.CharField(max_length=200)),
+                (
+                    "status",
+                    models.CharField(
+                        default="N",
+                        max_length=1,
+                        choices=[
+                            ("N", "Not cached"),
+                            ("P", "Processing"),
+                            ("C", "Cached"),
+                            ("F", "Failed"),
+                        ],
+                    ),
+                ),
+                ("analyzer", models.BooleanField(default=False)),
+                ("creation_date", models.DateTimeField(auto_now_add=True, null=True)),
+                ("start_date", models.DateTimeField(null=True, blank=True)),
+                ("end_date", models.DateTimeField(null=True, blank=True)),
+                (
+                    "algorithm",
+                    models.ForeignKey(
+                        related_name="blocks",
+                        to="algorithms.Algorithm",
+                        on_delete=models.CASCADE,
+                    ),
+                ),
+                (
+                    "environment",
+                    models.ForeignKey(
+                        related_name="blocks",
+                        to="backend.Environment",
+                        null=True,
+                        on_delete=models.SET_NULL,
+                    ),
+                ),
             ],
         ),
         migrations.CreateModel(
-            name='CachedFile',
+            name="CachedFile",
             fields=[
-                ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
-                ('hash', models.CharField(unique=True, max_length=64)),
-                ('linear_execution_time', models.FloatField(default=0.0)),
-                ('speed_up_real', models.FloatField(default=0.0)),
-                ('speed_up_maximal', models.FloatField(default=0.0)),
-                ('queuing_time', models.FloatField(default=0.0)),
-                ('stdout', models.TextField(null=True, blank=True)),
-                ('stderr', models.TextField(null=True, blank=True)),
-                ('error_report', models.TextField(null=True, blank=True)),
-                ('cpu_time', models.FloatField(default=0.0)),
-                ('max_memory', models.BigIntegerField(default=0)),
-                ('data_read_size', models.BigIntegerField(default=0)),
-                ('data_read_nb_blocks', models.IntegerField(default=0)),
-                ('data_read_time', models.FloatField(default=0.0)),
-                ('data_written_size', models.BigIntegerField(default=0)),
-                ('data_written_nb_blocks', models.IntegerField(default=0)),
-                ('data_written_time', models.FloatField(default=0.0)),
-                ('blocks', models.ManyToManyField(related_name='hashes', to='experiments.Block', blank=True)),
+                (
+                    "id",
+                    models.AutoField(
+                        verbose_name="ID",
+                        serialize=False,
+                        auto_created=True,
+                        primary_key=True,
+                    ),
+                ),
+                ("hash", models.CharField(unique=True, max_length=64)),
+                ("linear_execution_time", models.FloatField(default=0.0)),
+                ("speed_up_real", models.FloatField(default=0.0)),
+                ("speed_up_maximal", models.FloatField(default=0.0)),
+                ("queuing_time", models.FloatField(default=0.0)),
+                ("stdout", models.TextField(null=True, blank=True)),
+                ("stderr", models.TextField(null=True, blank=True)),
+                ("error_report", models.TextField(null=True, blank=True)),
+                ("cpu_time", models.FloatField(default=0.0)),
+                ("max_memory", models.BigIntegerField(default=0)),
+                ("data_read_size", models.BigIntegerField(default=0)),
+                ("data_read_nb_blocks", models.IntegerField(default=0)),
+                ("data_read_time", models.FloatField(default=0.0)),
+                ("data_written_size", models.BigIntegerField(default=0)),
+                ("data_written_nb_blocks", models.IntegerField(default=0)),
+                ("data_written_time", models.FloatField(default=0.0)),
+                (
+                    "blocks",
+                    models.ManyToManyField(
+                        related_name="hashes", to="experiments.Block", blank=True
+                    ),
+                ),
             ],
         ),
         migrations.CreateModel(
-            name='Experiment',
+            name="Experiment",
             fields=[
-                ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
-                ('sharing', models.CharField(default='P', max_length=1, choices=[('P', 'Private'), ('S', 'Shared'), ('A', 'Public'), ('U', 'Usable')])),
-                ('name', models.CharField(max_length=200)),
-                ('short_description', models.CharField(default='', help_text='Describe the object succinctly (try to keep it under 80 characters)', max_length=100, blank=True)),
-                ('status', models.CharField(default='P', max_length=1, choices=[('P', 'Pending'), ('S', 'Scheduled'), ('R', 'Running'), ('D', 'Done'), ('F', 'Failed'), ('C', 'Canceling')])),
-                ('creation_date', models.DateTimeField(auto_now_add=True, null=True)),
-                ('start_date', models.DateTimeField(null=True, blank=True)),
-                ('end_date', models.DateTimeField(null=True, blank=True)),
-                ('declaration_file', models.FileField(db_column='declaration', upload_to=beat.web.common.models.get_contribution_declaration_filename, storage=beat.web.experiments.models.DeclarationStorage(), max_length=300, blank=True, null=True)),
-                ('description_file', models.FileField(db_column='description', upload_to=beat.web.common.models.get_contribution_description_filename, storage=beat.web.experiments.models.DeclarationStorage(), max_length=300, blank=True, null=True)),
-                ('hash', models.CharField(max_length=64)),
-                ('author', models.ForeignKey(related_name='experiments', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
-                ('referenced_algorithms', models.ManyToManyField(related_name='experiments', to='algorithms.Algorithm', blank=True)),
-                ('referenced_datasets', models.ManyToManyField(related_name='experiments', to='databases.DatabaseSet', blank=True)),
-                ('shared_with', models.ManyToManyField(related_name='shared_experiments', to=settings.AUTH_USER_MODEL, blank=True)),
-                ('shared_with_team', models.ManyToManyField(related_name='shared_experiments', to='team.Team', blank=True)),
-                ('toolchain', models.ForeignKey(related_name='experiments', to='toolchains.Toolchain', on_delete=models.CASCADE)),
+                (
+                    "id",
+                    models.AutoField(
+                        verbose_name="ID",
+                        serialize=False,
+                        auto_created=True,
+                        primary_key=True,
+                    ),
+                ),
+                (
+                    "sharing",
+                    models.CharField(
+                        default="P",
+                        max_length=1,
+                        choices=[
+                            ("P", "Private"),
+                            ("S", "Shared"),
+                            ("A", "Public"),
+                            ("U", "Usable"),
+                        ],
+                    ),
+                ),
+                ("name", models.CharField(max_length=200)),
+                (
+                    "short_description",
+                    models.CharField(
+                        default="",
+                        help_text="Describe the object succinctly (try to keep it under 80 characters)",
+                        max_length=100,
+                        blank=True,
+                    ),
+                ),
+                (
+                    "status",
+                    models.CharField(
+                        default="P",
+                        max_length=1,
+                        choices=[
+                            ("P", "Pending"),
+                            ("S", "Scheduled"),
+                            ("R", "Running"),
+                            ("D", "Done"),
+                            ("F", "Failed"),
+                            ("C", "Canceling"),
+                        ],
+                    ),
+                ),
+                ("creation_date", models.DateTimeField(auto_now_add=True, null=True)),
+                ("start_date", models.DateTimeField(null=True, blank=True)),
+                ("end_date", models.DateTimeField(null=True, blank=True)),
+                (
+                    "declaration_file",
+                    models.FileField(
+                        db_column="declaration",
+                        upload_to=beat.web.common.models.get_contribution_declaration_filename,
+                        storage=beat.web.experiments.models.DeclarationStorage(),
+                        max_length=300,
+                        blank=True,
+                        null=True,
+                    ),
+                ),
+                (
+                    "description_file",
+                    models.FileField(
+                        db_column="description",
+                        upload_to=beat.web.common.models.get_contribution_description_filename,
+                        storage=beat.web.experiments.models.DeclarationStorage(),
+                        max_length=300,
+                        blank=True,
+                        null=True,
+                    ),
+                ),
+                ("hash", models.CharField(max_length=64)),
+                (
+                    "author",
+                    models.ForeignKey(
+                        related_name="experiments",
+                        to=settings.AUTH_USER_MODEL,
+                        on_delete=models.CASCADE,
+                    ),
+                ),
+                (
+                    "referenced_algorithms",
+                    models.ManyToManyField(
+                        related_name="experiments",
+                        to="algorithms.Algorithm",
+                        blank=True,
+                    ),
+                ),
+                (
+                    "referenced_datasets",
+                    models.ManyToManyField(
+                        related_name="experiments",
+                        to="databases.DatabaseSet",
+                        blank=True,
+                    ),
+                ),
+                (
+                    "shared_with",
+                    models.ManyToManyField(
+                        related_name="shared_experiments",
+                        to=settings.AUTH_USER_MODEL,
+                        blank=True,
+                    ),
+                ),
+                (
+                    "shared_with_team",
+                    models.ManyToManyField(
+                        related_name="shared_experiments", to="team.Team", blank=True
+                    ),
+                ),
+                (
+                    "toolchain",
+                    models.ForeignKey(
+                        related_name="experiments",
+                        to="toolchains.Toolchain",
+                        on_delete=models.CASCADE,
+                    ),
+                ),
             ],
-            options={
-                'ordering': ['-creation_date'],
-            },
+            options={"ordering": ["-creation_date"]},
         ),
         migrations.CreateModel(
-            name='Result',
+            name="Result",
             fields=[
-                ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
-                ('name', models.CharField(max_length=200)),
-                ('type', models.CharField(max_length=200)),
-                ('primary', models.BooleanField(default=False)),
-                ('data_value', models.TextField(null=True, blank=True)),
-                ('block', models.ForeignKey(related_name='results', to='experiments.Block', on_delete=models.CASCADE)),
+                (
+                    "id",
+                    models.AutoField(
+                        verbose_name="ID",
+                        serialize=False,
+                        auto_created=True,
+                        primary_key=True,
+                    ),
+                ),
+                ("name", models.CharField(max_length=200)),
+                ("type", models.CharField(max_length=200)),
+                ("primary", models.BooleanField(default=False)),
+                ("data_value", models.TextField(null=True, blank=True)),
+                (
+                    "block",
+                    models.ForeignKey(
+                        related_name="results",
+                        to="experiments.Block",
+                        on_delete=models.CASCADE,
+                    ),
+                ),
             ],
         ),
         migrations.AddField(
-            model_name='block',
-            name='experiment',
-            field=models.ForeignKey(related_name='blocks', to='experiments.Experiment', on_delete=models.CASCADE),
+            model_name="block",
+            name="experiment",
+            field=models.ForeignKey(
+                related_name="blocks",
+                to="experiments.Experiment",
+                on_delete=models.CASCADE,
+            ),
         ),
         migrations.AlterUniqueTogether(
-            name='experiment',
-            unique_together=set([('author', 'toolchain', 'name')]),
+            name="experiment", unique_together=set([("author", "toolchain", "name")]),
         ),
     ]
diff --git a/beat/web/experiments/migrations/0002_scheduler_addons.py b/beat/web/experiments/migrations/0002_scheduler_addons.py
index 9d0394066bdc33a363f1f6b30592d02d6b5fe197..24a10cd729f08fc6a73bdafbb91a9559a4606376 100644
--- a/beat/web/experiments/migrations/0002_scheduler_addons.py
+++ b/beat/web/experiments/migrations/0002_scheduler_addons.py
@@ -28,18 +28,20 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, models
+from django.db import migrations
+from django.db import models
 
 
 def move_result_to_cache(apps, schema_editor):
-    '''Moves the result association from the block to the related cache file'''
+    """Moves the result association from the block to the related cache file"""
 
     Result = apps.get_model("experiments", "Result")
 
     total = Result.objects.count()
-    if total: print('')
-    for i, r in enumerate(Result.objects.order_by('-id')):
-        print("Resetting result (%d) %d/%d..." % (r.id, i+1, total))
+    if total:
+        print("")
+    for i, r in enumerate(Result.objects.order_by("-id")):
+        print("Resetting result (%d) %d/%d..." % (r.id, i + 1, total))
         r.cache = r.block.hashes.first()
         r.save()
 
@@ -47,15 +49,19 @@ def move_result_to_cache(apps, schema_editor):
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0001_initial'),
+        ("experiments", "0001_initial"),
     ]
 
     operations = [
         migrations.AddField(
-            model_name='result',
-            name='cache',
-            field=models.ForeignKey(related_name='results',
-                                    to='experiments.CachedFile', null=True, on_delete=models.SET_NULL),
+            model_name="result",
+            name="cache",
+            field=models.ForeignKey(
+                related_name="results",
+                to="experiments.CachedFile",
+                null=True,
+                on_delete=models.SET_NULL,
+            ),
         ),
         migrations.RunPython(move_result_to_cache),
     ]
diff --git a/beat/web/experiments/migrations/0003_scheduler_addons_2.py b/beat/web/experiments/migrations/0003_scheduler_addons_2.py
index ac59b9bec5b370a50a805f3d02fd68e6c0c54996..cd8cf49984fe92edc50e89d329e18e03a7910b4d 100644
--- a/beat/web/experiments/migrations/0003_scheduler_addons_2.py
+++ b/beat/web/experiments/migrations/0003_scheduler_addons_2.py
@@ -32,25 +32,28 @@ from django.db import migrations
 
 
 def dedup_resuls(apps, schema_editor):
-    '''Deletes duplicated results (older ones)'''
+    """Deletes duplicated results (older ones)"""
 
     Result = apps.get_model("experiments", "Result")
 
-    for i, r in enumerate(Result.objects.order_by('-id')):
-        older = Result.objects.filter(name=r.name, id__lt=r.id,
-                                      cache=r.block.hashes.first())
+    for i, r in enumerate(Result.objects.order_by("-id")):
+        older = Result.objects.filter(
+            name=r.name, id__lt=r.id, cache=r.block.hashes.first()
+        )
         if older:
-            print("Cache %s already contains Result `%s' - keeping " \
-                  "newest (out of %d)..." % (r.block.hashes.first().hash, r.name,
-                                             older.count()+1))
+            print(
+                "Cache %s already contains Result `%s' - keeping "
+                "newest (out of %d)..."
+                % (r.block.hashes.first().hash, r.name, older.count() + 1)
+            )
             older.delete()
 
 
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0002_scheduler_addons'),
-        ('search', '0002_scheduler_addons'),
+        ("experiments", "0002_scheduler_addons"),
+        ("search", "0002_scheduler_addons"),
     ]
 
     operations = [
diff --git a/beat/web/experiments/migrations/0004_scheduler_addons_3.py b/beat/web/experiments/migrations/0004_scheduler_addons_3.py
index 2e573ad2c03f602f614a91959614dd8f3f8b237a..b8341bfbfe305b887dedb735caaec95d9e4b0a33 100644
--- a/beat/web/experiments/migrations/0004_scheduler_addons_3.py
+++ b/beat/web/experiments/migrations/0004_scheduler_addons_3.py
@@ -28,100 +28,144 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, models
+from django.db import migrations
+from django.db import models
+
 
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('backend', '0002_scheduler_addons'),
-        ('databases', '0002_scheduler_addons'),
-        ('experiments', '0003_scheduler_addons_2'),
+        ("backend", "0002_scheduler_addons"),
+        ("databases", "0002_scheduler_addons"),
+        ("experiments", "0003_scheduler_addons_2"),
     ]
 
     operations = [
         migrations.AlterUniqueTogether(
-            name='result',
-            unique_together=set([('cache', 'name')]),
-        ),
-        migrations.RemoveField(
-            model_name='result',
-            name='block',
+            name="result", unique_together=set([("cache", "name")]),
         ),
+        migrations.RemoveField(model_name="result", name="block",),
         migrations.CreateModel(
-            name='BlockInput',
+            name="BlockInput",
             fields=[
-                ('id', models.AutoField(verbose_name='ID', serialize=False,
-                                        auto_created=True, primary_key=True)),
-                ('channel', models.CharField(default=b'',
-                                             help_text=b'Synchronization channel within the toolchain',
-                                             max_length=200, blank=True)),
-                ('block', models.ForeignKey(related_name='inputs',
-                                            to='experiments.Block', null=True, on_delete=models.CASCADE)),
-                ('cache', models.ForeignKey(related_name='inputs',
-                                            to='experiments.CachedFile', null=True, on_delete=models.CASCADE)),
-                ('database', models.ForeignKey(related_name='blocks',
-                                               to='databases.DatabaseSetOutput', null=True, on_delete=models.CASCADE)),
+                (
+                    "id",
+                    models.AutoField(
+                        verbose_name="ID",
+                        serialize=False,
+                        auto_created=True,
+                        primary_key=True,
+                    ),
+                ),
+                (
+                    "channel",
+                    models.CharField(
+                        default=b"",
+                        help_text=b"Synchronization channel within the toolchain",
+                        max_length=200,
+                        blank=True,
+                    ),
+                ),
+                (
+                    "block",
+                    models.ForeignKey(
+                        related_name="inputs",
+                        to="experiments.Block",
+                        null=True,
+                        on_delete=models.CASCADE,
+                    ),
+                ),
+                (
+                    "cache",
+                    models.ForeignKey(
+                        related_name="inputs",
+                        to="experiments.CachedFile",
+                        null=True,
+                        on_delete=models.CASCADE,
+                    ),
+                ),
+                (
+                    "database",
+                    models.ForeignKey(
+                        related_name="blocks",
+                        to="databases.DatabaseSetOutput",
+                        null=True,
+                        on_delete=models.CASCADE,
+                    ),
+                ),
             ],
         ),
         migrations.AddField(
-            model_name='block',
-            name='channel',
-            field=models.CharField(default=b'',
-                                   help_text=b'Synchronization channel within the toolchain',
-                                   max_length=200, blank=True),
+            model_name="block",
+            name="channel",
+            field=models.CharField(
+                default=b"",
+                help_text=b"Synchronization channel within the toolchain",
+                max_length=200,
+                blank=True,
+            ),
         ),
         migrations.AddField(
-            model_name='block',
-            name='command',
+            model_name="block",
+            name="command",
             field=models.TextField(null=True, blank=True),
         ),
         migrations.AddField(
-            model_name='block',
-            name='dependencies',
-            field=models.ManyToManyField(related_name='dependents',
-                                         to='experiments.Block', blank=True),
+            model_name="block",
+            name="dependencies",
+            field=models.ManyToManyField(
+                related_name="dependents", to="experiments.Block", blank=True
+            ),
         ),
         migrations.AlterField(
-            model_name='block',
-            name='environment',
-            field=models.ForeignKey(related_name='blocks',
-                                    on_delete=models.deletion.SET_NULL, to='backend.Environment',
-                                    null=True),
+            model_name="block",
+            name="environment",
+            field=models.ForeignKey(
+                related_name="blocks",
+                on_delete=models.deletion.SET_NULL,
+                to="backend.Environment",
+                null=True,
+            ),
         ),
         migrations.AddField(
-            model_name='block',
-            name='queue',
-            field=models.ForeignKey(related_name='blocks',
-                                    on_delete=models.deletion.SET_NULL, to='backend.Queue',
-                                    null=True),
+            model_name="block",
+            name="queue",
+            field=models.ForeignKey(
+                related_name="blocks",
+                on_delete=models.deletion.SET_NULL,
+                to="backend.Queue",
+                null=True,
+            ),
         ),
         migrations.AddField(
-            model_name='block',
-            name='required_slots',
+            model_name="block",
+            name="required_slots",
             field=models.PositiveIntegerField(default=1),
         ),
         migrations.AlterField(
-            model_name='block',
-            name='status',
-            field=models.CharField(default=b'N', max_length=1,
-                                   choices=[
-                                       (b'N', b'Not cached'),
-                                       (b'P', b'Processing'),
-                                       (b'C', b'Cached'),
-                                       (b'F', b'Failed'),
-                                       (b'S', b'Skipped'),
-                                       (b'L', b'Cancelled'),
-                                   ]
+            model_name="block",
+            name="status",
+            field=models.CharField(
+                default=b"N",
+                max_length=1,
+                choices=[
+                    (b"N", b"Not cached"),
+                    (b"P", b"Processing"),
+                    (b"C", b"Cached"),
+                    (b"F", b"Failed"),
+                    (b"S", b"Skipped"),
+                    (b"L", b"Cancelled"),
+                ],
             ),
         ),
         migrations.AlterUniqueTogether(
-            name='block',
-            unique_together=set([('experiment', 'name')]),
+            name="block", unique_together=set([("experiment", "name")]),
         ),
         migrations.AlterField(
-            model_name='cachedfile',
-            name='blocks',
-            field=models.ManyToManyField(related_name='outputs',
-                                         to='experiments.Block', blank=True),
+            model_name="cachedfile",
+            name="blocks",
+            field=models.ManyToManyField(
+                related_name="outputs", to="experiments.Block", blank=True
+            ),
         ),
     ]
diff --git a/beat/web/experiments/migrations/0005_scheduler_addons_4.py b/beat/web/experiments/migrations/0005_scheduler_addons_4.py
index 26100a727a81649acfc007e74a68a68c33a8be98..a40c7a871848ad82816b147cf0b222dd3602e256 100644
--- a/beat/web/experiments/migrations/0005_scheduler_addons_4.py
+++ b/beat/web/experiments/migrations/0005_scheduler_addons_4.py
@@ -28,16 +28,18 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, utils
+import simplejson
 from django.conf import settings
+from django.db import migrations
+from django.db import utils
 
-import simplejson
 import beat.core.experiment
+
 from ...common import storage
 
 
 def reset_blocks(apps, schema_editor):
-    '''Resets block dependencies and queue relationship'''
+    """Resets block dependencies and queue relationship"""
 
     Experiment = apps.get_model("experiments", "Experiment")
     Block = apps.get_model("experiments", "Block")
@@ -50,9 +52,9 @@ def reset_blocks(apps, schema_editor):
     Result = apps.get_model("experiments", "Result")
 
     total = Experiment.objects.count()
-    for i, e in enumerate(Experiment.objects.order_by('id')):
+    for i, e in enumerate(Experiment.objects.order_by("id")):
 
-        fullname = '%s/%s/%s/%d/%s' % (
+        fullname = "%s/%s/%s/%d/%s" % (
             e.author.username,
             e.toolchain.author.username,
             e.toolchain.name,
@@ -60,21 +62,24 @@ def reset_blocks(apps, schema_editor):
             e.name,
         )
 
-        print("Updating blocks for experiment %d/%d (%s, id=%d)..." % \
-              (i+1, total, fullname, e.id))
+        print(
+            "Updating blocks for experiment %d/%d (%s, id=%d)..."
+            % (i + 1, total, fullname, e.id)
+        )
 
-        xp_decl = simplejson.loads(storage.get_file_content(e,
-                                                            'declaration_file'))
-        tc_decl = simplejson.loads(storage.get_file_content(e.toolchain,
-                                                            'declaration_file'))
+        xp_decl = simplejson.loads(storage.get_file_content(e, "declaration_file"))
+        tc_decl = simplejson.loads(
+            storage.get_file_content(e.toolchain, "declaration_file")
+        )
 
-        xp = beat.core.experiment.Experiment(settings.PREFIX, (xp_decl,
-                                                               tc_decl))
+        xp = beat.core.experiment.Experiment(settings.PREFIX, (xp_decl, tc_decl))
 
         if xp.errors:
-            message = "The experiment `%s' isn't valid (skipping " \
+            message = (
+                "The experiment `%s' isn't valid (skipping "
                 "block update), due to the following errors:\n  * %s"
-            print(message % (fullname, '\n * '.join(xp.errors)))
+            )
+            print(message % (fullname, "\n * ".join(xp.errors)))
             continue
 
         # Loads the experiment execution description, creating the Block's,
@@ -82,17 +87,21 @@ def reset_blocks(apps, schema_editor):
         for block_name, description in xp.setup().items():
 
             # Checks that the Queue/Environment exists
-            job_description = description['configuration']
+            job_description = description["configuration"]
 
             env = Environment.objects.filter(
-                name=job_description['environment']['name'],
-                version=job_description['environment']['version'],
+                name=job_description["environment"]["name"],
+                version=job_description["environment"]["version"],
             )
 
             if not env:
-                print("Cannot find environment `%s (%s)' - not setting" % \
-                      (job_description['environment']['name'],
-                       job_description['environment']['version']))
+                print(
+                    "Cannot find environment `%s (%s)' - not setting"
+                    % (
+                        job_description["environment"]["name"],
+                        job_description["environment"]["version"],
+                    )
+                )
                 env = None
             else:
                 env = env[0]
@@ -100,69 +109,70 @@ def reset_blocks(apps, schema_editor):
             # Search for queue that contains a specific environment
             # notice we don't require environment to exist in relation to
             # the queue as it may have been removed already.
-            queue = Queue.objects.filter(name=job_description['queue'])
+            queue = Queue.objects.filter(name=job_description["queue"])
             if not queue:
-                print("Cannot find queue `%s'" % job_description['queue'])
+                print("Cannot find queue `%s'" % job_description["queue"])
                 queue = None
             else:
                 queue = queue[0]
 
-            parts = job_description['algorithm'].split('/')
+            parts = job_description["algorithm"].split("/")
             algorithm = Algorithm.objects.get(
-                author__username=parts[0],
-                name=parts[1],
-                version=parts[2],
+                author__username=parts[0], name=parts[1], version=parts[2],
             )
 
             # Ties the block in
-            slots = job_description.get('nb_slots')
-
             try:
-                b, _ = Block.objects.get_or_create(experiment=e,
-                                                   name=block_name, algorithm=algorithm)
-            except utils.IntegrityError as exc:
-                print("Block `%s' for experiment `%s' already exists - " \
-                      "modifying entry for migration purposes. This " \
-                      "issue is due a misconnection on the toolchain level " \
-                      "(known case: tpereira/full_isv/2)" % \
-                      (block_name, fullname))
+                b, _ = Block.objects.get_or_create(
+                    experiment=e, name=block_name, algorithm=algorithm
+                )
+            except utils.IntegrityError:
+                print(
+                    "Block `%s' for experiment `%s' already exists - "
+                    "modifying entry for migration purposes. This "
+                    "issue is due a misconnection on the toolchain level "
+                    "(known case: tpereira/full_isv/2)" % (block_name, fullname)
+                )
                 b = Block.objects.get(experiment=e, name=block_name)
 
-            b.command=simplejson.dumps(job_description, indent=4)
-            b.status='N' if (e.status == 'P') else b.status
-            b.environment=env
-            b.queue=queue
+            b.command = simplejson.dumps(job_description, indent=4)
+            b.status = "N" if (e.status == "P") else b.status
+            b.environment = env
+            b.queue = queue
             b.algorithm = algorithm
-            b.analyzer = (algorithm.result_dataformat is not None)
-            b.required_slots=job_description['nb_slots']
-            b.channel=job_description['channel']
+            b.analyzer = algorithm.result_dataformat is not None
+            b.required_slots = job_description["nb_slots"]
+            b.channel = job_description["channel"]
             b.save()
 
             # from this point: requires block to have an assigned id
-            b.dependencies.add(*[e.blocks.get(name=k) \
-                                 for k in description['dependencies']])
+            b.dependencies.add(
+                *[e.blocks.get(name=k) for k in description["dependencies"]]
+            )
 
             # reset inputs and outputs - creates if necessary only
-            for v in job_description['inputs'].values():
-                if 'database' in v: #database input
-                    db = DatabaseSetOutput.objects.get(hash=v['hash'])
-                    BlockInput.objects.get_or_create(block=b,
-                                                     channel=v['channel'], database=db)
+            for v in job_description["inputs"].values():
+                if "database" in v:  # database input
+                    db = DatabaseSetOutput.objects.get(hash=v["hash"])
+                    BlockInput.objects.get_or_create(
+                        block=b, channel=v["channel"], database=db
+                    )
                 else:
-                    cache = CachedFile.objects.get(hash=v['hash'])
-                    BlockInput.objects.get_or_create(block=b,
-                                                     channel=v['channel'], cache=cache)
+                    cache = CachedFile.objects.get(hash=v["hash"])
+                    BlockInput.objects.get_or_create(
+                        block=b, channel=v["channel"], cache=cache
+                    )
 
             current = list(b.outputs.all())
-            b.outputs.clear() #dissociates all current outputs
-            outputs = job_description.get('outputs',
-                                          {'': job_description.get('result')})
+            b.outputs.clear()  # dissociates all current outputs
+            outputs = job_description.get(
+                "outputs", {"": job_description.get("result")}
+            )
             for v in outputs.values():
-                cache, cr = CachedFile.objects.get_or_create(hash=v['hash'])
+                cache, cr = CachedFile.objects.get_or_create(hash=v["hash"])
                 if cr:
-                    if len(current) == len(outputs): #copy
-                        cache.linear_exedution_time = \
-                            current[0].linear_execution_time
+                    if len(current) == len(outputs):  # copy
+                        cache.linear_exedution_time = current[0].linear_execution_time
                         cache.speed_up_real = current[0].speed_up_real
                         cache.speed_up_maximal = current[0].speed_up_maximal
                         cache.queuing_time = current[0].queuing_time
@@ -172,36 +182,38 @@ def reset_blocks(apps, schema_editor):
                         cache.cpu_time = current[0].cpu_time
                         cache.max_memory = current[0].max_memory
                         cache.data_read_size = current[0].data_read_size
-                        cache.data_read_nb_blocks = \
-                            current[0].data_read_nb_blocks
+                        cache.data_read_nb_blocks = current[0].data_read_nb_blocks
                         cache.data_read_time = current[0].data_read_time
                         cache.data_written_size = current[0].data_written_size
-                        cache.data_written_nb_blocks = \
-                            current[0].data_written_nb_blocks
+                        cache.data_written_nb_blocks = current[0].data_written_nb_blocks
                         cache.data_written_time = current[0].data_written_time
                         if current[0].results.count():
                             for r in current[0].results.all():
                                 r.cache = cache
                                 r.save()
-                        print("CachedFile data `%s' MOVED from `%s'" % \
-                              (cache.hash, current[0].hash))
+                        print(
+                            "CachedFile data `%s' MOVED from `%s'"
+                            % (cache.hash, current[0].hash)
+                        )
                     else:
-                        print("CachedFile (hash=%s) CREATED for block `%s' " \
-                              "of experiment `%s' which is in state `%s'" % \
-                              (cache.hash, block_name, fullname,
-                               b.get_status_display()))
+                        print(
+                            "CachedFile (hash=%s) CREATED for block `%s' "
+                            "of experiment `%s' which is in state `%s'"
+                            % (cache.hash, block_name, fullname, b.get_status_display())
+                        )
                 cache.blocks.add(b)
 
-        #asserts all blocks (except analysis blocks have dependents)
+        # asserts all blocks (except analysis blocks have dependents)
         for b in e.blocks.all():
-            assert (b.analyzer and b.dependents.count() == 0) or \
-                b.dependents.count() > 0
+            assert (  # nosec
+                b.analyzer and b.dependents.count() == 0
+            ) or b.dependents.count() > 0
 
-        #asserts all analysis blocks have only one output
+        # asserts all analysis blocks have only one output
         for b in e.blocks.filter(analyzer=True):
-            assert b.outputs.count() == 1
+            assert b.outputs.count() == 1  # nosec
 
-        #removes results without caches
+        # removes results without caches
         for r in Result.objects.filter(cache=None):
             print("Removing result %d (no associated cache)" % r.id)
             r.delete()
@@ -210,7 +222,7 @@ def reset_blocks(apps, schema_editor):
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0004_scheduler_addons_3'),
+        ("experiments", "0004_scheduler_addons_3"),
     ]
 
     operations = [
diff --git a/beat/web/experiments/migrations/0006_block_order.py b/beat/web/experiments/migrations/0006_block_order.py
index bbdd26ebabc2c25abb274ba40bb618006296e590..ba3963299270cfb9a13b3672ae8fa19b7b76348d 100755
--- a/beat/web/experiments/migrations/0006_block_order.py
+++ b/beat/web/experiments/migrations/0006_block_order.py
@@ -27,24 +27,26 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, models
+import simplejson
 from django.conf import settings
+from django.db import migrations
+from django.db import models
 
-import simplejson
 import beat.core.experiment
+
 from ...common import storage
 
 
 def set_block_order(apps, schema_editor):
-    '''Set block order for existing experiments'''
+    """Set block order for existing experiments"""
 
     Experiment = apps.get_model("experiments", "Experiment")
     Block = apps.get_model("experiments", "Block")
 
     total = Experiment.objects.count()
-    for i, e in enumerate(Experiment.objects.order_by('id')):
+    for i, e in enumerate(Experiment.objects.order_by("id")):
 
-        fullname = '%s/%s/%s/%d/%s' % (
+        fullname = "%s/%s/%s/%d/%s" % (
             e.author.username,
             e.toolchain.author.username,
             e.toolchain.name,
@@ -52,21 +54,24 @@ def set_block_order(apps, schema_editor):
             e.name,
         )
 
-        print("Updating blocks for experiment %d/%d (%s, id=%d)..." % \
-              (i+1, total, fullname, e.id))
+        print(
+            "Updating blocks for experiment %d/%d (%s, id=%d)..."
+            % (i + 1, total, fullname, e.id)
+        )
 
-        xp_decl = simplejson.loads(storage.get_file_content(e,
-                                                            'declaration_file'))
-        tc_decl = simplejson.loads(storage.get_file_content(e.toolchain,
-                                                            'declaration_file'))
+        xp_decl = simplejson.loads(storage.get_file_content(e, "declaration_file"))
+        tc_decl = simplejson.loads(
+            storage.get_file_content(e.toolchain, "declaration_file")
+        )
 
-        xp = beat.core.experiment.Experiment(settings.PREFIX, (xp_decl,
-                                                               tc_decl))
+        xp = beat.core.experiment.Experiment(settings.PREFIX, (xp_decl, tc_decl))
 
         if xp.errors:
-            message = "The experiment `%s' isn't valid (skipping " \
+            message = (
+                "The experiment `%s' isn't valid (skipping "
                 "block update), due to the following errors:\n  * %s"
-            print(message % (fullname, '\n * '.join(xp.errors)))
+            )
+            print(message % (fullname, "\n * ".join(xp.errors)))
             continue
 
         # Goes, in order, setting block inner order
@@ -80,18 +85,17 @@ def set_block_order(apps, schema_editor):
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0005_scheduler_addons_4'),
+        ("experiments", "0005_scheduler_addons_4"),
     ]
 
     operations = [
         migrations.AddField(
-            model_name='block',
-            name='execution_order',
+            model_name="block",
+            name="execution_order",
             field=models.PositiveIntegerField(blank=True, null=True),
         ),
         migrations.AlterModelOptions(
-            name='block',
-            options={'ordering': ['experiment_id', 'execution_order']},
+            name="block", options={"ordering": ["experiment_id", "execution_order"]},
         ),
         migrations.RunPython(set_block_order),
     ]
diff --git a/beat/web/experiments/migrations/0007_cachedfile_status.py b/beat/web/experiments/migrations/0007_cachedfile_status.py
index e022eb725f7414f0a903c8c4a98de61462f804e0..d016d49d98d77433b5367ffb07413c3a74388b73 100755
--- a/beat/web/experiments/migrations/0007_cachedfile_status.py
+++ b/beat/web/experiments/migrations/0007_cachedfile_status.py
@@ -27,11 +27,12 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, models
-from django.conf import settings
-
-import os
 import glob
+import os
+
+from django.conf import settings
+from django.db import migrations
+from django.db import models
 
 import beat.core.hash
 
@@ -40,18 +41,23 @@ def set_status(apps, schema_editor):
     CachedFile = apps.get_model("experiments", "CachedFile")
 
     total = CachedFile.objects.count()
-    for i, c in enumerate(CachedFile.objects.order_by('id')):
+    for i, c in enumerate(CachedFile.objects.order_by("id")):
 
-        print("Updating cached file %d/%d (%s, id=%d)..." % \
-              (i+1, total, c.hash, c.id))
+        print(
+            "Updating cached file %d/%d (%s, id=%d)..." % (i + 1, total, c.hash, c.id)
+        )
 
-        abs_path = os.path.join(settings.CACHE_ROOT, beat.core.hash.toPath(c.hash, suffix=''))
+        abs_path = os.path.join(
+            settings.CACHE_ROOT, beat.core.hash.toPath(c.hash, suffix="")
+        )
 
-        data_files = sorted(glob.glob(abs_path + '*.index')) + \
-            sorted(glob.glob(abs_path + '*.data'))
+        data_files = sorted(glob.glob(abs_path + "*.index")) + sorted(
+            glob.glob(abs_path + "*.data")
+        )
 
-        checksum_files = sorted(glob.glob(abs_path + '*.index.checksum')) + \
-            sorted(glob.glob(abs_path + '*.data.checksum'))
+        checksum_files = sorted(glob.glob(abs_path + "*.index.checksum")) + sorted(
+            glob.glob(abs_path + "*.data.checksum")
+        )
 
         if len(data_files) == 0:
             continue
@@ -61,7 +67,7 @@ def set_status(apps, schema_editor):
 
         cached = True
         for data_file, checksum_file in zip(data_files, checksum_files):
-            with open(checksum_file, 'rt') as f:
+            with open(checksum_file, "rt") as f:
                 recorded = f.read().strip()
 
             actual = beat.core.hash.hashFileContents(data_file)
@@ -71,22 +77,29 @@ def set_status(apps, schema_editor):
                 break
 
         if cached:
-            c.status = 'C'
+            c.status = "C"
             c.save()
 
 
-
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0006_block_order'),
+        ("experiments", "0006_block_order"),
     ]
 
     operations = [
         migrations.AddField(
-            model_name='cachedfile',
-            name='status',
-            field=models.CharField(choices=[(b'N', b'Not cached'), (b'P', b'Processing'), (b'C', b'Cached')], default=b'N', max_length=1),
+            model_name="cachedfile",
+            name="status",
+            field=models.CharField(
+                choices=[
+                    (b"N", b"Not cached"),
+                    (b"P", b"Processing"),
+                    (b"C", b"Cached"),
+                ],
+                default=b"N",
+                max_length=1,
+            ),
         ),
         migrations.RunPython(set_status),
     ]
diff --git a/beat/web/experiments/migrations/0008_block_status.py b/beat/web/experiments/migrations/0008_block_status.py
index e5a3d74d559c75dc29a41e1904f26e87e317eaad..adce99a8e4823bf5721f2d14950b94890863b68c 100644
--- a/beat/web/experiments/migrations/0008_block_status.py
+++ b/beat/web/experiments/migrations/0008_block_status.py
@@ -2,19 +2,31 @@
 # Generated by Django 1.9.13 on 2017-09-27 16:48
 from __future__ import unicode_literals
 
-from django.db import migrations, models
+from django.db import migrations
+from django.db import models
 
 
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0007_cachedfile_status'),
+        ("experiments", "0007_cachedfile_status"),
     ]
 
     operations = [
         migrations.AlterField(
-            model_name='block',
-            name='status',
-            field=models.CharField(choices=[(b'N', b'Pending'), (b'P', b'Processing'), (b'C', b'Done'), (b'F', b'Failed'), (b'S', b'Skipped'), (b'L', b'Cancelled')], default=b'N', max_length=1),
+            model_name="block",
+            name="status",
+            field=models.CharField(
+                choices=[
+                    (b"N", b"Pending"),
+                    (b"P", b"Processing"),
+                    (b"C", b"Done"),
+                    (b"F", b"Failed"),
+                    (b"S", b"Skipped"),
+                    (b"L", b"Cancelled"),
+                ],
+                default=b"N",
+                max_length=1,
+            ),
         ),
     ]
diff --git a/beat/web/experiments/migrations/0009_block_status.py b/beat/web/experiments/migrations/0009_block_status.py
index db3f2c5398a4ad6405a2afa3a5771eeac390a27c..b4ab55d90a7f11a3dc32ecbf3be8e689352b2392 100644
--- a/beat/web/experiments/migrations/0009_block_status.py
+++ b/beat/web/experiments/migrations/0009_block_status.py
@@ -2,19 +2,30 @@
 # Generated by Django 1.9.13 on 2017-09-29 08:42
 from __future__ import unicode_literals
 
-from django.db import migrations, models
+from django.db import migrations
+from django.db import models
 
 
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0008_block_status'),
+        ("experiments", "0008_block_status"),
     ]
 
     operations = [
         migrations.AlterField(
-            model_name='block',
-            name='status',
-            field=models.CharField(choices=[(b'N', b'Pending'), (b'P', b'Processing'), (b'C', b'Done'), (b'F', b'Failed'), (b'L', b'Cancelled')], default=b'N', max_length=1),
+            model_name="block",
+            name="status",
+            field=models.CharField(
+                choices=[
+                    (b"N", b"Pending"),
+                    (b"P", b"Processing"),
+                    (b"C", b"Done"),
+                    (b"F", b"Failed"),
+                    (b"L", b"Cancelled"),
+                ],
+                default=b"N",
+                max_length=1,
+            ),
         ),
     ]