diff --git a/beat/web/experiments/__init__.py b/beat/web/experiments/__init__.py
index b168e0d6250e467dd371da63c155f5dedfae6f57..f6a944674862a2ca7de1b7ce9a0bba2469f88a6f 100644
--- a/beat/web/experiments/__init__.py
+++ b/beat/web/experiments/__init__.py
@@ -25,4 +25,4 @@
 #                                                                             #
 ###############################################################################
 
-default_app_config = 'beat.web.experiments.apps.ExperimentsConfig'
+default_app_config = "beat.web.experiments.apps.ExperimentsConfig"
diff --git a/beat/web/experiments/admin.py b/beat/web/experiments/admin.py
index 313e38e6939adb0d4393f72b69317e1a93fd36c3..6d453ef13bb732e0df20ff89e8a53f6699cc8c30 100755
--- a/beat/web/experiments/admin.py
+++ b/beat/web/experiments/admin.py
@@ -26,77 +26,72 @@
 ###############################################################################
 
 import simplejson as json
-
 from django import forms
 from django.contrib import admin
 from django.core.files.base import ContentFile
-
+from django.db.models import Count
+from django.db.models import Max
+from django.urls import reverse
 from django.utils.html import format_html
 from django.utils.safestring import mark_safe
-from django.urls import reverse
-from django.db.models import Max, Count
 
-from .models import Experiment as ExperimentModel
+from ..common.texts import Messages
+from ..ui.forms import CodeMirrorJSONCharField
+from ..ui.forms import CodeMirrorJSONFileField
+from ..ui.forms import CodeMirrorRSTFileField
+from ..ui.forms import NameField
 from .models import Block as BlockModel
-from .models import Result as ResultModel
-from .models import CachedFile as CachedFileModel
 from .models import BlockInput as BlockInputModel
+from .models import CachedFile as CachedFileModel
+from .models import Experiment as ExperimentModel
+from .models import Result as ResultModel
 from .models import validate_experiment
 
-from ..ui.forms import CodeMirrorJSONFileField, CodeMirrorRSTFileField, \
-    NameField, CodeMirrorJSONCharField
+# ----------------------------------------------------------
 
-from ..common.texts import Messages
-
-
-#----------------------------------------------------------
 
 class ExperimentModelForm(forms.ModelForm):
 
     name = NameField(
-        widget=forms.TextInput(attrs=dict(size=80)),
-        help_text=Messages['name'],
+        widget=forms.TextInput(attrs=dict(size=80)), help_text=Messages["name"],
     )
 
     declaration_file = CodeMirrorJSONFileField(
-        label='Declaration',
-        help_text=Messages['json'],
+        label="Declaration", help_text=Messages["json"],
     )
 
     description_file = CodeMirrorRSTFileField(
-        label='Description',
+        label="Description",
         required=False,
         allow_empty_file=True,
-        help_text=Messages['description'],
+        help_text=Messages["description"],
     )
 
     class Meta:
         model = ExperimentModel
         exclude = []
         widgets = {
-            'short_description': forms.TextInput(
-                attrs=dict(size=100),
-            ),
+            "short_description": forms.TextInput(attrs=dict(size=100),),
         }
 
     def clean_declaration_file(self):
         """Cleans-up the declaration_file data, make sure it is really new"""
 
-        new_declaration = self.cleaned_data['declaration_file'].read()
-        old_declaration = ''
+        new_declaration = self.cleaned_data["declaration_file"].read()
+        old_declaration = ""
 
         if self.instance and self.instance.declaration_file.name is not None:
             old_declaration = self.instance.declaration_string
             if new_declaration == old_declaration:
-                self.changed_data.remove('declaration_file')
+                self.changed_data.remove("declaration_file")
                 content_file = ContentFile(old_declaration)
                 content_file.name = self.instance.declaration_file.name
                 return content_file
 
         try:
-            core_experiment, errors = \
-                validate_experiment(json.loads(new_declaration),
-                                    self.cleaned_data['toolchain'].declaration)
+            core_experiment, errors = validate_experiment(
+                json.loads(new_declaration), self.cleaned_data["toolchain"].declaration
+            )
         except SyntaxError as e:
             raise forms.ValidationError(str(e))
 
@@ -104,84 +99,98 @@ class ExperimentModelForm(forms.ModelForm):
             all_errors = [forms.ValidationError(k) for k in errors]
             raise forms.ValidationError(all_errors)
 
-        self.cleaned_data['declaration_file'].seek(0) #reset ContentFile readout
-        return self.cleaned_data['declaration_file']
+        self.cleaned_data["declaration_file"].seek(0)  # reset ContentFile readout
+        return self.cleaned_data["declaration_file"]
 
     def clean(self):
         """Cleans-up the input data, make sure it overall validates"""
 
-        if 'declaration_file' in self.data and \
-                isinstance(self.data['declaration_file'], str):
+        if "declaration_file" in self.data and isinstance(
+            self.data["declaration_file"], str
+        ):
             mutable_data = self.data.copy()
-            mutable_data['declaration_file'] = ContentFile(self.data['declaration_file'], name='unsaved')
+            mutable_data["declaration_file"] = ContentFile(
+                self.data["declaration_file"], name="unsaved"
+            )
             self.data = mutable_data
 
 
-
 class BlockInline(admin.TabularInline):
 
     model = BlockModel
     extra = 0
 
-    readonly_fields = ['execution_order', 'link', 'algorithm', 'analyzer',
-                       'status']
-    ordering = ['execution_order']
+    readonly_fields = ["execution_order", "link", "algorithm", "analyzer", "status"]
+    ordering = ["execution_order"]
     fields = readonly_fields
 
     def link(self, obj):
-        url = reverse('admin:experiments_block_change', args=(obj.pk,))
-        return mark_safe('<a href="%s">%s</a>' % (url, obj.name))
-    link.short_description = 'name'
+        url = reverse("admin:experiments_block_change", args=(obj.pk,))
+        return mark_safe('<a href="%s">%s</a>' % (url, obj.name))  # nosec
+
+    link.short_description = "name"
 
     def has_delete_permission(self, request, obj=None):
         return False
 
     def has_add_permission(self, request):
-            return False
+        return False
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 def reset_experiment(modeladmin, request, queryset):
-    for q in queryset: q.reset()
-reset_experiment.short_description = 'Reset selected experiments'
+    for q in queryset:
+        q.reset()
+
+
+reset_experiment.short_description = "Reset selected experiments"
 
 
 def cancel_experiment(modeladmin, request, queryset):
-    for q in queryset: q.cancel()
-cancel_experiment.short_description = 'Cancel selected experiments'
+    for q in queryset:
+        q.cancel()
+
+
+cancel_experiment.short_description = "Cancel selected experiments"
 
 
 def rehash_experiment(modeladmin, request, queryset):
-    for q in queryset: q.save()
-rehash_experiment.short_description = 'Rehash selected experiments'
+    for q in queryset:
+        q.save()
+
+
+rehash_experiment.short_description = "Rehash selected experiments"
+
 
 class Experiment(admin.ModelAdmin):
 
-    list_display       = ('id',
-                          'author',
-                          'toolchain',
-                          'name',
-                          'creation_date',
-                          'start_date',
-                          'end_date',
-                          'status',
-                          'sharing',
-                         )
-    search_fields      = ['author__username',
-                          'toolchain__name',
-                          'toolchain__author__username',
-                          'name',
-                          'short_description',
-                         ]
-    readonly_fields    = (
-        'hash',
-        'referenced_datasets',
-        'referenced_algorithms',
-        'short_description',
+    list_display = (
+        "id",
+        "author",
+        "toolchain",
+        "name",
+        "creation_date",
+        "start_date",
+        "end_date",
+        "status",
+        "sharing",
+    )
+    search_fields = [
+        "author__username",
+        "toolchain__name",
+        "toolchain__author__username",
+        "name",
+        "short_description",
+    ]
+    readonly_fields = (
+        "hash",
+        "referenced_datasets",
+        "referenced_algorithms",
+        "short_description",
     )
-    list_display_links = ('id', )
+    list_display_links = ("id",)
 
     actions = [
         rehash_experiment,
@@ -191,135 +200,126 @@ class Experiment(admin.ModelAdmin):
 
     form = ExperimentModelForm
 
-    filter_horizontal = [
-        'shared_with',
-        'shared_with_team'
-    ]
+    filter_horizontal = ["shared_with", "shared_with_team"]
 
     inlines = [
         BlockInline,
     ]
 
     fieldsets = (
-        (None,
-         dict(
-             fields=('name', 'author', 'toolchain'),
-         ),
-          ),
-        ('Status and dates',
-         dict(
-             classes=('collapse',),
-             fields=('start_date', 'end_date', 'status'),
-         ),
-          ),
-        ('Documentation',
-         dict(
-             classes=('collapse',),
-             fields=('short_description', 'description_file',),
-         ),
-          ),
-        ('References (read-only)',
-         dict(
-             classes=('collapse',),
-             fields=('referenced_datasets', 'referenced_algorithms',),
-         ),
-          ),
-        ('Sharing',
-         dict(
-             classes=('collapse',),
-             fields=('sharing', 'shared_with', 'shared_with_team'),
-         ),
-          ),
-        ('Source code',
-         dict(
-             fields=('hash', 'declaration_file'),
-         ),
-          ),
+        (None, dict(fields=("name", "author", "toolchain"),),),
+        (
+            "Status and dates",
+            dict(classes=("collapse",), fields=("start_date", "end_date", "status"),),
+        ),
+        (
+            "Documentation",
+            dict(
+                classes=("collapse",),
+                fields=("short_description", "description_file",),
+            ),
+        ),
+        (
+            "References (read-only)",
+            dict(
+                classes=("collapse",),
+                fields=("referenced_datasets", "referenced_algorithms",),
+            ),
+        ),
+        (
+            "Sharing",
+            dict(
+                classes=("collapse",),
+                fields=("sharing", "shared_with", "shared_with_team"),
+            ),
+        ),
+        ("Source code", dict(fields=("hash", "declaration_file"),),),
     )
 
+
 admin.site.register(ExperimentModel, Experiment)
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class BlockInputInline(admin.TabularInline):
 
-    model           = BlockInputModel
-    verbose_name = 'Input'
-    verbose_name_plural = 'Inputs'
-    extra           = 0
-    ordering        = ['database', 'cache']
-    readonly_fields = ['input', 'channel']
+    model = BlockInputModel
+    verbose_name = "Input"
+    verbose_name_plural = "Inputs"
+    extra = 0
+    ordering = ["database", "cache"]
+    readonly_fields = ["input", "channel"]
     fields = readonly_fields
 
     def input(self, obj):
         if obj.database:
-            url = reverse('admin:databases_databaseset_change',
-                          args=(obj.database.set.pk,))
-            text = '%s (%s)' % (obj.database, obj.database.hash)
-            what = 'Dataset Output'
+            url = reverse(
+                "admin:databases_databaseset_change", args=(obj.database.set.pk,)
+            )
+            text = "%s (%s)" % (obj.database, obj.database.hash)
+            what = "Dataset Output"
         else:
-            url = reverse('admin:experiments_cachedfile_change',
-                          args=(obj.cache.pk,))
+            url = reverse("admin:experiments_cachedfile_change", args=(obj.cache.pk,))
             text = obj.cache.hash
-            what = 'Cached File'
-        return mark_safe('%s: <a href="%s">%s</a>' % (what, url, text))
+            what = "Cached File"
+        return mark_safe('%s: <a href="%s">%s</a>' % (what, url, text))  # nosec
 
     def has_delete_permission(self, request, obj=None):
         return False
 
     def has_add_permission(self, request):
-            return False
+        return False
 
 
 class CachedFileInline(admin.TabularInline):
 
     model = CachedFileModel.blocks.through
-    verbose_name = 'Output'
-    verbose_name_plural = 'Outputs'
+    verbose_name = "Output"
+    verbose_name_plural = "Outputs"
     extra = 0
 
-    readonly_fields = ['output']
+    readonly_fields = ["output"]
     fields = readonly_fields
 
     def output(self, obj):
-        url = reverse('admin:experiments_cachedfile_change', args=(obj.cachedfile.pk,))
+        url = reverse("admin:experiments_cachedfile_change", args=(obj.cachedfile.pk,))
         text = obj.cachedfile.hash
-        what = 'Cached File'
-        return mark_safe('%s: <a href="%s">%s</a>' % (what, url, text))
+        what = "Cached File"
+        return mark_safe('%s: <a href="%s">%s</a>' % (what, url, text))  # nosec
 
     def has_delete_permission(self, request, obj=None):
         return False
 
     def has_add_permission(self, request):
-            return False
+        return False
 
 
 class BlockDependentsInline(admin.TabularInline):
 
     model = BlockModel.dependencies.through
-    verbose_name = 'Dependent'
-    verbose_name_plural = 'Dependents'
-    fk_name = 'to_block'
+    verbose_name = "Dependent"
+    verbose_name_plural = "Dependents"
+    fk_name = "to_block"
     extra = 0
 
-    readonly_fields = ['order', 'name', 'algorithm', 'analyzer',
-                       'status']
-    ordering = ['id']
+    readonly_fields = ["order", "name", "algorithm", "analyzer", "status"]
+    ordering = ["id"]
     fields = readonly_fields
 
     def order(self, obj):
         return obj.from_block.execution_order
 
     def name(self, obj):
-        url = reverse('admin:experiments_block_change', args=(obj.from_block.pk,))
-        return mark_safe('<a href="%s">%s</a>' % (url, obj.from_block.name))
+        url = reverse("admin:experiments_block_change", args=(obj.from_block.pk,))
+        return mark_safe('<a href="%s">%s</a>' % (url, obj.from_block.name))  # nosec
 
     def algorithm(self, obj):
         return obj.from_block.algorithm
 
     def analyzer(self, obj):
         return obj.from_block.analyzer
+
     analyzer.boolean = True
 
     def status(self, obj):
@@ -329,33 +329,34 @@ class BlockDependentsInline(admin.TabularInline):
         return False
 
     def has_add_permission(self, request):
-            return False
+        return False
 
 
 class BlockDependenciesInline(admin.TabularInline):
 
     model = BlockModel.dependencies.through
-    verbose_name = 'Dependency'
-    verbose_name_plural = 'Dependencies'
-    fk_name = 'from_block'
+    verbose_name = "Dependency"
+    verbose_name_plural = "Dependencies"
+    fk_name = "from_block"
     extra = 0
 
-    readonly_fields = ['order', 'name', 'algorithm', 'analyzer', 'status']
-    ordering = ['id']
+    readonly_fields = ["order", "name", "algorithm", "analyzer", "status"]
+    ordering = ["id"]
     fields = readonly_fields
 
     def order(self, obj):
         return obj.to_block.execution_order
 
     def name(self, obj):
-        url = reverse('admin:experiments_block_change', args=(obj.to_block.pk,))
-        return mark_safe('<a href="%s">%s</a>' % (url, obj.to_block.name))
+        url = reverse("admin:experiments_block_change", args=(obj.to_block.pk,))
+        return mark_safe('<a href="%s">%s</a>' % (url, obj.to_block.name))  # nosec
 
     def algorithm(self, obj):
         return obj.to_block.algorithm
 
     def analyzer(self, obj):
         return obj.to_block.analyzer
+
     analyzer.boolean = True
 
     def status(self, obj):
@@ -365,15 +366,12 @@ class BlockDependenciesInline(admin.TabularInline):
         return False
 
     def has_add_permission(self, request):
-            return False
+        return False
 
 
 class BlockModelForm(forms.ModelForm):
 
-    command = CodeMirrorJSONCharField(
-        help_text=Messages['json'],
-        readonly=True,
-    )
+    command = CodeMirrorJSONCharField(help_text=Messages["json"], readonly=True,)
 
     class Meta:
         model = BlockModel
@@ -383,34 +381,34 @@ class BlockModelForm(forms.ModelForm):
 class Block(admin.ModelAdmin):
 
     list_display = (
-        'id',
-        'author',
-        'toolchain',
-        'xp',
-        'execution_order',
-        'name',
-        'algorithm',
-        'analyzer',
-        'status',
-        'ins',
-        'outs',
-        'environment',
-        'q'
+        "id",
+        "author",
+        "toolchain",
+        "xp",
+        "execution_order",
+        "name",
+        "algorithm",
+        "analyzer",
+        "status",
+        "ins",
+        "outs",
+        "environment",
+        "q",
     )
 
     search_fields = [
-        'name',
-        'experiment__author__username',
-        'experiment__toolchain__author__username',
-        'experiment__toolchain__name',
-        'experiment__name',
-        'algorithm__author__username',
-        'algorithm__name',
-        'environment__name',
-        'environment__version',
+        "name",
+        "experiment__author__username",
+        "experiment__toolchain__author__username",
+        "experiment__toolchain__name",
+        "experiment__name",
+        "algorithm__author__username",
+        "algorithm__name",
+        "environment__name",
+        "environment__version",
     ]
 
-    list_display_links  = ('id', 'name')
+    list_display_links = ("id", "name")
 
     inlines = [
         BlockDependenciesInline,
@@ -419,11 +417,11 @@ class Block(admin.ModelAdmin):
         BlockDependentsInline,
     ]
 
-    exclude = ['dependencies']
+    exclude = ["dependencies"]
 
     def get_queryset(self, request):
         qs = super(Block, self).get_queryset(request)
-        return qs.annotate(Count('outputs'))
+        return qs.annotate(Count("outputs"))
 
     def author(self, obj):
         return obj.experiment.author
@@ -433,116 +431,106 @@ class Block(admin.ModelAdmin):
 
     def xp(self, obj):
         return obj.experiment.name
-    xp.short_description = 'experiment'
+
+    xp.short_description = "experiment"
 
     def ins(self, obj):
         return obj.inputs.count()
 
     def outs(self, obj):
         return obj.outputs__count
-    outs.admin_order_field = 'outputs__count'
+
+    outs.admin_order_field = "outputs__count"
 
     def q(self, obj):
-        if obj.queue: return obj.queue.name
+        if obj.queue:
+            return obj.queue.name
         return None
-    q.short_description = 'queue'
+
+    q.short_description = "queue"
 
     def get_readonly_fields(self, request, obj=None):
-        return list(self.readonly_fields) + \
-            [field.name for field in obj._meta.fields if field.name != 'command']
+        return list(self.readonly_fields) + [
+            field.name for field in obj._meta.fields if field.name != "command"
+        ]
 
     def has_delete_permission(self, request, obj=None):
         return False
 
     def has_add_permission(self, request):
-            return False
+        return False
 
     form = BlockModelForm
 
     fieldsets = (
-        (None,
-         dict(
-             fields=('id', 'name', 'experiment'),
-         ),
-          ),
-        ('Status and dates',
-         dict(
-             fields=('creation_date', 'start_date', 'end_date', 'status'),
-         ),
-          ),
-        ('Code',
-         dict(
-             classes=('collapse',),
-             fields=('algorithm', 'analyzer',),
-         ),
-          ),
-        ('Backend',
-         dict(
-             classes=('collapse',),
-             fields=('environment', 'queue', 'required_slots', 'channel'),
-         ),
-          ),
-        ('Command',
-         dict(
-             classes=('collapse',),
-             fields=('command',),
-         ),
-          ),
+        (None, dict(fields=("id", "name", "experiment"),),),
+        (
+            "Status and dates",
+            dict(fields=("creation_date", "start_date", "end_date", "status"),),
+        ),
+        ("Code", dict(classes=("collapse",), fields=("algorithm", "analyzer",),),),
+        (
+            "Backend",
+            dict(
+                classes=("collapse",),
+                fields=("environment", "queue", "required_slots", "channel"),
+            ),
+        ),
+        ("Command", dict(classes=("collapse",), fields=("command",),),),
     )
 
+
 admin.site.register(BlockModel, Block)
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class Result(admin.ModelAdmin):
 
-    list_display = ('id', 'cache', 'name', 'type', 'primary', 'data_value')
+    list_display = ("id", "cache", "name", "type", "primary", "data_value")
 
     search_fields = [
-        'name',
-        'cache__hash',
+        "name",
+        "cache__hash",
     ]
 
-    list_display_links  = ('id', 'name')
+    list_display_links = ("id", "name")
 
-    list_select_related = (
-        'cache',
-    )
+    list_select_related = ("cache",)
 
     def get_readonly_fields(self, request, obj=None):
-        return list(self.readonly_fields) + \
-            [field.name for field in obj._meta.fields]
+        return list(self.readonly_fields) + [field.name for field in obj._meta.fields]
 
     def has_delete_permission(self, request, obj=None):
         return False
 
     def has_add_permission(self, request):
-            return False
+        return False
+
 
 admin.site.register(ResultModel, Result)
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 def delete_file_on_fs(modeladmin, request, queryset):
-    '''
+    """
     Delete the files contained in the cache
-    '''
+    """
 
     for obj in queryset:
         obj.delete_files()
 
 
-delete_file_on_fs.short_description = 'Delete files from the cache'
+delete_file_on_fs.short_description = "Delete files from the cache"
 
 
 def cascading_delete_file_on_fs(modeladmin, request, queryset):
-    '''
+    """
     Delete the files contained in the cache
-    '''
+    """
 
     for obj in queryset:
         for block in obj.blocks.all():
@@ -556,108 +544,104 @@ def cascading_delete_file_on_fs(modeladmin, request, queryset):
                         input_.cache.delete_files()
 
 
-cascading_delete_file_on_fs.short_description = 'Delete files from the ' \
-                                                'selected and related caches'
+cascading_delete_file_on_fs.short_description = (
+    "Delete files from the " "selected and related caches"
+)
 
 
 class CachedFile(admin.ModelAdmin):
 
     search_fields = [
-        'hash',
-        'blocks__name',
-        'blocks__experiment__name',
+        "hash",
+        "blocks__name",
+        "blocks__experiment__name",
     ]
 
     list_display = (
-        'id',
-        'hash',
-        'status',
-        'date',
-        'blocks_url',
+        "id",
+        "hash",
+        "status",
+        "date",
+        "blocks_url",
     )
 
-    list_display_links  = ('id', 'hash')
+    list_display_links = ("id", "hash")
 
-    list_filter = ('status', )
+    list_filter = ("status",)
 
     # to avoid very slow loading of cached files
-    raw_id_fields = ('blocks',)
+    raw_id_fields = ("blocks",)
 
     actions = [delete_file_on_fs, cascading_delete_file_on_fs]
 
     def get_queryset(self, request):
         qs = super(CachedFile, self).get_queryset(request)
-        return qs.annotate(date=Max('blocks__start_date'))
+        return qs.annotate(date=Max("blocks__start_date"))
 
     def get_actions(self, request):
         actions = super(CachedFile, self).get_actions(request)
-        if 'delete_selected' in actions:
-            del actions['delete_selected']
+        if "delete_selected" in actions:
+            del actions["delete_selected"]
         return actions
 
     def date(self, obj):
         return obj.date
 
-    date.admin_order_field = '-date'
+    date.admin_order_field = "-date"
 
     def blocks_url(self, obj):
-        retval = '<ul>'
+        retval = "<ul>"
         for block in obj.blocks.all():
-            retval += format_html("<li><a href='{block_url}'>{block_name}</a> @ <a href='{experiment_url}'>{experiment_name}</a> ({block_status})</li>",
-                                  experiment_url=reverse('admin:experiments_experiment_change', args=(block.experiment.id,)),
-                                  experiment_name=block.experiment.fullname(),
-                                  block_url=reverse('admin:experiments_block_change', args=(block.id,)),
-                                  block_name=block.name,
-                                  block_status=block.get_status_display(),
+            retval += format_html(
+                "<li><a href='{block_url}'>{block_name}</a> @ <a href='{experiment_url}'>{experiment_name}</a> ({block_status})</li>",
+                experiment_url=reverse(
+                    "admin:experiments_experiment_change", args=(block.experiment.id,)
+                ),
+                experiment_name=block.experiment.fullname(),
+                block_url=reverse("admin:experiments_block_change", args=(block.id,)),
+                block_name=block.name,
+                block_status=block.get_status_display(),
             )
-        return retval + '</ul>'
+        return retval + "</ul>"
 
     blocks_url.short_description = "Blocks"
     blocks_url.allow_tags = True
 
-
     fieldsets = (
-        (None,
-         dict(
-             fields=('hash', 'status', 'blocks',)
-         ),
-          ),
-        ('Logging',
-         dict(
-             fields=('error_report', 'stderr', 'stdout'),
-         ),
-          ),
-        ('Performance',
-         dict(
-             classes=('collapse',),
-             fields=(
-                 'linear_execution_time',
-                  'speed_up_real',
-                  'speed_up_maximal',
-                  'cpu_time',
-                  'max_memory',
-                  'queuing_time',
-                  'data_read_time',
-                  'data_read_size',
-                  'data_read_nb_blocks',
-                  'data_written_time',
-                  'data_written_size',
-                  'data_written_nb_blocks',
-             ),
-         ),
-          ),
+        (None, dict(fields=("hash", "status", "blocks",)),),
+        ("Logging", dict(fields=("error_report", "stderr", "stdout"),),),
+        (
+            "Performance",
+            dict(
+                classes=("collapse",),
+                fields=(
+                    "linear_execution_time",
+                    "speed_up_real",
+                    "speed_up_maximal",
+                    "cpu_time",
+                    "max_memory",
+                    "queuing_time",
+                    "data_read_time",
+                    "data_read_size",
+                    "data_read_nb_blocks",
+                    "data_written_time",
+                    "data_written_size",
+                    "data_written_nb_blocks",
+                ),
+            ),
+        ),
     )
 
-    readonly_fields = ['blocks']
+    readonly_fields = ["blocks"]
 
     def get_readonly_fields(self, request, obj=None):
-        return list(self.readonly_fields) + \
-            [field.name for field in obj._meta.fields]
+        return list(self.readonly_fields) + [field.name for field in obj._meta.fields]
 
     def has_delete_permission(self, request, obj=None):
         return False
 
     def has_add_permission(self, request):
-            return False
+        return False
+
 
 admin.site.register(CachedFileModel, CachedFile)
diff --git a/beat/web/experiments/api.py b/beat/web/experiments/api.py
index aa947acb3fe0a2ecfceb8f008a5c4ebe0e439e00..fcd84927251d7adb4090c0bc34eec9bde63f0ebd 100755
--- a/beat/web/experiments/api.py
+++ b/beat/web/experiments/api.py
@@ -25,48 +25,42 @@
 #                                                                             #
 ###############################################################################
 
-import simplejson
 import functools
 
+import simplejson
 from django.conf import settings
-from django.shortcuts import get_object_or_404
-
 from django.core.exceptions import ValidationError
-
-from rest_framework.response import Response
-from rest_framework import permissions
+from django.shortcuts import get_object_or_404
 from rest_framework import generics
+from rest_framework import permissions
 from rest_framework import serializers
-from rest_framework.views import APIView
-from rest_framework.reverse import reverse
 from rest_framework.exceptions import ParseError
+from rest_framework.response import Response
+from rest_framework.reverse import reverse
+from rest_framework.views import APIView
 
-import beat.core.hash
 import beat.core.algorithm
+import beat.core.hash
 import beat.core.toolchain
 
-from .models import Experiment
-from .serializers import (
-    ExperimentSerializer,
-    ExperimentResultsSerializer,
-    ExperimentCreationSerializer,
-)
-from .permissions import IsDatabaseAccessible
-
-from ..common.responses import BadRequestResponse, ForbiddenResponse
-from ..common.api import (
-    ShareView,
-    ListContributionView,
-    ListCreateContributionView,
-    RetrieveUpdateDestroyContributionView,
-)
-from ..common.mixins import CommonContextMixin
+from ..common.api import ListContributionView
+from ..common.api import ListCreateContributionView
+from ..common.api import RetrieveUpdateDestroyContributionView
+from ..common.api import ShareView
 from ..common.exceptions import ShareError
+from ..common.mixins import CommonContextMixin
+from ..common.responses import BadRequestResponse
+from ..common.responses import ForbiddenResponse
 from ..common.serializers import SharingSerializer
-from ..common.utils import validate_restructuredtext, ensure_html, py3_cmp
-
+from ..common.utils import ensure_html
+from ..common.utils import py3_cmp
+from ..common.utils import validate_restructuredtext
 from ..toolchains.models import Toolchain
-
+from .models import Experiment
+from .permissions import IsDatabaseAccessible
+from .serializers import ExperimentCreationSerializer
+from .serializers import ExperimentResultsSerializer
+from .serializers import ExperimentSerializer
 
 # ----------------------------------------------------------
 
diff --git a/beat/web/experiments/apps.py b/beat/web/experiments/apps.py
index 6392aa6ee0362a37b0b5a503548e552ac0fe0e93..1c188d63b8310e0475a2b732a8568bc2dde27a3a 100644
--- a/beat/web/experiments/apps.py
+++ b/beat/web/experiments/apps.py
@@ -25,15 +25,19 @@
 #                                                                             #
 ###############################################################################
 
-from ..common.apps import CommonAppConfig
 from django.utils.translation import ugettext_lazy as _
 
+from ..common.apps import CommonAppConfig
+
+
 class ExperimentsConfig(CommonAppConfig):
-    name = 'beat.web.experiments'
-    verbose_name = _('Experiments')
+    name = "beat.web.experiments"
+    verbose_name = _("Experiments")
 
     def ready(self):
         super(ExperimentsConfig, self).ready()
-        from .signals import on_team_delete
         from actstream import registry
-        registry.register(self.get_model('Experiment'))
+
+        from .signals import on_team_delete  # noqa: F401
+
+        registry.register(self.get_model("Experiment"))
diff --git a/beat/web/experiments/management/commands/cleanup_orphaned_caches.py b/beat/web/experiments/management/commands/cleanup_orphaned_caches.py
index 8e02872caa49699cb2c1400dd3ca8c9f1a10a316..e19698014c21249e5936865116dbbab2b9046385 100644
--- a/beat/web/experiments/management/commands/cleanup_orphaned_caches.py
+++ b/beat/web/experiments/management/commands/cleanup_orphaned_caches.py
@@ -27,37 +27,45 @@
 
 
 import logging
-logger = logging.getLogger(__name__)
 
 from django.core.management.base import BaseCommand
 
 from ... import utils
 
+logger = logging.getLogger(__name__)
+
 
 class Command(BaseCommand):
 
-    help = 'Sets and resets queue configurations'
-
+    help = "Sets and resets queue configurations"
 
     def add_arguments(self, parser):
 
-        parser.add_argument('--delete', action='store_true', dest='delete',
-                            default=False, help='Really deletes the CachedFiles - ' \
-                            'otherwise only displays what would be deleted')
+        parser.add_argument(
+            "--delete",
+            action="store_true",
+            dest="delete",
+            default=False,
+            help="Really deletes the CachedFiles - "
+            "otherwise only displays what would be deleted",
+        )
 
     def handle(self, *ignored, **arguments):
 
         # Setup this command's logging level
         global logger
-        arguments['verbosity'] = int(arguments['verbosity'])
-        if arguments['verbosity'] >= 1:
-            if arguments['verbosity'] == 1: logger.setLevel(logging.INFO)
-            elif arguments['verbosity'] >= 2: logger.setLevel(logging.DEBUG)
+        arguments["verbosity"] = int(arguments["verbosity"])
+        if arguments["verbosity"] >= 1:
+            if arguments["verbosity"] == 1:
+                logger.setLevel(logging.INFO)
+            elif arguments["verbosity"] >= 2:
+                logger.setLevel(logging.DEBUG)
 
-        if arguments['delete']:
+        if arguments["delete"]:
             utils.cleanup_orphaned_cachedfiles()
 
         else:
-            l = utils.list_orphaned_cachedfiles()
-            for c in l: print(c)
-            print('%d CachedFiles are unreacheable' % len(l))
+            orphaned_files = utils.list_orphaned_cachedfiles()
+            for file_ in orphaned_files:
+                print(file_)
+            print("%d CachedFiles are unreacheable" % len(orphaned_files))
diff --git a/beat/web/experiments/migrations/0001_initial.py b/beat/web/experiments/migrations/0001_initial.py
index b5cbfb1c7bee401ae7e7cf88fd5f361308096519..d343f1368919099f1e0b22f50d856c3e268bd11f 100644
--- a/beat/web/experiments/migrations/0001_initial.py
+++ b/beat/web/experiments/migrations/0001_initial.py
@@ -27,104 +27,272 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, models
 from django.conf import settings
-import beat.web.experiments.models
+from django.db import migrations
+from django.db import models
+
 import beat.web.common.models
+import beat.web.experiments.models
 
 
 class Migration(migrations.Migration):
 
     dependencies = [
         migrations.swappable_dependency(settings.AUTH_USER_MODEL),
-        ('databases', '0001_initial'),
-        ('algorithms', '0001_initial'),
-        ('toolchains', '0001_initial'),
-        ('backend', '0001_initial'),
-        ('team', '0001_initial'),
+        ("databases", "0001_initial"),
+        ("algorithms", "0001_initial"),
+        ("toolchains", "0001_initial"),
+        ("backend", "0001_initial"),
+        ("team", "0001_initial"),
     ]
 
     operations = [
         migrations.CreateModel(
-            name='Block',
+            name="Block",
             fields=[
-                ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
-                ('name', models.CharField(max_length=200)),
-                ('status', models.CharField(default='N', max_length=1, choices=[('N', 'Not cached'), ('P', 'Processing'), ('C', 'Cached'), ('F', 'Failed')])),
-                ('analyzer', models.BooleanField(default=False)),
-                ('creation_date', models.DateTimeField(auto_now_add=True, null=True)),
-                ('start_date', models.DateTimeField(null=True, blank=True)),
-                ('end_date', models.DateTimeField(null=True, blank=True)),
-                ('algorithm', models.ForeignKey(related_name='blocks', to='algorithms.Algorithm', on_delete=models.CASCADE)),
-                ('environment', models.ForeignKey(related_name='blocks', to='backend.Environment', null=True, on_delete=models.SET_NULL)),
+                (
+                    "id",
+                    models.AutoField(
+                        verbose_name="ID",
+                        serialize=False,
+                        auto_created=True,
+                        primary_key=True,
+                    ),
+                ),
+                ("name", models.CharField(max_length=200)),
+                (
+                    "status",
+                    models.CharField(
+                        default="N",
+                        max_length=1,
+                        choices=[
+                            ("N", "Not cached"),
+                            ("P", "Processing"),
+                            ("C", "Cached"),
+                            ("F", "Failed"),
+                        ],
+                    ),
+                ),
+                ("analyzer", models.BooleanField(default=False)),
+                ("creation_date", models.DateTimeField(auto_now_add=True, null=True)),
+                ("start_date", models.DateTimeField(null=True, blank=True)),
+                ("end_date", models.DateTimeField(null=True, blank=True)),
+                (
+                    "algorithm",
+                    models.ForeignKey(
+                        related_name="blocks",
+                        to="algorithms.Algorithm",
+                        on_delete=models.CASCADE,
+                    ),
+                ),
+                (
+                    "environment",
+                    models.ForeignKey(
+                        related_name="blocks",
+                        to="backend.Environment",
+                        null=True,
+                        on_delete=models.SET_NULL,
+                    ),
+                ),
             ],
         ),
         migrations.CreateModel(
-            name='CachedFile',
+            name="CachedFile",
             fields=[
-                ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
-                ('hash', models.CharField(unique=True, max_length=64)),
-                ('linear_execution_time', models.FloatField(default=0.0)),
-                ('speed_up_real', models.FloatField(default=0.0)),
-                ('speed_up_maximal', models.FloatField(default=0.0)),
-                ('queuing_time', models.FloatField(default=0.0)),
-                ('stdout', models.TextField(null=True, blank=True)),
-                ('stderr', models.TextField(null=True, blank=True)),
-                ('error_report', models.TextField(null=True, blank=True)),
-                ('cpu_time', models.FloatField(default=0.0)),
-                ('max_memory', models.BigIntegerField(default=0)),
-                ('data_read_size', models.BigIntegerField(default=0)),
-                ('data_read_nb_blocks', models.IntegerField(default=0)),
-                ('data_read_time', models.FloatField(default=0.0)),
-                ('data_written_size', models.BigIntegerField(default=0)),
-                ('data_written_nb_blocks', models.IntegerField(default=0)),
-                ('data_written_time', models.FloatField(default=0.0)),
-                ('blocks', models.ManyToManyField(related_name='hashes', to='experiments.Block', blank=True)),
+                (
+                    "id",
+                    models.AutoField(
+                        verbose_name="ID",
+                        serialize=False,
+                        auto_created=True,
+                        primary_key=True,
+                    ),
+                ),
+                ("hash", models.CharField(unique=True, max_length=64)),
+                ("linear_execution_time", models.FloatField(default=0.0)),
+                ("speed_up_real", models.FloatField(default=0.0)),
+                ("speed_up_maximal", models.FloatField(default=0.0)),
+                ("queuing_time", models.FloatField(default=0.0)),
+                ("stdout", models.TextField(null=True, blank=True)),
+                ("stderr", models.TextField(null=True, blank=True)),
+                ("error_report", models.TextField(null=True, blank=True)),
+                ("cpu_time", models.FloatField(default=0.0)),
+                ("max_memory", models.BigIntegerField(default=0)),
+                ("data_read_size", models.BigIntegerField(default=0)),
+                ("data_read_nb_blocks", models.IntegerField(default=0)),
+                ("data_read_time", models.FloatField(default=0.0)),
+                ("data_written_size", models.BigIntegerField(default=0)),
+                ("data_written_nb_blocks", models.IntegerField(default=0)),
+                ("data_written_time", models.FloatField(default=0.0)),
+                (
+                    "blocks",
+                    models.ManyToManyField(
+                        related_name="hashes", to="experiments.Block", blank=True
+                    ),
+                ),
             ],
         ),
         migrations.CreateModel(
-            name='Experiment',
+            name="Experiment",
             fields=[
-                ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
-                ('sharing', models.CharField(default='P', max_length=1, choices=[('P', 'Private'), ('S', 'Shared'), ('A', 'Public'), ('U', 'Usable')])),
-                ('name', models.CharField(max_length=200)),
-                ('short_description', models.CharField(default='', help_text='Describe the object succinctly (try to keep it under 80 characters)', max_length=100, blank=True)),
-                ('status', models.CharField(default='P', max_length=1, choices=[('P', 'Pending'), ('S', 'Scheduled'), ('R', 'Running'), ('D', 'Done'), ('F', 'Failed'), ('C', 'Canceling')])),
-                ('creation_date', models.DateTimeField(auto_now_add=True, null=True)),
-                ('start_date', models.DateTimeField(null=True, blank=True)),
-                ('end_date', models.DateTimeField(null=True, blank=True)),
-                ('declaration_file', models.FileField(db_column='declaration', upload_to=beat.web.common.models.get_contribution_declaration_filename, storage=beat.web.experiments.models.DeclarationStorage(), max_length=300, blank=True, null=True)),
-                ('description_file', models.FileField(db_column='description', upload_to=beat.web.common.models.get_contribution_description_filename, storage=beat.web.experiments.models.DeclarationStorage(), max_length=300, blank=True, null=True)),
-                ('hash', models.CharField(max_length=64)),
-                ('author', models.ForeignKey(related_name='experiments', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
-                ('referenced_algorithms', models.ManyToManyField(related_name='experiments', to='algorithms.Algorithm', blank=True)),
-                ('referenced_datasets', models.ManyToManyField(related_name='experiments', to='databases.DatabaseSet', blank=True)),
-                ('shared_with', models.ManyToManyField(related_name='shared_experiments', to=settings.AUTH_USER_MODEL, blank=True)),
-                ('shared_with_team', models.ManyToManyField(related_name='shared_experiments', to='team.Team', blank=True)),
-                ('toolchain', models.ForeignKey(related_name='experiments', to='toolchains.Toolchain', on_delete=models.CASCADE)),
+                (
+                    "id",
+                    models.AutoField(
+                        verbose_name="ID",
+                        serialize=False,
+                        auto_created=True,
+                        primary_key=True,
+                    ),
+                ),
+                (
+                    "sharing",
+                    models.CharField(
+                        default="P",
+                        max_length=1,
+                        choices=[
+                            ("P", "Private"),
+                            ("S", "Shared"),
+                            ("A", "Public"),
+                            ("U", "Usable"),
+                        ],
+                    ),
+                ),
+                ("name", models.CharField(max_length=200)),
+                (
+                    "short_description",
+                    models.CharField(
+                        default="",
+                        help_text="Describe the object succinctly (try to keep it under 80 characters)",
+                        max_length=100,
+                        blank=True,
+                    ),
+                ),
+                (
+                    "status",
+                    models.CharField(
+                        default="P",
+                        max_length=1,
+                        choices=[
+                            ("P", "Pending"),
+                            ("S", "Scheduled"),
+                            ("R", "Running"),
+                            ("D", "Done"),
+                            ("F", "Failed"),
+                            ("C", "Canceling"),
+                        ],
+                    ),
+                ),
+                ("creation_date", models.DateTimeField(auto_now_add=True, null=True)),
+                ("start_date", models.DateTimeField(null=True, blank=True)),
+                ("end_date", models.DateTimeField(null=True, blank=True)),
+                (
+                    "declaration_file",
+                    models.FileField(
+                        db_column="declaration",
+                        upload_to=beat.web.common.models.get_contribution_declaration_filename,
+                        storage=beat.web.experiments.models.DeclarationStorage(),
+                        max_length=300,
+                        blank=True,
+                        null=True,
+                    ),
+                ),
+                (
+                    "description_file",
+                    models.FileField(
+                        db_column="description",
+                        upload_to=beat.web.common.models.get_contribution_description_filename,
+                        storage=beat.web.experiments.models.DeclarationStorage(),
+                        max_length=300,
+                        blank=True,
+                        null=True,
+                    ),
+                ),
+                ("hash", models.CharField(max_length=64)),
+                (
+                    "author",
+                    models.ForeignKey(
+                        related_name="experiments",
+                        to=settings.AUTH_USER_MODEL,
+                        on_delete=models.CASCADE,
+                    ),
+                ),
+                (
+                    "referenced_algorithms",
+                    models.ManyToManyField(
+                        related_name="experiments",
+                        to="algorithms.Algorithm",
+                        blank=True,
+                    ),
+                ),
+                (
+                    "referenced_datasets",
+                    models.ManyToManyField(
+                        related_name="experiments",
+                        to="databases.DatabaseSet",
+                        blank=True,
+                    ),
+                ),
+                (
+                    "shared_with",
+                    models.ManyToManyField(
+                        related_name="shared_experiments",
+                        to=settings.AUTH_USER_MODEL,
+                        blank=True,
+                    ),
+                ),
+                (
+                    "shared_with_team",
+                    models.ManyToManyField(
+                        related_name="shared_experiments", to="team.Team", blank=True
+                    ),
+                ),
+                (
+                    "toolchain",
+                    models.ForeignKey(
+                        related_name="experiments",
+                        to="toolchains.Toolchain",
+                        on_delete=models.CASCADE,
+                    ),
+                ),
             ],
-            options={
-                'ordering': ['-creation_date'],
-            },
+            options={"ordering": ["-creation_date"]},
         ),
         migrations.CreateModel(
-            name='Result',
+            name="Result",
             fields=[
-                ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
-                ('name', models.CharField(max_length=200)),
-                ('type', models.CharField(max_length=200)),
-                ('primary', models.BooleanField(default=False)),
-                ('data_value', models.TextField(null=True, blank=True)),
-                ('block', models.ForeignKey(related_name='results', to='experiments.Block', on_delete=models.CASCADE)),
+                (
+                    "id",
+                    models.AutoField(
+                        verbose_name="ID",
+                        serialize=False,
+                        auto_created=True,
+                        primary_key=True,
+                    ),
+                ),
+                ("name", models.CharField(max_length=200)),
+                ("type", models.CharField(max_length=200)),
+                ("primary", models.BooleanField(default=False)),
+                ("data_value", models.TextField(null=True, blank=True)),
+                (
+                    "block",
+                    models.ForeignKey(
+                        related_name="results",
+                        to="experiments.Block",
+                        on_delete=models.CASCADE,
+                    ),
+                ),
             ],
         ),
         migrations.AddField(
-            model_name='block',
-            name='experiment',
-            field=models.ForeignKey(related_name='blocks', to='experiments.Experiment', on_delete=models.CASCADE),
+            model_name="block",
+            name="experiment",
+            field=models.ForeignKey(
+                related_name="blocks",
+                to="experiments.Experiment",
+                on_delete=models.CASCADE,
+            ),
         ),
         migrations.AlterUniqueTogether(
-            name='experiment',
-            unique_together=set([('author', 'toolchain', 'name')]),
+            name="experiment", unique_together=set([("author", "toolchain", "name")]),
         ),
     ]
diff --git a/beat/web/experiments/migrations/0002_scheduler_addons.py b/beat/web/experiments/migrations/0002_scheduler_addons.py
index 9d0394066bdc33a363f1f6b30592d02d6b5fe197..24a10cd729f08fc6a73bdafbb91a9559a4606376 100644
--- a/beat/web/experiments/migrations/0002_scheduler_addons.py
+++ b/beat/web/experiments/migrations/0002_scheduler_addons.py
@@ -28,18 +28,20 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, models
+from django.db import migrations
+from django.db import models
 
 
 def move_result_to_cache(apps, schema_editor):
-    '''Moves the result association from the block to the related cache file'''
+    """Moves the result association from the block to the related cache file"""
 
     Result = apps.get_model("experiments", "Result")
 
     total = Result.objects.count()
-    if total: print('')
-    for i, r in enumerate(Result.objects.order_by('-id')):
-        print("Resetting result (%d) %d/%d..." % (r.id, i+1, total))
+    if total:
+        print("")
+    for i, r in enumerate(Result.objects.order_by("-id")):
+        print("Resetting result (%d) %d/%d..." % (r.id, i + 1, total))
         r.cache = r.block.hashes.first()
         r.save()
 
@@ -47,15 +49,19 @@ def move_result_to_cache(apps, schema_editor):
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0001_initial'),
+        ("experiments", "0001_initial"),
     ]
 
     operations = [
         migrations.AddField(
-            model_name='result',
-            name='cache',
-            field=models.ForeignKey(related_name='results',
-                                    to='experiments.CachedFile', null=True, on_delete=models.SET_NULL),
+            model_name="result",
+            name="cache",
+            field=models.ForeignKey(
+                related_name="results",
+                to="experiments.CachedFile",
+                null=True,
+                on_delete=models.SET_NULL,
+            ),
         ),
         migrations.RunPython(move_result_to_cache),
     ]
diff --git a/beat/web/experiments/migrations/0003_scheduler_addons_2.py b/beat/web/experiments/migrations/0003_scheduler_addons_2.py
index ac59b9bec5b370a50a805f3d02fd68e6c0c54996..cd8cf49984fe92edc50e89d329e18e03a7910b4d 100644
--- a/beat/web/experiments/migrations/0003_scheduler_addons_2.py
+++ b/beat/web/experiments/migrations/0003_scheduler_addons_2.py
@@ -32,25 +32,28 @@ from django.db import migrations
 
 
 def dedup_resuls(apps, schema_editor):
-    '''Deletes duplicated results (older ones)'''
+    """Deletes duplicated results (older ones)"""
 
     Result = apps.get_model("experiments", "Result")
 
-    for i, r in enumerate(Result.objects.order_by('-id')):
-        older = Result.objects.filter(name=r.name, id__lt=r.id,
-                                      cache=r.block.hashes.first())
+    for i, r in enumerate(Result.objects.order_by("-id")):
+        older = Result.objects.filter(
+            name=r.name, id__lt=r.id, cache=r.block.hashes.first()
+        )
         if older:
-            print("Cache %s already contains Result `%s' - keeping " \
-                  "newest (out of %d)..." % (r.block.hashes.first().hash, r.name,
-                                             older.count()+1))
+            print(
+                "Cache %s already contains Result `%s' - keeping "
+                "newest (out of %d)..."
+                % (r.block.hashes.first().hash, r.name, older.count() + 1)
+            )
             older.delete()
 
 
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0002_scheduler_addons'),
-        ('search', '0002_scheduler_addons'),
+        ("experiments", "0002_scheduler_addons"),
+        ("search", "0002_scheduler_addons"),
     ]
 
     operations = [
diff --git a/beat/web/experiments/migrations/0004_scheduler_addons_3.py b/beat/web/experiments/migrations/0004_scheduler_addons_3.py
index 2e573ad2c03f602f614a91959614dd8f3f8b237a..b8341bfbfe305b887dedb735caaec95d9e4b0a33 100644
--- a/beat/web/experiments/migrations/0004_scheduler_addons_3.py
+++ b/beat/web/experiments/migrations/0004_scheduler_addons_3.py
@@ -28,100 +28,144 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, models
+from django.db import migrations
+from django.db import models
+
 
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('backend', '0002_scheduler_addons'),
-        ('databases', '0002_scheduler_addons'),
-        ('experiments', '0003_scheduler_addons_2'),
+        ("backend", "0002_scheduler_addons"),
+        ("databases", "0002_scheduler_addons"),
+        ("experiments", "0003_scheduler_addons_2"),
     ]
 
     operations = [
         migrations.AlterUniqueTogether(
-            name='result',
-            unique_together=set([('cache', 'name')]),
-        ),
-        migrations.RemoveField(
-            model_name='result',
-            name='block',
+            name="result", unique_together=set([("cache", "name")]),
         ),
+        migrations.RemoveField(model_name="result", name="block",),
         migrations.CreateModel(
-            name='BlockInput',
+            name="BlockInput",
             fields=[
-                ('id', models.AutoField(verbose_name='ID', serialize=False,
-                                        auto_created=True, primary_key=True)),
-                ('channel', models.CharField(default=b'',
-                                             help_text=b'Synchronization channel within the toolchain',
-                                             max_length=200, blank=True)),
-                ('block', models.ForeignKey(related_name='inputs',
-                                            to='experiments.Block', null=True, on_delete=models.CASCADE)),
-                ('cache', models.ForeignKey(related_name='inputs',
-                                            to='experiments.CachedFile', null=True, on_delete=models.CASCADE)),
-                ('database', models.ForeignKey(related_name='blocks',
-                                               to='databases.DatabaseSetOutput', null=True, on_delete=models.CASCADE)),
+                (
+                    "id",
+                    models.AutoField(
+                        verbose_name="ID",
+                        serialize=False,
+                        auto_created=True,
+                        primary_key=True,
+                    ),
+                ),
+                (
+                    "channel",
+                    models.CharField(
+                        default=b"",
+                        help_text=b"Synchronization channel within the toolchain",
+                        max_length=200,
+                        blank=True,
+                    ),
+                ),
+                (
+                    "block",
+                    models.ForeignKey(
+                        related_name="inputs",
+                        to="experiments.Block",
+                        null=True,
+                        on_delete=models.CASCADE,
+                    ),
+                ),
+                (
+                    "cache",
+                    models.ForeignKey(
+                        related_name="inputs",
+                        to="experiments.CachedFile",
+                        null=True,
+                        on_delete=models.CASCADE,
+                    ),
+                ),
+                (
+                    "database",
+                    models.ForeignKey(
+                        related_name="blocks",
+                        to="databases.DatabaseSetOutput",
+                        null=True,
+                        on_delete=models.CASCADE,
+                    ),
+                ),
             ],
         ),
         migrations.AddField(
-            model_name='block',
-            name='channel',
-            field=models.CharField(default=b'',
-                                   help_text=b'Synchronization channel within the toolchain',
-                                   max_length=200, blank=True),
+            model_name="block",
+            name="channel",
+            field=models.CharField(
+                default=b"",
+                help_text=b"Synchronization channel within the toolchain",
+                max_length=200,
+                blank=True,
+            ),
         ),
         migrations.AddField(
-            model_name='block',
-            name='command',
+            model_name="block",
+            name="command",
             field=models.TextField(null=True, blank=True),
         ),
         migrations.AddField(
-            model_name='block',
-            name='dependencies',
-            field=models.ManyToManyField(related_name='dependents',
-                                         to='experiments.Block', blank=True),
+            model_name="block",
+            name="dependencies",
+            field=models.ManyToManyField(
+                related_name="dependents", to="experiments.Block", blank=True
+            ),
         ),
         migrations.AlterField(
-            model_name='block',
-            name='environment',
-            field=models.ForeignKey(related_name='blocks',
-                                    on_delete=models.deletion.SET_NULL, to='backend.Environment',
-                                    null=True),
+            model_name="block",
+            name="environment",
+            field=models.ForeignKey(
+                related_name="blocks",
+                on_delete=models.deletion.SET_NULL,
+                to="backend.Environment",
+                null=True,
+            ),
         ),
         migrations.AddField(
-            model_name='block',
-            name='queue',
-            field=models.ForeignKey(related_name='blocks',
-                                    on_delete=models.deletion.SET_NULL, to='backend.Queue',
-                                    null=True),
+            model_name="block",
+            name="queue",
+            field=models.ForeignKey(
+                related_name="blocks",
+                on_delete=models.deletion.SET_NULL,
+                to="backend.Queue",
+                null=True,
+            ),
         ),
         migrations.AddField(
-            model_name='block',
-            name='required_slots',
+            model_name="block",
+            name="required_slots",
             field=models.PositiveIntegerField(default=1),
         ),
         migrations.AlterField(
-            model_name='block',
-            name='status',
-            field=models.CharField(default=b'N', max_length=1,
-                                   choices=[
-                                       (b'N', b'Not cached'),
-                                       (b'P', b'Processing'),
-                                       (b'C', b'Cached'),
-                                       (b'F', b'Failed'),
-                                       (b'S', b'Skipped'),
-                                       (b'L', b'Cancelled'),
-                                   ]
+            model_name="block",
+            name="status",
+            field=models.CharField(
+                default=b"N",
+                max_length=1,
+                choices=[
+                    (b"N", b"Not cached"),
+                    (b"P", b"Processing"),
+                    (b"C", b"Cached"),
+                    (b"F", b"Failed"),
+                    (b"S", b"Skipped"),
+                    (b"L", b"Cancelled"),
+                ],
             ),
         ),
         migrations.AlterUniqueTogether(
-            name='block',
-            unique_together=set([('experiment', 'name')]),
+            name="block", unique_together=set([("experiment", "name")]),
         ),
         migrations.AlterField(
-            model_name='cachedfile',
-            name='blocks',
-            field=models.ManyToManyField(related_name='outputs',
-                                         to='experiments.Block', blank=True),
+            model_name="cachedfile",
+            name="blocks",
+            field=models.ManyToManyField(
+                related_name="outputs", to="experiments.Block", blank=True
+            ),
         ),
     ]
diff --git a/beat/web/experiments/migrations/0005_scheduler_addons_4.py b/beat/web/experiments/migrations/0005_scheduler_addons_4.py
index 26100a727a81649acfc007e74a68a68c33a8be98..a40c7a871848ad82816b147cf0b222dd3602e256 100644
--- a/beat/web/experiments/migrations/0005_scheduler_addons_4.py
+++ b/beat/web/experiments/migrations/0005_scheduler_addons_4.py
@@ -28,16 +28,18 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, utils
+import simplejson
 from django.conf import settings
+from django.db import migrations
+from django.db import utils
 
-import simplejson
 import beat.core.experiment
+
 from ...common import storage
 
 
 def reset_blocks(apps, schema_editor):
-    '''Resets block dependencies and queue relationship'''
+    """Resets block dependencies and queue relationship"""
 
     Experiment = apps.get_model("experiments", "Experiment")
     Block = apps.get_model("experiments", "Block")
@@ -50,9 +52,9 @@ def reset_blocks(apps, schema_editor):
     Result = apps.get_model("experiments", "Result")
 
     total = Experiment.objects.count()
-    for i, e in enumerate(Experiment.objects.order_by('id')):
+    for i, e in enumerate(Experiment.objects.order_by("id")):
 
-        fullname = '%s/%s/%s/%d/%s' % (
+        fullname = "%s/%s/%s/%d/%s" % (
             e.author.username,
             e.toolchain.author.username,
             e.toolchain.name,
@@ -60,21 +62,24 @@ def reset_blocks(apps, schema_editor):
             e.name,
         )
 
-        print("Updating blocks for experiment %d/%d (%s, id=%d)..." % \
-              (i+1, total, fullname, e.id))
+        print(
+            "Updating blocks for experiment %d/%d (%s, id=%d)..."
+            % (i + 1, total, fullname, e.id)
+        )
 
-        xp_decl = simplejson.loads(storage.get_file_content(e,
-                                                            'declaration_file'))
-        tc_decl = simplejson.loads(storage.get_file_content(e.toolchain,
-                                                            'declaration_file'))
+        xp_decl = simplejson.loads(storage.get_file_content(e, "declaration_file"))
+        tc_decl = simplejson.loads(
+            storage.get_file_content(e.toolchain, "declaration_file")
+        )
 
-        xp = beat.core.experiment.Experiment(settings.PREFIX, (xp_decl,
-                                                               tc_decl))
+        xp = beat.core.experiment.Experiment(settings.PREFIX, (xp_decl, tc_decl))
 
         if xp.errors:
-            message = "The experiment `%s' isn't valid (skipping " \
+            message = (
+                "The experiment `%s' isn't valid (skipping "
                 "block update), due to the following errors:\n  * %s"
-            print(message % (fullname, '\n * '.join(xp.errors)))
+            )
+            print(message % (fullname, "\n * ".join(xp.errors)))
             continue
 
         # Loads the experiment execution description, creating the Block's,
@@ -82,17 +87,21 @@ def reset_blocks(apps, schema_editor):
         for block_name, description in xp.setup().items():
 
             # Checks that the Queue/Environment exists
-            job_description = description['configuration']
+            job_description = description["configuration"]
 
             env = Environment.objects.filter(
-                name=job_description['environment']['name'],
-                version=job_description['environment']['version'],
+                name=job_description["environment"]["name"],
+                version=job_description["environment"]["version"],
             )
 
             if not env:
-                print("Cannot find environment `%s (%s)' - not setting" % \
-                      (job_description['environment']['name'],
-                       job_description['environment']['version']))
+                print(
+                    "Cannot find environment `%s (%s)' - not setting"
+                    % (
+                        job_description["environment"]["name"],
+                        job_description["environment"]["version"],
+                    )
+                )
                 env = None
             else:
                 env = env[0]
@@ -100,69 +109,70 @@ def reset_blocks(apps, schema_editor):
             # Search for queue that contains a specific environment
             # notice we don't require environment to exist in relation to
             # the queue as it may have been removed already.
-            queue = Queue.objects.filter(name=job_description['queue'])
+            queue = Queue.objects.filter(name=job_description["queue"])
             if not queue:
-                print("Cannot find queue `%s'" % job_description['queue'])
+                print("Cannot find queue `%s'" % job_description["queue"])
                 queue = None
             else:
                 queue = queue[0]
 
-            parts = job_description['algorithm'].split('/')
+            parts = job_description["algorithm"].split("/")
             algorithm = Algorithm.objects.get(
-                author__username=parts[0],
-                name=parts[1],
-                version=parts[2],
+                author__username=parts[0], name=parts[1], version=parts[2],
             )
 
             # Ties the block in
-            slots = job_description.get('nb_slots')
-
             try:
-                b, _ = Block.objects.get_or_create(experiment=e,
-                                                   name=block_name, algorithm=algorithm)
-            except utils.IntegrityError as exc:
-                print("Block `%s' for experiment `%s' already exists - " \
-                      "modifying entry for migration purposes. This " \
-                      "issue is due a misconnection on the toolchain level " \
-                      "(known case: tpereira/full_isv/2)" % \
-                      (block_name, fullname))
+                b, _ = Block.objects.get_or_create(
+                    experiment=e, name=block_name, algorithm=algorithm
+                )
+            except utils.IntegrityError:
+                print(
+                    "Block `%s' for experiment `%s' already exists - "
+                    "modifying entry for migration purposes. This "
+                    "issue is due a misconnection on the toolchain level "
+                    "(known case: tpereira/full_isv/2)" % (block_name, fullname)
+                )
                 b = Block.objects.get(experiment=e, name=block_name)
 
-            b.command=simplejson.dumps(job_description, indent=4)
-            b.status='N' if (e.status == 'P') else b.status
-            b.environment=env
-            b.queue=queue
+            b.command = simplejson.dumps(job_description, indent=4)
+            b.status = "N" if (e.status == "P") else b.status
+            b.environment = env
+            b.queue = queue
             b.algorithm = algorithm
-            b.analyzer = (algorithm.result_dataformat is not None)
-            b.required_slots=job_description['nb_slots']
-            b.channel=job_description['channel']
+            b.analyzer = algorithm.result_dataformat is not None
+            b.required_slots = job_description["nb_slots"]
+            b.channel = job_description["channel"]
             b.save()
 
             # from this point: requires block to have an assigned id
-            b.dependencies.add(*[e.blocks.get(name=k) \
-                                 for k in description['dependencies']])
+            b.dependencies.add(
+                *[e.blocks.get(name=k) for k in description["dependencies"]]
+            )
 
             # reset inputs and outputs - creates if necessary only
-            for v in job_description['inputs'].values():
-                if 'database' in v: #database input
-                    db = DatabaseSetOutput.objects.get(hash=v['hash'])
-                    BlockInput.objects.get_or_create(block=b,
-                                                     channel=v['channel'], database=db)
+            for v in job_description["inputs"].values():
+                if "database" in v:  # database input
+                    db = DatabaseSetOutput.objects.get(hash=v["hash"])
+                    BlockInput.objects.get_or_create(
+                        block=b, channel=v["channel"], database=db
+                    )
                 else:
-                    cache = CachedFile.objects.get(hash=v['hash'])
-                    BlockInput.objects.get_or_create(block=b,
-                                                     channel=v['channel'], cache=cache)
+                    cache = CachedFile.objects.get(hash=v["hash"])
+                    BlockInput.objects.get_or_create(
+                        block=b, channel=v["channel"], cache=cache
+                    )
 
             current = list(b.outputs.all())
-            b.outputs.clear() #dissociates all current outputs
-            outputs = job_description.get('outputs',
-                                          {'': job_description.get('result')})
+            b.outputs.clear()  # dissociates all current outputs
+            outputs = job_description.get(
+                "outputs", {"": job_description.get("result")}
+            )
             for v in outputs.values():
-                cache, cr = CachedFile.objects.get_or_create(hash=v['hash'])
+                cache, cr = CachedFile.objects.get_or_create(hash=v["hash"])
                 if cr:
-                    if len(current) == len(outputs): #copy
-                        cache.linear_exedution_time = \
-                            current[0].linear_execution_time
+                    if len(current) == len(outputs):  # copy
+                        cache.linear_exedution_time = current[0].linear_execution_time
                         cache.speed_up_real = current[0].speed_up_real
                         cache.speed_up_maximal = current[0].speed_up_maximal
                         cache.queuing_time = current[0].queuing_time
@@ -172,36 +182,38 @@ def reset_blocks(apps, schema_editor):
                         cache.cpu_time = current[0].cpu_time
                         cache.max_memory = current[0].max_memory
                         cache.data_read_size = current[0].data_read_size
-                        cache.data_read_nb_blocks = \
-                            current[0].data_read_nb_blocks
+                        cache.data_read_nb_blocks = current[0].data_read_nb_blocks
                         cache.data_read_time = current[0].data_read_time
                         cache.data_written_size = current[0].data_written_size
-                        cache.data_written_nb_blocks = \
-                            current[0].data_written_nb_blocks
+                        cache.data_written_nb_blocks = current[0].data_written_nb_blocks
                         cache.data_written_time = current[0].data_written_time
                         if current[0].results.count():
                             for r in current[0].results.all():
                                 r.cache = cache
                                 r.save()
-                        print("CachedFile data `%s' MOVED from `%s'" % \
-                              (cache.hash, current[0].hash))
+                        print(
+                            "CachedFile data `%s' MOVED from `%s'"
+                            % (cache.hash, current[0].hash)
+                        )
                     else:
-                        print("CachedFile (hash=%s) CREATED for block `%s' " \
-                              "of experiment `%s' which is in state `%s'" % \
-                              (cache.hash, block_name, fullname,
-                               b.get_status_display()))
+                        print(
+                            "CachedFile (hash=%s) CREATED for block `%s' "
+                            "of experiment `%s' which is in state `%s'"
+                            % (cache.hash, block_name, fullname, b.get_status_display())
+                        )
                 cache.blocks.add(b)
 
-        #asserts all blocks (except analysis blocks have dependents)
+        # asserts all blocks (except analysis blocks have dependents)
         for b in e.blocks.all():
-            assert (b.analyzer and b.dependents.count() == 0) or \
-                b.dependents.count() > 0
+            assert (  # nosec
+                b.analyzer and b.dependents.count() == 0
+            ) or b.dependents.count() > 0
 
-        #asserts all analysis blocks have only one output
+        # asserts all analysis blocks have only one output
         for b in e.blocks.filter(analyzer=True):
-            assert b.outputs.count() == 1
+            assert b.outputs.count() == 1  # nosec
 
-        #removes results without caches
+        # removes results without caches
         for r in Result.objects.filter(cache=None):
             print("Removing result %d (no associated cache)" % r.id)
             r.delete()
@@ -210,7 +222,7 @@ def reset_blocks(apps, schema_editor):
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0004_scheduler_addons_3'),
+        ("experiments", "0004_scheduler_addons_3"),
     ]
 
     operations = [
diff --git a/beat/web/experiments/migrations/0006_block_order.py b/beat/web/experiments/migrations/0006_block_order.py
index bbdd26ebabc2c25abb274ba40bb618006296e590..ba3963299270cfb9a13b3672ae8fa19b7b76348d 100755
--- a/beat/web/experiments/migrations/0006_block_order.py
+++ b/beat/web/experiments/migrations/0006_block_order.py
@@ -27,24 +27,26 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, models
+import simplejson
 from django.conf import settings
+from django.db import migrations
+from django.db import models
 
-import simplejson
 import beat.core.experiment
+
 from ...common import storage
 
 
 def set_block_order(apps, schema_editor):
-    '''Set block order for existing experiments'''
+    """Set block order for existing experiments"""
 
     Experiment = apps.get_model("experiments", "Experiment")
     Block = apps.get_model("experiments", "Block")
 
     total = Experiment.objects.count()
-    for i, e in enumerate(Experiment.objects.order_by('id')):
+    for i, e in enumerate(Experiment.objects.order_by("id")):
 
-        fullname = '%s/%s/%s/%d/%s' % (
+        fullname = "%s/%s/%s/%d/%s" % (
             e.author.username,
             e.toolchain.author.username,
             e.toolchain.name,
@@ -52,21 +54,24 @@ def set_block_order(apps, schema_editor):
             e.name,
         )
 
-        print("Updating blocks for experiment %d/%d (%s, id=%d)..." % \
-              (i+1, total, fullname, e.id))
+        print(
+            "Updating blocks for experiment %d/%d (%s, id=%d)..."
+            % (i + 1, total, fullname, e.id)
+        )
 
-        xp_decl = simplejson.loads(storage.get_file_content(e,
-                                                            'declaration_file'))
-        tc_decl = simplejson.loads(storage.get_file_content(e.toolchain,
-                                                            'declaration_file'))
+        xp_decl = simplejson.loads(storage.get_file_content(e, "declaration_file"))
+        tc_decl = simplejson.loads(
+            storage.get_file_content(e.toolchain, "declaration_file")
+        )
 
-        xp = beat.core.experiment.Experiment(settings.PREFIX, (xp_decl,
-                                                               tc_decl))
+        xp = beat.core.experiment.Experiment(settings.PREFIX, (xp_decl, tc_decl))
 
         if xp.errors:
-            message = "The experiment `%s' isn't valid (skipping " \
+            message = (
+                "The experiment `%s' isn't valid (skipping "
                 "block update), due to the following errors:\n  * %s"
-            print(message % (fullname, '\n * '.join(xp.errors)))
+            )
+            print(message % (fullname, "\n * ".join(xp.errors)))
             continue
 
         # Goes, in order, setting block inner order
@@ -80,18 +85,17 @@ def set_block_order(apps, schema_editor):
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0005_scheduler_addons_4'),
+        ("experiments", "0005_scheduler_addons_4"),
     ]
 
     operations = [
         migrations.AddField(
-            model_name='block',
-            name='execution_order',
+            model_name="block",
+            name="execution_order",
             field=models.PositiveIntegerField(blank=True, null=True),
         ),
         migrations.AlterModelOptions(
-            name='block',
-            options={'ordering': ['experiment_id', 'execution_order']},
+            name="block", options={"ordering": ["experiment_id", "execution_order"]},
         ),
         migrations.RunPython(set_block_order),
     ]
diff --git a/beat/web/experiments/migrations/0007_cachedfile_status.py b/beat/web/experiments/migrations/0007_cachedfile_status.py
index e022eb725f7414f0a903c8c4a98de61462f804e0..d016d49d98d77433b5367ffb07413c3a74388b73 100755
--- a/beat/web/experiments/migrations/0007_cachedfile_status.py
+++ b/beat/web/experiments/migrations/0007_cachedfile_status.py
@@ -27,11 +27,12 @@
 
 from __future__ import unicode_literals
 
-from django.db import migrations, models
-from django.conf import settings
-
-import os
 import glob
+import os
+
+from django.conf import settings
+from django.db import migrations
+from django.db import models
 
 import beat.core.hash
 
@@ -40,18 +41,23 @@ def set_status(apps, schema_editor):
     CachedFile = apps.get_model("experiments", "CachedFile")
 
     total = CachedFile.objects.count()
-    for i, c in enumerate(CachedFile.objects.order_by('id')):
+    for i, c in enumerate(CachedFile.objects.order_by("id")):
 
-        print("Updating cached file %d/%d (%s, id=%d)..." % \
-              (i+1, total, c.hash, c.id))
+        print(
+            "Updating cached file %d/%d (%s, id=%d)..." % (i + 1, total, c.hash, c.id)
+        )
 
-        abs_path = os.path.join(settings.CACHE_ROOT, beat.core.hash.toPath(c.hash, suffix=''))
+        abs_path = os.path.join(
+            settings.CACHE_ROOT, beat.core.hash.toPath(c.hash, suffix="")
+        )
 
-        data_files = sorted(glob.glob(abs_path + '*.index')) + \
-            sorted(glob.glob(abs_path + '*.data'))
+        data_files = sorted(glob.glob(abs_path + "*.index")) + sorted(
+            glob.glob(abs_path + "*.data")
+        )
 
-        checksum_files = sorted(glob.glob(abs_path + '*.index.checksum')) + \
-            sorted(glob.glob(abs_path + '*.data.checksum'))
+        checksum_files = sorted(glob.glob(abs_path + "*.index.checksum")) + sorted(
+            glob.glob(abs_path + "*.data.checksum")
+        )
 
         if len(data_files) == 0:
             continue
@@ -61,7 +67,7 @@ def set_status(apps, schema_editor):
 
         cached = True
         for data_file, checksum_file in zip(data_files, checksum_files):
-            with open(checksum_file, 'rt') as f:
+            with open(checksum_file, "rt") as f:
                 recorded = f.read().strip()
 
             actual = beat.core.hash.hashFileContents(data_file)
@@ -71,22 +77,29 @@ def set_status(apps, schema_editor):
                 break
 
         if cached:
-            c.status = 'C'
+            c.status = "C"
             c.save()
 
 
-
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0006_block_order'),
+        ("experiments", "0006_block_order"),
     ]
 
     operations = [
         migrations.AddField(
-            model_name='cachedfile',
-            name='status',
-            field=models.CharField(choices=[(b'N', b'Not cached'), (b'P', b'Processing'), (b'C', b'Cached')], default=b'N', max_length=1),
+            model_name="cachedfile",
+            name="status",
+            field=models.CharField(
+                choices=[
+                    (b"N", b"Not cached"),
+                    (b"P", b"Processing"),
+                    (b"C", b"Cached"),
+                ],
+                default=b"N",
+                max_length=1,
+            ),
         ),
         migrations.RunPython(set_status),
     ]
diff --git a/beat/web/experiments/migrations/0008_block_status.py b/beat/web/experiments/migrations/0008_block_status.py
index e5a3d74d559c75dc29a41e1904f26e87e317eaad..adce99a8e4823bf5721f2d14950b94890863b68c 100644
--- a/beat/web/experiments/migrations/0008_block_status.py
+++ b/beat/web/experiments/migrations/0008_block_status.py
@@ -2,19 +2,31 @@
 # Generated by Django 1.9.13 on 2017-09-27 16:48
 from __future__ import unicode_literals
 
-from django.db import migrations, models
+from django.db import migrations
+from django.db import models
 
 
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0007_cachedfile_status'),
+        ("experiments", "0007_cachedfile_status"),
     ]
 
     operations = [
         migrations.AlterField(
-            model_name='block',
-            name='status',
-            field=models.CharField(choices=[(b'N', b'Pending'), (b'P', b'Processing'), (b'C', b'Done'), (b'F', b'Failed'), (b'S', b'Skipped'), (b'L', b'Cancelled')], default=b'N', max_length=1),
+            model_name="block",
+            name="status",
+            field=models.CharField(
+                choices=[
+                    (b"N", b"Pending"),
+                    (b"P", b"Processing"),
+                    (b"C", b"Done"),
+                    (b"F", b"Failed"),
+                    (b"S", b"Skipped"),
+                    (b"L", b"Cancelled"),
+                ],
+                default=b"N",
+                max_length=1,
+            ),
         ),
     ]
diff --git a/beat/web/experiments/migrations/0009_block_status.py b/beat/web/experiments/migrations/0009_block_status.py
index db3f2c5398a4ad6405a2afa3a5771eeac390a27c..b4ab55d90a7f11a3dc32ecbf3be8e689352b2392 100644
--- a/beat/web/experiments/migrations/0009_block_status.py
+++ b/beat/web/experiments/migrations/0009_block_status.py
@@ -2,19 +2,30 @@
 # Generated by Django 1.9.13 on 2017-09-29 08:42
 from __future__ import unicode_literals
 
-from django.db import migrations, models
+from django.db import migrations
+from django.db import models
 
 
 class Migration(migrations.Migration):
 
     dependencies = [
-        ('experiments', '0008_block_status'),
+        ("experiments", "0008_block_status"),
     ]
 
     operations = [
         migrations.AlterField(
-            model_name='block',
-            name='status',
-            field=models.CharField(choices=[(b'N', b'Pending'), (b'P', b'Processing'), (b'C', b'Done'), (b'F', b'Failed'), (b'L', b'Cancelled')], default=b'N', max_length=1),
+            model_name="block",
+            name="status",
+            field=models.CharField(
+                choices=[
+                    (b"N", b"Pending"),
+                    (b"P", b"Processing"),
+                    (b"C", b"Done"),
+                    (b"F", b"Failed"),
+                    (b"L", b"Cancelled"),
+                ],
+                default=b"N",
+                max_length=1,
+            ),
         ),
     ]
diff --git a/beat/web/experiments/models/__init__.py b/beat/web/experiments/models/__init__.py
index be1bb8322812df530c67dbf6e4164aee9ac44e40..ee68b128299b3e9a43ca7c730a7bb6e31b043e71 100644
--- a/beat/web/experiments/models/__init__.py
+++ b/beat/web/experiments/models/__init__.py
@@ -1,7 +1,17 @@
 from .block import Block
-from .cached_file import CachedFile
 from .block_input import BlockInput
-from .result import Result
-from .experiment import validate_experiment
+from .cached_file import CachedFile
 from .experiment import DeclarationStorage
 from .experiment import Experiment
+from .experiment import validate_experiment
+from .result import Result
+
+__all__ = [
+    "Block",
+    "BlockInput",
+    "CachedFile",
+    "DeclarationStorage",
+    "Experiment",
+    "validate_experiment",
+    "Result",
+]
diff --git a/beat/web/experiments/models/block.py b/beat/web/experiments/models/block.py
index 9de128ce3ebba812bf230743efd0457e5585e2eb..b5a70cd8a83a410beed472e84bb111feae7ff9ec 100755
--- a/beat/web/experiments/models/block.py
+++ b/beat/web/experiments/models/block.py
@@ -25,35 +25,28 @@
 #                                                                             #
 ###############################################################################
 
-from django.db import models
-from django.conf import settings
 
-import beat.core.hash
-import beat.core.data
-import beat.core.algorithm
+from datetime import datetime
 
-from beat.core.utils import NumpyJSONEncoder
+from django.db import models
 
 from ...algorithms.models import Algorithm
-from ...backend.models import Queue
 from ...backend.models import Environment
-from ...backend.models import Job
-
-from .result import Result
-
-import os
-import simplejson
-from datetime import datetime
-
+from ...backend.models import Queue
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class BlockManager(models.Manager):
-
-    def get_by_natural_key(self, name, experiment_author,
-                           toolchain_author, toolchain_name,
-                           toolchain_version, experiment_name):
+    def get_by_natural_key(
+        self,
+        name,
+        experiment_author,
+        toolchain_author,
+        toolchain_name,
+        toolchain_version,
+        experiment_name,
+    ):
         return self.get(
             name=name,
             experiment__author__username=experiment_author,
@@ -64,52 +57,57 @@ class BlockManager(models.Manager):
         )
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class Block(models.Model):
 
-    PENDING    = 'N'
-    PROCESSING = 'P'
-    DONE       = 'C'
-    FAILED     = 'F'
-    CANCELLED  = 'L'
+    PENDING = "N"
+    PROCESSING = "P"
+    DONE = "C"
+    FAILED = "F"
+    CANCELLED = "L"
 
     STATUS = (
-        (PENDING,    'Pending'),
-        (PROCESSING, 'Processing'),
-        (DONE,       'Done'),
-        (FAILED,     'Failed'),
-        (CANCELLED,  'Cancelled'),
+        (PENDING, "Pending"),
+        (PROCESSING, "Processing"),
+        (DONE, "Done"),
+        (FAILED, "Failed"),
+        (CANCELLED, "Cancelled"),
     )
 
-    experiment = models.ForeignKey('Experiment', related_name='blocks',
-                                   on_delete=models.CASCADE)
+    experiment = models.ForeignKey(
+        "Experiment", related_name="blocks", on_delete=models.CASCADE
+    )
     name = models.CharField(max_length=200)
     command = models.TextField(null=True, blank=True)
     status = models.CharField(max_length=1, choices=STATUS, default=PENDING)
     analyzer = models.BooleanField(default=False)
-    algorithm = models.ForeignKey(Algorithm, related_name='blocks',
-                                  on_delete=models.CASCADE)
-    creation_date = models.DateTimeField(null=True, blank=True,
-                                         auto_now_add=True)
+    algorithm = models.ForeignKey(
+        Algorithm, related_name="blocks", on_delete=models.CASCADE
+    )
+    creation_date = models.DateTimeField(null=True, blank=True, auto_now_add=True)
     start_date = models.DateTimeField(null=True, blank=True)
     end_date = models.DateTimeField(null=True, blank=True)
-    environment = models.ForeignKey(Environment, related_name='blocks',
-                                    null=True, on_delete=models.SET_NULL)
-    queue = models.ForeignKey(Queue, related_name='blocks', null=True,
-                              on_delete=models.SET_NULL)
+    environment = models.ForeignKey(
+        Environment, related_name="blocks", null=True, on_delete=models.SET_NULL
+    )
+    queue = models.ForeignKey(
+        Queue, related_name="blocks", null=True, on_delete=models.SET_NULL
+    )
 
     required_slots = models.PositiveIntegerField(default=1)
-    channel = models.CharField(max_length=200, default='', blank=True,
-                               help_text="Synchronization channel within the toolchain")
+    channel = models.CharField(
+        max_length=200,
+        default="",
+        blank=True,
+        help_text="Synchronization channel within the toolchain",
+    )
 
     # relationship to blocks to which this block depends on
-    dependencies = models.ManyToManyField('self',
-                                          related_name='dependents',
-                                          blank=True,
-                                          symmetrical=False,
-                                         )
+    dependencies = models.ManyToManyField(
+        "self", related_name="dependents", blank=True, symmetrical=False,
+    )
 
     # order of this block within the experiment - useful for the `backup'
     # command, so we can dump the blocks in the right dependence order
@@ -117,17 +115,19 @@ class Block(models.Model):
 
     objects = BlockManager()
 
-
     class Meta:
-        unique_together = ('experiment', 'name')
+        unique_together = ("experiment", "name")
 
         # setup ordering so that the dump order respects self dependencies
-        ordering = ['experiment_id', 'execution_order']
-
+        ordering = ["experiment_id", "execution_order"]
 
     def __str__(self):
-        return self.experiment.fullname() + ', ' + self.name + ' (%s)' % self.get_status_display()
-
+        return (
+            self.experiment.fullname()
+            + ", "
+            + self.name
+            + " (%s)" % self.get_status_display()
+        )
 
     def natural_key(self):
         return (
@@ -138,8 +138,8 @@ class Block(models.Model):
             self.experiment.toolchain.version,
             self.experiment.name,
         )
-    natural_key.dependencies = ['experiments.experiment']
 
+    natural_key.dependencies = ["experiments.experiment"]
 
     def save(self, *args, **kwargs):
         # Ensure that the state of the block is consistent, just in case, but
@@ -147,7 +147,7 @@ class Block(models.Model):
         if self.status == Block.PENDING:
             try:
                 self.results.all().delete()
-            except:
+            except Exception:  # nosec
                 pass
 
             self.start_date = None
@@ -165,90 +165,69 @@ class Block(models.Model):
 
         super(Block, self).save(*args, **kwargs)
 
-
     # Accessors for statistics
     def __return_first__(self, field, default=None):
         return getattr(self.outputs.first(), field, default)
 
-
     def first_cache(self):
         return self.outputs.first()
 
-
     def error_report(self):
-        return self.__return_first__('error_report')
-
+        return self.__return_first__("error_report")
 
     def stdout(self):
-        return self.__return_first__('stdout')
-
+        return self.__return_first__("stdout")
 
     def stderr(self):
-        return self.__return_first__('stderr')
-
+        return self.__return_first__("stderr")
 
     def speed_up_real(self):
-        return self.__return_first__('speed_up_real')
-
+        return self.__return_first__("speed_up_real")
 
     def speed_up_maximal(self):
-        return self.__return_first__('speed_up_maximal')
-
+        return self.__return_first__("speed_up_maximal")
 
     def linear_execution_time(self):
-        return self.__return_first__('linear_execution_time')
-
+        return self.__return_first__("linear_execution_time")
 
     def queuing_time(self):
-        return self.__return_first__('queuing_time')
-
+        return self.__return_first__("queuing_time")
 
     def cpu_time(self):
-        return self.__return_first__('cpu_time')
-
+        return self.__return_first__("cpu_time")
 
     def max_memory(self):
-        return self.__return_first__('max_memory')
-
+        return self.__return_first__("max_memory")
 
     def data_read_size(self):
-        return self.__return_first__('data_read_size')
-
+        return self.__return_first__("data_read_size")
 
     def data_read_nb_blocks(self):
-        return self.__return_first__('data_read_nb_blocks')
-
+        return self.__return_first__("data_read_nb_blocks")
 
     def data_read_time(self):
-        return self.__return_first__('data_read_time')
-
+        return self.__return_first__("data_read_time")
 
     def data_written_size(self):
-        return self.__return_first__('data_written_size')
-
+        return self.__return_first__("data_written_size")
 
     def data_written_nb_blocks(self):
-        return self.__return_first__('data_written_nb_blocks')
-
+        return self.__return_first__("data_written_nb_blocks")
 
     def data_written_time(self):
-        return self.__return_first__('data_written_time')
-
+        return self.__return_first__("data_written_time")
 
     # Accessor for results
-    results = property(lambda self: self.__return_first__('results'))
-
+    results = property(lambda self: self.__return_first__("results"))
 
     def done(self):
-      '''Says whether the block has finished or not'''
-
-      return self.status not in (Block.PENDING, Block.PROCESSING)
+        """Says whether the block has finished or not"""
 
+        return self.status not in (Block.PENDING, Block.PROCESSING)
 
     def is_runnable(self):
-        '''Checks if a block is runnable presently'''
-        return all([ k.status == Block.DONE for k in self.dependencies.all() ])
-
+        """Checks if a block is runnable presently"""
+        return all([k.status == Block.DONE for k in self.dependencies.all()])
 
     def set_canceled(self, end_date=None):
         """Update the block state to canceled
@@ -270,7 +249,6 @@ class Block(models.Model):
 
         self.save()
 
-
     def set_failed(self, end_date):
         """Update the block state to failed
 
diff --git a/beat/web/experiments/models/block_input.py b/beat/web/experiments/models/block_input.py
index e48adb4a2b2bcf749cb375c26493cd942c9ee4e6..09824b26db9f663f30ad672abdad14082ca0cf85 100755
--- a/beat/web/experiments/models/block_input.py
+++ b/beat/web/experiments/models/block_input.py
@@ -31,14 +31,11 @@ from beat.web.databases.models import DatabaseSetOutput
 
 from .block import Block
 
-
 # ----------------------------------------------------------
 
 
 class BlockInputManager(models.Manager):
-
-    def get_by_natural_key(self, block_natural_key,
-                           cache_hash, database_natural_key):
+    def get_by_natural_key(self, block_natural_key, cache_hash, database_natural_key):
         if block_natural_key:
             block = Block.objects.get_by_natural_key(*block_natural_key)
         else:
@@ -48,28 +45,36 @@ class BlockInputManager(models.Manager):
             return self.get(cache__hash=cache_hash, block=block)
         else:
             database = DatabaseSetOutput.objects.get_by_natural_key(
-                *database_natural_key)
+                *database_natural_key
+            )
             return self.get(database=database, block=block)
 
+
 # ----------------------------------------------------------
 
 
 class BlockInput(models.Model):
 
-    block = models.ForeignKey(Block, related_name='inputs', null=True,
-                              on_delete=models.CASCADE)
+    block = models.ForeignKey(
+        Block, related_name="inputs", null=True, on_delete=models.CASCADE
+    )
 
     # if the input cames from another block, then this one is set
-    cache = models.ForeignKey('CachedFile', related_name='inputs', null=True,
-                              on_delete=models.CASCADE)
+    cache = models.ForeignKey(
+        "CachedFile", related_name="inputs", null=True, on_delete=models.CASCADE
+    )
 
     # if the input cames from a dataset, then this one is set
-    database = models.ForeignKey(DatabaseSetOutput, related_name='blocks',
-                                 null=True, on_delete=models.CASCADE)
+    database = models.ForeignKey(
+        DatabaseSetOutput, related_name="blocks", null=True, on_delete=models.CASCADE
+    )
 
-    channel = models.CharField(max_length=200, default='', blank=True,
-                               help_text="Synchronization channel within "
-                                         "the toolchain")
+    channel = models.CharField(
+        max_length=200,
+        default="",
+        blank=True,
+        help_text="Synchronization channel within " "the toolchain",
+    )
 
     objects = BlockInputManager()
 
@@ -81,6 +86,9 @@ class BlockInput(models.Model):
         cache_hash = self.cache and self.cache.hash
         database_natural_key = self.database and self.database.natural_key()
         return (block_natural_key, cache_hash, database_natural_key)
-    natural_key.dependencies = ['experiments.block',
-                                'experiments.cachedfile',
-                                'databases.databasesetoutput']
+
+    natural_key.dependencies = [
+        "experiments.block",
+        "experiments.cachedfile",
+        "databases.databasesetoutput",
+    ]
diff --git a/beat/web/experiments/models/cached_file.py b/beat/web/experiments/models/cached_file.py
index 13ad9a240ac6e54d1aef8e9533f123d5552e955a..7d3deb0e11a6109ab1a86a043e847000bda7e92c 100755
--- a/beat/web/experiments/models/cached_file.py
+++ b/beat/web/experiments/models/cached_file.py
@@ -25,106 +25,100 @@
 #                                                                             #
 ###############################################################################
 
-from django.db import models
+import glob
+import logging
+import os
+
 from django.conf import settings
+from django.db import models
 
 import beat.core.hash
 
-import os
-import glob
-
-import logging
 logger = logging.getLogger(__name__)
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class CachedFileManager(models.Manager):
-
     def get_by_natural_key(self, hash):
         return self.get(hash=hash)
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class CachedFile(models.Model):
 
-    NOT_CACHED = 'N'
-    PROCESSING = 'P'
-    CACHED     = 'C'
+    NOT_CACHED = "N"
+    PROCESSING = "P"
+    CACHED = "C"
 
     STATUS = (
-        (NOT_CACHED, 'Not cached'),
-        (PROCESSING, 'Processing'),
-        (CACHED,     'Cached'),
+        (NOT_CACHED, "Not cached"),
+        (PROCESSING, "Processing"),
+        (CACHED, "Cached"),
     )
 
-
-    blocks = models.ManyToManyField('Block', related_name='outputs', blank=True)
+    blocks = models.ManyToManyField("Block", related_name="outputs", blank=True)
     hash = models.CharField(max_length=64, unique=True)
     status = models.CharField(max_length=1, choices=STATUS, default=NOT_CACHED)
 
     # the total amount of time this block took to run considering the
     # wall-clock time.
-    linear_execution_time = models.FloatField(default=0.)
+    linear_execution_time = models.FloatField(default=0.0)
 
     # the real speed-up obtained by running this block using X slots
-    speed_up_real = models.FloatField(default=0.)
+    speed_up_real = models.FloatField(default=0.0)
 
     # the maximum obtainable speed-up that could be achieved if all slots
     # were running in parallel. Essentially linear_execution_time /
     # maximum_slot_time
-    speed_up_maximal = models.FloatField(default=0.)
+    speed_up_maximal = models.FloatField(default=0.0)
 
     # the time this block waited to be executed
-    queuing_time = models.FloatField(default=0.)
+    queuing_time = models.FloatField(default=0.0)
 
     stdout = models.TextField(null=True, blank=True)
     stderr = models.TextField(null=True, blank=True)
     error_report = models.TextField(null=True, blank=True)
 
     # other statistics of interest
-    cpu_time = models.FloatField(default=0.)
+    cpu_time = models.FloatField(default=0.0)
     max_memory = models.BigIntegerField(default=0)
     data_read_size = models.BigIntegerField(default=0)
     data_read_nb_blocks = models.IntegerField(default=0)
-    data_read_time = models.FloatField(default=0.)
+    data_read_time = models.FloatField(default=0.0)
     data_written_size = models.BigIntegerField(default=0)
     data_written_nb_blocks = models.IntegerField(default=0)
-    data_written_time = models.FloatField(default=0.)
+    data_written_time = models.FloatField(default=0.0)
 
     objects = CachedFileManager()
 
-
     def __str__(self):
-        return 'CachedFile(%s, %s, %d blocks)' % (
-            self.hash, self.get_status_display(), self.blocks.count()
+        return "CachedFile(%s, %s, %d blocks)" % (
+            self.hash,
+            self.get_status_display(),
+            self.blocks.count(),
         )
 
-
     def natural_key(self):
         return (self.hash,)
 
-
     def path(self):
-        '''Returns the full path prefix to the cached file on disk'''
-
-        return beat.core.hash.toPath(self.hash, suffix='')
+        """Returns the full path prefix to the cached file on disk"""
 
+        return beat.core.hash.toPath(self.hash, suffix="")
 
     def absolute_path(self, cache=settings.CACHE_ROOT):
-        '''Returns the full path prefix to the cached file on disk'''
+        """Returns the full path prefix to the cached file on disk"""
 
         return os.path.join(cache, self.path())
 
-
     def files(self, cache=settings.CACHE_ROOT):
-        '''Checks if any file belonging to this cache exist on disk'''
-
-        return glob.glob(self.absolute_path(cache) + '*')
+        """Checks if any file belonging to this cache exist on disk"""
 
+        return glob.glob(self.absolute_path(cache) + "*")
 
     def update(self, block_status):
         from . import Block
@@ -133,7 +127,9 @@ class CachedFile(models.Model):
             self.status = CachedFile.CACHED
             self.save()
 
-        elif (block_status == Block.PROCESSING) and (self.status != CachedFile.PROCESSING):
+        elif (block_status == Block.PROCESSING) and (
+            self.status != CachedFile.PROCESSING
+        ):
             self.status = CachedFile.PROCESSING
             self.save()
 
@@ -141,48 +137,55 @@ class CachedFile(models.Model):
             self.status = CachedFile.NOT_CACHED
             self.save()
 
-
     def exists(self, cache=settings.CACHE_ROOT):
-        '''Checks if any file belonging to this cache exist on disk'''
+        """Checks if any file belonging to this cache exist on disk"""
 
         return bool(self.files(cache))
 
-
     def check_checksums(self, cache=settings.CACHE_ROOT):
-        '''Checks if the cached files checksum properly'''
+        """Checks if the cached files checksum properly"""
 
         abs_path = self.absolute_path(cache)
 
-        data_files = sorted(glob.glob(abs_path + '*.index')) + \
-            sorted(glob.glob(abs_path + '*.data'))
+        data_files = sorted(glob.glob(abs_path + "*.index")) + sorted(
+            glob.glob(abs_path + "*.data")
+        )
 
-        checksum_files = sorted(glob.glob(abs_path + '*.index.checksum')) + \
-            sorted(glob.glob(abs_path + '*.data.checksum'))
+        checksum_files = sorted(glob.glob(abs_path + "*.index.checksum")) + sorted(
+            glob.glob(abs_path + "*.data.checksum")
+        )
 
         if len(data_files) != len(checksum_files):
-            logger.warn("Number of files (%d) is different from checksums (%d) " \
-                        "for cache `%s'", len(data_files), len(checksum_files),
-                        abs_path)
+            logger.warn(
+                "Number of files (%d) is different from checksums (%d) "
+                "for cache `%s'",
+                len(data_files),
+                len(checksum_files),
+                abs_path,
+            )
             return False
 
         for data_file, checksum_file in zip(data_files, checksum_files):
-            with open(checksum_file, 'rt') as f:
+            with open(checksum_file, "rt") as f:
                 recorded = f.read().strip()
 
             actual = beat.core.hash.hashFileContents(data_file)
 
             if actual != recorded:
-                logger.warn("Checksum for file `%s' does not match (%s != %s)",
-                            data_file, actual, recorded)
+                logger.warn(
+                    "Checksum for file `%s' does not match (%s != %s)",
+                    data_file,
+                    actual,
+                    recorded,
+                )
                 return False
 
         return True
 
-
     def delete_files(self, cache=settings.CACHE_ROOT):
-        '''
+        """
         Delete the files contained in this cache
-        '''
+        """
         files = self.files()
         for file in files:
             os.remove(file)
diff --git a/beat/web/experiments/models/result.py b/beat/web/experiments/models/result.py
index eb0f5edb572b9442a1e2d55f4db313208a215f20..1db3f2bbecc6ee070f12268c0af2e8b5f2dc1b9c 100755
--- a/beat/web/experiments/models/result.py
+++ b/beat/web/experiments/models/result.py
@@ -25,73 +25,68 @@
 #                                                                             #
 ###############################################################################
 
-from django.db import models
-from django.conf import settings
-
 import simplejson
+from django.conf import settings
+from django.db import models
 
-
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class ResultManager(models.Manager):
-
     def get_by_natural_key(self, name, hash):
         return self.get(name=name, cache__hash=hash)
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 class Result(models.Model):
 
-    SIMPLE_TYPE_NAMES  = ('int32', 'float32', 'bool', 'string')
+    SIMPLE_TYPE_NAMES = ("int32", "float32", "bool", "string")
 
-    cache = models.ForeignKey('CachedFile', related_name='results', null=True,
-                              on_delete=models.CASCADE)
+    cache = models.ForeignKey(
+        "CachedFile", related_name="results", null=True, on_delete=models.CASCADE
+    )
     name = models.CharField(max_length=200)
     type = models.CharField(max_length=200)
     primary = models.BooleanField(default=False)
     data_value = models.TextField(null=True, blank=True)
 
-
     objects = ResultManager()
 
-
-    #_____ Meta parameters __________
+    # _____ Meta parameters __________
 
     class Meta:
-        unique_together = ('cache', 'name')
-
+        unique_together = ("cache", "name")
 
     def __str__(self):
-        return '%s - %s' % (self.cache, self.name)
-
+        return "%s - %s" % (self.cache, self.name)
 
     def natural_key(self):
         return (
             self.name,
             self.cache.hash,
         )
-    natural_key.dependencies = ['experiments.cachedfile']
 
+    natural_key.dependencies = ["experiments.cachedfile"]
 
     def value(self):
-        if self.data_value in ['+inf', '-inf', 'NaN']:
+        if self.data_value in ["+inf", "-inf", "NaN"]:
             return self.data_value
-        elif self.type == 'int32':
+        elif self.type == "int32":
             return int(self.data_value)
-        elif self.type == 'float32':
+        elif self.type == "float32":
             return float(self.data_value)
-        elif self.type == 'bool':
+        elif self.type == "bool":
             return bool(self.data_value)
-        elif self.type == 'string':
+        elif self.type == "string":
             return str(self.data_value)
-        elif self.type.startswith('%s/' % settings.PLOT_ACCOUNT) or (self.type in Result.SIMPLE_TYPE_NAMES):
+        elif self.type.startswith("%s/" % settings.PLOT_ACCOUNT) or (
+            self.type in Result.SIMPLE_TYPE_NAMES
+        ):
             return simplejson.loads(self.data_value)
 
         return None
 
-
     def is_chart(self):
         return self.type not in Result.SIMPLE_TYPE_NAMES
diff --git a/beat/web/experiments/serializers.py b/beat/web/experiments/serializers.py
index ec7e24bc3990d16667814e35b0b07a8d90a6d8ca..7221a8bc2557af5158003459bccc59ffd659eaf5 100755
--- a/beat/web/experiments/serializers.py
+++ b/beat/web/experiments/serializers.py
@@ -25,31 +25,24 @@
 #                                                                             #
 ###############################################################################
 
-import simplejson as json
-
-import beat.core
-
 from datetime import datetime
 
-from rest_framework import serializers
+import simplejson as json
+from django.contrib.humanize.templatetags.humanize import naturaltime
 from rest_framework import exceptions as drf_exceptions
+from rest_framework import serializers
 
-from django.contrib.humanize.templatetags.humanize import naturaltime
+import beat.core
 
+from ..common import fields as beat_fields
 from ..common.serializers import ShareableSerializer
-from ..common.utils import validate_restructuredtext
 from ..common.utils import annotate_full_name
-from ..common import fields as beat_fields
-
-from ..ui.templatetags.markup import restructuredtext
-
+from ..common.utils import validate_restructuredtext
 from ..toolchains.models import Toolchain
-
-from .models.experiment import validate_experiment
-
-from .models import Experiment
+from ..ui.templatetags.markup import restructuredtext
 from .models import Block
-
+from .models import Experiment
+from .models.experiment import validate_experiment
 
 # ----------------------------------------------------------
 
diff --git a/beat/web/experiments/signals.py b/beat/web/experiments/signals.py
index e05055c70e8db1f42e6c438ee17aa62e7ed79f62..01e199baeff1bc1fb3fb09e6586ec7a321998f67 100755
--- a/beat/web/experiments/signals.py
+++ b/beat/web/experiments/signals.py
@@ -25,13 +25,12 @@
 #                                                                             #
 ###############################################################################
 
+
 from django.db import models
 from django.dispatch import receiver
 
 from ..team.models import Team
-from .models import Experiment, Block
-
-from datetime import datetime
+from .models import Experiment
 
 
 # These two auto-delete files from filesystem when they are unneeded:
@@ -70,7 +69,8 @@ def auto_delete_file_on_change(sender, instance, **kwargs):
         old_descr.delete(save=False)
 
 
-#_________ Algorithms _________
+# _________ Algorithms _________
+
 
 def build_user_algorithm_set(user):
     all_algorithms = []
@@ -80,6 +80,7 @@ def build_user_algorithm_set(user):
 
     return set(all_algorithms)
 
+
 def process_algorithms(team):
     team_algorithms = set(team.shared_algorithms.all() | team.usable_algorithms.all())
 
@@ -90,7 +91,8 @@ def process_algorithms(team):
             algorithm.share(public=False, users=[member])
 
 
-#_________ Toolchains _________
+# _________ Toolchains _________
+
 
 def build_user_toolchain_set(user):
     all_toolchains = []
@@ -113,6 +115,6 @@ def process_toolchains(team):
 
 @receiver(models.signals.pre_delete, sender=Team)
 def on_team_delete(sender, **kwargs):
-    team = kwargs.get('instance')
+    team = kwargs.get("instance")
     process_algorithms(team)
     process_toolchains(team)
diff --git a/beat/web/experiments/templates/experiments/list.html b/beat/web/experiments/templates/experiments/list.html
index 0a1d1090e1b07340827e27b00a9f4c5adbf07f57..d0fa45821d57edc7326be59a703c953b497d4860 100644
--- a/beat/web/experiments/templates/experiments/list.html
+++ b/beat/web/experiments/templates/experiments/list.html
@@ -2,21 +2,21 @@
 {% comment %}
  * Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
  * Contact: beat.support@idiap.ch
- * 
+ *
  * This file is part of the beat.web module of the BEAT platform.
- * 
+ *
  * Commercial License Usage
  * Licensees holding valid commercial BEAT licenses may use this file in
  * accordance with the terms contained in a written agreement between you
  * and Idiap. For further information contact tto@idiap.ch
- * 
+ *
  * Alternatively, this file may be used under the terms of the GNU Affero
  * Public License version 3 as published by the Free Software and appearing
  * in the file LICENSE.AGPL included in the packaging of this file.
  * The BEAT platform is distributed in the hope that it will be useful, but
  * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
  * or FITNESS FOR A PARTICULAR PURPOSE.
- * 
+ *
  * You should have received a copy of the GNU Affero Public License along
  * with the BEAT platform. If not, see http://www.gnu.org/licenses/.
 {% endcomment %}
diff --git a/beat/web/experiments/templates/experiments/panels/actions.html b/beat/web/experiments/templates/experiments/panels/actions.html
index 51d4ab1b65926c9e99b74102cf9c0cd0de34269f..4515e0e6c8b7f5baff010cdb2d8f3468f31813f0 100644
--- a/beat/web/experiments/templates/experiments/panels/actions.html
+++ b/beat/web/experiments/templates/experiments/panels/actions.html
@@ -1,21 +1,21 @@
 {% comment %}
  * Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
  * Contact: beat.support@idiap.ch
- * 
+ *
  * This file is part of the beat.web module of the BEAT platform.
- * 
+ *
  * Commercial License Usage
  * Licensees holding valid commercial BEAT licenses may use this file in
  * accordance with the terms contained in a written agreement between you
  * and Idiap. For further information contact tto@idiap.ch
- * 
+ *
  * Alternatively, this file may be used under the terms of the GNU Affero
  * Public License version 3 as published by the Free Software and appearing
  * in the file LICENSE.AGPL included in the packaging of this file.
  * The BEAT platform is distributed in the hope that it will be useful, but
  * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
  * or FITNESS FOR A PARTICULAR PURPOSE.
- * 
+ *
  * You should have received a copy of the GNU Affero Public License along
  * with the BEAT platform. If not, see http://www.gnu.org/licenses/.
 {% endcomment %}
diff --git a/beat/web/experiments/templates/experiments/panels/breadcrumb.html b/beat/web/experiments/templates/experiments/panels/breadcrumb.html
index ea93b2213352f617b41a9db201b0ccb2e4206585..f5a3e3cff1fb74de33e496b5c11659057469907f 100644
--- a/beat/web/experiments/templates/experiments/panels/breadcrumb.html
+++ b/beat/web/experiments/templates/experiments/panels/breadcrumb.html
@@ -1,21 +1,21 @@
 {% comment %}
  * Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
  * Contact: beat.support@idiap.ch
- * 
+ *
  * This file is part of the beat.web module of the BEAT platform.
- * 
+ *
  * Commercial License Usage
  * Licensees holding valid commercial BEAT licenses may use this file in
  * accordance with the terms contained in a written agreement between you
  * and Idiap. For further information contact tto@idiap.ch
- * 
+ *
  * Alternatively, this file may be used under the terms of the GNU Affero
  * Public License version 3 as published by the Free Software and appearing
  * in the file LICENSE.AGPL included in the packaging of this file.
  * The BEAT platform is distributed in the hope that it will be useful, but
  * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
  * or FITNESS FOR A PARTICULAR PURPOSE.
- * 
+ *
  * You should have received a copy of the GNU Affero Public License along
  * with the BEAT platform. If not, see http://www.gnu.org/licenses/.
 {% endcomment %}
diff --git a/beat/web/experiments/templates/experiments/panels/filter_script.html b/beat/web/experiments/templates/experiments/panels/filter_script.html
index d76949f0b5846fd2b14580a1f3cd0709b2a6ab45..c831105ea2a25e2e4bde60fa1b63d51fd703e295 100644
--- a/beat/web/experiments/templates/experiments/panels/filter_script.html
+++ b/beat/web/experiments/templates/experiments/panels/filter_script.html
@@ -1,21 +1,21 @@
 {% comment %}
  * Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
  * Contact: beat.support@idiap.ch
- * 
+ *
  * This file is part of the beat.web module of the BEAT platform.
- * 
+ *
  * Commercial License Usage
  * Licensees holding valid commercial BEAT licenses may use this file in
  * accordance with the terms contained in a written agreement between you
  * and Idiap. For further information contact tto@idiap.ch
- * 
+ *
  * Alternatively, this file may be used under the terms of the GNU Affero
  * Public License version 3 as published by the Free Software and appearing
  * in the file LICENSE.AGPL included in the packaging of this file.
  * The BEAT platform is distributed in the hope that it will be useful, but
  * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
  * or FITNESS FOR A PARTICULAR PURPOSE.
- * 
+ *
  * You should have received a copy of the GNU Affero Public License along
  * with the BEAT platform. If not, see http://www.gnu.org/licenses/.
 {% endcomment %}
diff --git a/beat/web/experiments/templates/experiments/panels/report_script.html b/beat/web/experiments/templates/experiments/panels/report_script.html
index 2ff0abf3e21278000f48c749321ac826c6a6d02c..93280d822745ae36be3f316c2d44d76cfc88c039 100644
--- a/beat/web/experiments/templates/experiments/panels/report_script.html
+++ b/beat/web/experiments/templates/experiments/panels/report_script.html
@@ -1,21 +1,21 @@
 {% comment %}
  * Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
  * Contact: beat.support@idiap.ch
- * 
+ *
  * This file is part of the beat.web module of the BEAT platform.
- * 
+ *
  * Commercial License Usage
  * Licensees holding valid commercial BEAT licenses may use this file in
  * accordance with the terms contained in a written agreement between you
  * and Idiap. For further information contact tto@idiap.ch
- * 
+ *
  * Alternatively, this file may be used under the terms of the GNU Affero
  * Public License version 3 as published by the Free Software and appearing
  * in the file LICENSE.AGPL included in the packaging of this file.
  * The BEAT platform is distributed in the hope that it will be useful, but
  * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
  * or FITNESS FOR A PARTICULAR PURPOSE.
- * 
+ *
  * You should have received a copy of the GNU Affero Public License along
  * with the BEAT platform. If not, see http://www.gnu.org/licenses/.
 {% endcomment %}
diff --git a/beat/web/experiments/templates/experiments/panels/results.html b/beat/web/experiments/templates/experiments/panels/results.html
index ac39b71a011e2222dfe49d94d52d3f5cef0a4608..f2212a22a1461ba77ec6f424961a95893daf0fb7 100644
--- a/beat/web/experiments/templates/experiments/panels/results.html
+++ b/beat/web/experiments/templates/experiments/panels/results.html
@@ -1,21 +1,21 @@
 {% comment %}
  * Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
  * Contact: beat.support@idiap.ch
- * 
+ *
  * This file is part of the beat.web module of the BEAT platform.
- * 
+ *
  * Commercial License Usage
  * Licensees holding valid commercial BEAT licenses may use this file in
  * accordance with the terms contained in a written agreement between you
  * and Idiap. For further information contact tto@idiap.ch
- * 
+ *
  * Alternatively, this file may be used under the terms of the GNU Affero
  * Public License version 3 as published by the Free Software and appearing
  * in the file LICENSE.AGPL included in the packaging of this file.
  * The BEAT platform is distributed in the hope that it will be useful, but
  * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
  * or FITNESS FOR A PARTICULAR PURPOSE.
- * 
+ *
  * You should have received a copy of the GNU Affero Public License along
  * with the BEAT platform. If not, see http://www.gnu.org/licenses/.
 {% endcomment %}
diff --git a/beat/web/experiments/templates/experiments/panels/sharing.html b/beat/web/experiments/templates/experiments/panels/sharing.html
index a9eca7ca407ccb9fba1842851862f3ab7b00dc01..21ac9c46bde279e257d7956437521d6971a80408 100644
--- a/beat/web/experiments/templates/experiments/panels/sharing.html
+++ b/beat/web/experiments/templates/experiments/panels/sharing.html
@@ -1,21 +1,21 @@
 {% comment %}
  * Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
  * Contact: beat.support@idiap.ch
- * 
+ *
  * This file is part of the beat.web module of the BEAT platform.
- * 
+ *
  * Commercial License Usage
  * Licensees holding valid commercial BEAT licenses may use this file in
  * accordance with the terms contained in a written agreement between you
  * and Idiap. For further information contact tto@idiap.ch
- * 
+ *
  * Alternatively, this file may be used under the terms of the GNU Affero
  * Public License version 3 as published by the Free Software and appearing
  * in the file LICENSE.AGPL included in the packaging of this file.
  * The BEAT platform is distributed in the hope that it will be useful, but
  * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
  * or FITNESS FOR A PARTICULAR PURPOSE.
- * 
+ *
  * You should have received a copy of the GNU Affero Public License along
  * with the BEAT platform. If not, see http://www.gnu.org/licenses/.
 {% endcomment %}
diff --git a/beat/web/experiments/templates/experiments/panels/table.html b/beat/web/experiments/templates/experiments/panels/table.html
index 14812386bd407ba58d715eea00a4283ddc530b43..9dea3ac38649363a16c015cebf09ab98b8c009e9 100644
--- a/beat/web/experiments/templates/experiments/panels/table.html
+++ b/beat/web/experiments/templates/experiments/panels/table.html
@@ -1,21 +1,21 @@
 {% comment %}
  * Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
  * Contact: beat.support@idiap.ch
- * 
+ *
  * This file is part of the beat.web module of the BEAT platform.
- * 
+ *
  * Commercial License Usage
  * Licensees holding valid commercial BEAT licenses may use this file in
  * accordance with the terms contained in a written agreement between you
  * and Idiap. For further information contact tto@idiap.ch
- * 
+ *
  * Alternatively, this file may be used under the terms of the GNU Affero
  * Public License version 3 as published by the Free Software and appearing
  * in the file LICENSE.AGPL included in the packaging of this file.
  * The BEAT platform is distributed in the hope that it will be useful, but
  * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
  * or FITNESS FOR A PARTICULAR PURPOSE.
- * 
+ *
  * You should have received a copy of the GNU Affero Public License along
  * with the BEAT platform. If not, see http://www.gnu.org/licenses/.
 {% endcomment %}
diff --git a/beat/web/experiments/templatetags/experiment_tags.py b/beat/web/experiments/templatetags/experiment_tags.py
index bf56b42346a1b741f070b244c4dee52b74e2d44c..6bb7f314bd28b9155d4b448e6c3e8960b9d22587 100755
--- a/beat/web/experiments/templatetags/experiment_tags.py
+++ b/beat/web/experiments/templatetags/experiment_tags.py
@@ -26,50 +26,40 @@
 ###############################################################################
 
 
-
+import simplejson as json
 from django import template
 from django.conf import settings
-from django.db.models import Count
 from django.db.models import Q
 
-import simplejson as json
-
-from ..models import Result
 from ...plotters.models import Plotter
-
+from ..models import Result
 
 register = template.Library()
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
-@register.inclusion_tag('experiments/panels/table.html', takes_context=True)
+@register.inclusion_tag("experiments/panels/table.html", takes_context=True)
 def experiment_table(context, objects, owner, id):
-    return dict(
-        request=context['request'],
-        objects=objects,
-        owner=owner,
-        panel_id=id,
-    )
+    return dict(request=context["request"], objects=objects, owner=owner, panel_id=id,)
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
-@register.inclusion_tag('experiments/panels/breadcrumb.html', takes_context=True)
+@register.inclusion_tag("experiments/panels/breadcrumb.html", takes_context=True)
 def experiment_breadcrumb(context, obj):
-    return dict(
-        request=context['request'],
-        object=obj,
-    )
+    return dict(request=context["request"], object=obj,)
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
-@register.inclusion_tag('experiments/panels/filter_script.html')
-def filter_script(panel_id, text_filter_id, attestation_filter_id, privacy_filter_id, status_filter_id):
+@register.inclusion_tag("experiments/panels/filter_script.html")
+def filter_script(
+    panel_id, text_filter_id, attestation_filter_id, privacy_filter_id, status_filter_id
+):
     return dict(
         panel_id=panel_id,
         text_filter_id=text_filter_id,
@@ -79,15 +69,13 @@ def filter_script(panel_id, text_filter_id, attestation_filter_id, privacy_filte
     )
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
-@register.inclusion_tag('experiments/panels/report_script.html',
-                        takes_context=True)
-def report_script(context, panel_id, master_checkbox_id, checkbox_class,
-                  form_id):
+@register.inclusion_tag("experiments/panels/report_script.html", takes_context=True)
+def report_script(context, panel_id, master_checkbox_id, checkbox_class, form_id):
     return dict(
-        request=context['request'],
+        request=context["request"],
         panel_id=panel_id,
         master_checkbox_id=master_checkbox_id,
         checkbox_class=checkbox_class,
@@ -95,34 +83,28 @@ def report_script(context, panel_id, master_checkbox_id, checkbox_class,
     )
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
-@register.inclusion_tag('experiments/panels/execution_infos.html')
+@register.inclusion_tag("experiments/panels/execution_infos.html")
 def experiment_execution_infos(id):
-    return dict(
-        panel_id=id,
-        URL_PREFIX=settings.URL_PREFIX,
-    )
+    return dict(panel_id=id, URL_PREFIX=settings.URL_PREFIX,)
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
-@register.inclusion_tag('experiments/dialogs/algorithm_mapping.html')
+@register.inclusion_tag("experiments/dialogs/algorithm_mapping.html")
 def algorithm_mapping(id):
-    return dict(
-        dialog_id=id,
-        URL_PREFIX=settings.URL_PREFIX,
-    )
+    return dict(dialog_id=id, URL_PREFIX=settings.URL_PREFIX,)
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
-@register.inclusion_tag('experiments/panels/actions.html', takes_context=True)
+@register.inclusion_tag("experiments/panels/actions.html", takes_context=True)
 def experiment_actions(context, object, display_count):
-    '''Composes the action buttons for a particular experiment
+    """Composes the action buttons for a particular experiment
 
     This panel primarily exists for showing action buttons for a given
     experiment taking into consideration it is being displayed for a given
@@ -135,59 +117,66 @@ def experiment_actions(context, object, display_count):
         display_count (bool): If the set of buttons should include one with the
           number of forkes based on this experiment.
 
-    '''
-    return dict(
-        request=context['request'],
-        object=object,
-        display_count=display_count,
-    )
+    """
+    return dict(request=context["request"], object=object, display_count=display_count,)
 
 
-@register.inclusion_tag('experiments/panels/sharing.html', takes_context=True)
+@register.inclusion_tag("experiments/panels/sharing.html", takes_context=True)
 def experiment_sharing(context, obj):
-    '''Composes the current sharing properties and a form to change them
+    """Composes the current sharing properties and a form to change them
 
     Parameters:
 
         object (Toolchain): The experiment concerned for which the buttons will
           be drawn.
 
-    '''
+    """
     return {
-        'request': context['request'],
-            'object': obj,
-            'users': context['users'],
-            'teams': context['teams'],
+        "request": context["request"],
+        "object": obj,
+        "users": context["users"],
+        "teams": context["teams"],
     }
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
 @register.simple_tag
 def experiment_list_of_referenced_formats(experiment):
-    dataformats = filter(lambda x: x.author == experiment.author, experiment.all_needed_dataformats())
+    dataformats = filter(
+        lambda x: x.author == experiment.author, experiment.all_needed_dataformats()
+    )
+
+    return "[%s]" % ",".join(
+        map(
+            lambda x: '{ "name": "%s", "accessibility": "%s" }'
+            % (x.fullname(), x.get_sharing_display().lower()),
+            dataformats,
+        )
+    )
 
-    return '[%s]' % ','.join(map(lambda x: '{ "name": "%s", "accessibility": "%s" }' % (x.fullname(), x.get_sharing_display().lower()),
-                                 dataformats))
 
+# --------------------------------------------------
 
-#--------------------------------------------------
 
 @register.simple_tag
 def experiment_list_of_referenced_algorithms(experiment, user):
-    algorithms = filter(lambda x: x.author == experiment.author, experiment.referenced_algorithms.iterator())
+    algorithms = filter(
+        lambda x: x.author == experiment.author,
+        experiment.referenced_algorithms.iterator(),
+    )
 
     result = []
     for algorithm in algorithms:
         (has_access, open_source, accessibility) = algorithm.accessibility_for(user)
-        if not(has_access):
+        if not (has_access):
             continue
 
         entry = {
-            'name':          algorithm.fullname(),
-            'accessibility': accessibility,
-            'opensource':    open_source,
+            "name": algorithm.fullname(),
+            "accessibility": accessibility,
+            "opensource": open_source,
         }
 
         result.append(entry)
@@ -195,10 +184,10 @@ def experiment_list_of_referenced_algorithms(experiment, user):
     return json.dumps(result)
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
-@register.inclusion_tag('experiments/panels/results.html')
+@register.inclusion_tag("experiments/panels/results.html")
 def experiment_results(panel_id, experiment):
     """Called to display the experiment result panel.
 
@@ -209,21 +198,25 @@ def experiment_results(panel_id, experiment):
 
     # create doubles of analyzer -> result pairs
     analyzers = experiment.blocks.filter(analyzer=True)
-    contents = [(k, k.results.order_by('type', 'name')) for k in analyzers]
+    contents = [(k, k.results.order_by("type", "name")) for k in analyzers]
 
     # calculates plotter/parameter associations for the required types
-    formats = [k.type for a in analyzers for k in a.results.exclude(type__in=Result.SIMPLE_TYPE_NAMES)]
+    formats = [
+        k.type
+        for a in analyzers
+        for k in a.results.exclude(type__in=Result.SIMPLE_TYPE_NAMES)
+    ]
 
     # get capable plotters and parameters associated
     plotting = dict([(k, Plotter.objects.for_strformat(k)) for k in formats])
 
     json_plotting = dict()
     for fmt in plotting:
-        plotter, param = plotting[fmt]['default']
-        parameters = plotting[fmt]['options'][plotter]
+        plotter, param = plotting[fmt]["default"]
+        parameters = plotting[fmt]["options"][plotter]
         json_plotting[fmt] = dict(
             plotter=plotter.fullname(),
-            parameter=param.fullname() if param else '',
+            parameter=param.fullname() if param else "",
             parameters=json.dumps([k.fullname() for k in parameters]),
         )
 
@@ -235,7 +228,7 @@ def experiment_results(panel_id, experiment):
     )
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
 @register.simple_tag
@@ -246,55 +239,63 @@ def ordered_blocks(xp):
     blocks = []
     analyzers = []
     for key in block_order:
-      b = xp.blocks.get(name=key)
-      if b.analyzer: analyzers.append(b)
-      else: blocks.append(b)
+        b = xp.blocks.get(name=key)
+        if b.analyzer:
+            analyzers.append(b)
+        else:
+            blocks.append(b)
     return blocks + analyzers
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
 @register.simple_tag(takes_context=True)
 def visible_reports(context, xp):
     """Calculates the visible reports to an experiment"""
     from ...reports.models import Report
-    user = context['request'].user
+
+    user = context["request"].user
     if user.is_anonymous:
         return xp.reports.filter(status=Report.PUBLISHED)
     else:
-        return xp.reports.filter(Q(author=context['request'].user) |
-                                 Q(status=Report.PUBLISHED))
+        return xp.reports.filter(
+            Q(author=context["request"].user) | Q(status=Report.PUBLISHED)
+        )
 
 
-#--------------------------------------------------
+# --------------------------------------------------
 
 
 @register.simple_tag
 def owner_algorithms(obj):
-    '''Calculates the user algorithms for a given experiment'''
+    """Calculates the user algorithms for a given experiment"""
 
     return obj.referenced_algorithms.filter(author=obj.author)
 
 
 @register.simple_tag
 def owner_source_algorithms(obj):
-    '''Calculates the user algorithms in source code form for a given experiment'''
+    """Calculates the user algorithms in source code form for a given experiment"""
 
-    return [ x for x in obj.referenced_algorithms.filter(author=obj.author)
-             if not x.is_binary() ]
+    return [
+        x
+        for x in obj.referenced_algorithms.filter(author=obj.author)
+        if not x.is_binary()
+    ]
 
 
 @register.simple_tag
 def owner_binary_algorithms(obj):
-    '''Calculates the user algorithms in binary form for a given experiment'''
+    """Calculates the user algorithms in binary form for a given experiment"""
 
-    return [ x for x in obj.referenced_algorithms.filter(author=obj.author)
-             if x.is_binary() ]
+    return [
+        x for x in obj.referenced_algorithms.filter(author=obj.author) if x.is_binary()
+    ]
 
 
 @register.simple_tag
 def owner_dataformats(obj):
-    '''Calculates the user dataformats and algorithms for a given experiment'''
+    """Calculates the user dataformats and algorithms for a given experiment"""
 
     return [k for k in obj.all_needed_dataformats() if k.author == obj.author]
diff --git a/beat/web/experiments/tests/tests_api.py b/beat/web/experiments/tests/tests_api.py
index 997656999e4c491a242d57ff21dadbb6ea18bd58..a72021de48c6c8cbb8fa02b1e7b1a7eea922d930 100755
--- a/beat/web/experiments/tests/tests_api.py
+++ b/beat/web/experiments/tests/tests_api.py
@@ -25,37 +25,30 @@
 #                                                                             #
 ###############################################################################
 
+import copy
 import os
-import simplejson as json
 import shutil
-import copy
-
 from datetime import datetime
 from datetime import timedelta
 
+import simplejson as json
 from django.conf import settings
 from django.contrib.auth.models import User
 from django.urls import reverse
 
-from ...dataformats.models import DataFormat
 from ...algorithms.models import Algorithm
-from ...toolchains.models import Toolchain
-from ...team.models import Team
-
+from ...attestations.models import Attestation
 from ...backend.models import Environment
 from ...backend.models import Queue
-
-from ...attestations.models import Attestation
-from ...databases.models import Database
-
 from ...common.testutils import BaseTestCase
-from ...common.testutils import (  # noqa test runner will call it, tearDownModule
-    tearDownModule,
-)
-
-from ..models import Experiment
-from ..models import CachedFile
+from ...common.testutils import tearDownModule  # noqa test runner will call it
+from ...databases.models import Database
+from ...dataformats.models import DataFormat
+from ...team.models import Team
+from ...toolchains.models import Toolchain
 from ..models import Block
+from ..models import CachedFile
+from ..models import Experiment
 from ..models import Result
 
 TEST_PWD = "1234"  # nosec
diff --git a/beat/web/experiments/utils.py b/beat/web/experiments/utils.py
index 1b25bd01fa924bdc285a0d969a5d6e2779d8aed3..bed0fb6cceadeaf214ed707bcc6892af549d7aad 100644
--- a/beat/web/experiments/utils.py
+++ b/beat/web/experiments/utils.py
@@ -26,26 +26,25 @@
 ###############################################################################
 
 
-'''Utilities for experiment management'''
-
+"""Utilities for experiment management"""
+import logging
 
 from django.db.models import Count
 
 from .models import CachedFile
 
-import logging
 logger = logging.getLogger(__name__)
 
 
 def list_orphaned_cachedfiles():
-    '''Lists orphaned cache files that do not exist in the disk either'''
+    """Lists orphaned cache files that do not exist in the disk either"""
 
-    q = CachedFile.objects.annotate(Count('blocks')).filter(blocks__count__lt=1)
+    q = CachedFile.objects.annotate(Count("blocks")).filter(blocks__count__lt=1)
     return [c for c in q if not c.exists()]
 
 
 def cleanup_orphaned_cachedfiles():
-    '''Cleans-up orphaned cache files that do not exist in the disk either'''
+    """Cleans-up orphaned cache files that do not exist in the disk either"""
 
     for c in list_orphaned_cachedfiles():
         logger.info("Removing orphaned CachedFile object `%s'..." % c.hash)
diff --git a/beat/web/experiments/views.py b/beat/web/experiments/views.py
index 33079c9bdb65a6f5ccb019d89202abfd46a1f0f4..9a24fcaa34325067450f0020ab7a47382763964b 100644
--- a/beat/web/experiments/views.py
+++ b/beat/web/experiments/views.py
@@ -25,26 +25,27 @@
 #                                                                             #
 ###############################################################################
 
-from django.shortcuts import get_object_or_404
-from django.shortcuts import render, redirect
-from django.http import Http404
+from django.conf import settings
 from django.contrib.auth.decorators import login_required
 from django.contrib.auth.models import User
-from django.conf import settings
 from django.db.models.functions import Coalesce
+from django.http import Http404
+from django.shortcuts import get_object_or_404
+from django.shortcuts import redirect
+from django.shortcuts import render
 
-from .models import Experiment
-from ..toolchains.models import Toolchain
 from ..team.models import Team
+from ..toolchains.models import Toolchain
+from .models import Experiment
 
-
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 @login_required
-def new_from_toolchain(request, toolchain_author_name, toolchain_name,
-                       toolchain_version):
-    '''Sets up a new experiment from a toolchain name'''
+def new_from_toolchain(
+    request, toolchain_author_name, toolchain_name, toolchain_version
+):
+    """Sets up a new experiment from a toolchain name"""
 
     # Retrieve the toolchain
     toolchain = get_object_or_404(
@@ -56,23 +57,22 @@ def new_from_toolchain(request, toolchain_author_name, toolchain_name,
 
     # Check that the user can access it
     has_access = toolchain.accessibility_for(request.user)[0]
-    if not(has_access): raise Http404()
+    if not (has_access):
+        raise Http404()
 
-    return render(request,
-                  'experiments/setup.html',
-                  {
-                      'toolchain': toolchain,
-                      'action': 'new',
-                  })
+    return render(
+        request, "experiments/setup.html", {"toolchain": toolchain, "action": "new"}
+    )
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 @login_required
-def fork(request, author_name, toolchain_author_name,
-         toolchain_name, toolchain_version, name):
-    '''Sets up a new experiment from an experiment fork'''
+def fork(
+    request, author_name, toolchain_author_name, toolchain_name, toolchain_version, name
+):
+    """Sets up a new experiment from an experiment fork"""
 
     # Retrieve the experiment
     experiment = get_object_or_404(
@@ -81,28 +81,31 @@ def fork(request, author_name, toolchain_author_name,
         toolchain__author__username=toolchain_author_name,
         toolchain__name=toolchain_name,
         toolchain__version=toolchain_version,
-        name=name
+        name=name,
     )
 
     # Check that the user can access it
     (has_access, accessibility) = experiment.accessibility_for(request.user)
-    if not(has_access): raise Http404()
+    if not (has_access):
+        raise Http404()
+
+    return render(
+        request,
+        "experiments/setup.html",
+        {
+            "toolchain": experiment.toolchain,
+            "experiment": experiment,
+            "action": "fork",
+        },
+    )
 
-    return render(request,
-                  'experiments/setup.html',
-                  {
-                      'toolchain': experiment.toolchain,
-                      'experiment': experiment,
-                      'action': 'fork',
-                  })
 
+# ----------------------------------------------------------
 
-#----------------------------------------------------------
 
 @login_required
-def reset(request, toolchain_author_name,
-          toolchain_name, toolchain_version, name):
-    '''Resets the current experiment so it can run again'''
+def reset(request, toolchain_author_name, toolchain_name, toolchain_version, name):
+    """Resets the current experiment so it can run again"""
 
     # Retrieve the experiment
     experiment = get_object_or_404(
@@ -111,19 +114,21 @@ def reset(request, toolchain_author_name,
         toolchain__author__username=toolchain_author_name,
         toolchain__name=toolchain_name,
         toolchain__version=toolchain_version,
-        name=name
+        name=name,
     )
 
-    if not experiment.deletable(): raise Http404()
+    if not experiment.deletable():
+        raise Http404()
 
     experiment.reset()
 
     return redirect(experiment)
 
 
-def view(request, author_name, toolchain_author_name, toolchain_name,
-         toolchain_version, name):
-    '''Views an experiment no matter its present state'''
+def view(
+    request, author_name, toolchain_author_name, toolchain_name, toolchain_version, name
+):
+    """Views an experiment no matter its present state"""
 
     # Retrieve the experiment
     experiment = get_object_or_404(
@@ -132,80 +137,92 @@ def view(request, author_name, toolchain_author_name, toolchain_name,
         toolchain__author__username=toolchain_author_name,
         toolchain__name=toolchain_name,
         toolchain__version=toolchain_version,
-        name=name
+        name=name,
     )
 
     # Check that the user can access it
     (has_access, accessibility) = experiment.accessibility_for(request.user)
-    if not(has_access): raise Http404()
+    if not (has_access):
+        raise Http404()
 
     if experiment.status == Experiment.PENDING:
-        if request.user.is_anonymous: raise Http404()
-        return render(request,
-                      'experiments/setup.html',
-                      {
-                          'toolchain': experiment.toolchain,
-                          'experiment': experiment,
-                          'action': 'pending',
-                      })
+        if request.user.is_anonymous:
+            raise Http404()
+        return render(
+            request,
+            "experiments/setup.html",
+            {
+                "toolchain": experiment.toolchain,
+                "experiment": experiment,
+                "action": "pending",
+            },
+        )
 
     # Users the object can be shared with
-    users = User.objects.exclude(username__in=settings.ACCOUNTS_TO_EXCLUDE_FROM_TEAMS).order_by('username')
+    users = User.objects.exclude(
+        username__in=settings.ACCOUNTS_TO_EXCLUDE_FROM_TEAMS
+    ).order_by("username")
 
     # The experiment was already done, show results
-    return render(request,
-                  'experiments/view.html',
-                  {
-                      'experiment': experiment,
-                      'owner': experiment.author == request.user,
-                      'users': users,
-                      'teams': Team.objects.for_user(request.user, True)
-                  })
+    return render(
+        request,
+        "experiments/view.html",
+        {
+            "experiment": experiment,
+            "owner": experiment.author == request.user,
+            "users": users,
+            "teams": Team.objects.for_user(request.user, True),
+        },
+    )
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 def ls(request, author_name):
-    '''List all accessible experiments to the request user'''
+    """List all accessible experiments to the request user"""
 
-    if not author_name: return public_ls(request)
+    if not author_name:
+        return public_ls(request)
 
     # check that the user exists on the system
     author = get_object_or_404(User, username=author_name)
 
     # orders so that experiments that the latest information is displayed first
-    objects = Experiment.objects.from_author_and_public(request.user,
-                                                        author_name).annotate(updated=Coalesce('end_date', 'start_date',
-                                                                                               'creation_date')).order_by('-updated')
+    objects = (
+        Experiment.objects.from_author_and_public(request.user, author_name)
+        .annotate(updated=Coalesce("end_date", "start_date", "creation_date"))
+        .order_by("-updated")
+    )
 
     if request.user.is_anonymous:
         objects = objects.exclude(status=Experiment.PENDING)
 
-    owner = (request.user == author)
+    owner = request.user == author
 
-    return render(request,
-                  'experiments/list.html',
-                  dict(
-                      objects=objects,
-                      author=author,
-                      owner=owner,
-                  ))
+    return render(
+        request,
+        "experiments/list.html",
+        dict(objects=objects, author=author, owner=owner,),
+    )
 
 
-#----------------------------------------------------------
+# ----------------------------------------------------------
 
 
 def public_ls(request):
-    '''List all publicly accessible experiments'''
+    """List all publicly accessible experiments"""
 
     # orders so that recent objects are displayed first
-    objects = Experiment.objects.public().exclude(status=Experiment.PENDING).annotate(updated=Coalesce('end_date', 'start_date', 'creation_date')).order_by('-updated')
-
-    return render(request,
-                  'experiments/list.html',
-                  dict(
-                      objects=objects,
-                      author=request.user, #anonymous
-                      owner=False,
-                  ))
+    objects = (
+        Experiment.objects.public()
+        .exclude(status=Experiment.PENDING)
+        .annotate(updated=Coalesce("end_date", "start_date", "creation_date"))
+        .order_by("-updated")
+    )
+
+    return render(
+        request,
+        "experiments/list.html",
+        dict(objects=objects, author=request.user, owner=False,),  # anonymous
+    )