From 107c61c37cee3f2cce19a9111899941b0e725b52 Mon Sep 17 00:00:00 2001
From: Samuel Gaist <samuel.gaist@idiap.ch>
Date: Fri, 11 Sep 2020 15:14:14 +0200
Subject: [PATCH] [utils][management][commands] Pre-commit cleanup

---
 beat/web/utils/management/commands/backup.py  | 180 +++--
 beat/web/utils/management/commands/broker.py  |   2 +-
 .../commands/change_databases_root_folder.py  |  59 +-
 .../commands/clean_public_actions.py          |  14 +-
 .../management/commands/full_scheduling.py    |   6 +-
 beat/web/utils/management/commands/install.py | 645 ++++++++++--------
 .../management/commands/list_active_users.py  |  34 +-
 beat/web/utils/management/commands/restore.py |  16 +-
 .../utils/management/commands/scheduler.py    |  14 +-
 .../commands/update_installed_databases.py    | 119 ++--
 beat/web/utils/management/commands/worker.py  |   2 +-
 .../utils/management/commands/xdumpdata.py    | 178 +++--
 12 files changed, 743 insertions(+), 526 deletions(-)

diff --git a/beat/web/utils/management/commands/backup.py b/beat/web/utils/management/commands/backup.py
index 2875e8f19..0258b7d03 100644
--- a/beat/web/utils/management/commands/backup.py
+++ b/beat/web/utils/management/commands/backup.py
@@ -26,40 +26,41 @@
 ###############################################################################
 
 
+import copy
+import datetime
 import logging
-logger = logging.getLogger(__name__)
-
 import os
-import time
-import copy
 import shutil
 import tarfile
 import tempfile
-import datetime
+import time
 
+from django.apps import apps
+from django.apps import registry
+from django.conf import settings
 from django.core.management import call_command
 from django.core.management.base import BaseCommand
-from django.conf import settings
-from django.apps import apps, registry
-
-from ....import __version__
-
-APPS = [ #dump and load order are respected
-    'authtoken',
-        'backend',
-        'statistics',
-        'dataformats',
-        'databases',
-        'libraries',
-        'algorithms',
-        'plotters',
-        'toolchains',
-        'experiments',
-        'attestations',
-        'search',
-        'reports',
-        'actstream',
-        'post_office',
+
+from ..version import __version__
+
+logger = logging.getLogger(__name__)
+
+APPS = [  # dump and load order are respected
+    "authtoken",
+    "backend",
+    "statistics",
+    "dataformats",
+    "databases",
+    "libraries",
+    "algorithms",
+    "plotters",
+    "toolchains",
+    "experiments",
+    "attestations",
+    "search",
+    "reports",
+    "actstream",
+    "post_office",
 ]
 
 
@@ -75,63 +76,87 @@ def _check(app, queryset):
 
         # checks the forks
         if k.fork_of and k.fork_of.id not in declared:
-            logger.warn("** Inconsistent creation date - %s `%s' (%s) which is a fork of `%s' (%s) must be dumped after, but was created before", app, k, k.creation_date, k.fork_of, k.fork_of.creation_date)
-            logger.warn("   -> Correcting creation date of %s to be (a second after) %s", k, k.fork_of)
+            logger.warn(
+                "** Inconsistent creation date - %s `%s' (%s) which is a fork of `%s' (%s) must be dumped after, but was created before",
+                app,
+                k,
+                k.creation_date,
+                k.fork_of,
+                k.fork_of.creation_date,
+            )
+            logger.warn(
+                "   -> Correcting creation date of %s to be (a second after) %s",
+                k,
+                k.fork_of,
+            )
             k.creation_date = k.fork_of.creation_date + datetime.timedelta(seconds=1)
             k.save()
             errors += 1
 
         # checks previous versions
         if k.previous_version and k.previous_version.id not in declared:
-            logger.warn("** Inconsistent creation date - %s `%s' (%s) which has `%s' (%s) as a previous version must be dumped after, but was created before", app, k, k.creation_date, k.previous_version, k.previous_version.creation_date)
-            logger.warn("   -> Correcting creation date of %s to be (a second after) %s", k, k.previous_version)
-            k.creation_date = k.previous_version.creation_date + datetime.timedelta(seconds=1)
+            logger.warn(
+                "** Inconsistent creation date - %s `%s' (%s) which has `%s' (%s) as a previous version must be dumped after, but was created before",
+                app,
+                k,
+                k.creation_date,
+                k.previous_version,
+                k.previous_version.creation_date,
+            )
+            logger.warn(
+                "   -> Correcting creation date of %s to be (a second after) %s",
+                k,
+                k.previous_version,
+            )
+            k.creation_date = k.previous_version.creation_date + datetime.timedelta(
+                seconds=1
+            )
             k.save()
             errors += 1
 
         declared.append(k.id)
 
-
     return errors
 
 
 class Command(BaseCommand):
 
-    help = 'Backs-up current database and prefix in a single tarball'
-
+    help = "Backs-up current database and prefix in a single tarball"
 
     def handle(self, *ignored, **arguments):
 
         # Setup this command's logging level
         global logger
-        arguments['verbosity'] = int(arguments['verbosity'])
-        if arguments['verbosity'] >= 1:
-            if arguments['verbosity'] == 1: logger.setLevel(logging.INFO)
-            elif arguments['verbosity'] >= 2: logger.setLevel(logging.DEBUG)
+        arguments["verbosity"] = int(arguments["verbosity"])
+        if arguments["verbosity"] >= 1:
+            if arguments["verbosity"] == 1:
+                logger.setLevel(logging.INFO)
+            elif arguments["verbosity"] >= 2:
+                logger.setLevel(logging.DEBUG)
 
         # for these apps, only backs-up these particular object types
         only = dict(
-            dataformats = 'dataformats.DataFormat',
-            libraries = 'libraries.Library',
-            algorithms = 'algorithms.Algorithm',
-            databases = 'databases.Database',
-            toolchains = 'toolchains.Toolchain',
+            dataformats="dataformats.DataFormat",
+            libraries="libraries.Library",
+            algorithms="algorithms.Algorithm",
+            databases="databases.Database",
+            toolchains="toolchains.Toolchain",
         )
 
         dump_arguments = dict(
-            indent = 2,
-            verbosity=arguments.get('verbosity'),
+            indent=2,
+            verbosity=arguments.get("verbosity"),
             interactive=False,
-            use_natural_primary_keys = True,
-            use_natural_foreign_keys = True,
-            format = 'json',
-            exclude = [
-                'sessions',
-                    'admin',
-                    'contenttypes',
-                    'auth.Permission',
-                    'backend.Job',
-                    'backend.JobSplit',
+            use_natural_primary_keys=True,
+            use_natural_foreign_keys=True,
+            format="json",
+            exclude=[
+                "sessions",
+                "admin",
+                "contenttypes",
+                "auth.Permission",
+                "backend.Job",
+                "backend.JobSplit",
             ],
         )
 
@@ -141,59 +166,64 @@ class Command(BaseCommand):
         use_apps = [app for app in APPS if app in installed_apps]
 
         try:
-            tmpdir = tempfile.mkdtemp('.backup', 'beat.web-')
+            tmpdir = tempfile.mkdtemp(".backup", "beat.web-")
 
             # backs-up everything else first
             arguments = copy.deepcopy(dump_arguments)
-            arguments['exclude'] += use_apps
-            destfile = os.path.join(tmpdir, 'initial.json')
+            arguments["exclude"] += use_apps
+            destfile = os.path.join(tmpdir, "initial.json")
             logger.info("Dumping initial (unspecified) data -> `%s'", destfile)
-            arguments['output'] = destfile #new in Django-1.8.x
-            call_command('xdumpdata', **arguments)
+            arguments["output"] = destfile  # new in Django-1.8.x
+            call_command("xdumpdata", **arguments)
 
             # and backs-up the apps respecting the imposed order
             for app in use_apps:
 
-                destfile = os.path.join(tmpdir, '%s.json' % app)
+                destfile = os.path.join(tmpdir, "%s.json" % app)
                 arguments = copy.deepcopy(dump_arguments)
                 logger.info("Dumping data for `%s' -> `%s'", app, destfile)
 
                 if app in only:
 
-                    app, model = only[app].split('.')
+                    app, model = only[app].split(".")
                     model = apps.get_model(app, model)
-                    order = ('creation_date',)
+                    order = ("creation_date",)
 
                     # This will check and correct objects with weird creation
                     # dates so that the dump order is consistent
                     while True:
                         queryset = model.objects.order_by(*order)
                         err = _check(app, queryset)
-                        if not err: break
+                        if not err:
+                            break
 
-                    arguments['primary_keys'] = \
-                        ','.join([str(k.id) for k in queryset])
+                    arguments["primary_keys"] = ",".join([str(k.id) for k in queryset])
 
-                arguments['output'] = destfile #new in Django-1.8.x
-                call_command('xdumpdata', only.get(app, app), **arguments)
+                arguments["output"] = destfile  # new in Django-1.8.x
+                call_command("xdumpdata", only.get(app, app), **arguments)
 
                 # copy prefix data
                 path = os.path.join(settings.PREFIX, app)
                 if os.path.exists(path):
-                    destdir = os.path.join(tmpdir, 'prefix', app)
-                    logger.info("Backing up core objects for `%s' -> `%s'",
-                                app, destdir)
+                    destdir = os.path.join(tmpdir, "prefix", app)
+                    logger.info(
+                        "Backing up core objects for `%s' -> `%s'", app, destdir
+                    )
                     shutil.copytree(path, destdir)
                 else:
                     logger.info("No disk presence found for `%s'", app)
 
             # tarball and remove directory
-            compress = 'bz2'
-            tarball = time.strftime('%Y.%m.%d-%Hh%Mm%S') + \
-                ('-v%s' % __version__) + '.tar.' + compress
+            compress = "bz2"
+            tarball = (
+                time.strftime("%Y.%m.%d-%Hh%Mm%S")
+                + ("-v%s" % __version__)
+                + ".tar."
+                + compress
+            )
             logger.info("Writing archive `%s'", tarball)
             with tarfile.open(tarball, "w:%s" % compress) as tar:
-                tar.add(tmpdir, arcname='')
+                tar.add(tmpdir, arcname="")
 
         finally:
 
diff --git a/beat/web/utils/management/commands/broker.py b/beat/web/utils/management/commands/broker.py
index 282cc35dd..94d86bdc5 100644
--- a/beat/web/utils/management/commands/broker.py
+++ b/beat/web/utils/management/commands/broker.py
@@ -31,9 +31,9 @@ import logging
 from django.core.management.base import BaseCommand
 from django.db import transaction
 
-from beat.web.backend.models import Worker
 from beat.core.bcpapi.broker import BeatComputationBroker
 from beat.core.utils import setup_logging
+from beat.web.backend.models import Worker
 
 logger = logging.getLogger(__name__)
 
diff --git a/beat/web/utils/management/commands/change_databases_root_folder.py b/beat/web/utils/management/commands/change_databases_root_folder.py
index 0fbc6faef..d34a55064 100644
--- a/beat/web/utils/management/commands/change_databases_root_folder.py
+++ b/beat/web/utils/management/commands/change_databases_root_folder.py
@@ -46,73 +46,76 @@ Examples:
 
       $ manage.py update_installed_databases -v1 --dry-run
 """
-
-import os
-import sys
 import logging
 
 from django.core.management.base import BaseCommand
-from django.conf import settings
 
 from beat.web.databases.models import Database
 
 from .install import load_database_folders
 
-
 logger = logging.getLogger(__name__)
 
 
 class Command(BaseCommand):
 
-    help = 'Change the root path of the databases listed in the given conf file'
-
+    help = "Change the root path of the databases listed in the given conf file"
 
     def add_arguments(self, parser):
 
         from argparse import RawDescriptionHelpFormatter
+
         parser.epilog = __doc__
         parser.formatter_class = RawDescriptionHelpFormatter
 
-
-        parser.add_argument('database_root_file', type=str,
-                            help='The JSON file containing ' \
-                            'the root directories of the databases installed ' \
-                            'on the platform.')
-
-        parser.add_argument('--dry-run', '-d', action='store_true',
-                            dest='dry_run', default=False, help='Set this flag to ' \
-                            'simulate a run.')
-
+        parser.add_argument(
+            "database_root_file",
+            type=str,
+            help="The JSON file containing "
+            "the root directories of the databases installed "
+            "on the platform.",
+        )
+
+        parser.add_argument(
+            "--dry-run",
+            "-d",
+            action="store_true",
+            dest="dry_run",
+            default=False,
+            help="Set this flag to " "simulate a run.",
+        )
 
     def handle(self, *ignored, **arguments):
         # Setup this command's logging level
         global logger
-        arguments['verbosity'] = int(arguments['verbosity'])
-        if arguments['verbosity'] >= 1:
-            if arguments['verbosity'] == 1:
+        arguments["verbosity"] = int(arguments["verbosity"])
+        if arguments["verbosity"] >= 1:
+            if arguments["verbosity"] == 1:
                 logger.setLevel(logging.INFO)
-            elif arguments['verbosity'] >= 2:
+            elif arguments["verbosity"] >= 2:
                 logger.setLevel(logging.DEBUG)
 
-        dry_run = arguments['dry_run']
+        dry_run = arguments["dry_run"]
 
         # Reads database root file, if provided
-        db_root_file = arguments['database_root_file']
+        db_root_file = arguments["database_root_file"]
         db_root = load_database_folders(db_root_file)
 
         for db, path in db_root.items():
-            name, version = db.split('/')
+            name, version = db.split("/")
             try:
                 database = Database.objects.get(name=name, version=int(version))
             except Database.DoesNotExist:
                 logger.error("Failed to find %s", db)
             else:
                 if dry_run:
-                    logger.info("Would change %s for %s" %(database.declaration['root_folder'],
-                                                           path))
+                    logger.info(
+                        "Would change %s for %s"
+                        % (database.declaration["root_folder"], path)
+                    )
                 else:
-                    logger.info("Changing %s path for %s" %(db, path))
+                    logger.info("Changing %s path for %s" % (db, path))
                     declaration = database.declaration
-                    declaration['root_folder'] = path
+                    declaration["root_folder"] = path
                     database.declaration = declaration
                     database.save()
diff --git a/beat/web/utils/management/commands/clean_public_actions.py b/beat/web/utils/management/commands/clean_public_actions.py
index 89726187c..e392027c6 100644
--- a/beat/web/utils/management/commands/clean_public_actions.py
+++ b/beat/web/utils/management/commands/clean_public_actions.py
@@ -27,22 +27,24 @@
 ###############################################################################
 
 
-from django.core.management.base import BaseCommand, CommandError
+from actstream.models import Action
 from django.contrib.auth.models import User
 from django.contrib.contenttypes.models import ContentType
-
-from actstream.models import Action
+from django.core.management.base import BaseCommand
 
 from beat.web.common.models import Shareable
 
+
 class Command(BaseCommand):
 
-    help = 'Cleanup wrongly made public actions'
+    help = "Cleanup wrongly made public actions"
 
     def handle(self, *args, **options):
         ctype = ContentType.objects.get_for_model(User)
-        for action in Action.objects.filter(public=True).filter(actor_content_type=ctype):
-            if action.action_object and hasattr(action.action_object, 'sharing'):
+        for action in Action.objects.filter(public=True).filter(
+            actor_content_type=ctype
+        ):
+            if action.action_object and hasattr(action.action_object, "sharing"):
                 if action.action_object.sharing != Shareable.PUBLIC:
                     action.public = False
                     action.save()
diff --git a/beat/web/utils/management/commands/full_scheduling.py b/beat/web/utils/management/commands/full_scheduling.py
index 96bc69283..e93f0ff84 100644
--- a/beat/web/utils/management/commands/full_scheduling.py
+++ b/beat/web/utils/management/commands/full_scheduling.py
@@ -30,10 +30,10 @@ import logging
 import multiprocessing
 import signal
 
-from django.core.management.base import BaseCommand
-from django.core.management import call_command
-from django.conf import settings
 from django import db
+from django.conf import settings
+from django.core.management import call_command
+from django.core.management.base import BaseCommand
 
 from beat.core.utils import find_free_port
 
diff --git a/beat/web/utils/management/commands/install.py b/beat/web/utils/management/commands/install.py
index bd64ac9ea..cfbc16680 100755
--- a/beat/web/utils/management/commands/install.py
+++ b/beat/web/utils/management/commands/install.py
@@ -54,28 +54,25 @@ Examples:
   By default, paths to the root of all databases are set to match the Idiap
   Research Institute filesystem organisation.
 """
-
+import collections
+import fnmatch
 import logging
-logger = logging.getLogger(__name__)
-
-import pkg_resources
-
 import os
 import sys
-import fnmatch
-import collections
-import simplejson
 
-from django.core.management.base import BaseCommand
+import simplejson
 from django.conf import settings
+from django.core.management.base import BaseCommand
 
-import beat.core.dataformat
+import beat.core.algorithm
 import beat.core.database
+import beat.core.dataformat
+import beat.core.experiment
 import beat.core.library
-import beat.core.algorithm
 import beat.core.plotter
 import beat.core.toolchain
-import beat.core.experiment
+
+logger = logging.getLogger(__name__)
 
 
 def add_user(name, passwd, token_key):
@@ -90,16 +87,16 @@ def add_user(name, passwd, token_key):
         user = User()
         user.username = name
         user.first_name = name.capitalize()
-        user.last_name = name.capitalize() + 'son'
-        user.email = '%s@example.com' % name
+        user.last_name = name.capitalize() + "son"
+        user.email = "%s@example.com" % name
         user.is_active = True
         if passwd is not None:
             user.set_password(passwd)
             user.is_staff = True
             user.is_superuser = True
         user.save()
-        #set profile
-        user.profile.status = 'A'
+        # set profile
+        user.profile.status = "A"
         user.profile.rejection_date = None
         user.profile.supervision_key = None
         user.profile.save()
@@ -139,8 +136,10 @@ def add_group(name):
 def setup_environment(queue_config_filename, verbosity):
 
     from django.core.management import call_command
-    call_command('qsetup', verbosity=verbosity, reset=True,
-                 config=queue_config_filename)
+
+    call_command(
+        "qsetup", verbosity=verbosity, reset=True, config=queue_config_filename
+    )
 
 
 def create_sites():
@@ -152,19 +151,19 @@ def create_sites():
         obj.name = name
         obj.domain = domain
         obj.save()
-        logger.info('Saved site %s (pk=%s)' % (obj, obj.pk))
+        logger.info("Saved site %s (pk=%s)" % (obj, obj.pk))
 
-    _setup_site(1, 'Development Server', '127.0.0.1:8000')
-    _setup_site(2, 'Staging System', 'beatweb-staging')
-    _setup_site(3, 'Production System', 'www.beat-eu.org')
+    _setup_site(1, "Development Server", "127.0.0.1:8000")
+    _setup_site(2, "Staging System", "beatweb-staging")
+    _setup_site(3, "Production System", "www.beat-eu.org")
 
 
 def create_users(username, passwd):
 
     # Sets up initial users, if not already there.
-    system_user = add_user(settings.SYSTEM_ACCOUNT, None, '1')
-    plot_user = add_user(settings.PLOT_ACCOUNT, None, '2')
-    user = add_user(username, passwd, '3')
+    system_user = add_user(settings.SYSTEM_ACCOUNT, None, "1")
+    plot_user = add_user(settings.PLOT_ACCOUNT, None, "2")
+    user = add_user(username, passwd, "3")
 
     return system_user, plot_user, user
 
@@ -174,12 +173,13 @@ def list_objects(prefix, project, category, fnfilter):
 
     path = os.path.join(prefix, project, category)
 
-    if not os.path.exists(path): return []
+    if not os.path.exists(path):
+        return []
 
     retval = []
     for base, dirs, files in os.walk(path):
         for k in fnmatch.filter(files, fnfilter):
-            retval.append(os.path.join(base, k).replace(path + os.sep, ''))
+            retval.append(os.path.join(base, k).replace(path + os.sep, ""))
             retval[-1] = os.path.splitext(retval[-1])[0]
 
     return retval
@@ -203,9 +203,11 @@ def detemplatize(template, data):
 
     """
 
-    from jinja2 import Environment, DictLoader
-    env = Environment(loader=DictLoader({'object': template}))
-    return env.get_template('object').render(**data)
+    from jinja2 import DictLoader
+    from jinja2 import Environment
+
+    env = Environment(loader=DictLoader({"object": template}), autoescape=True)
+    return env.get_template("object").render(**data)
 
 
 def upload_dataformat(prefix, name, data):
@@ -228,20 +230,19 @@ def upload_dataformat(prefix, name, data):
     """
 
     storage = beat.core.dataformat.Storage(prefix, name)
-    if not storage.exists(): return False #should be ignored
+    if not storage.exists():
+        return False  # should be ignored
     declaration = detemplatize(storage.json.load(), data)
     obj = simplejson.loads(declaration)
-    description = storage.doc.load() if storage.doc.exists() else ''
+    description = storage.doc.load() if storage.doc.exists() else ""
 
     # Uploads the data format into the platform
     from ....dataformats.models import DataFormat
 
-    author = data[name.split(os.sep)[0].replace('name', '')]
+    author = data[name.split(os.sep)[0].replace("name", "")]
 
     dataformat = DataFormat.objects.filter(
-        author=author,
-        name=storage.name,
-        version=int(storage.version),
+        author=author, name=storage.name, version=int(storage.version),
     )
 
     if not dataformat:
@@ -250,7 +251,7 @@ def upload_dataformat(prefix, name, data):
             author=author,
             name=storage.name,
             version=int(storage.version),
-            short_description=obj.get('description', ''),
+            short_description=obj.get("description", ""),
             description=description,
             declaration=declaration,
         )
@@ -261,12 +262,12 @@ def upload_dataformat(prefix, name, data):
 
     else:
         dataformat = dataformat[0]
-        dataformat.short_description = obj.get('description', '')
+        dataformat.short_description = obj.get("description", "")
         dataformat.description = description
         dataformat.save()
         logger.info("Updated dataformat `%s'", dataformat)
 
-    if not data['private']:
+    if not data["private"]:
         dataformat.share()
         logger.info("Set dataformat `%s' as public", dataformat)
 
@@ -285,7 +286,7 @@ def load_database_folders(filename):
     """
 
     if os.path.exists(filename):
-        return simplejson.loads(open(filename,'rb').read())
+        return simplejson.loads(open(filename, "rb").read())
 
     return {}
 
@@ -311,30 +312,29 @@ def upload_database(prefix, name, data, new_only=False):
 
     storage = beat.core.database.Storage(prefix, name)
     if not storage.exists():
-        return False #should be ignored
+        return False  # should be ignored
     declaration = detemplatize(storage.json.load(), data)
     obj = simplejson.loads(declaration, object_pairs_hook=collections.OrderedDict)
-    description = storage.doc.load() if storage.doc.exists() else ''
-    code = storage.code.load() if storage.code and storage.code.exists() else ''
+    description = storage.doc.load() if storage.doc.exists() else ""
+    code = storage.code.load() if storage.code and storage.code.exists() else ""
 
-    if 'root_folder' in data:
-        obj['root_folder'] = data['root_folder']
+    if "root_folder" in data:
+        obj["root_folder"] = data["root_folder"]
         declaration = simplejson.dumps(obj, indent=4)
 
-    from ....databases.models import Database
     from ....common.models import Shareable
+    from ....databases.models import Database
 
-    database = Database.objects.filter(name=storage.name,
-                                       version=int(storage.version))
+    database = Database.objects.filter(name=storage.name, version=int(storage.version))
 
     if not database:
 
         (database, errors) = Database.objects.create_database(
-            name = storage.name,
-            declaration = declaration,
-            code = code,
-            short_description = obj.get('description', ''),
-            description = description,
+            name=storage.name,
+            declaration=declaration,
+            code=code,
+            short_description=obj.get("description", ""),
+            description=description,
             version=int(storage.version),
         )
 
@@ -346,10 +346,10 @@ def upload_database(prefix, name, data, new_only=False):
 
     elif new_only:
         return True
-    else: #only updates files
+    else:  # only updates files
 
         database = database[0]
-        database.short_description = obj.get('description', '')
+        database.short_description = obj.get("description", "")
         database.declaration = declaration
         database.description = description
         database.code = code
@@ -357,7 +357,7 @@ def upload_database(prefix, name, data, new_only=False):
 
         logger.info("Updated database `%s'", database)
 
-    if not data['private']:
+    if not data["private"]:
         database.sharing = Shareable.PUBLIC
         database.save()
         logger.info("Set database `%s' as public", database)
@@ -385,19 +385,18 @@ def upload_toolchain(prefix, name, data):
     """
 
     storage = beat.core.toolchain.Storage(prefix, name)
-    if not storage.exists(): return False #should be ignored
+    if not storage.exists():
+        return False  # should be ignored
     declaration = detemplatize(storage.json.load(), data)
     obj = simplejson.loads(declaration)
-    description = storage.doc.load() if storage.doc.exists() else ''
+    description = storage.doc.load() if storage.doc.exists() else ""
 
     from ....toolchains.models import Toolchain
 
-    author = data[name.split(os.sep)[0].replace('name', '')]
+    author = data[name.split(os.sep)[0].replace("name", "")]
 
     toolchain = Toolchain.objects.filter(
-        author=author,
-        name=storage.name,
-        version=int(storage.version),
+        author=author, name=storage.name, version=int(storage.version),
     )
 
     if not toolchain:
@@ -406,7 +405,7 @@ def upload_toolchain(prefix, name, data):
             author=author,
             name=storage.name,
             version=int(storage.version),
-            short_description=obj.get('description', ''),
+            short_description=obj.get("description", ""),
             description=description,
             declaration=declaration,
         )
@@ -418,16 +417,16 @@ def upload_toolchain(prefix, name, data):
         else:
             logger.info("Added toolchain `%s'", toolchain)
 
-    else: #only updates files
+    else:  # only updates files
 
         toolchain = toolchain[0]
-        toolchain.short_description = obj.get('description', '')
+        toolchain.short_description = obj.get("description", "")
         toolchain.declaration = declaration
         toolchain.description = description
         toolchain.save()
         logger.info("Updated toolchain `%s'", toolchain)
 
-    if not data['private']:
+    if not data["private"]:
         toolchain.share()
         logger.info("Set toolchain `%s' as public", toolchain)
 
@@ -454,21 +453,20 @@ def upload_library(prefix, name, data):
     """
 
     storage = beat.core.library.Storage(prefix, name)
-    if not storage.json.exists(): return False #should be ignored
+    if not storage.json.exists():
+        return False  # should be ignored
     declaration = detemplatize(storage.json.load(), data)
     obj = simplejson.loads(declaration)
-    storage.language = obj.get('language', 'python')
-    description = storage.doc.load() if storage.doc.exists() else ''
-    code = storage.code.load() if storage.code and storage.code.exists() else ''
+    storage.language = obj.get("language", "python")
+    description = storage.doc.load() if storage.doc.exists() else ""
+    code = storage.code.load() if storage.code and storage.code.exists() else ""
 
     from ....libraries.models import Library
 
-    author = data[name.split(os.sep)[0].replace('name', '')]
+    author = data[name.split(os.sep)[0].replace("name", "")]
 
     library = Library.objects.filter(
-        author=author,
-        name=storage.name,
-        version=int(storage.version),
+        author=author, name=storage.name, version=int(storage.version),
     )
 
     if not library:
@@ -476,7 +474,7 @@ def upload_library(prefix, name, data):
             author=author,
             name=storage.name,
             version=int(storage.version),
-            short_description=obj.get('description', ''),
+            short_description=obj.get("description", ""),
             description=description,
             declaration=declaration,
             code=code,
@@ -487,17 +485,17 @@ def upload_library(prefix, name, data):
         else:
             logger.info("Added library `%s'", library)
 
-    else: #only updates files
+    else:  # only updates files
 
         library = library[0]
-        library.short_description = obj.get('description', '')
+        library.short_description = obj.get("description", "")
         library.declaration = declaration
         library.description = description
         library.code = code
         library.save()
         logger.info("Updated library `%s'", library)
 
-    if not data['private']:
+    if not data["private"]:
         library.share(public=True)
         logger.info("Set library `%s' as public", library)
 
@@ -524,21 +522,20 @@ def upload_algorithm(prefix, name, data):
     """
 
     storage = beat.core.algorithm.Storage(prefix, name)
-    if not storage.json.exists(): return False #should be ignored
+    if not storage.json.exists():
+        return False  # should be ignored
     declaration = detemplatize(storage.json.load(), data)
     obj = simplejson.loads(declaration)
-    storage.language = obj.get('language', 'python')
-    description = storage.doc.load() if storage.doc.exists() else ''
-    code = storage.code.load() if storage.code and storage.code.exists() else ''
+    storage.language = obj.get("language", "python")
+    description = storage.doc.load() if storage.doc.exists() else ""
+    code = storage.code.load() if storage.code and storage.code.exists() else ""
 
     from ....algorithms.models import Algorithm
 
-    author = data[name.split(os.sep)[0].replace('name', '')]
+    author = data[name.split(os.sep)[0].replace("name", "")]
 
     algorithm = Algorithm.objects.filter(
-        author=author,
-        name=storage.name,
-        version=int(storage.version),
+        author=author, name=storage.name, version=int(storage.version),
     )
 
     if not algorithm:
@@ -546,7 +543,7 @@ def upload_algorithm(prefix, name, data):
             author=author,
             name=storage.name,
             version=int(storage.version),
-            short_description=obj.get('description', ''),
+            short_description=obj.get("description", ""),
             description=description,
             declaration=declaration,
             code=code,
@@ -557,16 +554,16 @@ def upload_algorithm(prefix, name, data):
         else:
             logger.info("Added algorithm `%s'", algorithm)
 
-    else: #only updates files
+    else:  # only updates files
 
         algorithm = algorithm[0]
-        algorithm.short_description = obj.get('description', '')
+        algorithm.short_description = obj.get("description", "")
         algorithm.declaration = declaration
         algorithm.description = description
         algorithm.save()
         logger.info("Updated algorithm `%s'", algorithm)
 
-    if not data['private']:
+    if not data["private"]:
         algorithm.share(public=True)
         logger.info("Set algorithm `%s' as public", algorithm)
 
@@ -593,15 +590,16 @@ def upload_experiment(prefix, name, data):
     """
 
     storage = beat.core.experiment.Storage(prefix, name)
-    if not storage.exists(): return False #should be ignored
+    if not storage.exists():
+        return False  # should be ignored
     declaration = detemplatize(storage.json.load(), data)
     obj = simplejson.loads(declaration)
-    storage.language = obj.get('language', 'python')
-    description = storage.doc.load() if storage.doc.exists() else ''
+    storage.language = obj.get("language", "python")
+    description = storage.doc.load() if storage.doc.exists() else ""
 
     from ....toolchains.models import Toolchain
 
-    author = data[name.split(os.sep)[0].replace('name', '')]
+    author = data[name.split(os.sep)[0].replace("name", "")]
 
     toolchain_storage = beat.core.toolchain.Storage(name, storage.toolchain)
     toolchain = Toolchain.objects.get(
@@ -613,9 +611,7 @@ def upload_experiment(prefix, name, data):
     from ....experiments.models import Experiment
 
     experiment = Experiment.objects.filter(
-        author=author,
-        toolchain=toolchain,
-        name=storage.name,
+        author=author, toolchain=toolchain, name=storage.name,
     )
 
     if not experiment:
@@ -624,7 +620,7 @@ def upload_experiment(prefix, name, data):
             toolchain=toolchain,
             name=storage.name,
             declaration=declaration,
-            short_description=obj.get('description', ''),
+            short_description=obj.get("description", ""),
             description=description,
         )
         if experiment is None:
@@ -633,16 +629,16 @@ def upload_experiment(prefix, name, data):
         else:
             logger.info("Added experiment `%s'", experiment)
 
-    else: #only updates files
+    else:  # only updates files
 
         experiment = experiment[0]
-        experiment.short_description = obj.get('description', '')
+        experiment.short_description = obj.get("description", "")
         experiment.declaration = declaration
         experiment.description = description
         experiment.save()
         logger.info("Updated experiment `%s'", experiment)
 
-    if not data['private']:
+    if not data["private"]:
         experiment.share()
         logger.info("Set experiment `%s' as public", experiment)
 
@@ -669,26 +665,33 @@ def upload_plotter(prefix, name, data):
     """
 
     storage = beat.core.plotter.Storage(prefix, name)
-    if not storage.json.exists(): return False #should be ignored
+    if not storage.json.exists():
+        return False  # should be ignored
     declaration = detemplatize(storage.json.load(), data)
     obj = simplejson.loads(declaration)
-    storage.language = obj.get('language', 'python')
-    description = storage.doc.load() if storage.doc.exists() else ''
-    code = storage.code.load() if storage.code and storage.code.exists() else ''
+    storage.language = obj.get("language", "python")
+    description = storage.doc.load() if storage.doc.exists() else ""
+    code = storage.code.load() if storage.code and storage.code.exists() else ""
 
-    from ....plotters.models import Plotter, DefaultPlotter, PlotterParameter
     from ....common.models import Shareable
+    from ....plotters.models import DefaultPlotter
+    from ....plotters.models import Plotter
+    from ....plotters.models import PlotterParameter
 
-    author = data[name.split(os.sep)[0].replace('name', '')]
+    author = data[name.split(os.sep)[0].replace("name", "")]
 
     plotter = Plotter.objects.filter(
-        author=author,
-        name=storage.name,
-        version=int(storage.version),
+        author=author, name=storage.name, version=int(storage.version),
     )
 
-    sample_data_file_location = prefix + "/plotters/" + name.split("/")[0] + "/" + \
-        name.split("/")[1] + "/sample_data.txt"
+    sample_data_file_location = (
+        prefix
+        + "/plotters/"
+        + name.split("/")[0]
+        + "/"
+        + name.split("/")[1]
+        + "/sample_data.txt"
+    )
 
     with open(sample_data_file_location) as sample_data_file:
         sample_data = simplejson.load(sample_data_file)
@@ -698,7 +701,7 @@ def upload_plotter(prefix, name, data):
             author=author,
             name=storage.name,
             version=int(storage.version),
-            short_description=obj.get('description', ''),
+            short_description=obj.get("description", ""),
             description=description,
             declaration=declaration,
             code=code,
@@ -711,10 +714,10 @@ def upload_plotter(prefix, name, data):
             plotter.save()
             logger.info("Added plotter `%s'", plotter)
 
-    else: #only updates documentation
+    else:  # only updates documentation
 
         plotter = plotter[0]
-        plotter.short_description = obj.get('description', '')
+        plotter.short_description = obj.get("description", "")
         plotter.declaration = declaration
         plotter.description = description
         plotter.code = code
@@ -722,21 +725,35 @@ def upload_plotter(prefix, name, data):
         plotter.save()
         logger.info("Updated plotter `%s'", plotter)
 
-    if not data['private']:
+    if not data["private"]:
         plotter.share(public=True)
         logger.info("Set plotter `%s' as public", plotter)
 
     # Make it the format default
-    if plotter.dataformat.author.username == author.username and \
-            plotter.dataformat.name == storage.name and \
-            plotter.dataformat.version == int(storage.version):
+    if (
+        plotter.dataformat.author.username == author.username
+        and plotter.dataformat.name == storage.name
+        and plotter.dataformat.version == int(storage.version)
+    ):
 
         # Adding some plotter parameters
-        plotterparameter_data_file_location = prefix + "/plotters/" + name.split("/")[0] + "/" + \
-            name.split("/")[1] + "/default_plotterparameter.txt"
+        plotterparameter_data_file_location = (
+            prefix
+            + "/plotters/"
+            + name.split("/")[0]
+            + "/"
+            + name.split("/")[1]
+            + "/default_plotterparameter.txt"
+        )
 
-        short_desc_file_location = prefix + "/plotters/" + name.split("/")[0] + "/" + \
-            name.split("/")[1] + "/default_plotterparameter_short_description.txt"
+        short_desc_file_location = (
+            prefix
+            + "/plotters/"
+            + name.split("/")[0]
+            + "/"
+            + name.split("/")[1]
+            + "/default_plotterparameter_short_description.txt"
+        )
 
         with open(plotterparameter_data_file_location) as plotterparameter_data_file:
             plotterparameter_data = simplejson.load(plotterparameter_data_file)
@@ -744,56 +761,83 @@ def upload_plotter(prefix, name, data):
         with open(short_desc_file_location) as short_desc_data_file:
             short_desc = short_desc_data_file.readline().split("\n")[0]
 
-        plotterparameter = PlotterParameter.objects.create(name=plotter.dataformat.name,
-                                                           author=author, plotter=plotter, data=simplejson.dumps(plotterparameter_data,
-                                                                                                                 indent=4), short_description=short_desc, sharing = Shareable.PUBLIC)
+        plotterparameter = PlotterParameter.objects.create(
+            name=plotter.dataformat.name,
+            author=author,
+            plotter=plotter,
+            data=simplejson.dumps(plotterparameter_data, indent=4),
+            short_description=short_desc,
+            sharing=Shareable.PUBLIC,
+        )
 
         plotterparameter.save()
         logger.info("Add plotterparameter `%s' ", plotterparameter)
 
-
         default = DefaultPlotter.objects.filter(dataformat=plotter.dataformat)
 
         if default:
             default.plotter = plotter
         else:
-            default = DefaultPlotter(dataformat=plotter.dataformat,
-                                     plotter=plotter, parameter=plotterparameter)
+            default = DefaultPlotter(
+                dataformat=plotter.dataformat,
+                plotter=plotter,
+                parameter=plotterparameter,
+            )
             default.save()
 
-        logger.info("Set plotter `%s' and plotterparameter `%s'  as default for `%s'", plotter, plotterparameter, plotter.dataformat)
+        logger.info(
+            "Set plotter `%s' and plotterparameter `%s'  as default for `%s'",
+            plotter,
+            plotterparameter,
+            plotter.dataformat,
+        )
 
         if plotter.dataformat.name == "isoroc":
             # Adding extra plotterparameter if not already present for plotter isoroc
-            other_plotterparameter_location = prefix + "/plotters/" + name.split("/")[0] + "/" + \
-                "other_plotterparameters"
+            other_plotterparameter_location = (
+                prefix
+                + "/plotters/"
+                + name.split("/")[0]
+                + "/"
+                + "other_plotterparameters"
+            )
 
-            the_folders = filter(lambda x:\
-                                 os.path.isdir(os.path.join(other_plotterparameter_location, x)),\
-                                 os.listdir(other_plotterparameter_location))
+            the_folders = filter(
+                lambda x: os.path.isdir(
+                    os.path.join(other_plotterparameter_location, x)
+                ),
+                os.listdir(other_plotterparameter_location),
+            )
 
             for folder_name in the_folders:
 
                 others_plotterparameter = PlotterParameter.objects.filter(
-                    author=author,
-                    name=folder_name,
-                    version=int(storage.version),
+                    author=author, name=folder_name, version=int(storage.version),
                 )
 
                 if others_plotterparameter is not None:
                     param_folder = other_plotterparameter_location + "/" + folder_name
                     data_file_location = param_folder + "/default_plotterparameter.txt"
-                    short_desc_file_location = param_folder + "/default_plotterparameter_short_description.txt"
+                    short_desc_file_location = (
+                        param_folder + "/default_plotterparameter_short_description.txt"
+                    )
 
                     with open(data_file_location) as plotterparameter_data_file:
-                        plotterparameter_data = simplejson.load(plotterparameter_data_file)
+                        plotterparameter_data = simplejson.load(
+                            plotterparameter_data_file
+                        )
 
                     with open(short_desc_file_location) as short_desc_data_file:
                         short_desc = short_desc_data_file.readline().split("\n")[0]
 
-                    plotterparameter = PlotterParameter.objects.create(name=folder_name,
-                                                                       author=author, plotter=plotter, data=simplejson.dumps(plotterparameter_data,
-                                                                                                                             indent=4), short_description=short_desc, sharing = Shareable.PUBLIC)
+                    plotterparameter = PlotterParameter.objects.create(
+                        name=folder_name,
+                        author=author,
+                        plotter=plotter,
+                        data=simplejson.dumps(plotterparameter_data, indent=4),
+                        short_description=short_desc,
+                        sharing=Shareable.PUBLIC,
+                    )
 
                     logger.info("Add plotterparameter `%s' ", folder_name)
 
@@ -828,17 +872,17 @@ def upload_dispatcher(prefix, project, type, name, data):
     base_subdir = os.path.join(prefix, project)
 
     valid_types = {
-        'dataformats': upload_dataformat,
-            'databases': upload_database,
-            'libraries': upload_library,
-            'algorithms': upload_algorithm,
-            'toolchains': upload_toolchain,
-            'experiments': upload_experiment,
-            'plotters': upload_plotter,
+        "dataformats": upload_dataformat,
+        "databases": upload_database,
+        "libraries": upload_library,
+        "algorithms": upload_algorithm,
+        "toolchains": upload_toolchain,
+        "experiments": upload_experiment,
+        "plotters": upload_plotter,
     }
 
     if type not in valid_types:
-        raise KeyError("Type must be one of `%s'" % ', '.join(valid_types.keys()))
+        raise KeyError("Type must be one of `%s'" % ", ".join(valid_types.keys()))
 
     upload_function = valid_types[type]
 
@@ -857,45 +901,46 @@ def upload_dispatcher(prefix, project, type, name, data):
 
 
 def link_database_versions():
-    '''Link object versions together'''
+    """Link object versions together"""
 
     from ....databases.models import Database
 
     for obj in Database.objects.all():
         if obj.version > 1:
-            #search for similar
+            # search for similar
             try:
-                existing = Database.objects.get(name=obj.name,
-                                                version=obj.version-1)
-                logger.info("Linking database `%s' -> `%s' (version)",
-                            obj, existing)
+                existing = Database.objects.get(name=obj.name, version=obj.version - 1)
+                logger.info("Linking database `%s' -> `%s' (version)", obj, existing)
                 obj.previous_version = existing
                 obj.save()
             except Database.DoesNotExist:
-                pass #ignores
+                pass  # ignores
 
 
 def link_contribution_versions(klass):
-    '''Link object versions together'''
-
+    """Link object versions together"""
 
     for obj in klass.objects.all():
         if obj.version > 1:
-            #search for similar
+            # search for similar
             try:
-                existing = klass.objects.get(author=obj.author, name=obj.name,
-                                             version=obj.version-1)
-                logger.info("Linking %s `%s' -> `%s' (version)",
-                            klass.__name__.lower(), obj, existing)
+                existing = klass.objects.get(
+                    author=obj.author, name=obj.name, version=obj.version - 1
+                )
+                logger.info(
+                    "Linking %s `%s' -> `%s' (version)",
+                    klass.__name__.lower(),
+                    obj,
+                    existing,
+                )
                 obj.previous_version = existing
                 obj.save()
             except klass.DoesNotExist:
-                pass #ignores
+                pass  # ignores
 
 
-def install_contributions(source_prefix, project, template_data,
-                          db_root_file=None):
-    '''Installs all contributions for a given project
+def install_contributions(source_prefix, project, template_data, db_root_file=None):
+    """Installs all contributions for a given project
 
 
     Parameters:
@@ -913,89 +958,91 @@ def install_contributions(source_prefix, project, template_data,
         database root for databases to be inserted. Database names not present
         at the project directory will be ignored.
 
-    '''
+    """
 
     # Dataformat adding requires a special trick as there are dependencies
     # between different dataformats. Our recipe: we try to upload all of them
     # one after the other. If one fails, we retry on the next loop, until all
     # formats have been uploaded.
-    dataformat_filenames_next = list_objects(source_prefix, project,
-                                             'dataformats', '*.json')
+    dataformat_filenames_next = list_objects(
+        source_prefix, project, "dataformats", "*.json"
+    )
     dataformat_filenames_cur = []
 
     while True:
-        if not dataformat_filenames_next: break
+        if not dataformat_filenames_next:
+            break
         if len(dataformat_filenames_cur) == len(dataformat_filenames_next):
             break
         dataformat_filenames_cur = dataformat_filenames_next
         dataformat_filenames_next = []
         for k in dataformat_filenames_cur:
-            if not upload_dispatcher(source_prefix, project, 'dataformats', k,
-                                     template_data):
+            if not upload_dispatcher(
+                source_prefix, project, "dataformats", k, template_data
+            ):
                 dataformat_filenames_next.append(k)
     from ....dataformats.models import DataFormat
+
     link_contribution_versions(DataFormat)
 
     # Reads database root file, if provided
     db_root = {}
-    if db_root_file: db_root.update(load_database_folders(db_root_file))
+    if db_root_file:
+        db_root.update(load_database_folders(db_root_file))
 
-    for k in list_objects(source_prefix, project, 'databases', '*.json'):
-        if k in db_root: template_data['root_folder'] = db_root[k]
-        upload_dispatcher(source_prefix, project, 'databases', k,
-                          template_data)
+    for k in list_objects(source_prefix, project, "databases", "*.json"):
+        if k in db_root:
+            template_data["root_folder"] = db_root[k]
+        upload_dispatcher(source_prefix, project, "databases", k, template_data)
     link_database_versions()
 
-    for k in list_objects(source_prefix, project, 'toolchains',
-                          '*.json'):
-        upload_dispatcher(source_prefix, project, 'toolchains', k,
-                          template_data)
+    for k in list_objects(source_prefix, project, "toolchains", "*.json"):
+        upload_dispatcher(source_prefix, project, "toolchains", k, template_data)
     from ....toolchains.models import Toolchain
+
     link_contribution_versions(Toolchain)
 
     # Libraries adding requires a special trick as there are
     # dependencies between different libraries and algorithms. Our
     # recipe: we use the same technique as for dataformats.
-    library_filenames_next = list_objects(source_prefix, project,
-                                          'libraries', '*.json')
+    library_filenames_next = list_objects(source_prefix, project, "libraries", "*.json")
     library_filenames_cur = []
 
     while True:
-        if not library_filenames_next: break
+        if not library_filenames_next:
+            break
         if len(library_filenames_cur) == len(library_filenames_next):
             break
         library_filenames_cur = library_filenames_next
         library_filenames_next = []
         for k in library_filenames_cur:
-            if not upload_dispatcher(source_prefix, project,
-                                     'libraries', k, template_data):
+            if not upload_dispatcher(
+                source_prefix, project, "libraries", k, template_data
+            ):
                 library_filenames_next.append(k)
     from ....libraries.models import Library
+
     link_contribution_versions(Library)
 
-    for k in list_objects(source_prefix, project, 'algorithms',
-                          '*.json'):
-        upload_dispatcher(source_prefix, project, 'algorithms', k,
-                          template_data)
+    for k in list_objects(source_prefix, project, "algorithms", "*.json"):
+        upload_dispatcher(source_prefix, project, "algorithms", k, template_data)
     from ....algorithms.models import Algorithm
+
     link_contribution_versions(Algorithm)
 
-    for k in list_objects(source_prefix, project, 'plotters', '*.json'):
-        upload_dispatcher(source_prefix, project, 'plotters', k,
-                          template_data)
+    for k in list_objects(source_prefix, project, "plotters", "*.json"):
+        upload_dispatcher(source_prefix, project, "plotters", k, template_data)
     from ....plotters.models import Plotter
+
     link_contribution_versions(Plotter)
 
-    for k in list_objects(source_prefix, project, 'experiments',
-                          '*.json'):
-        upload_dispatcher(source_prefix, project, 'experiments', k,
-                          template_data)
+    for k in list_objects(source_prefix, project, "experiments", "*.json"):
+        upload_dispatcher(source_prefix, project, "experiments", k, template_data)
 
 
 class Command(BaseCommand):
 
-    help = 'Uploads stock contributions into the database'
-
+    help = "Uploads stock contributions into the database"
 
     def __init__(self):
 
@@ -1003,86 +1050,128 @@ class Command(BaseCommand):
 
         self.prefix = os.path.join(
             os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0]))),
-            'src',
-              'beat.examples',
+            "src",
+            "beat.examples",
         )
 
         # gets a list of all available projects, excluding directories
-        ignore = ['system', 'LICENSE', '.git', '.gitignore', 'README.rst']
+        ignore = ["system", "LICENSE", ".git", ".gitignore", "README.rst"]
         projects = os.listdir(self.prefix)
         self.projects = []
         for candidate in projects:
             if (candidate not in ignore) or (candidate not in projects):
                 self.projects.append(candidate)
 
-
     def add_arguments(self, parser):
 
         from argparse import RawDescriptionHelpFormatter
+
         parser.epilog = __doc__
         parser.formatter_class = RawDescriptionHelpFormatter
 
-        parser.add_argument('--username', '-u', dest='username', type=str,
-                            default='user', help='Username to create, associated with ' \
-                            'user contributions [default: %(default)s]')
-
-        parser.add_argument('--password', '-P', dest='password', type=str,
-                            default='user', help='The password to set for such an user ' \
-                            '[default: %(default)s]')
-
-        parser.add_argument('--private', '-p', action='store_true',
-                            dest='private', default=False, help='Set this flag if all ' \
-                            'objects should be private to the user rather than public')
-
-        parser.add_argument('--database-root-file', '-R', type=str,
-                            dest='database_root_file', help='The JSON file containing ' \
-                            'the root directories of the databases installed ' \
-                            'on the platform. If not set or if there is no ' \
-                            'entry in this file for a given database, the root ' \
-                            'directory defined on the JSON database file is used.')
-
-        parser.add_argument('--source-prefix', '-X', type=str,
-                            dest='source_prefix', default=self.prefix,
-                            help='Set this to the root of the directory containing ' \
-                            'the project prefixes you wish to install ' \
-                            '[default: %(default)s]')
-
-        parser.add_argument('--queue-configuration', '-Q', type=str,
-                            dest='queue_configuration', help='The configuration for ' \
-                            'queues and environments to be inserted into the ' \
-                            'web server. If not passed, use the default ' \
-                            'queue-worker-environment configuration for a ' \
-                            'local development server.')
-
-        parser.add_argument('project', nargs='*', type=str,
-                            default=self.projects, help='The project data you wish to ' \
-                            'install. Currently, the default is to install ' \
-                            'data for all available projects. [default: ' \
-                            '%s]' % ', '.join(self.projects))
+        parser.add_argument(
+            "--username",
+            "-u",
+            dest="username",
+            type=str,
+            default="user",
+            help="Username to create, associated with "
+            "user contributions [default: %(default)s]",
+        )
+
+        parser.add_argument(
+            "--password",
+            "-P",
+            dest="password",
+            type=str,
+            default="user",
+            help="The password to set for such an user " "[default: %(default)s]",
+        )
+
+        parser.add_argument(
+            "--private",
+            "-p",
+            action="store_true",
+            dest="private",
+            default=False,
+            help="Set this flag if all "
+            "objects should be private to the user rather than public",
+        )
 
+        parser.add_argument(
+            "--database-root-file",
+            "-R",
+            type=str,
+            dest="database_root_file",
+            help="The JSON file containing "
+            "the root directories of the databases installed "
+            "on the platform. If not set or if there is no "
+            "entry in this file for a given database, the root "
+            "directory defined on the JSON database file is used.",
+        )
+
+        parser.add_argument(
+            "--source-prefix",
+            "-X",
+            type=str,
+            dest="source_prefix",
+            default=self.prefix,
+            help="Set this to the root of the directory containing "
+            "the project prefixes you wish to install "
+            "[default: %(default)s]",
+        )
+
+        parser.add_argument(
+            "--queue-configuration",
+            "-Q",
+            type=str,
+            dest="queue_configuration",
+            help="The configuration for "
+            "queues and environments to be inserted into the "
+            "web server. If not passed, use the default "
+            "queue-worker-environment configuration for a "
+            "local development server.",
+        )
+
+        parser.add_argument(
+            "project",
+            nargs="*",
+            type=str,
+            default=self.projects,
+            help="The project data you wish to "
+            "install. Currently, the default is to install "
+            "data for all available projects. [default: "
+            "%s]" % ", ".join(self.projects),
+        )
 
     def handle(self, *ignored, **arguments):
 
         # Setup this command's logging level
         global logger
-        arguments['verbosity'] = int(arguments['verbosity'])
-        if arguments['verbosity'] >= 1:
-            if arguments['verbosity'] == 1: logger.setLevel(logging.INFO)
-            elif arguments['verbosity'] >= 2: logger.setLevel(logging.DEBUG)
+        arguments["verbosity"] = int(arguments["verbosity"])
+        if arguments["verbosity"] >= 1:
+            if arguments["verbosity"] == 1:
+                logger.setLevel(logging.INFO)
+            elif arguments["verbosity"] >= 2:
+                logger.setLevel(logging.DEBUG)
 
         # Checks projects
-        if not arguments['project']:
-            arguments['project'] = ['test', 'advanced']
+        if not arguments["project"]:
+            arguments["project"] = ["test", "advanced"]
 
-        while 'system' in arguments['project']:
-            logger.warn("Removing `system' from the list of projects to " \
-                        "install (this is them minimal default anyway)")
-            arguments['project'].remove('system')
+        while "system" in arguments["project"]:
+            logger.warn(
+                "Removing `system' from the list of projects to "
+                "install (this is them minimal default anyway)"
+            )
+            arguments["project"].remove("system")
 
-        for k in arguments['project']:
+        for k in arguments["project"]:
             if k not in self.projects:
-                logger.error("Project `%s' is not available, choose from: %s",
-                             ', '.join("`%s'" % k for k in self.projects))
+                logger.error(
+                    "Project `%s' is not available, choose from: %s",
+                    ", ".join("`%s'" % k for k in self.projects),
+                )
                 sys.exit(1)
 
         # Creates the prefix directory
@@ -1092,46 +1181,48 @@ class Command(BaseCommand):
 
         # Creates the cache directory
         if not os.path.exists(settings.CACHE_ROOT):
-            logger.info("Creating cache directory `%s'...",
-                        settings.CACHE_ROOT)
+            logger.info("Creating cache directory `%s'...", settings.CACHE_ROOT)
             os.makedirs(settings.CACHE_ROOT)
 
         # Sync database
         from django.core.management import call_command
-        call_command('migrate', interactive=False, verbosity=1)
+
+        call_command("migrate", interactive=False, verbosity=1)
 
         # Setup sites: 1.Development; 2.Staging; 3.Production
         create_sites()
 
-        system_user, plot_user, user = create_users(arguments['username'],
-                                                    arguments['password'])
-
+        system_user, plot_user, user = create_users(
+            arguments["username"], arguments["password"]
+        )
 
         # Sets up initial groups
-        add_group('Default')
+        add_group("Default")
 
         # Sets up the queue and environments
-        setup_environment(arguments['queue_configuration'],
-                          arguments['verbosity'])
-        from ....backend.models import Environment, EnvironmentLanguage, Queue
+        setup_environment(arguments["queue_configuration"], arguments["verbosity"])
+        from ....backend.models import EnvironmentLanguage
         from ....code.models import Code
-        environment = EnvironmentLanguage.objects.filter(language=Code.PYTHON).first().environment
+
+        environment = (
+            EnvironmentLanguage.objects.filter(language=Code.PYTHON).first().environment
+        )
         queue = environment.queues.first()
 
         # Iterates over projects to install
-        for project in ['system'] + arguments['project']:
+        for project in ["system"] + arguments["project"]:
 
             template_data = dict(
-                system_user = system_user,
-                plot_user = plot_user,
-                user = user,
-                private = arguments['private'],
-                queue = queue.name,
-                environment = dict(name=environment.name,
-                                   version=environment.version),
+                system_user=system_user,
+                plot_user=plot_user,
+                user=user,
+                private=arguments["private"],
+                queue=queue.name,
+                environment=dict(name=environment.name, version=environment.version),
             )
 
             logger.info("Adding objects for project `%s'...", project)
 
-            install_contributions(self.prefix, project, template_data,
-                                  arguments['database_root_file'])
+            install_contributions(
+                self.prefix, project, template_data, arguments["database_root_file"]
+            )
diff --git a/beat/web/utils/management/commands/list_active_users.py b/beat/web/utils/management/commands/list_active_users.py
index 2ee07a79b..00e86fc2c 100644
--- a/beat/web/utils/management/commands/list_active_users.py
+++ b/beat/web/utils/management/commands/list_active_users.py
@@ -27,9 +27,9 @@
 ###############################################################################
 
 
-from django.core.management.base import BaseCommand
 from django.contrib.auth.models import User
 from django.contrib.sessions.models import Session
+from django.core.management.base import BaseCommand
 from django.utils import timezone
 
 
@@ -49,30 +49,40 @@ def get_current_users():
     user_id_list = []
     for session in active_sessions:
         data = session.get_decoded()
-        user_id_list.append(data.get('_auth_user_id', None))
+        user_id_list.append(data.get("_auth_user_id", None))
     # Query all logged in users based on id list
     return User.objects.filter(id__in=user_id_list)
 
 
 class Command(BaseCommand):
 
-    help = 'Get active users'
+    help = "Get active users"
 
     def add_arguments(self, parser):
-        parser.add_argument('--email', '-e', action='store_true',
-                        dest='show_email', default=False, help='Set this flag'\
-                        'to also print email addresse')
-        parser.add_argument('--only-email', '-o', action='store_true',
-                        dest='show_only_email', default=False, help='Set this'\
-                        'flag to only print email addresse')
+        parser.add_argument(
+            "--email",
+            "-e",
+            action="store_true",
+            dest="show_email",
+            default=False,
+            help="Set this flag" "to also print email addresse",
+        )
+        parser.add_argument(
+            "--only-email",
+            "-o",
+            action="store_true",
+            dest="show_only_email",
+            default=False,
+            help="Set this" "flag to only print email addresse",
+        )
 
     def handle(self, *args, **options):
         current_users = get_current_users()
         for user in current_users:
-            if options['show_only_email']:
+            if options["show_only_email"]:
                 print(user.email)
             else:
                 user_data = "{} {}".format(user.first_name, user.last_name)
-                if options['show_email']:
+                if options["show_email"]:
                     user_data += " {}".format(user.email)
-                print(user_data)
\ No newline at end of file
+                print(user_data)
diff --git a/beat/web/utils/management/commands/restore.py b/beat/web/utils/management/commands/restore.py
index c41963a7f..9b3241861 100644
--- a/beat/web/utils/management/commands/restore.py
+++ b/beat/web/utils/management/commands/restore.py
@@ -33,23 +33,20 @@ import tarfile
 import tempfile
 
 import django.core.serializers.base
-
+from django.apps import apps
+from django.apps import registry
+from django.conf import settings
 from django.contrib.auth.models import User
 from django.core.management import call_command
-from django.core.management.base import BaseCommand, CommandError
-from django.conf import settings
-from django.apps import apps, registry
+from django.core.management.base import BaseCommand
+from django.core.management.base import CommandError
 from django.db import connection
 
-
-# Overrides deserialization to affect OneToOneFields for Users correctly
-from ....navigation.models import Agreement
 from ....accounts.models import AccountSettings
 from ....experiments.models import Experiment
-
+from ....navigation.models import Agreement
 from .backup import APPS
 
-
 logger = logging.getLogger(__name__)
 _original_save = django.core.serializers.base.DeserializedObject.save
 
@@ -99,6 +96,7 @@ class Command(BaseCommand):
 
         if connection.vendor == "sqlite":
             import sqlite3  # noqa
+
             from pkg_resources import parse_version  # noqa
 
             if parse_version(sqlite3.sqlite_version) >= parse_version("3.26"):
diff --git a/beat/web/utils/management/commands/scheduler.py b/beat/web/utils/management/commands/scheduler.py
index 06beb3d1c..f52c559fa 100644
--- a/beat/web/utils/management/commands/scheduler.py
+++ b/beat/web/utils/management/commands/scheduler.py
@@ -26,26 +26,26 @@
 #                                                                             #
 ###############################################################################
 
+import json
 import logging
 import signal
-import json
 import sys
 
-from django.core.management.base import BaseCommand
 from django.conf import settings
+from django.core.management.base import BaseCommand
 
 from beat.core.bcpapi import BCP
 from beat.core.bcpapi.client import BeatComputationClient
 
-from ....backend.models import JobSplit
-from ....backend.helpers import split_new_jobs
-from ....backend.helpers import process_newly_cancelled_experiments
 from ....backend.helpers import assign_splits_to_workers
 from ....backend.helpers import get_configuration_for_split
-from ....backend.helpers import on_split_started
+from ....backend.helpers import on_split_cancelled
 from ....backend.helpers import on_split_done
 from ....backend.helpers import on_split_fail
-from ....backend.helpers import on_split_cancelled
+from ....backend.helpers import on_split_started
+from ....backend.helpers import process_newly_cancelled_experiments
+from ....backend.helpers import split_new_jobs
+from ....backend.models import JobSplit
 
 logger = logging.getLogger(__name__)
 
diff --git a/beat/web/utils/management/commands/update_installed_databases.py b/beat/web/utils/management/commands/update_installed_databases.py
index 39ad020d3..f6c4d810a 100644
--- a/beat/web/utils/management/commands/update_installed_databases.py
+++ b/beat/web/utils/management/commands/update_installed_databases.py
@@ -58,32 +58,28 @@ Examples:
   By default, paths to the root of all databases are set to match the Idiap
   Research Institute filesystem organisation.
 """
-
+import logging
 import os
 import sys
-import logging
-
-import beat.core.database
 
-from django.core.management.base import BaseCommand
 from django.conf import settings
 from django.contrib.auth.models import User
+from django.core.management.base import BaseCommand
 
+import beat.core.database
 from beat.web.databases.models import Database
 
 from .install import link_database_versions
+from .install import list_objects
 from .install import load_database_folders
 from .install import upload_database
-from .install import list_objects
-
 
 logger = logging.getLogger(__name__)
 
 
 class Command(BaseCommand):
 
-    help = 'Install the various database up to their latest versions'
-
+    help = "Install the various database up to their latest versions"
 
     def __init__(self):
 
@@ -91,78 +87,103 @@ class Command(BaseCommand):
 
         self.prefix = os.path.join(
             os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0]))),
-                            'src',
-                            'beat.examples',
+            "src",
+            "beat.examples",
         )
 
-
     def add_arguments(self, parser):
 
         from argparse import RawDescriptionHelpFormatter
+
         parser.epilog = __doc__
         parser.formatter_class = RawDescriptionHelpFormatter
 
-        parser.add_argument('--private', '-p', action='store_true',
-                            dest='private', default=False, help='Set this flag if all ' \
-                            'databases should private')
-
-        parser.add_argument('--database-root-file', '-R', type=str,
-                            dest='database_root_file', help='The JSON file containing ' \
-                            'the root directories of the databases installed ' \
-                            'on the platform. If not set or if there is no ' \
-                            'entry in this file for a given database, the root ' \
-                            'directory defined on the JSON database file is used.')
+        parser.add_argument(
+            "--private",
+            "-p",
+            action="store_true",
+            dest="private",
+            default=False,
+            help="Set this flag if all " "databases should private",
+        )
 
-        parser.add_argument('--replace', '-r', action='store_true',
-                            dest='replace', default=False, help='Set this flag if all ' \
-                            'databases should be replaced rather than just new added')
+        parser.add_argument(
+            "--database-root-file",
+            "-R",
+            type=str,
+            dest="database_root_file",
+            help="The JSON file containing "
+            "the root directories of the databases installed "
+            "on the platform. If not set or if there is no "
+            "entry in this file for a given database, the root "
+            "directory defined on the JSON database file is used.",
+        )
 
-        parser.add_argument('--dry-run', '-d', action='store_true',
-                            dest='dry_run', default=False, help='Set this flag to ' \
-                            'simulate a run.')
-        parser.add_argument('--user', '-u', type=str,
-                            dest='user', default="tutorial", help='Use given user ' \
-                            'for template rendering, currently used only by atvskeystroke')
+        parser.add_argument(
+            "--replace",
+            "-r",
+            action="store_true",
+            dest="replace",
+            default=False,
+            help="Set this flag if all "
+            "databases should be replaced rather than just new added",
+        )
 
+        parser.add_argument(
+            "--dry-run",
+            "-d",
+            action="store_true",
+            dest="dry_run",
+            default=False,
+            help="Set this flag to " "simulate a run.",
+        )
+        parser.add_argument(
+            "--user",
+            "-u",
+            type=str,
+            dest="user",
+            default="tutorial",
+            help="Use given user "
+            "for template rendering, currently used only by atvskeystroke",
+        )
 
     def handle(self, *ignored, **arguments):
         # Setup this command's logging level
         global logger
-        arguments['verbosity'] = int(arguments['verbosity'])
-        if arguments['verbosity'] >= 1:
-            if arguments['verbosity'] == 1:
+        arguments["verbosity"] = int(arguments["verbosity"])
+        if arguments["verbosity"] >= 1:
+            if arguments["verbosity"] == 1:
                 logger.setLevel(logging.INFO)
-            elif arguments['verbosity'] >= 2:
+            elif arguments["verbosity"] >= 2:
                 logger.setLevel(logging.DEBUG)
 
-        new_only = arguments['replace'] == False
-        dry_run = arguments['dry_run']
+        new_only = arguments["replace"] is False
+        dry_run = arguments["dry_run"]
 
         system_user = User.objects.get(username=settings.SYSTEM_ACCOUNT)
-        user = User.objects.get(username=arguments['user'])
+        user = User.objects.get(username=arguments["user"])
 
         template_data = dict(
-            system_user=system_user,
-            user=user,
-            private=arguments['private']
+            system_user=system_user, user=user, private=arguments["private"]
         )
 
         # Reads database root file, if provided
-        db_root_file = arguments['database_root_file']
+        db_root_file = arguments["database_root_file"]
         db_root = {}
         if db_root_file:
             db_root.update(load_database_folders(db_root_file))
 
-        source_prefix = os.path.join(self.prefix, 'advanced')
-        for key in list_objects(self.prefix, 'advanced', 'databases', '*.json'):
-            template_data.pop('root_folder', None)
+        source_prefix = os.path.join(self.prefix, "advanced")
+        for key in list_objects(self.prefix, "advanced", "databases", "*.json"):
+            template_data.pop("root_folder", None)
             if key in db_root:
-                template_data['root_folder'] = db_root[key]
-            logger.info('Installing/updating: %s for %s' % (source_prefix, key))
+                template_data["root_folder"] = db_root[key]
+            logger.info("Installing/updating: %s for %s" % (source_prefix, key))
             if dry_run:
                 storage = beat.core.database.Storage(source_prefix, key)
-                database = Database.objects.filter(name=storage.name,
-                                                   version=int(storage.version))
+                database = Database.objects.filter(
+                    name=storage.name, version=int(storage.version)
+                )
 
                 if not database:
                     logger.info("Would create: %s" % key)
diff --git a/beat/web/utils/management/commands/worker.py b/beat/web/utils/management/commands/worker.py
index 4429b282e..e72935dd5 100644
--- a/beat/web/utils/management/commands/worker.py
+++ b/beat/web/utils/management/commands/worker.py
@@ -28,8 +28,8 @@
 
 import socket
 
-from django.core.management.base import BaseCommand
 from django.conf import settings
+from django.core.management.base import BaseCommand
 
 from beat.core.bcp import worker
 from beat.core.utils import setup_logging
diff --git a/beat/web/utils/management/commands/xdumpdata.py b/beat/web/utils/management/commands/xdumpdata.py
index 0dd4ef3e2..c4e5a7d42 100644
--- a/beat/web/utils/management/commands/xdumpdata.py
+++ b/beat/web/utils/management/commands/xdumpdata.py
@@ -6,73 +6,123 @@
 # indicated below.
 #
 # Django (BSD License)
-
 from collections import OrderedDict
 
 from django.apps import apps
 from django.core import serializers
-from django.core.management.base import BaseCommand, CommandError
-from django.db import DEFAULT_DB_ALIAS, router
+from django.core.management.base import BaseCommand
+from django.core.management.base import CommandError
+from django.db import DEFAULT_DB_ALIAS
+from django.db import router
 
 
 class Command(BaseCommand):
-    help = ("Output the contents of the database as a fixture of the given "
-            "format (using each model's default manager unless --all is "
-            "specified).")
+    help = (
+        "Output the contents of the database as a fixture of the given "
+        "format (using each model's default manager unless --all is "
+        "specified)."
+    )
 
     def add_arguments(self, parser):
-        parser.add_argument('args', metavar='app_label[.ModelName]', nargs='*',
-                            help='Restricts dumped data to the specified app_label or app_label.ModelName.')
-        parser.add_argument('--format', default='json', dest='format',
-                            help='Specifies the output serialization format for fixtures.')
-        parser.add_argument('--indent', default=None, dest='indent', type=int,
-                            help='Specifies the indent level to use when pretty-printing output.')
-        parser.add_argument('--database', action='store', dest='database',
-                            default=DEFAULT_DB_ALIAS,
-                            help='Nominates a specific database to dump fixtures from. '
-                            'Defaults to the "default" database.')
-        parser.add_argument('-e', '--exclude', dest='exclude', action='append', default=[],
-                            help='An app_label or app_label.ModelName to exclude '
-                            '(use multiple --exclude to exclude multiple apps/models).')
-        parser.add_argument('--natural-foreign', action='store_true', dest='use_natural_foreign_keys', default=False,
-                            help='Use natural foreign keys if they are available.')
-        parser.add_argument('--natural-primary', action='store_true', dest='use_natural_primary_keys', default=False,
-                            help='Use natural primary keys if they are available.')
-        parser.add_argument('-a', '--all', action='store_true', dest='use_base_manager', default=False,
-                            help="Use Django's base manager to dump all models stored in the database, "
-                            "including those that would otherwise be filtered or modified by a custom manager.")
-        parser.add_argument('--pks', dest='primary_keys',
-                            help="Only dump objects with given primary keys. "
-                            "Accepts a comma separated list of keys. "
-                            "This option will only work when you specify one model.")
-        parser.add_argument('-o', '--output', default=None, dest='output',
-                            help='Specifies file to which the output is written.')
+        parser.add_argument(
+            "args",
+            metavar="app_label[.ModelName]",
+            nargs="*",
+            help="Restricts dumped data to the specified app_label or app_label.ModelName.",
+        )
+        parser.add_argument(
+            "--format",
+            default="json",
+            dest="format",
+            help="Specifies the output serialization format for fixtures.",
+        )
+        parser.add_argument(
+            "--indent",
+            default=None,
+            dest="indent",
+            type=int,
+            help="Specifies the indent level to use when pretty-printing output.",
+        )
+        parser.add_argument(
+            "--database",
+            action="store",
+            dest="database",
+            default=DEFAULT_DB_ALIAS,
+            help="Nominates a specific database to dump fixtures from. "
+            'Defaults to the "default" database.',
+        )
+        parser.add_argument(
+            "-e",
+            "--exclude",
+            dest="exclude",
+            action="append",
+            default=[],
+            help="An app_label or app_label.ModelName to exclude "
+            "(use multiple --exclude to exclude multiple apps/models).",
+        )
+        parser.add_argument(
+            "--natural-foreign",
+            action="store_true",
+            dest="use_natural_foreign_keys",
+            default=False,
+            help="Use natural foreign keys if they are available.",
+        )
+        parser.add_argument(
+            "--natural-primary",
+            action="store_true",
+            dest="use_natural_primary_keys",
+            default=False,
+            help="Use natural primary keys if they are available.",
+        )
+        parser.add_argument(
+            "-a",
+            "--all",
+            action="store_true",
+            dest="use_base_manager",
+            default=False,
+            help="Use Django's base manager to dump all models stored in the database, "
+            "including those that would otherwise be filtered or modified by a custom manager.",
+        )
+        parser.add_argument(
+            "--pks",
+            dest="primary_keys",
+            help="Only dump objects with given primary keys. "
+            "Accepts a comma separated list of keys. "
+            "This option will only work when you specify one model.",
+        )
+        parser.add_argument(
+            "-o",
+            "--output",
+            default=None,
+            dest="output",
+            help="Specifies file to which the output is written.",
+        )
 
     def handle(self, *app_labels, **options):
-        format = options.get('format')
-        indent = options.get('indent')
-        using = options.get('database')
-        excludes = options.get('exclude')
-        output = options.get('output')
-        show_traceback = options.get('traceback')
-        use_natural_foreign_keys = options.get('use_natural_foreign_keys')
-        use_natural_primary_keys = options.get('use_natural_primary_keys')
-        use_base_manager = options.get('use_base_manager')
-        pks = options.get('primary_keys')
+        format = options.get("format")
+        indent = options.get("indent")
+        using = options.get("database")
+        excludes = options.get("exclude")
+        output = options.get("output")
+        show_traceback = options.get("traceback")
+        use_natural_foreign_keys = options.get("use_natural_foreign_keys")
+        use_natural_primary_keys = options.get("use_natural_primary_keys")
+        use_base_manager = options.get("use_base_manager")
+        pks = options.get("primary_keys")
 
         if pks:
-            primary_keys = pks.split(',')
+            primary_keys = pks.split(",")
         else:
             primary_keys = []
 
         excluded_apps = set()
         excluded_models = set()
         for exclude in excludes:
-            if '.' in exclude:
+            if "." in exclude:
                 try:
                     model = apps.get_model(exclude)
                 except LookupError:
-                    raise CommandError('Unknown model in excludes: %s' % exclude)
+                    raise CommandError("Unknown model in excludes: %s" % exclude)
                 excluded_models.add(model)
             else:
                 try:
@@ -84,16 +134,19 @@ class Command(BaseCommand):
         if len(app_labels) == 0:
             if primary_keys:
                 raise CommandError("You can only use --pks option with one model")
-            app_list = OrderedDict((app_config, None)
-                                   for app_config in apps.get_app_configs()
-                                   if app_config.models_module is not None and app_config not in excluded_apps)
+            app_list = OrderedDict(
+                (app_config, None)
+                for app_config in apps.get_app_configs()
+                if app_config.models_module is not None
+                and app_config not in excluded_apps
+            )
         else:
             if len(app_labels) > 1 and primary_keys:
                 raise CommandError("You can only use --pks option with one model")
             app_list = OrderedDict()
             for label in app_labels:
                 try:
-                    app_label, model_label = label.split('.')
+                    app_label, model_label = label.split(".")
                     try:
                         app_config = apps.get_app_config(app_label)
                     except LookupError as e:
@@ -103,7 +156,9 @@ class Command(BaseCommand):
                     try:
                         model = app_config.get_model(model_label)
                     except LookupError:
-                        raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
+                        raise CommandError(
+                            "Unknown model: %s.%s" % (app_label, model_label)
+                        )
 
                     app_list_value = app_list.setdefault(app_config, [])
 
@@ -115,7 +170,9 @@ class Command(BaseCommand):
                             app_list_value.append(model)
                 except ValueError:
                     if primary_keys:
-                        raise CommandError("You can only use --pks option with one model")
+                        raise CommandError(
+                            "You can only use --pks option with one model"
+                        )
                     # This is just an app - no model qualifier
                     app_label = label
                     try:
@@ -172,16 +229,21 @@ class Command(BaseCommand):
             progress_output = None
             object_count = 0
             # If dumpdata is outputting to stdout, there is no way to display progress
-            if (output and self.stdout.isatty() and options['verbosity'] > 0):
+            if output and self.stdout.isatty() and options["verbosity"] > 0:
                 progress_output = self.stdout
                 object_count = sum(get_objects(count_only=True))
-            stream = open(output, 'w') if output else None
+            stream = open(output, "w") if output else None
             try:
-                serializers.serialize(format, get_objects(), indent=indent,
-                                      use_natural_foreign_keys=use_natural_foreign_keys,
-                                      use_natural_primary_keys=use_natural_primary_keys,
-                                      stream=stream or self.stdout, progress_output=progress_output,
-                                      object_count=object_count)
+                serializers.serialize(
+                    format,
+                    get_objects(),
+                    indent=indent,
+                    use_natural_foreign_keys=use_natural_foreign_keys,
+                    use_natural_primary_keys=use_natural_primary_keys,
+                    stream=stream or self.stdout,
+                    progress_output=progress_output,
+                    object_count=object_count,
+                )
             finally:
                 if stream:
                     stream.close()
-- 
GitLab