diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000000000000000000000000000000000000..994815d8870e9822617c4578efdce0e121988c60
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,4 @@
+[flake8]
+max-line-length = 88
+select = B,C,E,F,W,T4,B9,B950
+ignore = E501, W503, E203
diff --git a/.isort.cfg b/.isort.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..8d7af1de97c8aea738c0f7c503d1461ad413db4f
--- /dev/null
+++ b/.isort.cfg
@@ -0,0 +1,4 @@
+[settings]
+line_length=88
+order_by_type=true
+lines_between_types=1
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..20644394d3e0e2bee843554de12184bd36592a70
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,46 @@
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+  - repo: https://github.com/timothycrosley/isort
+    rev: 4.3.21-2
+    hooks:
+    - id: isort
+      args: [-sl]
+  - repo: https://github.com/psf/black
+    rev: stable
+    hooks:
+      - id: black
+        exclude: bob/devtools/templates/setup.py
+  - repo: https://github.com/pre-commit/pre-commit-hooks
+    rev: v2.0.0
+    hooks:
+      - id: check-ast
+        exclude: bob/devtools/templates/setup.py
+      - id: check-case-conflict
+      - id: trailing-whitespace
+      - id: end-of-file-fixer
+      - id: debug-statements
+        exclude: bob/devtools/templates/setup.py
+      - id: check-added-large-files
+      - id: flake8
+        exclude: bob/devtools/templates/setup.py
+      - id: check-yaml
+        exclude: ./*/meta.yaml
+  - repo: local
+    hooks:
+      - id: sphinx-build
+        name: sphinx build
+        entry: python -m sphinx.cmd.build
+        args: [-a, -E, -W, doc, sphinx]
+        language: system
+        files: ^doc/
+        types: [file]
+        pass_filenames: false
+      - id: sphinx-doctest
+        name: sphinx doctest
+        entry: python -m sphinx.cmd.build
+        args: [-a, -E, -b, doctest, doc, sphinx]
+        language: system
+        files: ^doc/
+        types: [file]
+        pass_filenames: false
diff --git a/MANIFEST.in b/MANIFEST.in
index 524752bcba06cc067470898e874ae1815f358d4b..955321ac2f87b8bdf3c0accc13ede41b8638b7ca 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,4 @@
 include LICENSE README.rst buildout.cfg version.txt
 recursive-include doc conf.py *.rst *.sh *.png *.ico *.crt
 recursive-include bob/devtools/data *.md *.yaml *.pem matplotlibrc
-recursive-include bob/devtools/templates conf.py *.rst *.png *.ico LICENSE COPYING MANIFEST.in .gitlab-ci.yml .gitignore *.cfg *.txt *.py *.yaml
+recursive-include bob/devtools/templates conf.py *.rst *.png *.ico LICENSE COPYING MANIFEST.in .gitlab-ci.yml .gitignore *.cfg *.txt *.py *.yaml .flake8
diff --git a/bob/devtools/bootstrap.py b/bob/devtools/bootstrap.py
index cab4ed38d4e5bb3aa115d500c266b138be63add2..b3e202d1bd45db9c16ce361f2970d691e96336ed 100644
--- a/bob/devtools/bootstrap.py
+++ b/bob/devtools/bootstrap.py
@@ -4,6 +4,14 @@
 
 """Bootstraps a new miniconda installation and prepares it for development."""
 
+import glob
+import logging
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import time
 
 _BASE_CONDARC = """\
 add_pip_as_python_dependency: false #!final
@@ -34,16 +42,6 @@ _INTERVALS = (
 """Time intervals that make up human readable time slots"""
 
 
-import os
-import sys
-import glob
-import time
-import shutil
-import platform
-import subprocess
-
-import logging
-
 logger = logging.getLogger(__name__)
 
 
@@ -171,7 +169,6 @@ def merge_conda_cache(cache, prefix, name):
 
     # merge urls.txt files
     logger.info("Merging urls.txt files from cache...")
-    urls = []
     cached_pkgs_urls_txt = os.path.join(cached_pkgs_dir, "urls.txt")
 
     if not os.path.exists(cached_pkgs_urls_txt):
diff --git a/bob/devtools/build.py b/bob/devtools/build.py
index ff54a4e6493843e0939ef78cd31987b1f2c65f4f..9c6f0a538301f99fcb03ad4ec56ff17f15841c95 100644
--- a/bob/devtools/build.py
+++ b/bob/devtools/build.py
@@ -4,22 +4,21 @@
 """Tools for self-building and other utilities."""
 
 
-import os
-import re
-import sys
+import distutils.version
 import glob
 import json
-import shutil
+import logging
+import os
 import platform
+import re
 import subprocess
+import sys
 
-import logging
+import conda_build.api
+import yaml
 
 logger = logging.getLogger(__name__)
 
-import yaml
-import distutils.version
-
 
 def remove_conda_loggers():
     """Cleans-up conda API logger handlers to avoid logging repetition"""
@@ -31,8 +30,6 @@ def remove_conda_loggers():
         logger.debug("Removed conda logger handler at %s", handler)
 
 
-import conda_build.api
-
 remove_conda_loggers()
 
 
@@ -102,9 +99,7 @@ def next_build_number(channel_url, basename):
     remove_conda_loggers()
 
     # get the channel index
-    channel_urls = calculate_channel_urls(
-        [channel_url], prepend=False, use_local=False
-    )
+    channel_urls = calculate_channel_urls([channel_url], prepend=False, use_local=False)
     logger.debug("Downloading channel index from %s", channel_urls)
     index = fetch_index(channel_urls=channel_urls)
 
@@ -115,8 +110,7 @@ def next_build_number(channel_url, basename):
         name, version, build = basename[:-6].rsplit("-", 2)
     else:
         raise RuntimeError(
-            "Package name %s does not end in either "
-            ".tar.bz2 or .conda" % (basename,)
+            "Package name %s does not end in either " ".tar.bz2 or .conda" % (basename,)
         )
 
     # remove the build number as we're looking for the next value
@@ -152,11 +146,7 @@ def next_build_number(channel_url, basename):
         ):  # match!
             url = index[dist].url
             logger.debug(
-                "Found match at %s for %s-%s-%s",
-                url,
-                name,
-                version,
-                build_variant,
+                "Found match at %s for %s-%s-%s", url, name, version, build_variant,
             )
             build_number = max(build_number, dist.build_number + 1)
             urls[index[dist].timestamp] = url.replace(channel_url, "")
@@ -205,9 +195,7 @@ def make_conda_config(config, python, append_file, condarc_options):
         #    appropriate platform-specific subdir (e.g. win-64)
         if os.path.isdir(url):
             if not os.path.isabs(url):
-                url = os.path.normpath(
-                    os.path.abspath(os.path.join(os.getcwd(), url))
-                )
+                url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url)))
             url = url_path(url)
         retval.channel_urls.append(url)
 
@@ -261,8 +249,7 @@ def exists_on_channel(channel_url, basename):
             name, version, build = name[:-8].rsplit("-", 2)
         else:
             raise RuntimeError(
-                "Package name %s does not end in either "
-                ".tar.bz2 or .conda" % (name,)
+                "Package name %s does not end in either " ".tar.bz2 or .conda" % (name,)
             )
 
         # remove the build number as we're looking for the next value
@@ -289,7 +276,7 @@ def exists_on_channel(channel_url, basename):
 
 
 def remove_pins(deps):
-    return [l.split()[0] for l in deps]
+    return [ll.split()[0] for ll in deps]
 
 
 def parse_dependencies(recipe_dir, config):
@@ -436,8 +423,7 @@ def get_docserver_setup(public, stable, server, intranet, group):
     if (not public) and (not intranet):
         raise RuntimeError(
             "You cannot request for private channels and set"
-            " intranet=False (server=%s) - these are conflicting options"
-            % server
+            " intranet=False (server=%s) - these are conflicting options" % server
         )
 
     entries = []
@@ -461,9 +447,7 @@ def get_docserver_setup(public, stable, server, intranet, group):
                 server + prefix + "/docs/" + group + "/%(name)s/stable/",
             ]
         else:
-            entries += [
-                server + prefix + "/docs/" + group + "/%(name)s/master/"
-            ]
+            entries += [server + prefix + "/docs/" + group + "/%(name)s/master/"]
 
     return "|".join(entries)
 
@@ -500,8 +484,7 @@ def check_version(workdir, envtag):
                 '"version.txt" indicates version is a '
                 'pre-release (v%s) - but environment provided tag "%s", '
                 "which indicates this is a **stable** build. "
-                "Have you created the tag using ``bdt release``?"
-                % (version, envtag)
+                "Have you created the tag using ``bdt release``?" % (version, envtag)
             )
     else:  # it is a stable build
         if envtag is None:
@@ -554,20 +537,11 @@ def git_clean_build(runner, verbose):
     if not verbose:
         flags += "q"
 
-    runner(
-        ["git", "clean", flags]
-        + ["--exclude=%s" % k for k in exclude_from_cleanup]
-    )
+    runner(["git", "clean", flags] + ["--exclude=%s" % k for k in exclude_from_cleanup])
 
 
 def base_build(
-    bootstrap,
-    server,
-    intranet,
-    group,
-    recipe_dir,
-    conda_build_config,
-    condarc_options,
+    bootstrap, server, intranet, group, recipe_dir, conda_build_config, condarc_options,
 ):
     """Builds a non-beat/non-bob software dependence that doesn't exist on
     defaults.
@@ -614,18 +588,14 @@ def base_build(
         "\n  - ".join(condarc_options["channels"]),
     )
     logger.info("Merging conda configuration files...")
-    conda_config = make_conda_config(
-        conda_build_config, None, None, condarc_options
-    )
+    conda_config = make_conda_config(conda_build_config, None, None, condarc_options)
 
     metadata = get_rendered_metadata(recipe_dir, conda_config)
     arch = conda_arch()
 
     # checks we should actually build this recipe
     if should_skip_build(metadata):
-        logger.warn(
-            'Skipping UNSUPPORTED build of "%s" on %s', recipe_dir, arch
-        )
+        logger.warn('Skipping UNSUPPORTED build of "%s" on %s', recipe_dir, arch)
         return
 
     paths = get_output_path(metadata, conda_config)
@@ -643,8 +613,8 @@ def base_build(
     if any(urls):
         raise RuntimeError(
             "One or more packages for recipe at '%s' already exist (%s). "
-            "Change the package build number to trigger a build." % \
-            (recipe_dir, ", ".join(urls)),
+            "Change the package build number to trigger a build."
+            % (recipe_dir, ", ".join(urls)),
         )
 
     # if you get to this point, just builds the package(s)
@@ -656,9 +626,7 @@ if __name__ == "__main__":
 
     import argparse
 
-    parser = argparse.ArgumentParser(
-        description="Builds bob.devtools on the CI"
-    )
+    parser = argparse.ArgumentParser(description="Builds bob.devtools on the CI")
     parser.add_argument(
         "-g",
         "--group",
@@ -677,8 +645,7 @@ if __name__ == "__main__":
         default=os.environ.get(
             "CONDA_ROOT", os.path.realpath(os.path.join(os.curdir, "miniconda"))
         ),
-        help="The location where we should install miniconda "
-        "[default: %(default)s]",
+        help="The location where we should install miniconda " "[default: %(default)s]",
     )
     parser.add_argument(
         "-V",
@@ -756,8 +723,7 @@ if __name__ == "__main__":
     bootstrap.set_environment("BOB_PACKAGE_VERSION", version)
 
     # create the build configuration
-    conda_build_config = os.path.join(args.work_dir, "conda",
-            "conda_build_config.yaml")
+    conda_build_config = os.path.join(args.work_dir, "conda", "conda_build_config.yaml")
     recipe_append = os.path.join(args.work_dir, "data", "recipe_append.yaml")
 
     condarc = os.path.join(args.conda_root, "condarc")
@@ -821,8 +787,7 @@ if __name__ == "__main__":
             "typically means this build is running on a shared builder and "
             "the file ~/.conda/environments.txt is polluted with other "
             "environment paths.  To fix, empty that file and set its mode "
-            "to read-only for all."
-            % (path, os.path.join(args.conda_root, "conda-bld"))
+            "to read-only for all." % (path, os.path.join(args.conda_root, "conda-bld"))
         )
 
     # retrieve the current build number(s) for this build
diff --git a/bob/devtools/changelog.py b/bob/devtools/changelog.py
index 355ba2b84a27674bc05b3289b6f26c0b903f05ce..9ec044eb049c06e936319b039b682c32e9aa4654 100644
--- a/bob/devtools/changelog.py
+++ b/bob/devtools/changelog.py
@@ -3,11 +3,11 @@
 
 """Utilities for retrieving, parsing and auto-generating changelogs."""
 
-import io
 import datetime
+import io
 
-import pytz
 import dateutil.parser
+import pytz
 
 from .log import get_logger
 
@@ -25,18 +25,14 @@ def parse_date(d):
 def _sort_commits(commits, reverse):
     """Sorts gitlab commit objects using their ``committed_date`` attribute."""
 
-    return sorted(
-        commits, key=lambda x: parse_date(x.committed_date), reverse=reverse
-    )
+    return sorted(commits, key=lambda x: parse_date(x.committed_date), reverse=reverse)
 
 
 def _sort_tags(tags, reverse):
     """Sorts gitlab tag objects using their ``committed_date`` attribute."""
 
     return sorted(
-        tags,
-        key=lambda x: parse_date(x.commit["committed_date"]),
-        reverse=reverse,
+        tags, key=lambda x: parse_date(x.commit["committed_date"]), reverse=reverse,
     )
 
 
@@ -153,9 +149,7 @@ def _write_one_tag(f, pkg_name, tag):
         if line.startswith("* ") or line.startswith("- "):
             line = line[2:]
 
-        line = line.replace("!", pkg_name + "!").replace(
-            pkg_name + pkg_name, pkg_name
-        )
+        line = line.replace("!", pkg_name + "!").replace(pkg_name + pkg_name, pkg_name)
         line = line.replace("#", pkg_name + "#")
         if not line:
             continue
@@ -208,9 +202,7 @@ def _write_mergerequests_range(f, pkg_name, mrs):
         title = title.replace(" !", " " + pkg_name + "!")
         title = title.replace(" #", " " + pkg_name + "#")
         if mr.description is not None:
-            description = (
-                mr.description.strip().replace("\r", "").replace("\n", "  ")
-            )
+            description = mr.description.strip().replace("\r", "").replace("\n", "  ")
             description = description.replace(" !", " " + pkg_name + "!")
             description = description.replace(" #", " " + pkg_name + "#")
         else:
@@ -261,15 +253,13 @@ def get_changes_since(gitpkg, since):
     mrs = list(
         reversed(
             gitpkg.mergerequests.list(
-                state="merged",
-                updated_after=since,
-                order_by="updated_at",
-                all=True,
+                state="merged", updated_after=since, order_by="updated_at", all=True,
             )
         )
     )
     return mrs, tags, commits
 
+
 def write_tags_with_commits(f, gitpkg, since, mode):
     """Writes all tags and commits of a given package to the output file.
 
@@ -310,9 +300,7 @@ def write_tags_with_commits(f, gitpkg, since, mode):
             # the attribute 'merged_at' is not available in GitLab API as of 27
             # June 2018
             mrs4tag = [
-                k
-                for k in mrs
-                if (start_date < parse_date(k.updated_at) <= end_date)
+                k for k in mrs if (start_date < parse_date(k.updated_at) <= end_date)
             ]
             _write_mergerequests_range(
                 f, gitpkg.attributes["path_with_namespace"], mrs4tag
@@ -329,9 +317,7 @@ def write_tags_with_commits(f, gitpkg, since, mode):
             # write leftover merge requests
             # the attribute 'merged_at' is not available in GitLab API as of 27
             # June 2018
-            leftover_mrs = [
-                k for k in mrs if parse_date(k.updated_at) > start_date
-            ]
+            leftover_mrs = [k for k in mrs if parse_date(k.updated_at) > start_date]
             _write_mergerequests_range(
                 f, gitpkg.attributes["path_with_namespace"], leftover_mrs
             )
diff --git a/bob/devtools/ci.py b/bob/devtools/ci.py
index c18779b8512c98bc7889eaf21ba1ec481611707d..4a15fb443c63991b28821f099e5a1cb59d17e94e 100644
--- a/bob/devtools/ci.py
+++ b/bob/devtools/ci.py
@@ -4,11 +4,13 @@
 """Tools to help CI-based builds and artifact deployment."""
 
 
-import git
 import distutils.version
 
-from .log import get_logger, echo_info
+import git
+
 from .build import load_order_file
+from .log import echo_info
+from .log import get_logger
 
 logger = get_logger(__name__)
 
@@ -58,15 +60,11 @@ def is_stable(package, refname, tag, repodir):
 
     if tag is not None:
         logger.info('Project %s tag is "%s"', package, tag)
-        parsed_tag = distutils.version.LooseVersion(
-            tag[1:]
-        ).version  # remove 'v'
+        parsed_tag = distutils.version.LooseVersion(tag[1:]).version  # remove 'v'
         is_prerelease = any([isinstance(k, str) for k in parsed_tag])
 
         if is_prerelease:
-            logger.warn(
-                "Pre-release detected - not publishing to stable channels"
-            )
+            logger.warn("Pre-release detected - not publishing to stable channels")
             return False
 
         if is_master(refname, tag, repodir):
@@ -163,9 +161,7 @@ def select_build_file(basename, paths, branch):
         specific_basename = "%s-%s" % (basename, branch)
         for path in paths:
             path = os.path.realpath(path)
-            candidate = os.path.join(
-                path, "%s%s" % (specific_basename, extension)
-            )
+            candidate = os.path.join(path, "%s%s" % (specific_basename, extension))
             if os.path.exists(candidate):
                 return candidate
 
@@ -247,29 +243,34 @@ def cleanup(dry_run, username, password, includes):
 
         # go through all possible variants:
         archs = [
-                'linux-64',
-                'linux-32',
-                'linux-armv6l',
-                'linux-armv7l',
-                'linux-ppc64le',
-                'osx-64',
-                'osx-32',
-                'win-64',
-                'win-32',
-                'noarch',
-                ]
+            "linux-64",
+            "linux-32",
+            "linux-armv6l",
+            "linux-armv7l",
+            "linux-ppc64le",
+            "osx-64",
+            "osx-32",
+            "win-64",
+            "win-32",
+            "noarch",
+        ]
 
         path = server_info["conda"]
 
         for arch in archs:
 
-            arch_path = '/'.join((path, arch))
+            arch_path = "/".join((path, arch))
 
             if not (davclient.check(arch_path) and davclient.is_dir(arch_path)):
                 # it is normal if the directory does not exist
                 continue
 
             server_path = davclient.get_url(arch_path)
-            echo_info('Cleaning beta packages from %s' % server_path)
-            remove_old_beta_packages(client=davclient, path=arch_path,
-                    dry_run=dry_run, pyver=True, includes=includes)
+            echo_info("Cleaning beta packages from %s" % server_path)
+            remove_old_beta_packages(
+                client=davclient,
+                path=arch_path,
+                dry_run=dry_run,
+                pyver=True,
+                includes=includes,
+            )
diff --git a/bob/devtools/constants.py b/bob/devtools/constants.py
index 32b0194820ea5adbc1914ab4afc4726d8d6a3d06..0c5655500d5a44c6f649ae0c4048226073224477 100644
--- a/bob/devtools/constants.py
+++ b/bob/devtools/constants.py
@@ -4,10 +4,10 @@
 """Constants used for building and more."""
 
 import os
+
 import pkg_resources
 
 from . import bootstrap
-
 from .log import get_logger
 
 logger = get_logger(__name__)
@@ -112,9 +112,7 @@ CACERT_URL = "https://curl.haxx.se/ca/cacert.pem"
 """Location of the most up-to-date CA certificate bundle"""
 
 
-CACERT = pkg_resources.resource_filename(
-    __name__, os.path.join("data", "cacert.pem")
-)
+CACERT = pkg_resources.resource_filename(__name__, os.path.join("data", "cacert.pem"))
 """We keep a copy of the CA certificates we trust here
 
    To update this file use: ``curl --remote-name --time-cond cacert.pem https://curl.haxx.se/ca/cacert.pem``
diff --git a/bob/devtools/dav.py b/bob/devtools/dav.py
index 61a8059014153a79036ec8324f075e3e95f36df5..3503258741718f9507c257f6be8e54392ad38c8e 100644
--- a/bob/devtools/dav.py
+++ b/bob/devtools/dav.py
@@ -1,15 +1,18 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
+import configparser
 import os
 import re
-import configparser
-import dateutil.parser
 
 from distutils.version import StrictVersion
 
-from .log import get_logger, echo_warning, echo_info, echo_normal
+import dateutil.parser
+
 from .deploy import _setup_webdav_client
+from .log import echo_normal
+from .log import echo_warning
+from .log import get_logger
 
 logger = get_logger(__name__)
 
@@ -141,14 +144,14 @@ def remove_old_beta_packages(client, path, dry_run, pyver=True, includes=None):
             if result is not None:
                 name += "/" + result.string[:4]
 
-        target = '/'.join((path, f))
+        target = "/".join((path, f))
         info = client.info(target)
 
         betas.setdefault(name, []).append(
             (
                 StrictVersion(version),
                 int(build),  # build number
-                dateutil.parser.parse(info['modified']).timestamp(),
+                dateutil.parser.parse(info["modified"]).timestamp(),
                 target,
             )
         )
diff --git a/bob/devtools/deploy.py b/bob/devtools/deploy.py
index 9c7782902346e8f2914d71b88321a949ddd3af4d..14fef895c5a86ab81dd1be02d92a48c2dd02a076 100644
--- a/bob/devtools/deploy.py
+++ b/bob/devtools/deploy.py
@@ -6,7 +6,8 @@
 
 import os
 
-from .constants import WEBDAV_PATHS, SERVER
+from .constants import SERVER
+from .constants import WEBDAV_PATHS
 from .log import get_logger
 
 logger = get_logger(__name__)
@@ -57,9 +58,7 @@ def deploy_conda_package(
     """
 
     server_info = WEBDAV_PATHS[stable][public]
-    davclient = _setup_webdav_client(
-        SERVER, server_info["root"], username, password
-    )
+    davclient = _setup_webdav_client(SERVER, server_info["root"], username, password)
 
     basename = os.path.basename(package)
     arch = arch or os.path.basename(os.path.dirname(package))
@@ -75,30 +74,17 @@ def deploy_conda_package(
             )
 
         else:
-            logger.info(
-                "[dav] rm -f %s%s%s", SERVER, server_info["root"], remote_path
-            )
+            logger.info("[dav] rm -f %s%s%s", SERVER, server_info["root"], remote_path)
             if not dry_run:
                 davclient.clean(remote_path)
 
-    logger.info(
-        "[dav] %s -> %s%s%s", package, SERVER, server_info["root"], remote_path
-    )
+    logger.info("[dav] %s -> %s%s%s", package, SERVER, server_info["root"], remote_path)
     if not dry_run:
         davclient.upload(local_path=package, remote_path=remote_path)
 
 
 def deploy_documentation(
-    path,
-    package,
-    stable,
-    latest,
-    public,
-    branch,
-    tag,
-    username,
-    password,
-    dry_run,
+    path, package, stable, latest, public, branch, tag, username, password, dry_run,
 ):
     """Deploys sphinx documentation to the appropriate webdav locations.
 
@@ -133,9 +119,7 @@ def deploy_documentation(
         )
 
     server_info = WEBDAV_PATHS[stable][public]
-    davclient = _setup_webdav_client(
-        SERVER, server_info["root"], username, password
-    )
+    davclient = _setup_webdav_client(SERVER, server_info["root"], username, password)
 
     remote_path_prefix = "%s/%s" % (server_info["docs"], package)
 
diff --git a/bob/devtools/graph.py b/bob/devtools/graph.py
index a6b7296e627f2f3003973e516e84aedbaa7f3ea5..d3aa2fde5bed8a4f7498a8e613a201a66d056351 100644
--- a/bob/devtools/graph.py
+++ b/bob/devtools/graph.py
@@ -3,22 +3,21 @@
 
 """Utilities for calculating package dependencies and drawing graphs"""
 
+import glob
 import os
 import re
-import glob
-import fnmatch
-import tempfile
 import tarfile
+import tempfile
+
 from io import BytesIO
 
 from .bootstrap import set_environment
-from .build import (
-    next_build_number,
-    get_rendered_metadata,
-    get_parsed_recipe,
-    get_output_path,
-)
-from .log import get_logger, echo_info
+from .build import get_output_path
+from .build import get_parsed_recipe
+from .build import get_rendered_metadata
+from .build import next_build_number
+from .log import echo_info
+from .log import get_logger
 
 logger = get_logger(__name__)
 
@@ -28,7 +27,7 @@ def compute_adjencence_matrix(
     package,
     conda_config,
     main_channel,
-    recurse_regexp="^(bob|beat|batl|gridtk)(\.)?(?!-).*$",
+    recurse_regexp=r"^(bob|beat|batl|gridtk)(\.)?(?!-).*$",
     current={},
     ref="master",
     deptypes=[],
@@ -127,9 +126,7 @@ def compute_adjencence_matrix(
         path = get_output_path(metadata, conda_config)[0]
 
         # gets the next build number
-        build_number, _ = next_build_number(
-            main_channel, os.path.basename(path)
-        )
+        build_number, _ = next_build_number(main_channel, os.path.basename(path))
 
         # at this point, all elements are parsed, I know the package version,
         # build number and all dependencies
@@ -163,8 +160,8 @@ def compute_adjencence_matrix(
         # if dependencies match a target set of globs
         recurse_compiled = re.compile(recurse_regexp)
 
-        def _re_filter(l):
-            return [k for k in l if recurse_compiled.match(k)]
+        def _re_filter(ll):
+            return [k for k in ll if recurse_compiled.match(k)]
 
         all_recurse = set()
         all_recurse |= set([z.split()[0] for z in _re_filter(host)])
@@ -194,9 +191,7 @@ def compute_adjencence_matrix(
 
         # do not recurse for packages we already know
         all_recurse -= set(current.keys())
-        logger.info(
-            "Recursing over the following packages: %s", ", ".join(all_recurse)
-        )
+        logger.info("Recursing over the following packages: %s", ", ".join(all_recurse))
 
         for dep in all_recurse:
             dep_adjmtx = compute_adjencence_matrix(
@@ -264,17 +259,10 @@ def generate_graph(adjacence_matrix, deptypes, whitelist):
     for package, values in adjacence_matrix.items():
         if not whitelist_compiled.match(values["name"]):
             logger.debug(
-                "Skipping main package %s (did not match whitelist)",
-                values["name"],
+                "Skipping main package %s (did not match whitelist)", values["name"],
             )
             continue
-        name = (
-            values["name"]
-            + "\n"
-            + values["version"]
-            + "\n"
-            + values["build_string"]
-        )
+        name = values["name"] + "\n" + values["version"] + "\n" + values["build_string"]
         nodes[values["name"]] = graph.node(
             values["name"], name, shape="box", color="blue"
         )
@@ -294,9 +282,7 @@ def generate_graph(adjacence_matrix, deptypes, whitelist):
 
         for ref, parts in deps.items():
             if not whitelist_compiled.match(ref):
-                logger.debug(
-                    "Skipping dependence %s (did not match whitelist)", ref
-                )
+                logger.debug("Skipping dependence %s (did not match whitelist)", ref)
                 continue
 
             if not any([k == ref for k in nodes.keys()]):
diff --git a/bob/devtools/log.py b/bob/devtools/log.py
index 894f74ee685f906d75d0101f565ca0dc3508d934..b43a2a709fb0e8501d32f1f022e17e5c05f46480 100644
--- a/bob/devtools/log.py
+++ b/bob/devtools/log.py
@@ -3,13 +3,13 @@
 
 """Logging utilities."""
 
-import sys
 import logging
+import os
+import sys
 
 import click
 import termcolor
 
-
 # get the default root logger of Bob
 _logger = logging.getLogger("bob")
 
@@ -127,9 +127,7 @@ def echo_warning(text):
 
 
 # helper functions to instantiate and set-up logging
-def setup(
-    logger_name, format="%(levelname)s:%(name)s@%(asctime)s: %(message)s"
-):
+def setup(logger_name, format="%(levelname)s:%(name)s@%(asctime)s: %(message)s"):
     """This function returns a logger object that is set up to perform logging
     using Bob loggers.
 
diff --git a/bob/devtools/mirror.py b/bob/devtools/mirror.py
index a1055c1223118578a2008a04797d579045b80995..9c13a54b78338ee1a890e1c753f2abe9c5fbcfc7 100644
--- a/bob/devtools/mirror.py
+++ b/bob/devtools/mirror.py
@@ -2,26 +2,26 @@
 # vim: set fileencoding=utf-8 :
 
 
-'''Mirroring functionality for conda channels
+"""Mirroring functionality for conda channels
 
 Some constructs are bluntly copied from
 https://github.com/valassis-digital-media/conda-mirror
-'''
+"""
 
-import os
 import bz2
+import fnmatch
+import hashlib
 import json
-import time
+import os
 import random
-import hashlib
-import fnmatch
 import tempfile
+import time
 
 import requests
 
 from .log import get_logger
-logger = get_logger(__name__)
 
+logger = get_logger(__name__)
 
 
 def _download(url, target_directory):
@@ -44,12 +44,12 @@ def _download(url, target_directory):
     chunk_size = 1024  # 1KB chunks
     logger.info("Download %s -> %s", url, target_directory)
     # create a temporary file
-    target_filename = url.split('/')[-1]
+    target_filename = url.split("/")[-1]
     download_filename = os.path.join(target_directory, target_filename)
-    with open(download_filename, 'w+b') as tf:
+    with open(download_filename, "w+b") as tf:
         ret = requests.get(url, stream=True)
-        size = ret.headers.get('Content-length', '??')
-        logger.debug('Saving to %s (%s bytes)', download_filename, size)
+        size = ret.headers.get("Content-length", "??")
+        logger.debug("Saving to %s (%s bytes)", download_filename, size)
         for data in ret.iter_content(chunk_size):
             tf.write(data)
         file_size = os.path.getsize(download_filename)
@@ -70,8 +70,7 @@ def _list_conda_packages(local_dir):
         List of conda packages in `local_dir`
     """
     contents = os.listdir(local_dir)
-    return fnmatch.filter(contents, "*.conda") + \
-            fnmatch.filter(contents, "*.tar.bz2")
+    return fnmatch.filter(contents, "*.conda") + fnmatch.filter(contents, "*.tar.bz2")
 
 
 def get_json(channel, platform, name):
@@ -93,13 +92,13 @@ def get_json(channel, platform, name):
         contents of repodata.json
     """
 
-    url = channel + '/' + platform + '/' + name
-    logger.debug('[checking] %s...', url)
+    url = channel + "/" + platform + "/" + name
+    logger.debug("[checking] %s...", url)
     r = requests.get(url, allow_redirects=True, stream=True)
-    size = r.headers.get('Content-length', '??')
-    logger.info('[download] %s (%s bytes)...', url, size)
+    size = r.headers.get("Content-length", "??")
+    logger.info("[download] %s (%s bytes)...", url, size)
 
-    if name.endswith('.bz2'):
+    if name.endswith(".bz2"):
         # just in case transport encoding was applied
         r.raw.decode_content = True
         data = bz2.decompress(r.raw.read())
@@ -117,10 +116,11 @@ def get_local_contents(path, arch):
         return set()
 
     # path exists, lists currently available packages
-    logger.info('Listing package contents of %s...', path_arch)
+    logger.info("Listing package contents of %s...", path_arch)
     contents = os.listdir(path_arch)
-    return set(fnmatch.filter(contents, '*.tar.bz2') +
-            fnmatch.filter(contents, '*.conda'))
+    return set(
+        fnmatch.filter(contents, "*.tar.bz2") + fnmatch.filter(contents, "*.conda")
+    )
 
 
 def load_glob_list(path):
@@ -154,11 +154,11 @@ def whitelist_filter(packages, globs):
 def _sha256sum(filename):
     """Calculates and returns the sha-256 sum given a file name"""
 
-    h  = hashlib.sha256()
-    b  = bytearray(128*1024)
+    h = hashlib.sha256()
+    b = bytearray(128 * 1024)
     mv = memoryview(b)
-    with open(filename, 'rb', buffering=0) as f:
-        for n in iter(lambda : f.readinto(mv), 0):
+    with open(filename, "rb", buffering=0) as f:
+        for n in iter(lambda: f.readinto(mv), 0):
             h.update(mv[:n])
     return h.hexdigest()
 
@@ -166,11 +166,11 @@ def _sha256sum(filename):
 def _md5sum(filename):
     """Calculates and returns the md5 sum given a file name"""
 
-    h  = hashlib.md5()
-    b  = bytearray(128*1024)
+    h = hashlib.md5()
+    b = bytearray(128 * 1024)
     mv = memoryview(b)
-    with open(filename, 'rb', buffering=0) as f:
-        for n in iter(lambda : f.readinto(mv), 0):
+    with open(filename, "rb", buffering=0) as f:
+        for n in iter(lambda: f.readinto(mv), 0):
             h.update(mv[:n])
     return h.hexdigest()
 
@@ -208,53 +208,76 @@ def download_packages(packages, repodata, channel_url, dest_dir, arch, dry_run):
         total = len(packages)
         for k, p in enumerate(packages):
 
-            k+=1 #adjust to produce correct order on printouts
+            k += 1  # adjust to produce correct order on printouts
 
             # checksum to verify
-            if p.endswith('.tar.bz2'):
-                expected_hash = repodata['packages'][p].get('sha256',
-                        repodata['packages'][p]['md5'])
+            if p.endswith(".tar.bz2"):
+                expected_hash = repodata["packages"][p].get(
+                    "sha256", repodata["packages"][p]["md5"]
+                )
             else:
-                expected_hash = repodata['packages.conda'][p].get('sha256',
-                        repodata['packages.conda'][p]['md5'])
+                expected_hash = repodata["packages.conda"][p].get(
+                    "sha256", repodata["packages.conda"][p]["md5"]
+                )
 
             # download package to file in our temporary directory
-            url = channel_url + '/' + arch + '/' + p
+            url = channel_url + "/" + arch + "/" + p
             temp_dest = os.path.join(download_dir, p)
-            logger.info('[download: %d/%d] %s -> %s', k, total, url, temp_dest)
+            logger.info("[download: %d/%d] %s -> %s", k, total, url, temp_dest)
 
             package_retries = 10
             while package_retries:
 
                 if not dry_run:
-                    logger.debug('[checking: %d/%d] %s', k, total, url)
+                    logger.debug("[checking: %d/%d] %s", k, total, url)
                     r = requests.get(url, stream=True, allow_redirects=True)
-                    size = r.headers.get('Content-length', '??')
-                    logger.info('[download: %d/%d] %s -> %s (%s bytes)', k,
-                            total, url, temp_dest, size)
-                    open(temp_dest, 'wb').write(r.raw.read())
+                    size = r.headers.get("Content-length", "??")
+                    logger.info(
+                        "[download: %d/%d] %s -> %s (%s bytes)",
+                        k,
+                        total,
+                        url,
+                        temp_dest,
+                        size,
+                    )
+                    open(temp_dest, "wb").write(r.raw.read())
 
                 # verify that checksum matches
-                if len(expected_hash) == 32:  #md5
-                    logger.info('[verify: %d/%d] md5(%s) == %s?', k, total,
-                            temp_dest, expected_hash)
-                else:  #sha256
-                    logger.info('[verify: %d/%d] sha256(%s) == %s?', k, total,
-                            temp_dest, expected_hash)
+                if len(expected_hash) == 32:  # md5
+                    logger.info(
+                        "[verify: %d/%d] md5(%s) == %s?",
+                        k,
+                        total,
+                        temp_dest,
+                        expected_hash,
+                    )
+                else:  # sha256
+                    logger.info(
+                        "[verify: %d/%d] sha256(%s) == %s?",
+                        k,
+                        total,
+                        temp_dest,
+                        expected_hash,
+                    )
 
                 if not dry_run:
-                    if len(expected_hash) == 32:  #md5
+                    if len(expected_hash) == 32:  # md5
                         actual_hash = _md5sum(temp_dest)
-                    else:  #sha256
+                    else:  # sha256
                         actual_hash = _sha256sum(temp_dest)
 
                     if actual_hash != expected_hash:
-                        wait_time = random.randint(10,61)
-                        logger.warning('Checksum of locally downloaded ' \
-                                'version of %s does not match ' \
-                                '(actual:%r != %r:expected) - retrying ' \
-                                'after %d seconds', url, actual_hash,
-                                    expected_hash, wait_time)
+                        wait_time = random.randint(10, 61)
+                        logger.warning(
+                            "Checksum of locally downloaded "
+                            "version of %s does not match "
+                            "(actual:%r != %r:expected) - retrying "
+                            "after %d seconds",
+                            url,
+                            actual_hash,
+                            expected_hash,
+                            wait_time,
+                        )
                         os.unlink(temp_dest)
                         time.sleep(wait_time)
                         package_retries -= 1
@@ -263,20 +286,20 @@ def download_packages(packages, repodata, channel_url, dest_dir, arch, dry_run):
                         break
 
             # final check, before we continue
-            assert actual_hash == expected_hash, 'Checksum of locally ' \
-                    'downloaded version of %s does not match ' \
-                    '(actual:%r != %r:expected)' % (url, actual_hash,
-                            expected_hash)
+            assert actual_hash == expected_hash, (
+                "Checksum of locally "
+                "downloaded version of %s does not match "
+                "(actual:%r != %r:expected)" % (url, actual_hash, expected_hash)
+            )
 
             # move
             local_dest = os.path.join(dest_dir, arch, p)
-            logger.info('[move: %d/%d] %s -> %s', k, total, temp_dest,
-                    local_dest)
+            logger.info("[move: %d/%d] %s -> %s", k, total, temp_dest, local_dest)
 
             # check local directory is available before moving
             dirname = os.path.dirname(local_dest)
             if not os.path.exists(dirname):
-                logger.info('[mkdir] %s', dirname)
+                logger.info("[mkdir] %s", dirname)
                 if not dry_run:
                     os.makedirs(dirname)
 
@@ -289,9 +312,9 @@ def remove_packages(packages, dest_dir, arch, dry_run):
 
     total = len(packages)
     for k, p in enumerate(packages):
-        k+=1 #adjust to produce correct order on printouts
+        k += 1  # adjust to produce correct order on printouts
         path = os.path.join(dest_dir, arch, p)
-        logger.info('[remove: %d/%d] %s', k, total, path)
+        logger.info("[remove: %d/%d] %s", k, total, path)
         if not dry_run:
             os.unlink(path)
 
@@ -300,9 +323,10 @@ def _cleanup_json(data, packages):
     """Cleans-up the contents of conda JSON looking at existing packages"""
 
     # only keys to clean-up here, othere keys remain unchanged
-    for key in ('packages', 'packages.conda'):
-        if key not in data: continue
-        data[key] = dict((k,v) for k,v in data[key].items() if k in packages)
+    for key in ("packages", "packages.conda"):
+        if key not in data:
+            continue
+        data[key] = dict((k, v) for k, v in data[key].items() if k in packages)
 
     return data
 
@@ -312,7 +336,7 @@ def _save_json(data, dest_dir, arch, name, dry_run):
 
     destfile = os.path.join(dest_dir, arch, name)
     if not dry_run:
-        with open(destfile, 'w') as outfile:
+        with open(destfile, "w") as outfile:
             json.dump(data, outfile, ensure_ascii=True, indent=2)
     return destfile
 
@@ -367,30 +391,46 @@ def checksum_packages(repodata, dest_dir, arch, packages):
         path_to_package = os.path.join(dest_dir, arch, p)
 
         # checksum to verify
-        if p.endswith('.tar.bz2'):
-            expected_hash = repodata['packages'][p].get('sha256',
-                    repodata['packages'][p]['md5'])
+        if p.endswith(".tar.bz2"):
+            expected_hash = repodata["packages"][p].get(
+                "sha256", repodata["packages"][p]["md5"]
+            )
         else:
-            expected_hash = repodata['packages.conda'][p].get('sha256',
-                    repodata['packages.conda'][p]['md5'])
+            expected_hash = repodata["packages.conda"][p].get(
+                "sha256", repodata["packages.conda"][p]["md5"]
+            )
 
         # verify that checksum matches
-        if len(expected_hash) == 32:  #md5
-            logger.debug('[verify: %d/%d] md5(%s) == %s?', k, total,
-                    path_to_package, expected_hash)
-        else:  #sha256
-            logger.debug('[verify: %d/%d] sha256(%s) == %s?', k, total,
-                    path_to_package, expected_hash)
-
-        if len(expected_hash) == 32:  #md5
+        if len(expected_hash) == 32:  # md5
+            logger.debug(
+                "[verify: %d/%d] md5(%s) == %s?",
+                k,
+                total,
+                path_to_package,
+                expected_hash,
+            )
+        else:  # sha256
+            logger.debug(
+                "[verify: %d/%d] sha256(%s) == %s?",
+                k,
+                total,
+                path_to_package,
+                expected_hash,
+            )
+
+        if len(expected_hash) == 32:  # md5
             actual_hash = _md5sum(path_to_package)
-        else:  #sha256
+        else:  # sha256
             actual_hash = _sha256sum(path_to_package)
 
         if actual_hash != expected_hash:
-            logger.warning('Checksum of %s does not match remote ' \
-                    'repository description (actual:%r != %r:expected)',
-                    path_to_package, actual_hash, expected_hash)
+            logger.warning(
+                "Checksum of %s does not match remote "
+                "repository description (actual:%r != %r:expected)",
+                path_to_package,
+                actual_hash,
+                expected_hash,
+            )
             issues.add(p)
 
     return issues
diff --git a/bob/devtools/pipelines.py b/bob/devtools/pipelines.py
index 29ecf8b51f3b7de0efaefad0bcb9fbcc3fb47017..a39f80eb032985f8a887ec487b8e00eef137ba97 100644
--- a/bob/devtools/pipelines.py
+++ b/bob/devtools/pipelines.py
@@ -3,10 +3,12 @@
 
 """Pipeline utilities"""
 
-from tabulate import tabulate
 import re
+
 from datetime import datetime
 
+from tabulate import tabulate
+
 
 def process_log(log):
     """
@@ -16,26 +18,25 @@ def process_log(log):
     current_package = None
     logs = dict()
     dates = []
-    for l in log:
+    for ll in log:
 
         # Check which package are we
-        if len(re.findall("Building bob/[a-z]*", l)) > 0:
+        if len(re.findall("Building bob/[a-z]*", ll)) > 0:
             logs[current_package] = dates
             dates = []
 
-            pattern = re.findall("Building bob/[a-z]*", l)[0]
-            current_package = l[9:-1]
+            current_package = ll[9:-1]
             continue
 
         # Checking the date
         date = re.findall(
-            "[0-9]{4,4}-[0-9]{2,2}-[0-9]{2,2} [0-9]{2,2}:[0-9]{2,2}:[0-9]{2,2}", l
+            "[0-9]{4,4}-[0-9]{2,2}-[0-9]{2,2} [0-9]{2,2}:[0-9]{2,2}:[0-9]{2,2}", ll
         )
         if len(date) > 0:
             # logs[date[0]]=current_package
             dates.append(date[0])
 
-    ## Last log
+    # Last log
     if len(dates) > 0:
         logs[current_package] = dates
 
diff --git a/bob/devtools/release.py b/bob/devtools/release.py
index 9682ca966b85e5fa630acd6971440e06b4933a0e..0565a8a88b33be8548113e62997895a048a777a6 100644
--- a/bob/devtools/release.py
+++ b/bob/devtools/release.py
@@ -5,16 +5,17 @@
 
 import os
 import re
-import time
 import shutil
+import time
+
+from distutils.version import StrictVersion
+
 import gitlab
 
 from .log import get_logger
 
 logger = get_logger(__name__)
 
-from distutils.version import StrictVersion
-
 
 def download_path(package, path, output=None, ref="master"):
     """Downloads paths from gitlab, with an optional recurse.
@@ -101,22 +102,14 @@ def _update_readme(readme, version):
     for line in readme.splitlines():
         if BRANCH_RE.search(line) is not None:
             if "gitlab" in line:  # gitlab links
-                replacement = (
-                    "/v%s" % version if version is not None else "/master"
-                )
+                replacement = "/v%s" % version if version is not None else "/master"
                 line = BRANCH_RE.sub(replacement, line)
-            if ("software/bob" in line) or (
-                "software/beat" in line
-            ):  # our doc server
+            if ("software/bob" in line) or ("software/beat" in line):  # our doc server
                 if "master" not in line:  # don't replace 'latest' pointer
-                    replacement = (
-                        "/v%s" % version if version is not None else "/stable"
-                    )
+                    replacement = "/v%s" % version if version is not None else "/stable"
                     line = BRANCH_RE.sub(replacement, line)
         if DOC_IMAGE.search(line) is not None:
-            replacement = (
-                "-v%s-" % version if version is not None else "-stable-"
-            )
+            replacement = "-v%s-" % version if version is not None else "-stable-"
             line = DOC_IMAGE.sub(replacement, line)
         new_readme.append(line)
     return "\n".join(new_readme) + "\n"
@@ -195,8 +188,7 @@ def get_parsed_tag(gitpkg, tag):
             raise ValueError(
                 "The latest tag name {0} in package {1} has "
                 "unknown format".format(
-                    "v" + latest_tag_name,
-                    gitpkg.attributes["path_with_namespace"],
+                    "v" + latest_tag_name, gitpkg.attributes["path_with_namespace"],
                 )
             )
 
@@ -243,9 +235,7 @@ def update_tag_comments(gitpkg, tag_name, tag_comments_list, dry_run=False):
     logger.info(tag_name)
     tag = gitpkg.tags.get(tag_name)
     tag_comments = "\n".join(tag_comments_list)
-    logger.info(
-        "Found tag %s, updating its comments with:\n%s", tag.name, tag_comments
-    )
+    logger.info("Found tag %s, updating its comments with:\n%s", tag.name, tag_comments)
     if not dry_run:
         tag.set_release_description(tag_comments)
     return tag
@@ -322,12 +312,8 @@ def update_files_with_mr(
                 logger.info("Merging !%d immediately - CI was skipped", mr.iid)
                 mr.merge()
             else:
-                logger.info(
-                    "Auto-merging !%d only if pipeline succeeds", mr.iid
-                )
-                time.sleep(
-                    0.5
-                )  # to avoid the MR to be merged automatically - bug?
+                logger.info("Auto-merging !%d only if pipeline succeeds", mr.iid)
+                time.sleep(0.5)  # to avoid the MR to be merged automatically - bug?
                 mr.merge(merge_when_pipeline_succeeds=True)
 
 
@@ -458,9 +444,7 @@ def wait_for_pipeline_to_finish(gitpkg, pipeline_id, dry_run=False):
         raise ValueError(
             "Pipeline {0} of project {1} exited with "
             'undesired status "{2}". Release is not possible.'.format(
-                pipeline_id,
-                gitpkg.attributes["path_with_namespace"],
-                pipeline.status,
+                pipeline_id, gitpkg.attributes["path_with_namespace"], pipeline.status,
             )
         )
 
@@ -598,35 +582,3 @@ def parse_and_process_package_changelog(gl, gitpkg, package_changelog, dry_run):
 
     # return the last tag and comments for release
     return cur_tag, cur_tag_comments
-
-
-def release_bob(changelog_file):
-    """Process the changelog and releases the ``bob`` metapackage."""
-
-    logger.info(
-        'Read the section "Releasing the Bob meta package" '
-        "on the documentation"
-    )
-
-    # get the list of bob's dependencies.
-    # Get their latest tags (since bob's last release) and the tag's changelog
-    saw_a_new_package = True
-    latest_tag = None
-    latest_pkg = None
-    for line in changelog_file:
-        # if saw_a_new_package:
-        if line.startswith("*"):
-            pkg = line[2:].strip()
-            saw_a_new_package = True
-            logger.info("%s == %s", latest_pkg, latest_tag)
-            latest_pkg = pkg
-            latest_tag = None
-            continue
-        if line.startswith("  *"):
-            latest_tag = line.split()[1][1:]
-        saw_a_new_package = False
-    logger.info("%s == %s", latest_pkg, latest_tag)
-    readme = open("../../bob/README.rst").read()
-    readme = _update_readme(readme, bob_version)
-    open("../../bob/README.rst", "wt").write(readme)
-    open("../../bob/version.txt", "wt").write(bob_version)
diff --git a/bob/devtools/scripts/__init__.py b/bob/devtools/scripts/__init__.py
index 8b137891791fe96927ad78e64b0aad7bded08bdc..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644
--- a/bob/devtools/scripts/__init__.py
+++ b/bob/devtools/scripts/__init__.py
@@ -1 +0,0 @@
-
diff --git a/bob/devtools/scripts/badges.py b/bob/devtools/scripts/badges.py
index a70a2e23a6b696be72d7f5984d0a3d1c0f12394a..2d467a239bf792d48625a7b3bc4d03a6af91c54a 100644
--- a/bob/devtools/scripts/badges.py
+++ b/bob/devtools/scripts/badges.py
@@ -1,14 +1,15 @@
 #!/usr/bin/env python
 
-import os
 
 import click
 import gitlab
 
+from ..log import echo_warning
+from ..log import get_logger
+from ..log import verbosity_option
+from ..release import get_gitlab_instance
+from ..release import update_files_at_master
 from . import bdt
-from ..release import get_gitlab_instance, update_files_at_master
-
-from ..log import verbosity_option, get_logger, echo_normal, echo_warning
 
 logger = get_logger(__name__)
 
@@ -75,10 +76,10 @@ def _update_readme(content, info):
 
     new_badges_text = []
     for badge in README_BADGES:
-        data = dict((k, v.format(**info)) for (k,v) in badge.items())
+        data = dict((k, v.format(**info)) for (k, v) in badge.items())
         new_badges_text.append(".. image:: {image_url}".format(**data))
         new_badges_text.append("   :target: {link_url}".format(**data))
-    new_badges_text = '\n'.join(new_badges_text) + '\n'
+    new_badges_text = "\n".join(new_badges_text) + "\n"
     # matches only 3 or more occurences of ..image::/:target: occurences
     expression = r"(\.\.\s*image.+\n\s+:target:\s*.+\b\n){3,}"
     return re.sub(expression, new_badges_text, content)
@@ -138,19 +139,19 @@ def badges(package, dry_run):
                 badge.id,
                 badge.link_url,
             )
-            if not dry_run: badge.delete()
+            if not dry_run:
+                badge.delete()
 
         # creates all stock badges, preserve positions
         info = dict(zip(("group", "name"), package.split("/", 1)))
         for position, badge in enumerate(PROJECT_BADGES):
-            data = dict([(k,v.format(**info)) for (k,v) in badge.items()])
+            data = dict([(k, v.format(**info)) for (k, v) in badge.items()])
             data["position"] = position
             logger.info(
-                "Creating badge '%s' => '%s'",
-                data["name"],
-                data["link_url"],
+                "Creating badge '%s' => '%s'", data["name"], data["link_url"],
             )
-            if not dry_run: use_package.badges.create(data)
+            if not dry_run:
+                use_package.badges.create(data)
 
         # download and edit README to setup badges
         readme_file = use_package.files.get(file_path="README.rst", ref="master")
@@ -158,10 +159,16 @@ def badges(package, dry_run):
         readme_content = _update_readme(readme_content, info)
         # commit and push changes
         logger.info("Changing README.rst badges...")
-        update_files_at_master(use_package, {"README.rst": readme_content},
-            "Updated badges section [ci skip]", dry_run)
+        update_files_at_master(
+            use_package,
+            {"README.rst": readme_content},
+            "Updated badges section [ci skip]",
+            dry_run,
+        )
         logger.info("All done.")
 
-    except gitlab.GitlabGetError as e:
-        logger.warn("Gitlab access error - package %s does not exist?", package)
+    except gitlab.GitlabGetError:
+        logger.warn(
+            "Gitlab access error - package %s does not exist?", package, exc_info=True
+        )
         echo_warning("%s: unknown" % (package,))
diff --git a/bob/devtools/scripts/bdt.py b/bob/devtools/scripts/bdt.py
index 2943070b772ccd90e761cf95c512496b7fe342f1..979d34442c1b82d3cef28bd322ad94869c5be27e 100644
--- a/bob/devtools/scripts/bdt.py
+++ b/bob/devtools/scripts/bdt.py
@@ -4,9 +4,10 @@
 """Main entry point for bdt."""
 
 import os
-import pkg_resources
 
 import click
+import pkg_resources
+
 from click_plugins import with_plugins
 
 from ..log import setup
@@ -59,8 +60,7 @@ if "LC_ALL" not in os.environ:
 
 @with_plugins(pkg_resources.iter_entry_points("bdt.cli"))
 @click.group(
-    cls=AliasedGroup,
-    context_settings=dict(help_option_names=["-?", "-h", "--help"]),
+    cls=AliasedGroup, context_settings=dict(help_option_names=["-?", "-h", "--help"]),
 )
 def main():
     """Bob Development Tools - see available commands below"""
diff --git a/bob/devtools/scripts/build.py b/bob/devtools/scripts/build.py
index 4601c46b7650e0f0f1320ff51ad276d4d4017b76..be6518fd489b0eaebff4a40cd09963102043f888 100644
--- a/bob/devtools/scripts/build.py
+++ b/bob/devtools/scripts/build.py
@@ -4,37 +4,33 @@
 import os
 import sys
 
-import yaml
 import click
-import pkg_resources
 import conda_build.api
+import yaml
 
+from ..bootstrap import get_channels
+from ..bootstrap import set_environment
+from ..build import conda_arch
+from ..build import get_docserver_setup
+from ..build import get_env_directory
+from ..build import get_output_path
+from ..build import get_parsed_recipe
+from ..build import get_rendered_metadata
+from ..build import make_conda_config
+from ..build import next_build_number
+from ..build import remove_conda_loggers
+from ..build import should_skip_build
+from ..constants import BASE_CONDARC
+from ..constants import CONDA_BUILD_CONFIG
+from ..constants import CONDA_RECIPE_APPEND
+from ..constants import MATPLOTLIB_RCDIR
+from ..constants import SERVER
+from ..log import get_logger
+from ..log import verbosity_option
 from . import bdt
-from ..build import (
-    next_build_number,
-    conda_arch,
-    should_skip_build,
-    get_rendered_metadata,
-    get_parsed_recipe,
-    make_conda_config,
-    get_docserver_setup,
-    get_env_directory,
-    get_output_path,
-    remove_conda_loggers,
-)
 
 remove_conda_loggers()
 
-from ..constants import (
-    CONDA_BUILD_CONFIG,
-    CONDA_RECIPE_APPEND,
-    SERVER,
-    MATPLOTLIB_RCDIR,
-    BASE_CONDARC,
-)
-from ..bootstrap import set_environment, get_channels
-
-from ..log import verbosity_option, get_logger, echo_info
 
 logger = get_logger(__name__)
 
@@ -74,9 +70,7 @@ Examples:
     help="Version of python to build the environment for",
 )
 @click.option(
-    "-r",
-    "--condarc",
-    help="Use custom conda configuration file instead of our own",
+    "-r", "--condarc", help="Use custom conda configuration file instead of our own",
 )
 @click.option(
     "-m",
@@ -87,10 +81,7 @@ Examples:
     help="overwrites the path leading to " "variant configuration file to use",
 )
 @click.option(
-    "-n",
-    "--no-test",
-    is_flag=True,
-    help="Do not test the package, only builds it",
+    "-n", "--no-test", is_flag=True, help="Do not test the package, only builds it",
 )
 @click.option(
     "-a",
@@ -190,8 +181,6 @@ def build(
         group,
     )
 
-    project_dir = os.path.dirname(recipe_dir[0])
-
     if condarc is not None:
         logger.info("Loading CONDARC file from %s...", condarc)
         with open(condarc, "rb") as f:
@@ -201,11 +190,7 @@ def build(
         condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
 
     channels = get_channels(
-        public=(not private),
-        stable=stable,
-        server=server,
-        intranet=ci,
-        group=group,
+        public=(not private), stable=stable, server=server, intranet=ci, group=group,
     )
 
     if "channels" not in condarc_options:
@@ -222,9 +207,7 @@ def build(
     prefix = get_env_directory(os.environ["CONDA_EXE"], "base")
     condarc_options["croot"] = os.path.join(prefix, "conda-bld")
 
-    conda_config = make_conda_config(
-        config, python, append_file, condarc_options
-    )
+    conda_config = make_conda_config(config, python, append_file, condarc_options)
 
     set_environment("MATPLOTLIBRC", MATPLOTLIB_RCDIR)
 
@@ -232,11 +215,7 @@ def build(
     # and derived documentation building via Sphinx)
     set_environment("DOCSERVER", server)
     doc_urls = get_docserver_setup(
-        public=(not private),
-        stable=stable,
-        server=server,
-        intranet=ci,
-        group=group,
+        public=(not private), stable=stable, server=server, intranet=ci, group=group,
     )
     set_environment("BOB_DOCUMENTATION_SERVER", doc_urls)
 
@@ -260,9 +239,7 @@ def build(
 
         # checks if we should actually build this recipe
         if should_skip_build(metadata):
-            logger.info(
-                "Skipping UNSUPPORTED build of %s for %s", recipe_dir, arch
-            )
+            logger.info("Skipping UNSUPPORTED build of %s for %s", recipe_dir, arch)
             continue
 
         rendered_recipe = get_parsed_recipe(metadata)
@@ -284,9 +261,7 @@ def build(
             # set $BOB_BUILD_NUMBER and force conda_build to reparse recipe to
             # get it right
             set_environment("BOB_BUILD_NUMBER", str(build_number))
-            paths = conda_build.api.build(
-                d, config=conda_config, notest=no_test
-            )
+            paths = conda_build.api.build(d, config=conda_config, notest=no_test)
             # if you get to this point, the package was successfully rebuilt
             # set environment to signal caller we may dispose of it
             os.environ["BDT_BUILD"] = ":".join(paths)
diff --git a/bob/devtools/scripts/caupdate.py b/bob/devtools/scripts/caupdate.py
index 9330c81bfb6809029b4f61e1b13c22f4adb474f5..e274d5254fff429f39000a4778911bee73fe9685 100644
--- a/bob/devtools/scripts/caupdate.py
+++ b/bob/devtools/scripts/caupdate.py
@@ -1,13 +1,11 @@
 #!/usr/bin/env python
 
-import os
-
 import click
 
+from ..log import get_logger
+from ..log import verbosity_option
 from . import bdt
 
-from ..log import verbosity_option, get_logger
-
 logger = get_logger(__name__)
 
 
diff --git a/bob/devtools/scripts/changelog.py b/bob/devtools/scripts/changelog.py
index 730b73db69af089426d9eaa3077e9c3f0069a66e..538caa00a9a38fee17a3234afac9bc3b2f8b2921 100644
--- a/bob/devtools/scripts/changelog.py
+++ b/bob/devtools/scripts/changelog.py
@@ -2,8 +2,9 @@
 
 import click
 
+from ..log import get_logger
+from ..log import verbosity_option
 from . import bdt
-from ..log import verbosity_option, get_logger
 
 logger = get_logger(__name__)
 
diff --git a/bob/devtools/scripts/ci.py b/bob/devtools/scripts/ci.py
index 1e4ffd52fddc9805214333c36105fb1b40576b0c..13234b8c8e9d5a395408bcede96c32db9d7decc5 100644
--- a/bob/devtools/scripts/ci.py
+++ b/bob/devtools/scripts/ci.py
@@ -1,29 +1,31 @@
 #!/usr/bin/env python
 
-import os
-import re
 import glob
+import os
 import shutil
 
-import yaml
 import click
 import pkg_resources
+import yaml
+
 from click_plugins import with_plugins
 
+from ..build import comment_cleanup
+from ..build import load_order_file
+from ..ci import cleanup
+from ..ci import read_packages
+from ..ci import select_conda_build_config
+from ..ci import select_conda_recipe_append
+from ..ci import select_user_condarc
+from ..ci import uniq
+from ..constants import BASE_CONDARC
+from ..constants import SERVER
+from ..deploy import deploy_conda_package
+from ..deploy import deploy_documentation
+from ..log import echo_normal
+from ..log import get_logger
+from ..log import verbosity_option
 from . import bdt
-from ..constants import SERVER, WEBDAV_PATHS, BASE_CONDARC
-from ..deploy import deploy_conda_package, deploy_documentation
-from ..build import comment_cleanup, load_order_file
-from ..ci import (
-    read_packages,
-    uniq,
-    select_conda_build_config,
-    select_conda_recipe_append,
-    select_user_condarc,
-    cleanup,
-)
-
-from ..log import verbosity_option, get_logger, echo_normal
 
 logger = get_logger(__name__)
 
@@ -237,9 +239,7 @@ def readme(package):
         failed = check([k])
 
         if failed:
-            raise RuntimeError(
-                "twine check (a.k.a. readme check) %s: FAILED" % k
-            )
+            raise RuntimeError("twine check (a.k.a. readme check) %s: FAILED" % k)
         else:
             logger.info("twine check (a.k.a. readme check) %s: OK", k)
 
@@ -378,9 +378,7 @@ def base_build(order, group, dry_run):
         condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
 
     # dump packages at conda_root
-    condarc_options["croot"] = os.path.join(
-        os.environ["CONDA_ROOT"], "conda-bld"
-    )
+    condarc_options["croot"] = os.path.join(os.environ["CONDA_ROOT"], "conda-bld")
 
     recipes = load_order_file(order)
 
@@ -389,17 +387,14 @@ def base_build(order, group, dry_run):
 
     for k, recipe in enumerate(recipes):
         echo_normal("\n" + (80 * "="))
-        echo_normal(
-            'Building "%s" (%d/%d)' % (recipe, k + 1, len(recipes))
-        )
+        echo_normal('Building "%s" (%d/%d)' % (recipe, k + 1, len(recipes)))
         echo_normal((80 * "=") + "\n")
         if not os.path.exists(os.path.join(recipe, "meta.yaml")):
             logger.info('Ignoring directory "%s" - no meta.yaml found' % recipe)
             continue
 
         variants_file = select_conda_build_config(
-            paths=[recipe, os.curdir],
-            branch=os.environ.get("CI_COMMIT_REF_NAME"),
+            paths=[recipe, os.curdir], branch=os.environ.get("CI_COMMIT_REF_NAME"),
         )
         logger.info("Conda build configuration file: %s", variants_file)
 
@@ -451,37 +446,30 @@ def test(ctx, dry_run):
     recipe_dir = os.path.join(os.path.realpath(os.curdir), "conda")
 
     condarc = select_user_condarc(
-        paths=[recipe_dir, os.curdir],
-        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+        paths=[recipe_dir, os.curdir], branch=os.environ.get("CI_COMMIT_REF_NAME"),
     )
     if condarc is not None:
         logger.info("Condarc configuration file: %s", condarc)
 
     variants_file = select_conda_build_config(
-        paths=[recipe_dir, os.curdir],
-        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+        paths=[recipe_dir, os.curdir], branch=os.environ.get("CI_COMMIT_REF_NAME"),
     )
     logger.info("Conda build configuration file: %s", variants_file)
 
     append_file = select_conda_recipe_append(
-        paths=[recipe_dir, os.curdir],
-        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+        paths=[recipe_dir, os.curdir], branch=os.environ.get("CI_COMMIT_REF_NAME"),
     )
     logger.info("Conda build recipe-append file: %s", append_file)
 
     from .test import test
 
     base_path = os.path.join(
-        os.environ["CONDA_ROOT"],
-        "conda-bld",
-        "*",
-        os.environ["CI_PROJECT_NAME"],
+        os.environ["CONDA_ROOT"], "conda-bld", "*", os.environ["CI_PROJECT_NAME"],
     )
 
     ctx.invoke(
         test,
-        package=glob.glob(base_path + "*.conda")
-        + glob.glob(base_path + "*.tar.bz2"),
+        package=glob.glob(base_path + "*.conda") + glob.glob(base_path + "*.tar.bz2"),
         condarc=condarc,
         config=variants_file,
         append_file=append_file,
@@ -536,21 +524,18 @@ def build(ctx, dry_run, recipe_dir):
 
     # Use custom variants and append files if available on recipe-dir
     condarc = select_user_condarc(
-        paths=[recipe_dir, os.curdir],
-        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+        paths=[recipe_dir, os.curdir], branch=os.environ.get("CI_COMMIT_REF_NAME"),
     )
     if condarc is not None:
         logger.info("Condarc configuration file: %s", condarc)
 
     variants_file = select_conda_build_config(
-        paths=[recipe_dir, os.curdir],
-        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+        paths=[recipe_dir, os.curdir], branch=os.environ.get("CI_COMMIT_REF_NAME"),
     )
     logger.info("Conda build configuration file: %s", variants_file)
 
     append_file = select_conda_recipe_append(
-        paths=[recipe_dir, os.curdir],
-        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+        paths=[recipe_dir, os.curdir], branch=os.environ.get("CI_COMMIT_REF_NAME"),
     )
     logger.info("Conda build recipe-append file: %s", append_file)
 
@@ -661,16 +646,12 @@ def nightlies(ctx, order, dry_run):
     for n, (package, branch) in enumerate(packages):
 
         echo_normal("\n" + (80 * "="))
-        echo_normal(
-            "Building %s@%s (%d/%d)" % (package, branch, n + 1, len(packages))
-        )
+        echo_normal("Building %s@%s (%d/%d)" % (package, branch, n + 1, len(packages)))
         echo_normal((80 * "=") + "\n")
 
         group, name = package.split("/", 1)
 
-        clone_to = os.path.join(
-            os.environ["CI_PROJECT_DIR"], "src", group, name
-        )
+        clone_to = os.path.join(os.environ["CI_PROJECT_DIR"], "src", group, name)
         dirname = os.path.dirname(clone_to)
         if not os.path.exists(dirname):
             os.makedirs(dirname)
@@ -685,30 +666,25 @@ def nightlies(ctx, order, dry_run):
         )
 
         # determine package visibility
-        private = (
-            urlopen("https://gitlab.idiap.ch/%s" % package).getcode() != 200
-        )
+        private = urlopen("https://gitlab.idiap.ch/%s" % package).getcode() != 200
         stable = "STABLE" in os.environ
 
         # Use custom variants and append files if available on recipe-dir
         recipe_dir = os.path.join(clone_to, "conda")
 
         condarc = select_user_condarc(
-            paths=[recipe_dir, os.curdir],
-            branch=os.environ.get("CI_COMMIT_REF_NAME"),
+            paths=[recipe_dir, os.curdir], branch=os.environ.get("CI_COMMIT_REF_NAME"),
         )
         if condarc is not None:
             logger.info("Condarc configuration file: %s", condarc)
 
         variants_file = select_conda_build_config(
-            paths=[recipe_dir, os.curdir],
-            branch=os.environ.get("CI_COMMIT_REF_NAME"),
+            paths=[recipe_dir, os.curdir], branch=os.environ.get("CI_COMMIT_REF_NAME"),
         )
         logger.info("Conda build configuration file: %s", variants_file)
 
         append_file = select_conda_recipe_append(
-            paths=[recipe_dir, os.curdir],
-            branch=os.environ.get("CI_COMMIT_REF_NAME"),
+            paths=[recipe_dir, os.curdir], branch=os.environ.get("CI_COMMIT_REF_NAME"),
         )
         logger.info("Conda build recipe-append file: %s", append_file)
 
@@ -754,8 +730,7 @@ def nightlies(ctx, order, dry_run):
         local_docs = os.path.join(os.environ["CI_PROJECT_DIR"], "sphinx")
         if os.path.exists(local_docs):
             logger.debug(
-                "Sphinx output was generated during test/rebuild "
-                "of %s - Erasing...",
+                "Sphinx output was generated during test/rebuild " "of %s - Erasing...",
                 package,
             )
             shutil.rmtree(local_docs)
@@ -857,8 +832,7 @@ def docs(ctx, requirement, dry_run):
                     clone_to,
                 )
                 git.Repo.clone_from(
-                    "https://gitlab-ci-token:%s@gitlab.idiap.ch/%s"
-                    % (token, package),
+                    "https://gitlab-ci-token:%s@gitlab.idiap.ch/%s" % (token, package),
                     clone_to,
                     branch=branch,
                     depth=1,
@@ -884,9 +858,7 @@ def docs(ctx, requirement, dry_run):
                 with open(requirements_path) as f:
                     extra_intersphinx += comment_cleanup(f.readlines())
 
-            nitpick_path = os.path.join(
-                clone_to, "doc", "nitpick-exceptions.txt"
-            )
+            nitpick_path = os.path.join(clone_to, "doc", "nitpick-exceptions.txt")
             if os.path.exists(nitpick_path):
                 with open(nitpick_path) as f:
                     nitpick += comment_cleanup(f.readlines())
@@ -954,7 +926,7 @@ def clean_betas(dry_run):
     """
 
     is_master = os.environ["CI_COMMIT_REF_NAME"] == "master"
-    if not is_master and dry_run == False:
+    if not is_master and dry_run is False:
         logger.warn("Forcing dry-run mode - not in master branch")
         logger.warn("... considering this is **not** a periodic run!")
         dry_run = True
diff --git a/bob/devtools/scripts/commitfile.py b/bob/devtools/scripts/commitfile.py
index 678bd7ea843374db736f9bd94a10d9a16bec958a..79bc23eccf68410eddf0a532b1a1610dbce27328 100644
--- a/bob/devtools/scripts/commitfile.py
+++ b/bob/devtools/scripts/commitfile.py
@@ -4,14 +4,12 @@ import os
 
 import click
 
+from ..log import get_logger
+from ..log import verbosity_option
+from ..release import get_gitlab_instance
+from ..release import update_files_at_master
+from ..release import update_files_with_mr
 from . import bdt
-from ..release import (
-    get_gitlab_instance,
-    update_files_with_mr,
-    update_files_at_master,
-)
-
-from ..log import verbosity_option, get_logger
 
 logger = get_logger(__name__)
 
@@ -39,13 +37,9 @@ Examples:
 """
 )
 @click.argument("package")
-@click.argument(
-    "file", type=click.Path(file_okay=True, dir_okay=False, exists=True)
-)
+@click.argument("file", type=click.Path(file_okay=True, dir_okay=False, exists=True))
 @click.option("-m", "--message", help="Message to set for this commit")
-@click.option(
-    "-p", "--path", help="Which path to replace on the remote package"
-)
+@click.option("-p", "--path", help="Which path to replace on the remote package")
 @click.option(
     "-b",
     "--branch",
diff --git a/bob/devtools/scripts/create.py b/bob/devtools/scripts/create.py
index b1eb017beb0af29abf9e4e941512c33bbc7a329f..33d851dbf58099712238c70f0b5a2077145df707 100644
--- a/bob/devtools/scripts/create.py
+++ b/bob/devtools/scripts/create.py
@@ -4,21 +4,21 @@
 import os
 import sys
 
-import pkg_resources
 import click
 import yaml
 
-from . import bdt
-from ..build import parse_dependencies, conda_create, make_conda_config
-from ..constants import (
-    BASE_CONDARC,
-    CONDA_BUILD_CONFIG,
-    CONDA_RECIPE_APPEND,
-    SERVER,
-)
 from ..bootstrap import set_environment
-
-from ..log import verbosity_option, get_logger, echo_normal
+from ..build import conda_create
+from ..build import make_conda_config
+from ..build import parse_dependencies
+from ..constants import BASE_CONDARC
+from ..constants import CONDA_BUILD_CONFIG
+from ..constants import CONDA_RECIPE_APPEND
+from ..constants import SERVER
+from ..log import echo_normal
+from ..log import get_logger
+from ..log import verbosity_option
+from . import bdt
 
 logger = get_logger(__name__)
 
@@ -70,8 +70,7 @@ Examples:
     "--python",
     default=("%d.%d" % sys.version_info[:2]),
     show_default=True,
-    help="Version of python to build the "
-    "environment for [default: %(default)s]",
+    help="Version of python to build the " "environment for [default: %(default)s]",
 )
 @click.option(
     "-o",
@@ -82,9 +81,7 @@ Examples:
     show_default=True,
 )
 @click.option(
-    "-r",
-    "--condarc",
-    help="Use custom conda configuration file instead of our own",
+    "-r", "--condarc", help="Use custom conda configuration file instead of our own",
 )
 @click.option(
     "-l",
@@ -214,28 +211,24 @@ def create(
 
     if "channels" not in condarc_options:
         from ..bootstrap import get_channels
+
         channels = get_channels(
             public=(not private),
             stable=stable,
             server=server,
             intranet=private,
-            group=group
+            group=group,
         )
         condarc_options["channels"] = channels + ["defaults"]
 
     logger.info(
-            "Using the following channels during environment creation:" \
-                    "\n  - %s",
-            "\n  - ".join(condarc_options["channels"]),
-            )
-
-    conda_config = make_conda_config(
-        config, python, append_file, condarc_options
+        "Using the following channels during environment creation:" "\n  - %s",
+        "\n  - ".join(condarc_options["channels"]),
     )
+
+    conda_config = make_conda_config(config, python, append_file, condarc_options)
     deps = parse_dependencies(recipe_dir, conda_config)
     # when creating a local development environment, remove the always_yes option
     del condarc_options["always_yes"]
-    status = conda_create(
-        conda, name, overwrite, condarc_options, deps, dry_run, use_local
-    )
+    conda_create(conda, name, overwrite, condarc_options, deps, dry_run, use_local)
     echo_normal('Execute on your shell: "conda activate %s"' % name)
diff --git a/bob/devtools/scripts/dav.py b/bob/devtools/scripts/dav.py
index 0642af521c27fe4d44ae521942c61fe31c5dc508..69d0d92bb769445926a98db933eb0ef5590efed3 100644
--- a/bob/devtools/scripts/dav.py
+++ b/bob/devtools/scripts/dav.py
@@ -2,18 +2,21 @@
 # -*- coding: utf-8 -*-
 
 import os
-import sys
 
 import click
 import pkg_resources
+
 from click_plugins import with_plugins
 
+from ..dav import remove_old_beta_packages
+from ..dav import setup_webdav_client
+from ..log import echo_info
+from ..log import echo_normal
+from ..log import echo_warning
+from ..log import get_logger
+from ..log import verbosity_option
 from . import bdt
 
-from ..dav import setup_webdav_client, remove_old_beta_packages
-from ..log import verbosity_option, get_logger, echo_normal, echo_info, \
-    echo_warning
-
 logger = get_logger(__name__)
 
 
@@ -61,9 +64,7 @@ Examples:
     help="If set, print details about each listed file",
 )
 @click.argument(
-    "path",
-    default="/",
-    required=False,
+    "path", default="/", required=False,
 )
 @verbosity_option()
 @bdt.raise_on_error
@@ -71,15 +72,16 @@ def list(private, long_format, path):
     """List the contents of a given WebDAV directory.
     """
 
-    if not path.startswith('/'): path = '/' + path
+    if not path.startswith("/"):
+        path = "/" + path
     cl = setup_webdav_client(private)
     contents = cl.list(path)
     remote_path = cl.get_url(path)
-    echo_info('ls %s' % (remote_path,))
+    echo_info("ls %s" % (remote_path,))
     for k in contents:
         if long_format:
-            info = cl.info('/'.join((path, k)))
-            echo_normal('%-20s  %-10s  %s' % (info['created'], info['size'], k))
+            info = cl.info("/".join((path, k)))
+            echo_normal("%-20s  %-10s  %s" % (info["created"], info["size"], k))
         else:
             echo_normal(k)
 
@@ -101,8 +103,7 @@ Examples:
     help="If set, use the 'private' area instead of the public one",
 )
 @click.argument(
-    "path",
-    required=True,
+    "path", required=True,
 )
 @verbosity_option()
 @bdt.raise_on_error
@@ -112,18 +113,19 @@ def makedirs(private, path):
     Gracefully exists if the directory is already there.
     """
 
-    if not path.startswith('/'): path = '/' + path
+    if not path.startswith("/"):
+        path = "/" + path
     cl = setup_webdav_client(private)
     remote_path = cl.get_url(path)
 
     if cl.check(path):
-        echo_warning('directory %s already exists' % (remote_path,))
+        echo_warning("directory %s already exists" % (remote_path,))
 
-    rpath = ''
-    for k in path.split('/'):
-        rpath = '/'.join((rpath, k)) if rpath else k
+    rpath = ""
+    for k in path.split("/"):
+        rpath = "/".join((rpath, k)) if rpath else k
         if not cl.check(rpath):
-            echo_info('mkdir %s' % (rpath,))
+            echo_info("mkdir %s" % (rpath,))
             cl.mkdir(rpath)
 
 
@@ -161,8 +163,7 @@ Examples:
     help="If this flag is set, then execute the removal",
 )
 @click.argument(
-    "path",
-    required=True,
+    "path", required=True,
 )
 @verbosity_option()
 @bdt.raise_on_error
@@ -176,15 +177,16 @@ def rmtree(private, execute, path):
         echo_warning("!!!! DRY RUN MODE !!!!")
         echo_warning("Nothing is being executed on server.  Use -x to execute.")
 
-    if not path.startswith('/'): path = '/' + path
+    if not path.startswith("/"):
+        path = "/" + path
     cl = setup_webdav_client(private)
     remote_path = cl.get_url(path)
 
     if not cl.check(path):
-        echo_warning('resource %s does not exist' % (remote_path,))
+        echo_warning("resource %s does not exist" % (remote_path,))
         return
 
-    echo_info('rm -rf %s' % (remote_path,))
+    echo_info("rm -rf %s" % (remote_path,))
     if execute:
         cl.clean(path)
 
@@ -223,8 +225,7 @@ Examples:
     nargs=-1,
 )
 @click.argument(
-    "remote",
-    required=True,
+    "remote", required=True,
 )
 @verbosity_option()
 @bdt.raise_on_error
@@ -246,29 +247,29 @@ def upload(private, execute, local, remote):
         echo_warning("!!!! DRY RUN MODE !!!!")
         echo_warning("Nothing is being executed on server.  Use -x to execute.")
 
-    if not remote.startswith('/'): remote = '/' + remote
+    if not remote.startswith("/"):
+        remote = "/" + remote
     cl = setup_webdav_client(private)
 
     if not cl.check(remote):
-      echo_warning('base remote directory for upload %s does not exist' %
-          (remote,))
-      return 1
+        echo_warning("base remote directory for upload %s does not exist" % (remote,))
+        return 1
 
     for k in local:
         actual_remote = remote + os.path.basename(k)
         remote_path = cl.get_url(actual_remote)
 
         if cl.check(actual_remote):
-            echo_warning('resource %s already exists' % (remote_path,))
-            echo_warning('remove it first before uploading a new copy')
+            echo_warning("resource %s already exists" % (remote_path,))
+            echo_warning("remove it first before uploading a new copy")
             continue
 
         if os.path.isdir(k):
-            echo_info('cp -r %s %s' % (k, remote_path))
+            echo_info("cp -r %s %s" % (k, remote_path))
             if execute:
                 cl.upload_directory(local_path=k, remote_path=actual_remote)
         else:
-            echo_info('cp %s %s' % (k, remote_path))
+            echo_info("cp %s %s" % (k, remote_path))
             if execute:
                 cl.upload_file(local_path=k, remote_path=actual_remote)
 
@@ -305,8 +306,7 @@ Examples:
     help="If this flag is set, then execute the removal",
 )
 @click.argument(
-    "path",
-    required=True,
+    "path", required=True,
 )
 @verbosity_option()
 @bdt.raise_on_error
@@ -320,37 +320,39 @@ def clean_betas(private, execute, path):
         echo_warning("!!!! DRY RUN MODE !!!!")
         echo_warning("Nothing is being executed on server.  Use -x to execute.")
 
-    if not path.startswith('/'): path = '/' + path
+    if not path.startswith("/"):
+        path = "/" + path
     cl = setup_webdav_client(private)
     remote_path = cl.get_url(path)
 
     if not cl.is_dir(path):
-        echo_warning('Path %s is not a directory - ignoring...', remote_path)
+        echo_warning("Path %s is not a directory - ignoring...", remote_path)
         return
 
     # go through all possible variants:
     archs = [
-            'linux-64',
-            'linux-32',
-            'linux-armv6l',
-            'linux-armv7l',
-            'linux-ppc64le',
-            'osx-64',
-            'osx-32',
-            'win-64',
-            'win-32',
-            'noarch',
-            ]
+        "linux-64",
+        "linux-32",
+        "linux-armv6l",
+        "linux-armv7l",
+        "linux-ppc64le",
+        "osx-64",
+        "osx-32",
+        "win-64",
+        "win-32",
+        "noarch",
+    ]
 
     for arch in archs:
 
-        arch_path = '/'.join((path, arch))
+        arch_path = "/".join((path, arch))
 
         if not (cl.check(arch_path) and cl.is_dir(arch_path)):
             # it is normal if the directory does not exist
             continue
 
         server_path = cl.get_url(arch_path)
-        echo_info('Cleaning beta packages from %s' % server_path)
-        remove_old_beta_packages(client=cl, path=arch_path,
-                dry_run=(not execute), pyver=True)
+        echo_info("Cleaning beta packages from %s" % server_path)
+        remove_old_beta_packages(
+            client=cl, path=arch_path, dry_run=(not execute), pyver=True
+        )
diff --git a/bob/devtools/scripts/dumpsphinx.py b/bob/devtools/scripts/dumpsphinx.py
index 1882802fe7ffe24fed3e91fe4a5581e2f84b0c88..047a06fcfa383fc276878a853b16e439fd3247fc 100644
--- a/bob/devtools/scripts/dumpsphinx.py
+++ b/bob/devtools/scripts/dumpsphinx.py
@@ -2,13 +2,13 @@
 # -*- coding: utf-8 -*-
 
 
-from sphinx.ext import intersphinx
-
 import click
 
-from . import bdt
+from sphinx.ext import intersphinx
 
-from ..log import verbosity_option, get_logger
+from ..log import get_logger
+from ..log import verbosity_option
+from . import bdt
 
 logger = get_logger(__name__)
 
diff --git a/bob/devtools/scripts/getpath.py b/bob/devtools/scripts/getpath.py
index a7aede79bc5f547a5ede77be8faf8a6067c6ef60..5595c047ace20168d110e64439405b362ba2f72a 100644
--- a/bob/devtools/scripts/getpath.py
+++ b/bob/devtools/scripts/getpath.py
@@ -1,13 +1,13 @@
 #!/usr/bin/env python
 
-import os
-
 import click
 
+from ..log import get_logger
+from ..log import verbosity_option
+from ..release import download_path
+from ..release import get_gitlab_instance
 from . import bdt
-from ..release import get_gitlab_instance, download_path
 from .common_options import ref_option
-from ..log import verbosity_option, get_logger
 
 logger = get_logger(__name__)
 
diff --git a/bob/devtools/scripts/gitlab.py b/bob/devtools/scripts/gitlab.py
index 7e66d36e2317788d7be9606f44c2b7beb6c8097c..c2e971db649c7177ec460227eccc29861f7f4bbb 100644
--- a/bob/devtools/scripts/gitlab.py
+++ b/bob/devtools/scripts/gitlab.py
@@ -1,8 +1,8 @@
 #!/usr/bin/env python
 
+import click
 import pkg_resources
 
-import click
 from click_plugins import with_plugins
 
 from . import bdt
diff --git a/bob/devtools/scripts/graph.py b/bob/devtools/scripts/graph.py
index d3edfe03c43443b7c26148728a2d3a29fd00915a..641b3bd784023fab02c5b2911a339569e404ffb0 100644
--- a/bob/devtools/scripts/graph.py
+++ b/bob/devtools/scripts/graph.py
@@ -3,24 +3,24 @@
 
 import sys
 
-import yaml
 import click
-from click_plugins import with_plugins
+import yaml
 
-from . import bdt
-from ..constants import (
-    CONDA_BUILD_CONFIG,
-    CONDA_RECIPE_APPEND,
-    SERVER,
-    MATPLOTLIB_RCDIR,
-    BASE_CONDARC,
-)
+from ..bootstrap import get_channels
+from ..bootstrap import set_environment
 from ..build import make_conda_config
-from ..bootstrap import set_environment, get_channels
+from ..constants import BASE_CONDARC
+from ..constants import CONDA_BUILD_CONFIG
+from ..constants import CONDA_RECIPE_APPEND
+from ..constants import MATPLOTLIB_RCDIR
+from ..constants import SERVER
+from ..graph import compute_adjencence_matrix
+from ..graph import generate_graph
+from ..log import get_logger
+from ..log import verbosity_option
 from ..release import get_gitlab_instance
-from ..graph import compute_adjencence_matrix, generate_graph
+from . import bdt
 
-from ..log import verbosity_option, get_logger, echo_info
 logger = get_logger(__name__)
 
 
@@ -41,7 +41,7 @@ Examples:
      defined by a regular expression
 
 \b
-     $ bdt gitlab graph beat/beat.editor --deptypes=run --deptypes=test --whitelist='^beat\.(editor|cmdline).*$'
+     $ bdt gitlab graph beat/beat.editor --deptypes=run --deptypes=test --whitelist='^beat\\.(editor|cmdline).*$'
 
 """
 )
@@ -54,9 +54,7 @@ Examples:
     help="Version of python to build the environment for",
 )
 @click.option(
-    "-r",
-    "--condarc",
-    help="Use custom conda configuration file instead of our own",
+    "-r", "--condarc", help="Use custom conda configuration file instead of our own",
 )
 @click.option(
     "-m",
@@ -105,11 +103,7 @@ Examples:
     help="Use this flag to indicate the graph will be running on the CI",
 )
 @click.option(
-    "-n",
-    "--name",
-    show_default=True,
-    default="graph",
-    help="set the graph name",
+    "-n", "--name", show_default=True, default="graph", help="set the graph name",
 )
 @click.option(
     "-f",
@@ -122,12 +116,13 @@ Examples:
     "-w",
     "--whitelist",
     show_default=True,
-    default="^(bob|beat|batl|gridtk)(\.)?(?!-).*$",
+    default="^(bob|beat|batl|gridtk)(\\.)?(?!-).*$",
     help="package regular expression to preserve in the graph, "
     "use .* for keeping all packages, including non-maintained ones.  The "
     "current expression accepts most of our packages, excluding "
     "bob/beat-devel.  This flag only affects the graph generation - we still "
-    "recurse over all packages to calculate dependencies.")
+    "recurse over all packages to calculate dependencies.",
+)
 @click.option(
     "-d",
     "--deptypes",
@@ -136,11 +131,25 @@ Examples:
     multiple=True,
     help="types of dependencies to consider.  Pass multiple times to include "
     "more types.  Valid types are 'host', 'build', 'run' and 'test'.  An "
-    "empty set considers all dependencies to the graph")
+    "empty set considers all dependencies to the graph",
+)
 @verbosity_option()
 @bdt.raise_on_error
-def graph(package, python, condarc, config, append_file, server, private,
-        stable, ci, name, format, whitelist, deptypes):
+def graph(
+    package,
+    python,
+    condarc,
+    config,
+    append_file,
+    server,
+    private,
+    stable,
+    ci,
+    name,
+    format,
+    whitelist,
+    deptypes,
+):
     """
     Computes the dependency graph of a gitlab package (via its conda recipe)
     and outputs an dot file that can be used by graphviz to draw a direct
@@ -152,7 +161,7 @@ def graph(package, python, condarc, config, append_file, server, private,
     if "/" not in package:
         raise RuntimeError('PACKAGE should be specified as "group/name"')
 
-    package_group, package_name = package.split('/', 1)
+    package_group, package_name = package.split("/", 1)
 
     gl = get_gitlab_instance()
 
@@ -181,9 +190,7 @@ def graph(package, python, condarc, config, append_file, server, private,
         "\n  - ".join(condarc_options["channels"]),
     )
 
-    conda_config = make_conda_config(
-        config, python, append_file, condarc_options
-    )
+    conda_config = make_conda_config(config, python, append_file, condarc_options)
 
     set_environment("MATPLOTLIBRC", MATPLOTLIB_RCDIR)
 
@@ -195,9 +202,9 @@ def graph(package, python, condarc, config, append_file, server, private,
     # avoids conda-build complaints
     set_environment("NOSE_EVAL_ATTR", "")
 
-    adj_matrix = compute_adjencence_matrix(gl, package, conda_config,
-            channels[0], deptypes=deptypes)
+    adj_matrix = compute_adjencence_matrix(
+        gl, package, conda_config, channels[0], deptypes=deptypes
+    )
 
     graph = generate_graph(adj_matrix, deptypes=deptypes, whitelist=whitelist)
     graph.render(name, format=format, cleanup=True)
-
diff --git a/bob/devtools/scripts/jobs.py b/bob/devtools/scripts/jobs.py
index 05352f30a70e5dc7740c1fdfa198e5c54819d085..e18dbb1d171560d95c3172c17b01e2f371f315d2 100644
--- a/bob/devtools/scripts/jobs.py
+++ b/bob/devtools/scripts/jobs.py
@@ -1,13 +1,13 @@
 #!/usr/bin/env python
 
-import os
-
 import click
 
-from . import bdt
+from ..log import echo_info
+from ..log import echo_normal
+from ..log import get_logger
+from ..log import verbosity_option
 from ..release import get_gitlab_instance
-
-from ..log import verbosity_option, get_logger, echo_normal, echo_info
+from . import bdt
 
 logger = get_logger(__name__)
 
@@ -44,7 +44,6 @@ def jobs(name, status):
 
     gl = get_gitlab_instance()
     gl.auth()
-    user_id = gl.user.attributes["id"]
 
     names = name or [
         "linux-desktop-shell",
@@ -57,25 +56,17 @@ def jobs(name, status):
 
     # search for the runner(s) to affect
     runners = [
-        k
-        for k in gl.runners.list(all=True)
-        if k.attributes["description"] in names
+        k for k in gl.runners.list(all=True) if k.attributes["description"] in names
     ]
 
     if not runners:
-        raise RuntimeError(
-            "Cannot find runner with description = %s" % "|".join(names)
-        )
+        raise RuntimeError("Cannot find runner with description = %s" % "|".join(names))
 
     for runner in runners:
         jobs = runner.jobs.list(all=True, status=status)
         echo_normal(
             "Runner %s (id=%d) -- %d running"
-            % (
-                runner.attributes["description"],
-                runner.attributes["id"],
-                len(jobs),
-            )
+            % (runner.attributes["description"], runner.attributes["id"], len(jobs),)
         )
         for k in jobs:
             echo_info(
diff --git a/bob/devtools/scripts/lasttag.py b/bob/devtools/scripts/lasttag.py
index 356f3ee766606b73727ce137ab49cc1d4fc9de84..053885c9f6c38103052965c7da0747f1eab90eea 100644
--- a/bob/devtools/scripts/lasttag.py
+++ b/bob/devtools/scripts/lasttag.py
@@ -1,15 +1,16 @@
 #!/usr/bin/env python
 
-import os
-
 import click
 import gitlab
 
-from . import bdt
-from ..changelog import get_last_tag, parse_date
+from ..changelog import get_last_tag
+from ..changelog import parse_date
+from ..log import echo_normal
+from ..log import echo_warning
+from ..log import get_logger
+from ..log import verbosity_option
 from ..release import get_gitlab_instance
-
-from ..log import verbosity_option, get_logger, echo_normal, echo_warning
+from . import bdt
 
 logger = get_logger(__name__)
 
@@ -52,9 +53,10 @@ def lasttag(package):
         tag = get_last_tag(use_package)
         date = parse_date(tag.commit["committed_date"])
         echo_normal(
-            "%s: %s (%s)"
-            % (package, tag.name, date.strftime("%Y-%m-%d %H:%M:%S"))
+            "%s: %s (%s)" % (package, tag.name, date.strftime("%Y-%m-%d %H:%M:%S"))
+        )
+    except gitlab.GitlabGetError:
+        logger.warn(
+            "Gitlab access error - package %s does not exist?", package, exc_info=True
         )
-    except gitlab.GitlabGetError as e:
-        logger.warn("Gitlab access error - package %s does not exist?", package)
         echo_warning("%s: unknown" % (package,))
diff --git a/bob/devtools/scripts/local.py b/bob/devtools/scripts/local.py
index a008757e12b7e8935f21d534476b5e4ec2271570..b8565e9683c9c47040392a4078cdc229e7cf6fb0 100644
--- a/bob/devtools/scripts/local.py
+++ b/bob/devtools/scripts/local.py
@@ -3,17 +3,17 @@
 import os
 import sys
 
-import gitlab
-
 import click
+import gitlab
 import pkg_resources
+
 from click_plugins import with_plugins
 
+from ..log import get_logger
+from ..log import verbosity_option
 from . import bdt
 from . import ci
 
-from ..log import verbosity_option, get_logger
-
 logger = get_logger(__name__)
 
 
@@ -22,9 +22,7 @@ def set_up_environment_variables(
 ):
     """This function sets up the proper environment variables when user wants
     to run the commands usually run on ci locally."""
-    os.environ["CI_JOB_TOKEN"] = gitlab.Gitlab.from_config(
-        "idiap"
-    ).private_token
+    os.environ["CI_JOB_TOKEN"] = gitlab.Gitlab.from_config("idiap").private_token
     os.environ["CI_PROJECT_DIR"] = project_dir
     os.environ["CI_PROJECT_NAMESPACE"] = name_space
     os.environ["CI_PROJECT_VISIBILITY"] = project_visibility
diff --git a/bob/devtools/scripts/mirror.py b/bob/devtools/scripts/mirror.py
index 1cfb86c2cf9d2f83ac1147ed81ee70578d494f97..bd08b58293141d763aadec2998fb8306940ed2d5 100644
--- a/bob/devtools/scripts/mirror.py
+++ b/bob/devtools/scripts/mirror.py
@@ -3,24 +3,25 @@
 
 
 import os
-import click
 import tempfile
 
+import click
 import conda_build.api
 
+from ..log import echo_info
+from ..log import echo_warning
+from ..log import get_logger
+from ..log import verbosity_option
+from ..mirror import blacklist_filter
+from ..mirror import checksum_packages
+from ..mirror import copy_and_clean_patch
+from ..mirror import download_packages
+from ..mirror import get_json
+from ..mirror import get_local_contents
+from ..mirror import load_glob_list
+from ..mirror import remove_packages
+from ..mirror import whitelist_filter
 from . import bdt
-from ..mirror import (
-        get_json,
-        get_local_contents,
-        load_glob_list,
-        blacklist_filter,
-        whitelist_filter,
-        download_packages,
-        remove_packages,
-        copy_and_clean_patch,
-        checksum_packages,
-        )
-from ..log import verbosity_option, get_logger, echo_info, echo_warning
 
 logger = get_logger(__name__)
 
@@ -37,31 +38,38 @@ Examples:
     """
 )
 @click.argument(
-    "channel-url",
-    required=True,
+    "channel-url", required=True,
 )
 @click.argument(
     "dest-dir",
-    type=click.Path(exists=False, dir_okay=True, file_okay=False,
-        writable=True, readable=True, resolve_path=True),
+    type=click.Path(
+        exists=False,
+        dir_okay=True,
+        file_okay=False,
+        writable=True,
+        readable=True,
+        resolve_path=True,
+    ),
     required=True,
 )
 @click.option(
     "-b",
     "--blacklist",
-    type=click.Path(exists=True, dir_okay=False, file_okay=True,
-        readable=True, resolve_path=True),
-    help="A file containing a list of globs to exclude from local " \
-            "mirroring, one per line",
+    type=click.Path(
+        exists=True, dir_okay=False, file_okay=True, readable=True, resolve_path=True
+    ),
+    help="A file containing a list of globs to exclude from local "
+    "mirroring, one per line",
 )
 @click.option(
     "-w",
     "--whitelist",
-    type=click.Path(exists=True, dir_okay=False, file_okay=True,
-        readable=True, resolve_path=True),
-    help="A file containing a list of globs to include at local " \
-            "mirroring, one per line.  This is considered *after* " \
-            "the blacklisting.  It is here just for testing purposes",
+    type=click.Path(
+        exists=True, dir_okay=False, file_okay=True, readable=True, resolve_path=True
+    ),
+    help="A file containing a list of globs to include at local "
+    "mirroring, one per line.  This is considered *after* "
+    "the blacklisting.  It is here just for testing purposes",
 )
 @click.option(
     "-m",
@@ -80,8 +88,14 @@ Examples:
 @click.option(
     "-t",
     "--tmpdir",
-    type=click.Path(exists=True, dir_okay=True, file_okay=False,
-        readable=True, writable=True, resolve_path=True),
+    type=click.Path(
+        exists=True,
+        dir_okay=True,
+        file_okay=False,
+        readable=True,
+        writable=True,
+        resolve_path=True,
+    ),
     help="A directory where to store temporary files",
 )
 @click.option(
@@ -104,16 +118,16 @@ Examples:
 @verbosity_option()
 @bdt.raise_on_error
 def mirror(
-        channel_url,
-        dest_dir,
-        blacklist,
-        whitelist,
-        check_md5,
-        dry_run,
-        tmpdir,
-        patch,
-        checksum,
-        ):
+    channel_url,
+    dest_dir,
+    blacklist,
+    whitelist,
+    check_md5,
+    dry_run,
+    tmpdir,
+    patch,
+    checksum,
+):
     """Mirrors a conda channel to a particular local destination
 
     This command is capable of completely mirroring a valid conda channel,
@@ -125,37 +139,41 @@ def mirror(
 
     # creates a self destructing temporary directory that will act as temporary
     # directory for the rest of this program
-    tmpdir2 = tempfile.TemporaryDirectory(prefix='bdt-mirror-tmp', dir=tmpdir)
+    tmpdir2 = tempfile.TemporaryDirectory(prefix="bdt-mirror-tmp", dir=tmpdir)
     tempfile.tempdir = tmpdir2.name
-    os.environ['TMPDIR'] = tmpdir2.name
-    logger.info('Setting $TMPDIR and `tempfile.tempdir` to %s', tmpdir2.name)
+    os.environ["TMPDIR"] = tmpdir2.name
+    logger.info("Setting $TMPDIR and `tempfile.tempdir` to %s", tmpdir2.name)
 
     # if we are in a dry-run mode, let's let it be known
     if dry_run:
         logger.warn("!!!! DRY RUN MODE !!!!")
         logger.warn("Nothing will be really mirrored")
 
-    DEFAULT_SUBDIRS = ['noarch', 'linux-64', 'osx-64']
+    DEFAULT_SUBDIRS = ["noarch", "linux-64", "osx-64"]
 
-    noarch = os.path.join(dest_dir, 'noarch')
-    if not os.path.exists(noarch):  #first time
+    noarch = os.path.join(dest_dir, "noarch")
+    if not os.path.exists(noarch):  # first time
         # calls conda index to create basic infrastructure
         logger.info("Creating conda channel at %s...", dest_dir)
         if not dry_run:
-            conda_build.api.update_index([dest_dir], subdir=DEFAULT_SUBDIRS,
-                    progress=False, verbose=False)
+            conda_build.api.update_index(
+                [dest_dir], subdir=DEFAULT_SUBDIRS, progress=False, verbose=False
+            )
 
     for arch in DEFAULT_SUBDIRS:
 
-        remote_repodata = get_json(channel_url, arch,
-                'repodata_from_packages.json.bz2')
-        logger.info('%d packages available in remote index',
-                len(remote_repodata.get('packages', {})))
+        remote_repodata = get_json(channel_url, arch, "repodata_from_packages.json.bz2")
+        logger.info(
+            "%d packages available in remote index",
+            len(remote_repodata.get("packages", {})),
+        )
         local_packages = get_local_contents(dest_dir, arch)
-        logger.info('%d packages available in local mirror', len(local_packages))
+        logger.info("%d packages available in local mirror", len(local_packages))
 
-        remote_packages = set(list(remote_repodata.get('packages', {}).keys()) +
-                list(remote_repodata.get('packages.conda', {}).keys()))
+        remote_packages = set(
+            list(remote_repodata.get("packages", {}).keys())
+            + list(remote_repodata.get("packages.conda", {}).keys())
+        )
 
         if blacklist is not None and os.path.exists(blacklist):
             globs_to_remove = set(load_glob_list(blacklist))
@@ -164,8 +182,9 @@ def mirror(
 
         # in the remote packages, subset those that need to be downloaded
         # according to our own interest
-        to_download = blacklist_filter(remote_packages - local_packages,
-                globs_to_remove)
+        to_download = blacklist_filter(
+            remote_packages - local_packages, globs_to_remove
+        )
 
         if whitelist is not None and os.path.exists(whitelist):
             globs_to_consider = set(load_glob_list(whitelist))
@@ -182,46 +201,62 @@ def mirror(
         if checksum:
             # double-check if, among packages I should keep, everything looks
             # already with respect to expected checksums from the remote repo
-            issues = checksum_packages(remote_repodata, dest_dir, arch,
-                    to_keep)
+            issues = checksum_packages(remote_repodata, dest_dir, arch, to_keep)
             if issues:
-                echo_warning("Detected %d packages with checksum issues - " \
-                        "re-downloading after erasing..." % len(issues))
+                echo_warning(
+                    "Detected %d packages with checksum issues - "
+                    "re-downloading after erasing..." % len(issues)
+                )
             else:
                 echo_info("All local package checksums match expected values")
             remove_packages(issues, dest_dir, arch, dry_run)
             to_download |= issues
 
         if to_download:
-            download_packages(to_download, remote_repodata, channel_url,
-                    dest_dir, arch, dry_run)
+            download_packages(
+                to_download, remote_repodata, channel_url, dest_dir, arch, dry_run
+            )
         else:
-            echo_info("Mirror at %s/%s is up-to-date w.r.t. %s/%s. " \
-                    "No packages to download." % (dest_dir, arch, channel_url,
-                        arch))
+            echo_info(
+                "Mirror at %s/%s is up-to-date w.r.t. %s/%s. "
+                "No packages to download." % (dest_dir, arch, channel_url, arch)
+            )
 
         if to_delete_locally:
-            echo_warning("%d packages will be removed at %s/%s" % \
-                    (len(to_delete_locally), dest_dir, arch))
+            echo_warning(
+                "%d packages will be removed at %s/%s"
+                % (len(to_delete_locally), dest_dir, arch)
+            )
             remove_packages(to_delete_locally, dest_dir, arch, dry_run)
         else:
-            echo_info("Mirror at %s/%s is up-to-date w.r.t. blacklist. " \
-                    "No packages to be removed." % (dest_dir, arch))
+            echo_info(
+                "Mirror at %s/%s is up-to-date w.r.t. blacklist. "
+                "No packages to be removed." % (dest_dir, arch)
+            )
 
         if patch:
             # download/cleanup patch instructions, otherwise conda installs may
             # go crazy.  Do this before the indexing, that will use that file
             # to do its magic.
-            patch_file = 'patch_instructions.json'
-            name = copy_and_clean_patch(channel_url, dest_dir, arch,
-                    patch_file, dry_run)
-            echo_info("Cleaned copy of %s/%s/%s installed at %s" %
-                    (channel_url, arch, patch_file, name))
+            patch_file = "patch_instructions.json"
+            name = copy_and_clean_patch(
+                channel_url, dest_dir, arch, patch_file, dry_run
+            )
+            echo_info(
+                "Cleaned copy of %s/%s/%s installed at %s"
+                % (channel_url, arch, patch_file, name)
+            )
 
     # re-indexes the channel to produce a conda-compatible setup
     echo_info("Re-indexing %s..." % dest_dir)
     if not dry_run:
         from conda_build.index import MAX_THREADS_DEFAULT
-        conda_build.api.update_index([dest_dir], check_md5=check_md5,
-                progress=True, verbose=False, subdir=DEFAULT_SUBDIRS,
-                threads=MAX_THREADS_DEFAULT)
+
+        conda_build.api.update_index(
+            [dest_dir],
+            check_md5=check_md5,
+            progress=True,
+            verbose=False,
+            subdir=DEFAULT_SUBDIRS,
+            threads=MAX_THREADS_DEFAULT,
+        )
diff --git a/bob/devtools/scripts/new.py b/bob/devtools/scripts/new.py
index 2fca5cb0279ffd6e865fd8160516ba34a0150ced..b484d3da7fd4842462f428d052e75ac97e806f14 100644
--- a/bob/devtools/scripts/new.py
+++ b/bob/devtools/scripts/new.py
@@ -1,17 +1,17 @@
 #!/usr/bin/env python
 
+import datetime
 import os
 import shutil
-import datetime
 
 import click
 import jinja2
 import pkg_resources
 
+from ..log import get_logger
+from ..log import verbosity_option
 from . import bdt
 
-from ..log import verbosity_option, get_logger
-
 logger = get_logger(__name__)
 
 
@@ -112,11 +112,7 @@ def new(package, author, email, title, license, output_dir):
     #  Title
     # =======
     rst_title = (
-        ("=" * (2 + len(title)))
-        + "\n "
-        + title
-        + "\n"
-        + ("=" * (2 + len(title)))
+        ("=" * (2 + len(title))) + "\n " + title + "\n" + ("=" * (2 + len(title)))
     )
 
     # the jinja context defines the substitutions to be performed
@@ -137,9 +133,7 @@ def new(package, author, email, title, license, output_dir):
     # copy the whole template structure and de-templatize the needed files
     if output_dir is None:
         output_dir = os.path.join(os.path.realpath(os.curdir), name)
-    logger.info(
-        "Creating structure for %s at directory %s", package, output_dir
-    )
+    logger.info("Creating structure for %s at directory %s", package, output_dir)
 
     if os.path.exists(output_dir):
         raise IOError(
@@ -158,16 +152,19 @@ def new(package, author, email, title, license, output_dir):
 
     # other standard files
     simple = [
-        "requirements.txt",
-        "buildout.cfg",
-        "MANIFEST.in",
-        "setup.py",
+        ".flake8",
         ".gitignore",
-        "doc/index.rst",
+        ".gitlab-ci.yml",
+        ".isort.cfg",
+        ".pre-commit-config.yaml",
+        "buildout.cfg",
         "doc/conf.py",
+        "doc/index.rst",
         "doc/links.rst",
-        ".gitlab-ci.yml",
+        "MANIFEST.in",
         "README.rst",
+        "requirements.txt",
+        "setup.py",
         "version.txt",
     ]
     for k in simple:
@@ -184,9 +181,7 @@ def new(package, author, email, title, license, output_dir):
         __name__, os.path.join("..", "templates")
     )
     logger.info("Creating base %s python module", group)
-    shutil.copytree(
-        os.path.join(template_dir, "pkg"), os.path.join(output_dir, group)
-    )
+    shutil.copytree(os.path.join(template_dir, "pkg"), os.path.join(output_dir, group))
 
     # copies specific images to the right spot
     copy_file(os.path.join("doc", "img", "%s-favicon.ico" % group), output_dir)
@@ -205,6 +200,4 @@ def new(package, author, email, title, license, output_dir):
         comment_start_string="(#",
         comment_end_string="#)",
     )
-    render_template(
-        conda_env, os.path.join("conda", "meta.yaml"), context, output_dir
-    )
+    render_template(conda_env, os.path.join("conda", "meta.yaml"), context, output_dir)
diff --git a/bob/devtools/scripts/pipelines.py b/bob/devtools/scripts/pipelines.py
index 7d21bc068ba3377df469835fd861be981e89adf3..e362a4cec9cd8a70067c90875f8596e4afa78d41 100644
--- a/bob/devtools/scripts/pipelines.py
+++ b/bob/devtools/scripts/pipelines.py
@@ -1,18 +1,21 @@
 #!/usr/bin/env python
 
-import os
+import urllib
 
 import click
 import gitlab
-import urllib
 
-from . import bdt
-from ..release import get_gitlab_instance
+from tabulate import tabulate
 
-from ..log import verbosity_option, get_logger, echo_normal, echo_warning
+from ..log import echo_warning
+from ..log import get_logger
+from ..log import verbosity_option
 from ..pipelines import process_log
+from ..release import get_gitlab_instance
+from . import bdt
+
 logger = get_logger(__name__)
-from tabulate import tabulate
+
 
 @click.command(
     epilog="""
@@ -30,7 +33,7 @@ Examples:
 )
 @click.argument("package")
 @click.argument("pipeline")
-@click.option('--job-id', default=None, help='A job id from a pipeline')
+@click.option("--job-id", default=None, help="A job id from a pipeline")
 @verbosity_option()
 @bdt.raise_on_error
 def process_pipelines(package, pipeline, job_id):
@@ -46,24 +49,31 @@ def process_pipelines(package, pipeline, job_id):
         project = gl.projects.get(package)
         pipeline = project.pipelines.get(pipeline)
 
-        jobs = [j for j in pipeline.jobs.list()]        
+        jobs = [j for j in pipeline.jobs.list()]
         if job_id is not None:
-            jobs = [j for j in jobs if int(j.attributes["id"])==int(job_id)]
+            jobs = [j for j in jobs if int(j.attributes["id"]) == int(job_id)]
 
+        if len(jobs) == 0:
+            print(
+                "Job %s not found in the pipeline %s. Use `bdt gitlab get-pipelines` to search "
+                % (job_id, pipeline.attributes["id"])
+            )
 
-        if(len(jobs) == 0 ):
-            print("Job %s not found in the pipeline %s. Use `bdt gitlab get-pipelines` to search " % (job_id, pipeline.attributes["id"]))
-                         
         # Reading log
         try:
             for j in jobs:
-                print("Pipeline %s, Job %s" % (pipeline.attributes["id"], int(j.attributes["id"])))
-                web_url = j.attributes["web_url"] + "/raw"                
+                print(
+                    "Pipeline %s, Job %s"
+                    % (pipeline.attributes["id"], int(j.attributes["id"]))
+                )
+                web_url = j.attributes["web_url"] + "/raw"
                 log = str(urllib.request.urlopen(web_url).read()).split("\\n")
                 process_log(log)
-        except urllib.error.HTTPError as e:
+        except urllib.error.HTTPError:
             logger.warn(
-                "Gitlab access error - Log %s can't be found" % web_url, package
+                "Gitlab access error - Log %s can't be found" % web_url,
+                package,
+                exc_info=True,
             )
             echo_warning("%s: unknown" % (package,))
 
@@ -74,8 +84,10 @@ def process_pipelines(package, pipeline, job_id):
         )
 
         pass
-    except gitlab.GitlabGetError as e:
-        logger.warn("Gitlab access error - package %s does not exist?", package)
+    except gitlab.GitlabGetError:
+        logger.warn(
+            "Gitlab access error - package %s does not exist?", package, exc_info=True
+        )
         echo_warning("%s: unknown" % (package,))
 
 
@@ -125,6 +137,8 @@ def get_pipelines(package):
         print("Jobs from project %s" % package)
         print(tabulate(description))
 
-    except gitlab.GitlabGetError as e:
-        logger.warn("Gitlab access error - package %s does not exist?", package)
+    except gitlab.GitlabGetError:
+        logger.warn(
+            "Gitlab access error - package %s does not exist?", package, exc_info=True
+        )
         echo_warning("%s: unknown" % (package,))
diff --git a/bob/devtools/scripts/rebuild.py b/bob/devtools/scripts/rebuild.py
index 8b5a33710b04795ddbe99c8f4ec59e493ce7c169..d42c581a947c2e9f19b7581f546908adb12c2314 100644
--- a/bob/devtools/scripts/rebuild.py
+++ b/bob/devtools/scripts/rebuild.py
@@ -5,33 +5,29 @@ import os
 import sys
 import urllib.request
 
-import yaml
 import click
-import pkg_resources
 import conda_build.api
+import yaml
 
+from ..bootstrap import get_channels
+from ..bootstrap import set_environment
+from ..build import conda_arch
+from ..build import get_docserver_setup
+from ..build import get_env_directory
+from ..build import get_output_path
+from ..build import get_parsed_recipe
+from ..build import get_rendered_metadata
+from ..build import make_conda_config
+from ..build import next_build_number
+from ..build import should_skip_build
+from ..constants import BASE_CONDARC
+from ..constants import CONDA_BUILD_CONFIG
+from ..constants import CONDA_RECIPE_APPEND
+from ..constants import MATPLOTLIB_RCDIR
+from ..constants import SERVER
+from ..log import get_logger
+from ..log import verbosity_option
 from . import bdt
-from ..build import (
-    next_build_number,
-    conda_arch,
-    should_skip_build,
-    get_rendered_metadata,
-    get_parsed_recipe,
-    make_conda_config,
-    get_docserver_setup,
-    get_env_directory,
-    get_output_path,
-)
-from ..constants import (
-    CONDA_BUILD_CONFIG,
-    CONDA_RECIPE_APPEND,
-    SERVER,
-    MATPLOTLIB_RCDIR,
-    BASE_CONDARC,
-)
-from ..bootstrap import set_environment, get_channels
-
-from ..log import verbosity_option, get_logger, echo_normal
 
 logger = get_logger(__name__)
 
@@ -65,9 +61,7 @@ Examples:
     help="Version of python to build the environment for",
 )
 @click.option(
-    "-r",
-    "--condarc",
-    help="Use custom conda configuration file instead of our own",
+    "-r", "--condarc", help="Use custom conda configuration file instead of our own",
 )
 @click.option(
     "-m",
@@ -180,11 +174,7 @@ def rebuild(
 
     # get potential channel upload and other auxiliary channels
     channels = get_channels(
-        public=(not private),
-        stable=stable,
-        server=server,
-        intranet=ci,
-        group=group,
+        public=(not private), stable=stable, server=server, intranet=ci, group=group,
     )
 
     if condarc is not None:
@@ -209,9 +199,7 @@ def rebuild(
     prefix = get_env_directory(os.environ["CONDA_EXE"], "base")
     condarc_options["croot"] = os.path.join(prefix, "conda-bld")
 
-    conda_config = make_conda_config(
-        config, python, append_file, condarc_options
-    )
+    conda_config = make_conda_config(config, python, append_file, condarc_options)
 
     set_environment("MATPLOTLIBRC", MATPLOTLIB_RCDIR)
 
@@ -219,11 +207,7 @@ def rebuild(
     # and derived documentation building via Sphinx)
     set_environment("DOCSERVER", server)
     doc_urls = get_docserver_setup(
-        public=(not private),
-        stable=stable,
-        server=server,
-        intranet=ci,
-        group=group,
+        public=(not private), stable=stable, server=server, intranet=ci, group=group,
     )
     set_environment("BOB_DOCUMENTATION_SERVER", doc_urls)
 
@@ -247,24 +231,18 @@ def rebuild(
 
         # checks if we should actually build this recipe
         if should_skip_build(metadata):
-            logger.info(
-                "Skipping UNSUPPORTED build of %s for %s", recipe_dir, arch
-            )
+            logger.info("Skipping UNSUPPORTED build of %s for %s", recipe_dir, arch)
             continue
 
         rendered_recipe = get_parsed_recipe(metadata)
         path = get_output_path(metadata, conda_config)[0]
 
         # Get the latest build number
-        build_number, existing = next_build_number(
-            channels[0], os.path.basename(path)
-        )
+        build_number, existing = next_build_number(channels[0], os.path.basename(path))
 
         should_build = True
 
-        if (
-            existing
-        ):  # other builds exist, get the latest and see if it still works
+        if existing:  # other builds exist, get the latest and see if it still works
 
             destpath = os.path.join(
                 condarc_options["croot"], arch, os.path.basename(existing[0])
@@ -285,7 +263,6 @@ def rebuild(
                 should_build = not result
             except Exception as error:
                 logger.exception(error)
-            except:
                 logger.error(
                     "conda_build.api.test() threw an unknown exception - "
                     "looks like bad programming, but not on our side this time..."
@@ -311,9 +288,7 @@ def rebuild(
                 # set $BOB_BUILD_NUMBER and force conda_build to reparse recipe to get it
                 # right
                 set_environment("BOB_BUILD_NUMBER", str(build_number))
-                paths = conda_build.api.build(
-                    d, config=conda_config, notest=False
-                )
+                paths = conda_build.api.build(d, config=conda_config, notest=False)
                 # if you get to this point, the package was successfully rebuilt
                 # set environment to signal caller we may dispose of it
                 os.environ["BDT_BUILD"] = ":".join(paths)
diff --git a/bob/devtools/scripts/release.py b/bob/devtools/scripts/release.py
index 11b932c34626dae0578db109a2398f90e503d2f0..57f8725c628c0f4f59e09ced643ef0fd262ed450 100644
--- a/bob/devtools/scripts/release.py
+++ b/bob/devtools/scripts/release.py
@@ -2,16 +2,15 @@
 # -*- coding: utf-8 -*-
 
 
-import os
-
 import click
 
-from . import bdt
-from ..release import release_bob, parse_and_process_package_changelog
-from ..release import release_package, wait_for_pipeline_to_finish
+from ..log import get_logger
+from ..log import verbosity_option
 from ..release import get_gitlab_instance
-
-from ..log import verbosity_option, get_logger
+from ..release import parse_and_process_package_changelog
+from ..release import release_package
+from ..release import wait_for_pipeline_to_finish
+from . import bdt
 
 logger = get_logger(__name__)
 
diff --git a/bob/devtools/scripts/runners.py b/bob/devtools/scripts/runners.py
index 3ca015ba65bcba156f51abca8a3cf2fc8c1e7cca..59de342ece863a3aff5fb5dabe713880f087bf54 100644
--- a/bob/devtools/scripts/runners.py
+++ b/bob/devtools/scripts/runners.py
@@ -1,13 +1,11 @@
 #!/usr/bin/env python
 
-import os
-
 import click
 
-from . import bdt
+from ..log import get_logger
+from ..log import verbosity_option
 from ..release import get_gitlab_instance
-
-from ..log import verbosity_option, get_logger
+from . import bdt
 
 logger = get_logger(__name__)
 
@@ -51,7 +49,6 @@ def runners(target, cmd, name, dry_run):
 
     gl = get_gitlab_instance()
     gl.auth()
-    user_id = gl.user.attributes["id"]
 
     if "/" in target:  # it is a specific project
         packages = [gl.projects.get(target)]
@@ -68,26 +65,20 @@ def runners(target, cmd, name, dry_run):
             "Found gitlab group %s (id=%d)", group.attributes["path"], group.id
         )
         logger.warn(
-            "Retrieving all projects (with details) from group "
-            "%s (id=%d)...",
+            "Retrieving all projects (with details) from group " "%s (id=%d)...",
             group.attributes["path"],
             group.id,
         )
         packages = [
-            gl.projects.get(k.id)
-            for k in group.projects.list(all=True, simple=True)
+            gl.projects.get(k.id) for k in group.projects.list(all=True, simple=True)
         ]
         logger.info(
-            "Found %d projects under group %s",
-            len(packages),
-            group.attributes["path"],
+            "Found %d projects under group %s", len(packages), group.attributes["path"],
         )
 
     # search for the runner to affect
     the_runner = [
-        k
-        for k in gl.runners.list(all=True)
-        if k.attributes["description"] == name
+        k for k in gl.runners.list(all=True) if k.attributes["description"] == name
     ]
     if not the_runner:
         raise RuntimeError("Cannot find runner with description = %s", name)
@@ -100,21 +91,19 @@ def runners(target, cmd, name, dry_run):
 
     for k in packages:
         logger.info(
-            "Processing project %s (id=%d)",
-            k.attributes["path_with_namespace"],
-            k.id,
+            "Processing project %s (id=%d)", k.attributes["path_with_namespace"], k.id,
         )
 
         if cmd == "enable":
 
             # checks if runner is not enabled first
             enabled = False
-            for l in k.runners.list(all=True):
-                if l.id == the_runner.id:  # it is there already
+            for ll in k.runners.list(all=True):
+                if ll.id == the_runner.id:  # it is there already
                     logger.warn(
                         "Runner %s (id=%d) is already enabled for project %s",
-                        l.attributes["description"],
-                        l.id,
+                        ll.attributes["description"],
+                        ll.id,
                         k.attributes["path_with_namespace"],
                     )
                     enabled = True
@@ -134,12 +123,12 @@ def runners(target, cmd, name, dry_run):
 
             # checks if runner is not already disabled first
             disabled = True
-            for l in k.runners.list(all=True):
-                if l.id == the_runner.id:  # it is there already
+            for ll in k.runners.list(all=True):
+                if ll.id == the_runner.id:  # it is there already
                     logger.debug(
                         "Runner %s (id=%d) is enabled for project %s",
-                        l.attributes["description"],
-                        l.id,
+                        ll.attributes["description"],
+                        ll.id,
                         k.attributes["path_with_namespace"],
                     )
                     disabled = False
diff --git a/bob/devtools/scripts/test.py b/bob/devtools/scripts/test.py
index be73018cafdd5d649acc987e3d8b0d79789a0c6c..24f5ff8bbb66988c76d34a914b080991fc557086 100644
--- a/bob/devtools/scripts/test.py
+++ b/bob/devtools/scripts/test.py
@@ -2,33 +2,28 @@
 # -*- coding: utf-8 -*-
 
 import os
-import sys
 
-import yaml
 import click
-import pkg_resources
 import conda_build.api
+import yaml
 
+from ..bootstrap import set_environment
+from ..build import conda_arch
+from ..build import get_docserver_setup
+from ..build import get_env_directory
+from ..build import make_conda_config
+from ..build import remove_conda_loggers
+from ..constants import BASE_CONDARC
+from ..constants import CONDA_BUILD_CONFIG
+from ..constants import CONDA_RECIPE_APPEND
+from ..constants import MATPLOTLIB_RCDIR
+from ..constants import SERVER
+from ..log import get_logger
+from ..log import verbosity_option
 from . import bdt
-from ..build import (
-    conda_arch,
-    make_conda_config,
-    get_docserver_setup,
-    get_env_directory,
-    remove_conda_loggers,
-)
-remove_conda_loggers()
 
-from ..constants import (
-    CONDA_BUILD_CONFIG,
-    CONDA_RECIPE_APPEND,
-    SERVER,
-    MATPLOTLIB_RCDIR,
-    BASE_CONDARC,
-)
-from ..bootstrap import set_environment
+remove_conda_loggers()
 
-from ..log import verbosity_option, get_logger
 
 logger = get_logger(__name__)
 
@@ -57,9 +52,7 @@ Examples:
     nargs=-1,
 )
 @click.option(
-    "-r",
-    "--condarc",
-    help="Use custom conda configuration file instead of our own",
+    "-r", "--condarc", help="Use custom conda configuration file instead of our own",
 )
 @click.option(
     "-m",
@@ -173,12 +166,9 @@ def test(
 
     if "channels" not in condarc_options:
         from ..bootstrap import get_channels
+
         channels = get_channels(
-            public=(not private),
-            stable=stable,
-            server=server,
-            intranet=ci,
-            group=group
+            public=(not private), stable=stable, server=server, intranet=ci, group=group
         )
         condarc_options["channels"] = channels + ["defaults"]
 
@@ -199,11 +189,7 @@ def test(
     # and derived documentation building via Sphinx)
     set_environment("DOCSERVER", server)
     doc_urls = get_docserver_setup(
-        public=(not private),
-        stable=stable,
-        server=server,
-        intranet=ci,
-        group=group,
+        public=(not private), stable=stable, server=server, intranet=ci, group=group,
     )
     set_environment("BOB_DOCUMENTATION_SERVER", doc_urls)
 
diff --git a/bob/devtools/scripts/update_bob.py b/bob/devtools/scripts/update_bob.py
index 20f9a072e57bb69b89cd2530065c121d911cf01c..889381dcb07ad8e4156f7dfa14e1489687a9792a 100644
--- a/bob/devtools/scripts/update_bob.py
+++ b/bob/devtools/scripts/update_bob.py
@@ -3,10 +3,9 @@
 
 import click
 
+from ..log import get_logger
+from ..log import verbosity_option
 from . import bdt
-from .common_options import ref_option
-
-from ..log import verbosity_option, get_logger
 
 logger = get_logger(__name__)
 
@@ -71,7 +70,6 @@ def update_bob(stable):
         ]
         public_packages = [f"{pkg} =={tag}" for pkg, tag in zip(public_packages, tags)]
 
-
     # modify conda/meta.yaml and requirements.txt in bob/bob
     logger.info("Updating conda/meta.yaml")
     start_tag = "# LIST OF BOB PACKAGES - START"
diff --git a/bob/devtools/scripts/visibility.py b/bob/devtools/scripts/visibility.py
index 1ff96dbd4b52230efd2d27f1c0b85ba4139406fe..86eea473705cebaafb77ff9426c0baa29fb75889 100644
--- a/bob/devtools/scripts/visibility.py
+++ b/bob/devtools/scripts/visibility.py
@@ -1,15 +1,16 @@
 #!/usr/bin/env python
 
 import os
-import sys
 
 import click
 import gitlab
 
-from . import bdt
+from ..log import echo_normal
+from ..log import echo_warning
+from ..log import get_logger
+from ..log import verbosity_option
 from ..release import get_gitlab_instance
-
-from ..log import verbosity_option, get_logger, echo_normal, echo_warning
+from . import bdt
 
 logger = get_logger(__name__)
 
@@ -83,11 +84,12 @@ def visibility(target, group):
                 use_package.id,
             )
             echo_normal(
-                "%s: %s"
-                % (package, use_package.attributes["visibility"].lower())
+                "%s: %s" % (package, use_package.attributes["visibility"].lower())
             )
-        except gitlab.GitlabGetError as e:
+        except gitlab.GitlabGetError:
             logger.warn(
-                "Gitlab access error - package %s does not exist?", package
+                "Gitlab access error - package %s does not exist?",
+                package,
+                exc_info=True,
             )
             echo_warning("%s: unknown" % (package,))
diff --git a/bob/devtools/templates/.flake8 b/bob/devtools/templates/.flake8
new file mode 100644
index 0000000000000000000000000000000000000000..994815d8870e9822617c4578efdce0e121988c60
--- /dev/null
+++ b/bob/devtools/templates/.flake8
@@ -0,0 +1,4 @@
+[flake8]
+max-line-length = 88
+select = B,C,E,F,W,T4,B9,B950
+ignore = E501, W503, E203
diff --git a/bob/devtools/templates/.isort.cfg b/bob/devtools/templates/.isort.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..8d7af1de97c8aea738c0f7c503d1461ad413db4f
--- /dev/null
+++ b/bob/devtools/templates/.isort.cfg
@@ -0,0 +1,4 @@
+[settings]
+line_length=88
+order_by_type=true
+lines_between_types=1
diff --git a/bob/devtools/templates/.pre-commit-config.yaml b/bob/devtools/templates/.pre-commit-config.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..08ae74117cd54402e260369ed4cbf761e09cc283
--- /dev/null
+++ b/bob/devtools/templates/.pre-commit-config.yaml
@@ -0,0 +1,40 @@
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+  - repo: https://github.com/timothycrosley/isort
+    rev: 4.3.21-2
+    hooks:
+    - id: isort
+      args: [-sl]
+  - repo: https://github.com/psf/black
+    rev: stable
+    hooks:
+      - id: black
+  - repo: https://github.com/pre-commit/pre-commit-hooks
+    rev: v2.0.0
+    hooks:
+      - id: check-ast
+      - id: check-case-conflict
+      - id: trailing-whitespace
+      - id: end-of-file-fixer
+      - id: debug-statements
+      - id: check-added-large-files
+      - id: flake8
+  - repo: local
+    hooks:
+      - id: sphinx-build
+        name: sphinx build
+        entry: python -m sphinx.cmd.build
+        args: [-a, -E, -W, doc, sphinx]
+        language: system
+        files: ^doc/
+        types: [file]
+        pass_filenames: false
+      - id: sphinx-doctest
+        name: sphinx doctest
+        entry: python -m sphinx.cmd.build
+        args: [-a, -E, -b, doctest, doc, sphinx]
+        language: system
+        files: ^doc/
+        types: [file]
+        pass_filenames: false
diff --git a/bob/devtools/templates/doc/conf.py b/bob/devtools/templates/doc/conf.py
index a528ccaf93d9ff78c9b3a706617b1c75f1f63bf6..6ee3d7a8fa5886ea6d7686999ac9b3bcec40d5bb 100644
--- a/bob/devtools/templates/doc/conf.py
+++ b/bob/devtools/templates/doc/conf.py
@@ -2,7 +2,14 @@
 # -*- coding: utf-8 -*-
 
 import os
+import time
+
 import pkg_resources
+import sphinx_rtd_theme
+
+# For inter-documentation mapping:
+from bob.extension.utils import link_documentation
+from bob.extension.utils import load_requirements
 
 # -- General configuration -----------------------------------------------------
 
@@ -23,7 +30,7 @@ extensions = [
     "sphinx.ext.napoleon",
     "sphinx.ext.viewcode",
     "sphinx.ext.mathjax",
-    #'matplotlib.sphinxext.plot_directive'
+    # 'matplotlib.sphinxext.plot_directive',
 ]
 
 # Be picky about warnings
@@ -69,7 +76,6 @@ master_doc = "index"
 
 # General information about the project.
 project = "{{ name }}"
-import time
 
 copyright = "%s, Idiap Research Institute" % time.strftime("%Y")
 
@@ -126,9 +132,6 @@ owner = ["Idiap Research Institute"]
 
 # -- Options for HTML output ---------------------------------------------------
 
-# The theme to use for HTML and HTML Help pages.  See the documentation for
-# a list of builtin themes.
-import sphinx_rtd_theme
 
 html_theme = "sphinx_rtd_theme"
 
@@ -223,14 +226,10 @@ autodoc_member_order = "bysource"
 autodoc_default_flags = ["members", "undoc-members", "show-inheritance"]
 
 
-# For inter-documentation mapping:
-from bob.extension.utils import link_documentation, load_requirements
-
 sphinx_requirements = "extra-intersphinx.txt"
 if os.path.exists(sphinx_requirements):
     intersphinx_mapping = link_documentation(
-        additional_packages=["python", "numpy"]
-        + load_requirements(sphinx_requirements)
+        additional_packages=["python", "numpy"] + load_requirements(sphinx_requirements)
     )
 else:
     intersphinx_mapping = link_documentation()
diff --git a/bob/devtools/templates/requirements.txt b/bob/devtools/templates/requirements.txt
index 089f1abe678a25e3bdd9f06da16152d3b03da86f..bc1d8aa1142ef478a7dbad4b424a7153d0b61b3f 100644
--- a/bob/devtools/templates/requirements.txt
+++ b/bob/devtools/templates/requirements.txt
@@ -1,3 +1,3 @@
 setuptools
 numpy
-bob.extension
\ No newline at end of file
+bob.extension
diff --git a/bob/devtools/templates/setup.py b/bob/devtools/templates/setup.py
index 423e74d76c8e195175b8c9cb64112ecf3f6ad11a..ec0748cb8340c43a220f76136597c95ea08d8d3d 100644
--- a/bob/devtools/templates/setup.py
+++ b/bob/devtools/templates/setup.py
@@ -11,7 +11,9 @@ install_requires=load_requirements('requirements.txt')
 {% else %}from setuptools import setup, dist
 dist.Distribution(dict(setup_requires=['bob.extension']))
 
-from bob.extension.utils import load_requirements, find_packages
+from bob.extension.utils import find_packages
+from bob.extension.utils import load_requirements
+
 install_requires = load_requirements()
 {% endif %}
 
diff --git a/bob/devtools/webdav3/client.py b/bob/devtools/webdav3/client.py
index f42d0274e203205b218d25b8d585e8cc44cb924b..51fc57a4ee6abebd3471d87e9c2bb16cf242b100 100644
--- a/bob/devtools/webdav3/client.py
+++ b/bob/devtools/webdav3/client.py
@@ -4,17 +4,27 @@ import functools
 import os
 import shutil
 import threading
+
 from io import BytesIO
 from re import sub
 
 import lxml.etree as etree
 import requests
 
-from .connection import *
-from .exceptions import *
-from .urn import Urn
-
 from ..log import get_logger
+from .connection import ProxySettings
+from .connection import WebDAVSettings
+from .exceptions import ConnectionException
+from .exceptions import LocalResourceNotFound
+from .exceptions import MethodNotSupported
+from .exceptions import NoConnection
+from .exceptions import NotEnoughSpace
+from .exceptions import OptionNotValid
+from .exceptions import RemoteParentNotFound
+from .exceptions import RemoteResourceNotFound
+from .exceptions import ResourceTooBig
+from .exceptions import ResponseErrorCode
+from .urn import Urn
 
 logger = get_logger(__name__)
 
@@ -57,9 +67,7 @@ def get_options(option_type, from_options):
     _options = dict()
 
     for key in option_type.keys:
-        key_with_prefix = "{prefix}{key}".format(
-            prefix=option_type.prefix, key=key
-        )
+        key_with_prefix = "{prefix}{key}".format(prefix=option_type.prefix, key=key)
         if key not in from_options and key_with_prefix not in from_options:
             _options[key] = ""
         elif key in from_options:
@@ -243,12 +251,8 @@ class Client(object):
              `proxy_login`: login name for proxy server.
              `proxy_password`: password for proxy server.
         """
-        webdav_options = get_options(
-            option_type=WebDAVSettings, from_options=options
-        )
-        proxy_options = get_options(
-            option_type=ProxySettings, from_options=options
-        )
+        webdav_options = get_options(option_type=WebDAVSettings, from_options=options)
+        proxy_options = get_options(option_type=ProxySettings, from_options=options)
 
         self.webdav = WebDAVSettings(webdav_options)
         self.proxy = ProxySettings(proxy_options)
@@ -275,9 +279,7 @@ class Client(object):
             if not self.check(directory_urn.path()):
                 raise RemoteResourceNotFound(directory_urn.path())
 
-        response = self.execute_request(
-            action="list", path=directory_urn.quote()
-        )
+        response = self.execute_request(action="list", path=directory_urn.quote())
         urns = WebDavXmlUtils.parse_get_list_response(response.content)
 
         path = Urn.normalize_path(self.get_full_path(directory_urn))
@@ -332,9 +334,7 @@ class Client(object):
         if not self.check(directory_urn.parent()):
             raise RemoteParentNotFound(directory_urn.path())
 
-        response = self.execute_request(
-            action="mkdir", path=directory_urn.quote()
-        )
+        response = self.execute_request(action="mkdir", path=directory_urn.quote())
         return response.status_code in (200, 201)
 
     @wrap_connection_error
@@ -366,15 +366,11 @@ class Client(object):
         urn = Urn(remote_path)
         if self.is_dir(urn.path()):
             self.download_directory(
-                local_path=local_path,
-                remote_path=remote_path,
-                progress=progress,
+                local_path=local_path, remote_path=remote_path, progress=progress,
             )
         else:
             self.download_file(
-                local_path=local_path,
-                remote_path=remote_path,
-                progress=progress,
+                local_path=local_path, remote_path=remote_path, progress=progress,
             )
 
     def download_directory(self, remote_path, local_path, progress=None):
@@ -401,9 +397,7 @@ class Client(object):
             )
             _local_path = os.path.join(local_path, resource_name)
             self.download(
-                local_path=_local_path,
-                remote_path=_remote_path,
-                progress=progress,
+                local_path=_local_path, remote_path=_remote_path, progress=progress,
             )
 
     @wrap_connection_error
@@ -449,9 +443,12 @@ class Client(object):
         :param local_path: the path to save resource locally.
         :param callback: the callback which will be invoked when downloading is complete.
         """
-        target = lambda: self.download_sync(
-            local_path=local_path, remote_path=remote_path, callback=callback
-        )
+
+        def target():
+            return self.download_sync(
+                local_path=local_path, remote_path=remote_path, callback=callback
+            )
+
         threading.Thread(target=target).start()
 
     @wrap_connection_error
@@ -484,9 +481,7 @@ class Client(object):
         """
         if os.path.isdir(local_path):
             self.upload_directory(
-                local_path=local_path,
-                remote_path=remote_path,
-                progress=progress,
+                local_path=local_path, remote_path=remote_path, progress=progress,
             )
         else:
             self.upload_file(local_path=local_path, remote_path=remote_path)
@@ -521,9 +516,7 @@ class Client(object):
             )
             _local_path = os.path.join(local_path, resource_name)
             self.upload(
-                local_path=_local_path,
-                remote_path=_remote_path,
-                progress=progress,
+                local_path=_local_path, remote_path=_remote_path, progress=progress,
             )
 
     @wrap_connection_error
@@ -556,9 +549,7 @@ class Client(object):
                     path=local_path, size=file_size, max_size=self.large_size
                 )
 
-            self.execute_request(
-                action="upload", path=urn.quote(), data=local_file
-            )
+            self.execute_request(action="upload", path=urn.quote(), data=local_file)
 
     def upload_sync(self, remote_path, local_path, callback=None):
         """Uploads resource to remote path on WebDAV server synchronously. In
@@ -583,9 +574,12 @@ class Client(object):
         :param local_path: the path to local resource for uploading.
         :param callback: the callback which will be invoked when downloading is complete.
         """
-        target = lambda: self.upload_sync(
-            local_path=local_path, remote_path=remote_path, callback=callback
-        )
+
+        def target():
+            return self.upload_sync(
+                local_path=local_path, remote_path=remote_path, callback=callback
+            )
+
         threading.Thread(target=target).start()
 
     @wrap_connection_error
@@ -609,9 +603,7 @@ class Client(object):
             path=self.get_full_path(urn_to)
         )
         self.execute_request(
-            action="copy",
-            path=urn_from.quote(),
-            headers_ext=[header_destination],
+            action="copy", path=urn_from.quote(), headers_ext=[header_destination],
         )
 
     @wrap_connection_error
@@ -635,9 +627,7 @@ class Client(object):
         header_destination = "Destination: {path}".format(
             path=self.get_full_path(urn_to)
         )
-        header_overwrite = "Overwrite: {flag}".format(
-            flag="T" if overwrite else "F"
-        )
+        header_overwrite = "Overwrite: {flag}".format(flag="T" if overwrite else "F")
         self.execute_request(
             action="move",
             path=urn_from.quote(),
@@ -791,9 +781,7 @@ class Client(object):
             if os.path.isdir(local_path):
                 if not self.check(remote_path=remote_path):
                     self.mkdir(remote_path=remote_path)
-                self.push(
-                    remote_directory=remote_path, local_directory=local_path
-                )
+                self.push(remote_directory=remote_path, local_directory=local_path)
             else:
                 if local_resource_name in remote_resource_names:
                     continue
@@ -829,24 +817,16 @@ class Client(object):
             if self.is_dir(remote_urn.path()):
                 if not os.path.exists(local_path):
                     os.mkdir(local_path)
-                self.pull(
-                    remote_directory=remote_path, local_directory=local_path
-                )
+                self.pull(remote_directory=remote_path, local_directory=local_path)
             else:
                 if remote_resource_name in local_resource_names:
                     continue
-                self.download_file(
-                    remote_path=remote_path, local_path=local_path
-                )
+                self.download_file(remote_path=remote_path, local_path=local_path)
 
     def sync(self, remote_directory, local_directory):
 
-        self.pull(
-            remote_directory=remote_directory, local_directory=local_directory
-        )
-        self.push(
-            remote_directory=remote_directory, local_directory=local_directory
-        )
+        self.pull(remote_directory=remote_directory, local_directory=local_directory)
+        self.push(remote_directory=remote_directory, local_directory=local_directory)
 
 
 class Resource(object):
@@ -880,9 +860,7 @@ class Resource(object):
 
     def copy(self, remote_path):
         urn = Urn(remote_path)
-        self.client.copy(
-            remote_path_from=self.urn.path(), remote_path_to=remote_path
-        )
+        self.client.copy(remote_path_from=self.urn.path(), remote_path_to=remote_path)
         return Resource(self.client, urn)
 
     def info(self, params=None):
@@ -908,9 +886,7 @@ class Resource(object):
 
     def read_async(self, local_path, callback=None):
         return self.client.upload_async(
-            local_path=local_path,
-            remote_path=self.urn.path(),
-            callback=callback,
+            local_path=local_path, remote_path=self.urn.path(), callback=callback,
         )
 
     def write_to(self, buff):
@@ -923,9 +899,7 @@ class Resource(object):
 
     def write_async(self, local_path, callback=None):
         return self.client.download_async(
-            local_path=local_path,
-            remote_path=self.urn.path(),
-            callback=callback,
+            local_path=local_path, remote_path=self.urn.path(), callback=callback,
         )
 
     def publish(self):
@@ -936,9 +910,7 @@ class Resource(object):
 
     @property
     def property(self, option):
-        return self.client.get_property(
-            remote_path=self.urn.path(), option=option
-        )
+        return self.client.get_property(remote_path=self.urn.path(), option=option)
 
     @property.setter
     def property(self, option, value):
diff --git a/bob/devtools/webdav3/connection.py b/bob/devtools/webdav3/connection.py
index 989fb04b1355e0ceae02823bb99167f1b8e1dbac..da54e7d8ffe8b916654072b83cb2d055b84ed15e 100644
--- a/bob/devtools/webdav3/connection.py
+++ b/bob/devtools/webdav3/connection.py
@@ -1,6 +1,6 @@
 from os.path import exists
 
-from .exceptions import *
+from .exceptions import OptionNotValid
 from .urn import Urn
 
 
@@ -60,24 +60,16 @@ class WebDAVSettings(ConnectionSettings):
     def is_valid(self):
 
         if not self.hostname:
-            raise OptionNotValid(
-                name="hostname", value=self.hostname, ns=self.ns
-            )
+            raise OptionNotValid(name="hostname", value=self.hostname, ns=self.ns)
 
         if self.cert_path and not exists(self.cert_path):
-            raise OptionNotValid(
-                name="cert_path", value=self.cert_path, ns=self.ns
-            )
+            raise OptionNotValid(name="cert_path", value=self.cert_path, ns=self.ns)
 
         if self.key_path and not exists(self.key_path):
-            raise OptionNotValid(
-                name="key_path", value=self.key_path, ns=self.ns
-            )
+            raise OptionNotValid(name="key_path", value=self.key_path, ns=self.ns)
 
         if self.key_path and not self.cert_path:
-            raise OptionNotValid(
-                name="cert_path", value=self.cert_path, ns=self.ns
-            )
+            raise OptionNotValid(name="cert_path", value=self.cert_path, ns=self.ns)
 
         if self.password and not self.login:
             raise OptionNotValid(name="login", value=self.login, ns=self.ns)
@@ -111,6 +103,4 @@ class ProxySettings(ConnectionSettings):
 
         if self.login or self.password:
             if not self.hostname:
-                raise OptionNotValid(
-                    name="hostname", value=self.hostname, ns=self.ns
-                )
+                raise OptionNotValid(name="hostname", value=self.hostname, ns=self.ns)
diff --git a/bob/devtools/webdav3/urn.py b/bob/devtools/webdav3/urn.py
index 97c1ed68260f52c8d9f774bee5192a68f0b6bc50..d822bdb08c266ab1a17807abe9f06c6d689ecfa9 100644
--- a/bob/devtools/webdav3/urn.py
+++ b/bob/devtools/webdav3/urn.py
@@ -14,19 +14,15 @@ class Urn(object):
     def __init__(self, path, directory=False):
 
         self._path = quote(path)
-        expressions = "/\.+/", "/+"
+        expressions = r"/\.+/", "/+"
         for expression in expressions:
             self._path = sub(expression, Urn.separate, self._path)
 
         if not self._path.startswith(Urn.separate):
-            self._path = "{begin}{end}".format(
-                begin=Urn.separate, end=self._path
-            )
+            self._path = "{begin}{end}".format(begin=Urn.separate, end=self._path)
 
         if directory and not self._path.endswith(Urn.separate):
-            self._path = "{begin}{end}".format(
-                begin=self._path, end=Urn.separate
-            )
+            self._path = "{begin}{end}".format(begin=self._path, end=Urn.separate)
 
     def __str__(self):
         return self.path()
@@ -40,11 +36,7 @@ class Urn(object):
     def filename(self):
 
         path_split = self._path.split(Urn.separate)
-        name = (
-            path_split[-2] + Urn.separate
-            if path_split[-1] == ""
-            else path_split[-1]
-        )
+        name = path_split[-2] + Urn.separate if path_split[-1] == "" else path_split[-1]
         return unquote(name)
 
     def parent(self):
@@ -71,11 +63,7 @@ class Urn(object):
     @staticmethod
     def normalize_path(path):
         result = sub("/{2,}", "/", path)
-        return (
-            result
-            if len(result) < 1 or result[-1] != Urn.separate
-            else result[:-1]
-        )
+        return result if len(result) < 1 or result[-1] != Urn.separate else result[:-1]
 
     @staticmethod
     def compare_path(path_a, href):
diff --git a/doc/conf.py b/doc/conf.py
index c75b503e0d946005c04529012ca704c0a847aa63..f30bb3d2de2a32a505af985f2d297bdfa5a151d3 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -2,31 +2,31 @@
 # vim: set fileencoding=utf-8 :
 
 import os
-import sys
-import glob
-import pkg_resources
+import time
 
+import pkg_resources
+import sphinx_rtd_theme
 
 # -- General configuration -----------------------------------------------------
 
 # If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = '1.3'
+needs_sphinx = "1.3"
 
 # Add any Sphinx extension module names here, as strings. They can be extensions
 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
 extensions = [
-    'sphinx.ext.todo',
-    'sphinx.ext.coverage',
-    'sphinx.ext.ifconfig',
-    'sphinx.ext.autodoc',
-    'sphinx.ext.autosummary',
-    'sphinx.ext.doctest',
-    'sphinx.ext.graphviz',
-    'sphinx.ext.intersphinx',
-    'sphinx.ext.napoleon',
-    'sphinx.ext.viewcode',
-    'sphinx.ext.mathjax',
-    ]
+    "sphinx.ext.todo",
+    "sphinx.ext.coverage",
+    "sphinx.ext.ifconfig",
+    "sphinx.ext.autodoc",
+    "sphinx.ext.autosummary",
+    "sphinx.ext.doctest",
+    "sphinx.ext.graphviz",
+    "sphinx.ext.intersphinx",
+    "sphinx.ext.napoleon",
+    "sphinx.ext.viewcode",
+    "sphinx.ext.mathjax",
+]
 
 # Be picky about warnings
 nitpicky = False
@@ -35,13 +35,13 @@ nitpicky = False
 nitpick_ignore = []
 
 # Allows the user to override warnings from a separate file
-if os.path.exists('nitpick-exceptions.txt'):
-    for line in open('nitpick-exceptions.txt'):
+if os.path.exists("nitpick-exceptions.txt"):
+    for line in open("nitpick-exceptions.txt"):
         if line.strip() == "" or line.startswith("#"):
             continue
         dtype, target = line.split(None, 1)
         target = target.strip()
-        try: # python 2.x
+        try:  # python 2.x
             target = unicode(target)
         except NameError:
             pass
@@ -57,25 +57,25 @@ autosummary_generate = True
 numfig = True
 
 # If we are on OSX, the 'dvipng' path maybe different
-dvipng_osx = '/opt/local/libexec/texlive/binaries/dvipng'
-if os.path.exists(dvipng_osx): pngmath_dvipng = dvipng_osx
+dvipng_osx = "/opt/local/libexec/texlive/binaries/dvipng"
+if os.path.exists(dvipng_osx):
+    pngmath_dvipng = dvipng_osx
 
 # Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
 
 # The suffix of source filenames.
-source_suffix = '.rst'
+source_suffix = ".rst"
 
 # The encoding of source files.
-#source_encoding = 'utf-8-sig'
+# source_encoding = 'utf-8-sig'
 
 # The master toctree document.
-master_doc = 'index'
+master_doc = "index"
 
 # General information about the project.
-project = u'bob.devtools'
-import time
-copyright = u'%s, Idiap Research Institute' % time.strftime('%Y')
+project = u"bob.devtools"
+copyright = u"%s, Idiap Research Institute" % time.strftime("%Y")
 
 # Grab the setup entry
 distribution = pkg_resources.require(project)[0]
@@ -91,123 +91,120 @@ release = distribution.version
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
-#language = None
+# language = None
 
 # There are two options for replacing |today|: either, you set today to some
 # non-false value, then it is used:
-#today = ''
+# today = ''
 # Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
+# today_fmt = '%B %d, %Y'
 
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
-exclude_patterns = ['links.rst']
+exclude_patterns = ["links.rst"]
 
 # The reST default role (used for this markup: `text`) to use for all documents.
-#default_role = None
+# default_role = None
 
 # If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
+# add_function_parentheses = True
 
 # If true, the current module name will be prepended to all description
 # unit titles (such as .. function::).
-#add_module_names = True
+# add_module_names = True
 
 # If true, sectionauthor and moduleauthor directives will be shown in the
 # output. They are ignored by default.
-#show_authors = False
+# show_authors = False
 
 # The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = "sphinx"
 
 # A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
+# modindex_common_prefix = []
 
 # Some variables which are useful for generated material
-project_variable = project.replace('.', '_')
-short_description = u'Tools for development and CI integration of Bob packages'
-owner = [u'Idiap Research Institute']
+project_variable = project.replace(".", "_")
+short_description = u"Tools for development and CI integration of Bob packages"
+owner = [u"Idiap Research Institute"]
 
 
 # -- Options for HTML output ---------------------------------------------------
 
-# The theme to use for HTML and HTML Help pages.  See the documentation for
-# a list of builtin themes.
-import sphinx_rtd_theme
-html_theme = 'sphinx_rtd_theme'
+html_theme = "sphinx_rtd_theme"
 
 # Theme options are theme-specific and customize the look and feel of a theme
 # further.  For a list of options available for each theme, see the
 # documentation.
-#html_theme_options = {}
+# html_theme_options = {}
 
 # Add any paths that contain custom themes here, relative to this directory.
 html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
 
 # The name for this set of Sphinx documents.  If None, it defaults to
 # "<project> v<release> documentation".
-#html_title = None
+# html_title = None
 
 # A shorter title for the navigation bar.  Default is the same as html_title.
-#html_short_title = project_variable
+# html_short_title = project_variable
 
 # The name of an image file (relative to this directory) to place at the top
 # of the sidebar.
-html_logo = 'img/logo.png'
+html_logo = "img/logo.png"
 
 # The name of an image file (within the static path) to use as favicon of the
 # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
 # pixels large.
-html_favicon = 'img/favicon.ico'
+html_favicon = "img/favicon.ico"
 
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
 # so a file named "default.css" will overwrite the builtin "default.css".
-#html_static_path = ['_static']
+# html_static_path = ['_static']
 
 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
 # using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
+# html_last_updated_fmt = '%b %d, %Y'
 
 # If true, SmartyPants will be used to convert quotes and dashes to
 # typographically correct entities.
-#html_use_smartypants = True
+# html_use_smartypants = True
 
 # Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
+# html_sidebars = {}
 
 # Additional templates that should be rendered to pages, maps page names to
 # template names.
-#html_additional_pages = {}
+# html_additional_pages = {}
 
 # If false, no module index is generated.
-#html_domain_indices = True
+# html_domain_indices = True
 
 # If false, no index is generated.
-#html_use_index = True
+# html_use_index = True
 
 # If true, the index is split into individual pages for each letter.
-#html_split_index = False
+# html_split_index = False
 
 # If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
+# html_show_sourcelink = True
 
 # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
+# html_show_sphinx = True
 
 # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
+# html_show_copyright = True
 
 # If true, an OpenSearch description file will be output, and all pages will
 # contain a <link> tag referring to it.  The value of this option must be the
 # base URL from which the finished HTML is served.
-#html_use_opensearch = ''
+# html_use_opensearch = ''
 
 # This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
+# html_file_suffix = None
 
 # Output file base name for HTML help builder.
-htmlhelp_basename = project_variable + u'_doc'
+htmlhelp_basename = project_variable + u"_doc"
 
 
 # -- Post configuration --------------------------------------------------------
@@ -217,17 +214,19 @@ rst_epilog = """
 .. |project| replace:: Bob
 .. |version| replace:: %s
 .. |current-year| date:: %%Y
-""" % (version,)
+""" % (
+    version,
+)
 
 # Default processing flags for sphinx
-autoclass_content = 'class'
-autodoc_member_order = 'bysource'
+autoclass_content = "class"
+autodoc_member_order = "bysource"
 autodoc_default_flags = [
-  'members',
-  'undoc-members',
-  'show-inheritance',
-  ]
+    "members",
+    "undoc-members",
+    "show-inheritance",
+]
 
 # Adds simplejson, pyzmq links
-#intersphinx_mapping['http://simplejson.readthedocs.io/en/stable/'] = None
-#intersphinx_mapping['http://pyzmq.readthedocs.io/en/stable/'] = None
+# intersphinx_mapping['http://simplejson.readthedocs.io/en/stable/'] = None
+# intersphinx_mapping['http://pyzmq.readthedocs.io/en/stable/'] = None
diff --git a/doc/development.rst b/doc/development.rst
index b131739aa590a51f0aee4bf95415fd649ea0ac1b..d2c8f4c3184114df4e5fe1c36347c1790a5cd32e 100644
--- a/doc/development.rst
+++ b/doc/development.rst
@@ -6,13 +6,13 @@
 
 Very often, developers are confronted with the need to
 clone package repositories locally and develop installation/build and runtime code.
-It is recommended to create isolated environments to develop new projects using conda_ and zc.buildout_. 
-Tools implemented in `bob.devtools` helps automate this process for |project| packages. In the following we talk about how to checkout and build one or several packages from their git_ source and build proper isolated environments to develop them. Then we will describe how to create a new bob package from scratch and develop existing bob packages along side it. 
+It is recommended to create isolated environments to develop new projects using conda_ and zc.buildout_.
+Tools implemented in ``bob.devtools`` helps automate this process for |project| packages. In the following we talk about how to checkout and build one or several packages from their git_ source and build proper isolated environments to develop them. Then we will describe how to create a new bob package from scratch and develop existing bob packages along side it.
 
 TLDR
 ====
 
-Suppose you want to checkout the package ``bob.blitz`` from source and start developing it locally. We will use the tools implemented in ``bob.devtools`` to create a proper developing environment to build and develop ``bob.blitz``. We assume you have ``bob.devtools`` installed on a conda environment named ``bdt`` (Refer to :ref:`bob.devtools.install` for detailed information.) 
+Suppose you want to checkout the package ``bob.blitz`` from source and start developing it locally. We will use the tools implemented in ``bob.devtools`` to create a proper developing environment to build and develop ``bob.blitz``. We assume you have ``bob.devtools`` installed on a conda environment named ``bdt`` (Refer to :ref:`bob.devtools.install` for detailed information.)
 
 * Checkout the source of the package from git:
 
@@ -66,13 +66,22 @@ for example:
    $ ./bin/nosetests -sv
 
 .. note::
-    
+
     Sometimes when you are calling a function not interactively it is not acting normally. In that case import ``pkg_resources`` before importing your package. It is a known issue and we are working on it.
 
     .. code-block:: sh
 
         $ ./bin/python -c "import pkg_resources; import bob.blitz; print(bob.blitz)"
 
+* Some packages may come with a pre-commit_ config file (``.pre-commit-config.yaml``).
+  Make sure to install pre-commit if the config file exists:
+
+.. code-block:: sh
+
+   $ # check if the configuration file exists:
+   $ ls .pre-commit-config.yaml
+   $ pip install pre-commit
+   $ pre-commit install
 
 .. bob.devtools.local_development:
 
@@ -95,7 +104,7 @@ in |project|'s gitlab_ instance. In the following we assume you want to install
 Create an isolated conda environment
 ------------------------------------
 
-Now that we have the package checked out we need an isolated environment with proper configuration to develop the package. ``bob.devtools`` provides a tool that automatically creates such environment. 
+Now that we have the package checked out we need an isolated environment with proper configuration to develop the package. ``bob.devtools`` provides a tool that automatically creates such environment.
 Before proceeding, you need to make sure that you already have a conda_ environment with ``bob.devtools`` installed in it (Refer to :ref:`bob.devtools.install` for more information). let's assume that you have a conda environment named ``bdt`` with installed ``bob.devtools``.
 
 .. code-block:: sh
@@ -142,7 +151,7 @@ Running buildout
 ----------------
 
 The last step is to create a hooked-up environment so you can quickly test
-local changes to your package w/o necessarily creating a conda-package. 
+local changes to your package w/o necessarily creating a conda-package.
 zc.buildout_ takes care of it by modifying the load paths of scripts to find the correct
 version of your package sources from the local checkout. It by default uses a file named `buildout.cfg`, in the package directory. For our example package it looks like:
 
@@ -192,7 +201,7 @@ or build the documentation:
 
 .. note::
 
-    `buildout` by default uses the file `buildout.cfg` but you can specify another file by using -c option. In fact for developing packages especially if they need to be developed along with other packages, another file, namely `develop.cfg` is used like following: 
+    `buildout` by default uses the file `buildout.cfg` but you can specify another file by using -c option. In fact for developing packages especially if they need to be developed along with other packages, another file, namely `develop.cfg` is used like following:
 
     .. code-block:: sh
 
@@ -225,7 +234,7 @@ And you can install new packages using conda:
 
        $ cd <package>
        $ conda activate bdt
-       $ bdt local build   
+       $ bdt local build
 
 
 One important advantage of using conda_ and zc.buildout_ is that it does
@@ -270,8 +279,8 @@ It so happens that you want to develop several packages against each other for y
 Now you can run `buildout` as usual. The ``bob.extension`` will be checked out on `src` folder on the root of your project.
 
 .. note::
-  
-  The flag `debug = true` is usually used when in development mode. 
+
+  The flag `debug = true` is usually used when in development mode.
 
 
 .. _bob.devtools.create_package:
@@ -282,11 +291,11 @@ Local development of a new package
 In this section we explain how to create a new bob package from scratch and start developing it. Once again ``bob.devtools`` is here to help you. You need to activate your conda environment with ``bob.devtools`` installed in it.
 
 .. code-block:: sh
-    
+
     $ conda activate bdt
     $ bdt new -vv bob/bob.project.awesome author_name author_email
 
-This command will create a new bob package named "bob.project.awesome" that includes the correct anatomy of a package. For more information about the functionality of each file check :ref:`bob.devtools.anatomy`.  
+This command will create a new bob package named "bob.project.awesome" that includes the correct anatomy of a package. For more information about the functionality of each file check :ref:`bob.devtools.anatomy`.
 
 In the root of your project there is a file `buildout.cfg` used by `buildout` to build your package locally. It should look like:
 
@@ -308,7 +317,7 @@ In the root of your project there is a file `buildout.cfg` used by `buildout` to
 Now you have all the necessary tools available and you can make a development environment using `bdt create` command, run `buildout` in it and start developing your package.
 
 .. code-block:: sh
-    
+
     $ cd bob.project.awesome
     $ conda activate bdt
     $ bdt create --stable -vv awesome-project  #here we used the stable channels to make the conda environment.
@@ -319,7 +328,7 @@ Now you have all the necessary tools available and you can make a development en
 Developing existing bob packages along with your new package
 ------------------------------------------------------------
 
-Let's assume you need to develop two packages, ``bob.extension`` and ``bob.blitz``, as part of developing your new ``bob.project.awesome`` package. 
+Let's assume you need to develop two packages, ``bob.extension`` and ``bob.blitz``, as part of developing your new ``bob.project.awesome`` package.
 
 You need to add these packages to the ``buildout.cfg`` file in the newly created folder.
 
@@ -358,18 +367,18 @@ You need to add these packages to the ``buildout.cfg`` file in the newly created
 When you build your new package the dependent packages (in this example ``bob.extension`` and ``bob.blitz``) will be checked out on folder `src` in the root of your project.
 
 As usual, first create an isolated conda environment using `bdt create` command. Some of bob packages need dependencies that might not be installed on your environment. You can find these dependencies by checking `conda/meta.yaml` of each package. Install the required packages and then run buildout as usual. For our example you need to do the following:
- 
+
 .. code-block:: sh
-    
+
     $ conda install gcc_linux-64 gxx_linux-64 libblitz
     $ buildout
 
 .. note::
 
-    Sometimes you may need some of bob packages available in your local `bin` directory without necessarily developing them. 
+    Sometimes you may need some of bob packages available in your local `bin` directory without necessarily developing them.
 
     If you knew beforehand what are those packages, you can add them to "requirements/host" section of the `conda/meta.yaml` file and then create a conda environment using `bdt create`. Like this, those packages will be installed automatically. Otherwise, if you already have your conda environment, install them using `conda install` command.
-    
+
     When done, add those packages to the `eggs` section in your `buildout.cfg` file and then run `buildout`.
 
 
diff --git a/doc/links.rst b/doc/links.rst
index b933ddf12a519e0c2539c240f1747f227c894ae6..f498e729b8fb76e4147ce44f9e64aaec7b54b742 100644
--- a/doc/links.rst
+++ b/doc/links.rst
@@ -20,3 +20,4 @@
 .. _mr.developer: http://pypi.python.org/pypi/mr.developer/
 .. _conda channel: https://www.idiap.ch/software/bob/conda/
 .. _webdav configuration: https://gitlab.idiap.ch/bob/private/wikis/how-to-upload-resources
+.. _pre-commit: https://pre-commit.com/
diff --git a/doc/linux.rst b/doc/linux.rst
index 6cfb281c3c28517ffd38ecbfcbfae6608a41f85b..598100d0133d43d0ae3013421bcbf8edff516b7c 100644
--- a/doc/linux.rst
+++ b/doc/linux.rst
@@ -37,7 +37,7 @@ change ``/etc/default/grub`` to contain the line
 ``update-grub`` after such change.
 
 To install docker at Idiap, you also need to follow the security guidelines
-from Cédric at https://secure.idiap.ch/intranet/system/software/docker.  If you
+
 do not follow such guidelines, the machine will not be acessible from outside
 via the login gateway, as the default docker installation conflicts with
 Idiap's internal setup.  You may also find other network connectivity issues.
diff --git a/doc/macos-ci-install/datetime.sh b/doc/macos-ci-install/datetime.sh
index 03ca91db809b7c0b3e37182333a6181a41cd3c14..969517fadb9f1c5ec1a6a23f3011a94f5e87df07 100755
--- a/doc/macos-ci-install/datetime.sh
+++ b/doc/macos-ci-install/datetime.sh
@@ -10,4 +10,3 @@ set -eox pipefail
 systemsetup -setusingnetworktime on
 systemsetup -settimezone Europe/Zurich
 systemsetup -setnetworktimeserver time.euro.apple.com.
-
diff --git a/doc/templates.rst b/doc/templates.rst
index 8574dc47b68edaca807e973461551abeaf6feb2d..5ad5da2f726fc2eb034de5068b617ad0b45d1a92 100644
--- a/doc/templates.rst
+++ b/doc/templates.rst
@@ -134,7 +134,7 @@ recipe for that package.
 
 .. note::
 
-    For more detailed instructions on how to distribute your packages at Idiap, please see the 
+    For more detailed instructions on how to distribute your packages at Idiap, please see the
     guidelines on `Publishing Reproducible Papers at Idiap <https://gitlab.idiap.ch/bob/bob/wikis/Publishing-Reproducible-Papers-at-Idiap>`_.
 
 
@@ -143,7 +143,7 @@ recipe for that package.
 buildout.cfg in more details
 ============================
 This section briefly explains the different entries in ``buildout.cfg`` file. For better understanding of buildout refer to its
-`documentation <http://www.buildout.org>`_ 
+`documentation <http://www.buildout.org>`_
 
 
 To be able to develop a package, we first need to build and install it locally.
@@ -164,7 +164,7 @@ you do in the source. zc.buildout_ allows you to exactly do that.
 
 zc.buildout_ provides a ``buildout`` command. ``buildout`` takes as input a
 "recipe" that explains how to build a local working environment. The recipe, by
-default, is stored in a file called ``buildout.cfg``. 
+default, is stored in a file called ``buildout.cfg``.
 
 
 .. important::
diff --git a/setup.py b/setup.py
index f4cb86eb71c40be0478188dc21502ddd2930b724..f22e6f62aecfc1b93cafa991570d2864c70a7532 100644
--- a/setup.py
+++ b/setup.py
@@ -1,116 +1,107 @@
 #!/usr/bin/env python
 
-from setuptools import setup, find_packages
+from setuptools import find_packages
+from setuptools import setup
 
 # Define package version
 version = open("version.txt").read().rstrip()
 
 requires = [
-    'setuptools',
-    'click>=7',
-    'click-plugins',
-    'certifi',
-    'requests',
-    'gitpython',
-    'python-gitlab',
-    'sphinx',
-    'pyyaml>=5.1',
-    'twine',
-    'lxml',
-    'jinja2',
-    'termcolor',
-    'psutil',
-    ]
+    "setuptools",
+    "click>=7",
+    "click-plugins",
+    "certifi",
+    "requests",
+    "gitpython",
+    "python-gitlab",
+    "sphinx",
+    "pyyaml>=5.1",
+    "twine",
+    "lxml",
+    "jinja2",
+    "termcolor",
+    "psutil",
+]
 
 setup(
     name="bob.devtools",
     version=version,
     description="Tools for development and CI integration of Bob/BEAT packages",
-    url='http://gitlab.idiap.ch/bob/bob.devtools',
+    url="http://gitlab.idiap.ch/bob/bob.devtools",
     license="BSD",
-    author='Bob/BEAT Developers',
-    author_email='bob-devel@googlegroups.com,beat-devel@googlegroups.com',
-    long_description=open('README.rst').read(),
-
+    author="Bob/BEAT Developers",
+    author_email="bob-devel@googlegroups.com,beat-devel@googlegroups.com",
+    long_description=open("README.rst").read(),
     packages=find_packages(),
     include_package_data=True,
     zip_safe=False,
-
     # when updating these dependencies, update the README too
     install_requires=requires,
-
     entry_points={
-        'console_scripts': [
-            'bdt = bob.devtools.scripts.bdt:main',
+        "console_scripts": ["bdt = bob.devtools.scripts.bdt:main"],
+        "bdt.cli": [
+            "new = bob.devtools.scripts.new:new",
+            "dumpsphinx = bob.devtools.scripts.dumpsphinx:dumpsphinx",
+            "create = bob.devtools.scripts.create:create",
+            "build = bob.devtools.scripts.build:build",
+            "mirror = bob.devtools.scripts.mirror:mirror",
+            "rebuild = bob.devtools.scripts.rebuild:rebuild",
+            "test = bob.devtools.scripts.test:test",
+            "caupdate = bob.devtools.scripts.caupdate:caupdate",
+            "ci = bob.devtools.scripts.ci:ci",
+            "dav = bob.devtools.scripts.dav:dav",
+            "local = bob.devtools.scripts.local:local",
+            "gitlab = bob.devtools.scripts.gitlab:gitlab",
+        ],
+        "bdt.gitlab.cli": [
+            "badges = bob.devtools.scripts.badges:badges",
+            "commitfile = bob.devtools.scripts.commitfile:commitfile",
+            "release = bob.devtools.scripts.release:release",
+            "changelog = bob.devtools.scripts.changelog:changelog",
+            "lasttag = bob.devtools.scripts.lasttag:lasttag",
+            "runners = bob.devtools.scripts.runners:runners",
+            "jobs = bob.devtools.scripts.jobs:jobs",
+            "visibility = bob.devtools.scripts.visibility:visibility",
+            "getpath = bob.devtools.scripts.getpath:getpath",
+            "process-pipelines = bob.devtools.scripts.pipelines:process_pipelines",
+            "get-pipelines = bob.devtools.scripts.pipelines:get_pipelines",
+            "graph = bob.devtools.scripts.graph:graph",
+            "update-bob = bob.devtools.scripts.update_bob:update_bob",
+        ],
+        "bdt.ci.cli": [
+            "base-build = bob.devtools.scripts.ci:base_build",
+            "build = bob.devtools.scripts.ci:build",
+            "test = bob.devtools.scripts.ci:test",
+            "clean = bob.devtools.scripts.ci:clean",
+            "base-deploy = bob.devtools.scripts.ci:base_deploy",
+            "deploy = bob.devtools.scripts.ci:deploy",
+            "readme = bob.devtools.scripts.ci:readme",
+            "pypi = bob.devtools.scripts.ci:pypi",
+            "nightlies = bob.devtools.scripts.ci:nightlies",
+            "docs = bob.devtools.scripts.ci:docs",
+            "clean-betas = bob.devtools.scripts.ci:clean_betas",
+        ],
+        "bdt.local.cli": [
+            "docs = bob.devtools.scripts.local:docs",
+            "build = bob.devtools.scripts.local:build",
+            "base-build = bob.devtools.scripts.local:base_build",
+        ],
+        "bdt.dav.cli": [
+            "list = bob.devtools.scripts.dav:list",
+            "makedirs = bob.devtools.scripts.dav:makedirs",
+            "rmtree = bob.devtools.scripts.dav:rmtree",
+            "upload = bob.devtools.scripts.dav:upload",
+            "clean-betas = bob.devtools.scripts.dav:clean_betas",
         ],
-        'bdt.cli': [
-          'new = bob.devtools.scripts.new:new',
-          'dumpsphinx = bob.devtools.scripts.dumpsphinx:dumpsphinx',
-          'create = bob.devtools.scripts.create:create',
-          'build = bob.devtools.scripts.build:build',
-          'mirror = bob.devtools.scripts.mirror:mirror',
-          'rebuild = bob.devtools.scripts.rebuild:rebuild',
-          'test = bob.devtools.scripts.test:test',
-          'caupdate = bob.devtools.scripts.caupdate:caupdate',
-          'ci = bob.devtools.scripts.ci:ci',
-          'dav = bob.devtools.scripts.dav:dav',
-          'local = bob.devtools.scripts.local:local',
-          'gitlab = bob.devtools.scripts.gitlab:gitlab',
-          ],
-
-        'bdt.gitlab.cli': [
-          'badges = bob.devtools.scripts.badges:badges',
-          'commitfile = bob.devtools.scripts.commitfile:commitfile',
-          'release = bob.devtools.scripts.release:release',
-          'changelog = bob.devtools.scripts.changelog:changelog',
-          'lasttag = bob.devtools.scripts.lasttag:lasttag',
-          'runners = bob.devtools.scripts.runners:runners',
-          'jobs = bob.devtools.scripts.jobs:jobs',
-          'visibility = bob.devtools.scripts.visibility:visibility',
-          'getpath = bob.devtools.scripts.getpath:getpath',
-          'process-pipelines = bob.devtools.scripts.pipelines:process_pipelines',
-          'get-pipelines = bob.devtools.scripts.pipelines:get_pipelines',
-          'graph = bob.devtools.scripts.graph:graph',
-          'update-bob = bob.devtools.scripts.update_bob:update_bob',
-          ],
-
-        'bdt.ci.cli': [
-          'base-build = bob.devtools.scripts.ci:base_build',
-          'build = bob.devtools.scripts.ci:build',
-          'test = bob.devtools.scripts.ci:test',
-          'clean = bob.devtools.scripts.ci:clean',
-          'base-deploy = bob.devtools.scripts.ci:base_deploy',
-          'deploy = bob.devtools.scripts.ci:deploy',
-          'readme = bob.devtools.scripts.ci:readme',
-          'pypi = bob.devtools.scripts.ci:pypi',
-          'nightlies = bob.devtools.scripts.ci:nightlies',
-          'docs = bob.devtools.scripts.ci:docs',
-          'clean-betas = bob.devtools.scripts.ci:clean_betas',
-          ],
-
-        'bdt.local.cli': [
-          'docs = bob.devtools.scripts.local:docs',
-          'build = bob.devtools.scripts.local:build',
-          'base-build = bob.devtools.scripts.local:base_build',
-          ],
-
-        'bdt.dav.cli': [
-          'list = bob.devtools.scripts.dav:list',
-          'makedirs = bob.devtools.scripts.dav:makedirs',
-          'rmtree = bob.devtools.scripts.dav:rmtree',
-          'upload = bob.devtools.scripts.dav:upload',
-          'clean-betas = bob.devtools.scripts.dav:clean_betas',
-          ],
-
     },
     classifiers=[
-        'Framework :: Bob',
-        'Development Status :: 4 - Beta',
-        'Intended Audience :: Developers',
-        'License :: OSI Approved :: BSD License',
-        'Natural Language :: English',
-        'Programming Language :: Python',
-        'Programming Language :: Python :: 3',
-        'Topic :: Software Development :: Build Tools',
+        "Framework :: Bob",
+        "Development Status :: 4 - Beta",
+        "Intended Audience :: Developers",
+        "License :: OSI Approved :: BSD License",
+        "Natural Language :: English",
+        "Programming Language :: Python",
+        "Programming Language :: Python :: 3",
+        "Topic :: Software Development :: Build Tools",
     ],
 )
diff --git a/version.txt b/version.txt
index e26bbe641ec0c484704e3e0aabb237de6fbfb2a0..6b7e67bf562bf2a719e2ff5ef6bf69d65a1ae15d 100644
--- a/version.txt
+++ b/version.txt
@@ -1 +1 @@
-3.1.5b0
\ No newline at end of file
+3.1.5b0