From f9ede9b80ebef92b2296b8113bad21558d8c72a8 Mon Sep 17 00:00:00 2001
From: Andre Anjos <andre.dos.anjos@gmail.com>
Date: Thu, 5 Aug 2021 14:32:11 +0200
Subject: [PATCH] [pre-commit] resetup for 80 columns; isort needs specific
 setup to point to pyproject.toml

---
 .pre-commit-config.yaml                       |  8 +--
 bob/devtools/bootstrap.py                     | 29 ++++++---
 bob/devtools/build.py                         | 59 ++++++++++++-----
 bob/devtools/changelog.py                     | 20 ++++--
 bob/devtools/ci.py                            | 16 +++--
 bob/devtools/constants.py                     |  8 ++-
 bob/devtools/dav.py                           |  4 +-
 bob/devtools/deploy.py                        | 16 +++--
 bob/devtools/graph.py                         | 20 ++++--
 bob/devtools/log.py                           |  4 +-
 bob/devtools/mirror.py                        | 11 +++-
 bob/devtools/pipelines.py                     |  3 +-
 bob/devtools/release.py                       | 32 +++++++---
 bob/devtools/scripts/badges.py                |  8 ++-
 bob/devtools/scripts/build.py                 | 16 +++--
 bob/devtools/scripts/changelog.py             | 12 +++-
 bob/devtools/scripts/commitfile.py            | 14 +++-
 bob/devtools/scripts/create.py                | 18 ++++--
 bob/devtools/scripts/dav.py                   | 18 +++++-
 bob/devtools/scripts/graph.py                 |  4 +-
 bob/devtools/scripts/jobs.py                  |  8 ++-
 bob/devtools/scripts/lasttag.py               |  7 +-
 bob/devtools/scripts/local.py                 |  8 ++-
 bob/devtools/scripts/mirror.py                |  8 ++-
 bob/devtools/scripts/pipelines.py             | 11 +++-
 bob/devtools/scripts/rebuild.py               | 16 +++--
 bob/devtools/scripts/release.py               |  8 ++-
 bob/devtools/scripts/runners.py               |  8 ++-
 bob/devtools/scripts/settings.py              |  6 +-
 bob/devtools/scripts/update_bob.py            | 25 ++++++--
 bob/devtools/scripts/visibility.py            |  3 +-
 .../templates/.pre-commit-config.yaml         |  1 +
 bob/devtools/templates/doc/conf.py            |  3 +-
 bob/devtools/webdav3/client.py                | 64 ++++++++++++++-----
 bob/devtools/webdav3/connection.py            | 20 ++++--
 bob/devtools/webdav3/urn.py                   | 20 ++++--
 deps/repodata-patches/gen_patch_json.py       | 14 +++-
 deps/repodata-patches/show_diff.py            | 28 ++++++--
 38 files changed, 433 insertions(+), 145 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 1741c9ff..2357f8b3 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -5,11 +5,7 @@ repos:
     rev: 5.8.0
     hooks:
       - id: isort
-        args: [
-          --profile, "black",
-          --order-by-type,
-          --lines-between-types, "1"
-        ]
+        args: [--settings-path, "pyproject.toml"]
   - repo: https://github.com/psf/black
     rev: 21.7b0
     hooks:
@@ -19,7 +15,7 @@ repos:
     rev: 3.9.2
     hooks:
       - id: flake8
-        exclude: bob/devtools/templates/setup\.py
+        exclude: bob/devtools/templates/setup.py
   - repo: https://github.com/pre-commit/pre-commit-hooks
     rev: v4.0.1
     hooks:
diff --git a/bob/devtools/bootstrap.py b/bob/devtools/bootstrap.py
index a09f5db6..c53168a8 100644
--- a/bob/devtools/bootstrap.py
+++ b/bob/devtools/bootstrap.py
@@ -124,7 +124,8 @@ def run_cmdline(cmd, env=None, **kwargs):
 
     if p.wait() != 0:
         raise RuntimeError(
-            "command `%s' exited with error state (%d)" % (" ".join(cmd), p.returncode)
+            "command `%s' exited with error state (%d)"
+            % (" ".join(cmd), p.returncode)
         )
 
     total = time.time() - start
@@ -162,7 +163,9 @@ def merge_conda_cache(cache, prefix, name):
     cached_packages = glob.glob(os.path.join(cached_pkgs_dir, "*.tar.bz2"))
     cached_packages.extend(glob.glob(os.path.join(cached_pkgs_dir, "*.conda")))
 
-    cached_packages = [k for k in cached_packages if not k.startswith(name + "-")]
+    cached_packages = [
+        k for k in cached_packages if not k.startswith(name + "-")
+    ]
     logger.info("Merging %d cached conda packages", len(cached_packages))
     for k in cached_packages:
         dst = os.path.join(pkgs_dir, os.path.basename(k))
@@ -179,7 +182,9 @@ def merge_conda_cache(cache, prefix, name):
             data = sorted(list(data))
     else:
         # use both cached and actual conda package caches
-        with open(pkgs_urls_txt, "rb") as f1, open(cached_pkgs_urls_txt, "rb") as f2:
+        with open(pkgs_urls_txt, "rb") as f1, open(
+            cached_pkgs_urls_txt, "rb"
+        ) as f2:
             data = set(f1.readlines() + f2.readlines())
             data = sorted(list(data))
 
@@ -251,7 +256,9 @@ def ensure_miniconda_sh():
     )
 
     dst = "miniconda.sh"
-    logger.info("(download) http://%s:%d%s -> %s...", server[0], server[1], path, dst)
+    logger.info(
+        "(download) http://%s:%d%s -> %s...", server[0], server[1], path, dst
+    )
     with open(dst, "wb") as f:
         f.write(r1.read())
 
@@ -283,7 +290,9 @@ def install_miniconda(prefix, name):
         shutil.rmtree(cached)
 
 
-def get_channels(public, stable, server, intranet, group, add_dependent_channels=False):
+def get_channels(
+    public, stable, server, intranet, group, add_dependent_channels=False
+):
     """Returns the relevant conda channels to consider if building project.
 
     The subset of channels to be returned depends on the visibility and
@@ -321,7 +330,8 @@ def get_channels(public, stable, server, intranet, group, add_dependent_channels
     if (not public) and (not intranet):
         raise RuntimeError(
             "You cannot request for private channels and set"
-            " intranet=False (server=%s) - these are conflicting options" % server
+            " intranet=False (server=%s) - these are conflicting options"
+            % server
         )
 
     channels = []
@@ -430,7 +440,8 @@ if __name__ == "__main__":
         default=os.environ.get(
             "CONDA_ROOT", os.path.realpath(os.path.join(os.curdir, "miniconda"))
         ),
-        help="The location where we should install miniconda " "[default: %(default)s]",
+        help="The location where we should install miniconda "
+        "[default: %(default)s]",
     )
     parser.add_argument(
         "-t",
@@ -557,7 +568,9 @@ if __name__ == "__main__":
             add_dependent_channels=True,
         )
 
-        channels = ["--override-channels"] + ["--channel=%s" % k for k in channels]
+        channels = ["--override-channels"] + [
+            "--channel=%s" % k for k in channels
+        ]
         conda_cmd = "install" if args.envname in ("base", "root") else "create"
         cmd = (
             [conda_bin, conda_cmd, "--yes"]
diff --git a/bob/devtools/build.py b/bob/devtools/build.py
index 52fd2d79..afd014d4 100644
--- a/bob/devtools/build.py
+++ b/bob/devtools/build.py
@@ -102,7 +102,9 @@ def next_build_number(channel_url, basename):
     from conda.exports import fetch_index
 
     # get the channel index
-    channel_urls = calculate_channel_urls([channel_url], prepend=False, use_local=False)
+    channel_urls = calculate_channel_urls(
+        [channel_url], prepend=False, use_local=False
+    )
     logger.debug("Downloading channel index from %s", channel_urls)
     index = fetch_index(channel_urls=channel_urls)
 
@@ -113,7 +115,8 @@ def next_build_number(channel_url, basename):
         name, version, build = basename[:-6].rsplit("-", 2)
     else:
         raise RuntimeError(
-            "Package name %s does not end in either " ".tar.bz2 or .conda" % (basename,)
+            "Package name %s does not end in either "
+            ".tar.bz2 or .conda" % (basename,)
         )
 
     # remove the build number as we're looking for the next value
@@ -201,7 +204,9 @@ def make_conda_config(config, python, append_file, condarc_options):
         #    appropriate platform-specific subdir (e.g. win-64)
         if os.path.isdir(url):
             if not os.path.isabs(url):
-                url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url)))
+                url = os.path.normpath(
+                    os.path.abspath(os.path.join(os.getcwd(), url))
+                )
             with root_logger_protection():
                 url = url_path(url)
         retval.channel_urls.append(url)
@@ -258,7 +263,8 @@ def exists_on_channel(channel_url, basename):
             name, version, build = name[:-8].rsplit("-", 2)
         else:
             raise RuntimeError(
-                "Package name %s does not end in either " ".tar.bz2 or .conda" % (name,)
+                "Package name %s does not end in either "
+                ".tar.bz2 or .conda" % (name,)
             )
 
         # remove the build number as we're looking for the next value
@@ -317,7 +323,9 @@ def parse_dependencies(recipe_dir, config):
     recipe = get_parsed_recipe(metadata)
     requirements = []
     for section in ("build", "host"):
-        requirements += remove_pins(recipe.get("requirements", {}).get(section, []))
+        requirements += remove_pins(
+            recipe.get("requirements", {}).get(section, [])
+        )
     # we don't remove pins for the rest of the recipe
     requirements += recipe.get("requirements", {}).get("run", [])
     requirements += recipe.get("test", {}).get("requires", [])
@@ -470,7 +478,8 @@ def get_docserver_setup(public, stable, server, intranet, group):
     if (not public) and (not intranet):
         raise RuntimeError(
             "You cannot request for private channels and set"
-            " intranet=False (server=%s) - these are conflicting options" % server
+            " intranet=False (server=%s) - these are conflicting options"
+            % server
         )
 
     entries = []
@@ -494,7 +503,9 @@ def get_docserver_setup(public, stable, server, intranet, group):
                 server + prefix + "/docs/" + group + "/%(name)s/stable/",
             ]
         else:
-            entries += [server + prefix + "/docs/" + group + "/%(name)s/master/"]
+            entries += [
+                server + prefix + "/docs/" + group + "/%(name)s/master/"
+            ]
 
     return "|".join(entries)
 
@@ -531,7 +542,8 @@ def check_version(workdir, envtag):
                 '"version.txt" indicates version is a '
                 'pre-release (v%s) - but environment provided tag "%s", '
                 "which indicates this is a **stable** build. "
-                "Have you created the tag using ``bdt release``?" % (version, envtag)
+                "Have you created the tag using ``bdt release``?"
+                % (version, envtag)
             )
     else:  # it is a stable build
         if envtag is None:
@@ -584,7 +596,10 @@ def git_clean_build(runner, verbose):
     if not verbose:
         flags += "q"
 
-    runner(["git", "clean", flags] + ["--exclude=%s" % k for k in exclude_from_cleanup])
+    runner(
+        ["git", "clean", flags]
+        + ["--exclude=%s" % k for k in exclude_from_cleanup]
+    )
 
 
 def base_build(
@@ -646,18 +661,24 @@ def base_build(
         "\n  - ".join(condarc_options["channels"]),
     )
     logger.info("Merging conda configuration files...")
-    conda_config = make_conda_config(conda_build_config, None, None, condarc_options)
+    conda_config = make_conda_config(
+        conda_build_config, None, None, condarc_options
+    )
 
     metadata = get_rendered_metadata(recipe_dir, conda_config)
     arch = conda_arch()
 
     # checks we should actually build this recipe
     if should_skip_build(metadata):
-        logger.warn('Skipping UNSUPPORTED build of "%s" on %s', recipe_dir, arch)
+        logger.warn(
+            'Skipping UNSUPPORTED build of "%s" on %s', recipe_dir, arch
+        )
         return
 
     paths = get_output_path(metadata, conda_config)
-    urls = [exists_on_channel(upload_channel, os.path.basename(k)) for k in paths]
+    urls = [
+        exists_on_channel(upload_channel, os.path.basename(k)) for k in paths
+    ]
 
     if all(urls):
         logger.info(
@@ -686,7 +707,9 @@ if __name__ == "__main__":
 
     import argparse
 
-    parser = argparse.ArgumentParser(description="Builds bob.devtools on the CI")
+    parser = argparse.ArgumentParser(
+        description="Builds bob.devtools on the CI"
+    )
     parser.add_argument(
         "-g",
         "--group",
@@ -705,7 +728,8 @@ if __name__ == "__main__":
         default=os.environ.get(
             "CONDA_ROOT", os.path.realpath(os.path.join(os.curdir, "miniconda"))
         ),
-        help="The location where we should install miniconda " "[default: %(default)s]",
+        help="The location where we should install miniconda "
+        "[default: %(default)s]",
     )
     parser.add_argument(
         "-V",
@@ -785,7 +809,9 @@ if __name__ == "__main__":
     bootstrap.set_environment("BOB_PACKAGE_VERSION", version)
 
     # create the build configuration
-    conda_build_config = os.path.join(args.work_dir, "conda", "conda_build_config.yaml")
+    conda_build_config = os.path.join(
+        args.work_dir, "conda", "conda_build_config.yaml"
+    )
     recipe_append = os.path.join(args.work_dir, "data", "recipe_append.yaml")
 
     condarc = os.path.join(args.conda_root, "condarc")
@@ -849,7 +875,8 @@ if __name__ == "__main__":
             "typically means this build is running on a shared builder and "
             "the file ~/.conda/environments.txt is polluted with other "
             "environment paths.  To fix, empty that file and set its mode "
-            "to read-only for all." % (path, os.path.join(args.conda_root, "conda-bld"))
+            "to read-only for all."
+            % (path, os.path.join(args.conda_root, "conda-bld"))
         )
 
     # retrieve the current build number(s) for this build
diff --git a/bob/devtools/changelog.py b/bob/devtools/changelog.py
index abaa3967..a7f7420a 100644
--- a/bob/devtools/changelog.py
+++ b/bob/devtools/changelog.py
@@ -25,7 +25,9 @@ def parse_date(d):
 def _sort_commits(commits, reverse):
     """Sorts gitlab commit objects using their ``committed_date`` attribute."""
 
-    return sorted(commits, key=lambda x: parse_date(x.committed_date), reverse=reverse)
+    return sorted(
+        commits, key=lambda x: parse_date(x.committed_date), reverse=reverse
+    )
 
 
 def _sort_tags(tags, reverse):
@@ -151,7 +153,9 @@ def _write_one_tag(f, pkg_name, tag):
         if line.startswith("* ") or line.startswith("- "):
             line = line[2:]
 
-        line = line.replace("!", pkg_name + "!").replace(pkg_name + pkg_name, pkg_name)
+        line = line.replace("!", pkg_name + "!").replace(
+            pkg_name + pkg_name, pkg_name
+        )
         line = line.replace("#", pkg_name + "#")
         if not line:
             continue
@@ -204,7 +208,9 @@ def _write_mergerequests_range(f, pkg_name, mrs):
         title = title.replace(" !", " " + pkg_name + "!")
         title = title.replace(" #", " " + pkg_name + "#")
         if mr.description is not None:
-            description = mr.description.strip().replace("\r", "").replace("\n", "  ")
+            description = (
+                mr.description.strip().replace("\r", "").replace("\n", "  ")
+            )
             description = description.replace(" !", " " + pkg_name + "!")
             description = description.replace(" #", " " + pkg_name + "#")
         else:
@@ -305,7 +311,9 @@ def write_tags_with_commits(f, gitpkg, since, mode):
             # the attribute 'merged_at' is not available in GitLab API as of 27
             # June 2018
             mrs4tag = [
-                k for k in mrs if (start_date < parse_date(k.updated_at) <= end_date)
+                k
+                for k in mrs
+                if (start_date < parse_date(k.updated_at) <= end_date)
             ]
             _write_mergerequests_range(
                 f, gitpkg.attributes["path_with_namespace"], mrs4tag
@@ -322,7 +330,9 @@ def write_tags_with_commits(f, gitpkg, since, mode):
             # write leftover merge requests
             # the attribute 'merged_at' is not available in GitLab API as of 27
             # June 2018
-            leftover_mrs = [k for k in mrs if parse_date(k.updated_at) > start_date]
+            leftover_mrs = [
+                k for k in mrs if parse_date(k.updated_at) > start_date
+            ]
             _write_mergerequests_range(
                 f, gitpkg.attributes["path_with_namespace"], leftover_mrs
             )
diff --git a/bob/devtools/ci.py b/bob/devtools/ci.py
index 08a5155d..20577e80 100644
--- a/bob/devtools/ci.py
+++ b/bob/devtools/ci.py
@@ -68,7 +68,9 @@ def is_private(baseurl, package):
 
     private = True
     try:
-        r = urlopen(baseurl + "/" + package + "/info/refs?service=git-upload-pack")
+        r = urlopen(
+            baseurl + "/" + package + "/info/refs?service=git-upload-pack"
+        )
         private = r.getcode() != 200
     except HTTPError as e:
         private = e.getcode() == 401
@@ -96,11 +98,15 @@ def is_stable(package, refname, tag, repodir):
 
     if tag is not None:
         logger.info('Project %s tag is "%s"', package, tag)
-        parsed_tag = distutils.version.LooseVersion(tag[1:]).version  # remove 'v'
+        parsed_tag = distutils.version.LooseVersion(
+            tag[1:]
+        ).version  # remove 'v'
         is_prerelease = any([isinstance(k, str) for k in parsed_tag])
 
         if is_prerelease:
-            logger.warn("Pre-release detected - not publishing to stable channels")
+            logger.warn(
+                "Pre-release detected - not publishing to stable channels"
+            )
             return False
 
         if is_master(refname, tag, repodir):
@@ -174,7 +180,9 @@ def select_build_file(basename, paths, branch):
         specific_basename = "%s-%s" % (basename, branch)
         for path in paths:
             path = os.path.realpath(path)
-            candidate = os.path.join(path, "%s%s" % (specific_basename, extension))
+            candidate = os.path.join(
+                path, "%s%s" % (specific_basename, extension)
+            )
             if os.path.exists(candidate):
                 return candidate
 
diff --git a/bob/devtools/constants.py b/bob/devtools/constants.py
index 3270336a..dca90640 100644
--- a/bob/devtools/constants.py
+++ b/bob/devtools/constants.py
@@ -112,7 +112,9 @@ CACERT_URL = "https://curl.haxx.se/ca/cacert.pem"
 """Location of the most up-to-date CA certificate bundle"""
 
 
-CACERT = pkg_resources.resource_filename(__name__, os.path.join("data", "cacert.pem"))
+CACERT = pkg_resources.resource_filename(
+    __name__, os.path.join("data", "cacert.pem")
+)
 """We keep a copy of the CA certificates we trust here
 
    To update this file use: ``curl --remote-name --time-cond cacert.pem https://curl.haxx.se/ca/cacert.pem``
@@ -127,6 +129,8 @@ MATPLOTLIB_RCDIR = pkg_resources.resource_filename(__name__, "data")
 It is required for certain builds that use matplotlib functionality.
 """
 
-BOBRC_PATH = pkg_resources.resource_filename(__name__, os.path.join("data", "bobrc"))
+BOBRC_PATH = pkg_resources.resource_filename(
+    __name__, os.path.join("data", "bobrc")
+)
 """The path to custom Bob configuration file to be used during the CI
 """
diff --git a/bob/devtools/dav.py b/bob/devtools/dav.py
index 9cf67b92..bf874111 100644
--- a/bob/devtools/dav.py
+++ b/bob/devtools/dav.py
@@ -69,7 +69,9 @@ def augment_path_with_hash(path):
     """
     path = pathlib.Path(path)
     if not path.is_file():
-        raise ValueError(f"Can only augment path to files with a hash. Got: {path}")
+        raise ValueError(
+            f"Can only augment path to files with a hash. Got: {path}"
+        )
     file_hash = compute_sha256(path)[:8]
     suffix = "".join(path.suffixes)
     base_name = str(path.name)[: -len(suffix) or None]
diff --git a/bob/devtools/deploy.py b/bob/devtools/deploy.py
index ff278e3d..634fbb59 100644
--- a/bob/devtools/deploy.py
+++ b/bob/devtools/deploy.py
@@ -57,7 +57,9 @@ def deploy_conda_package(
     """
 
     server_info = WEBDAV_PATHS[stable][public]
-    davclient = _setup_webdav_client(SERVER, server_info["root"], username, password)
+    davclient = _setup_webdav_client(
+        SERVER, server_info["root"], username, password
+    )
 
     basename = os.path.basename(package)
     arch = arch or os.path.basename(os.path.dirname(package))
@@ -73,11 +75,15 @@ def deploy_conda_package(
             )
 
         else:
-            logger.info("[dav] rm -f %s%s%s", SERVER, server_info["root"], remote_path)
+            logger.info(
+                "[dav] rm -f %s%s%s", SERVER, server_info["root"], remote_path
+            )
             if not dry_run:
                 davclient.clean(remote_path)
 
-    logger.info("[dav] %s -> %s%s%s", package, SERVER, server_info["root"], remote_path)
+    logger.info(
+        "[dav] %s -> %s%s%s", package, SERVER, server_info["root"], remote_path
+    )
     if not dry_run:
         davclient.upload(local_path=package, remote_path=remote_path)
 
@@ -127,7 +133,9 @@ def deploy_documentation(
         )
 
     server_info = WEBDAV_PATHS[stable][public]
-    davclient = _setup_webdav_client(SERVER, server_info["root"], username, password)
+    davclient = _setup_webdav_client(
+        SERVER, server_info["root"], username, password
+    )
 
     remote_path_prefix = "%s/%s" % (server_info["docs"], package)
 
diff --git a/bob/devtools/graph.py b/bob/devtools/graph.py
index ac0a0e68..b2514df8 100644
--- a/bob/devtools/graph.py
+++ b/bob/devtools/graph.py
@@ -127,7 +127,9 @@ def compute_adjencence_matrix(
         path = get_output_path(metadata, conda_config)[0]
 
         # gets the next build number
-        build_number, _ = next_build_number(main_channel, os.path.basename(path))
+        build_number, _ = next_build_number(
+            main_channel, os.path.basename(path)
+        )
 
         # at this point, all elements are parsed, I know the package version,
         # build number and all dependencies
@@ -192,7 +194,9 @@ def compute_adjencence_matrix(
 
         # do not recurse for packages we already know
         all_recurse -= set(current.keys())
-        logger.info("Recursing over the following packages: %s", ", ".join(all_recurse))
+        logger.info(
+            "Recursing over the following packages: %s", ", ".join(all_recurse)
+        )
 
         for dep in all_recurse:
             dep_adjmtx = compute_adjencence_matrix(
@@ -264,7 +268,13 @@ def generate_graph(adjacence_matrix, deptypes, whitelist):
                 values["name"],
             )
             continue
-        name = values["name"] + "\n" + values["version"] + "\n" + values["build_string"]
+        name = (
+            values["name"]
+            + "\n"
+            + values["version"]
+            + "\n"
+            + values["build_string"]
+        )
         nodes[values["name"]] = graph.node(
             values["name"], name, shape="box", color="blue"
         )
@@ -284,7 +294,9 @@ def generate_graph(adjacence_matrix, deptypes, whitelist):
 
         for ref, parts in deps.items():
             if not whitelist_compiled.match(ref):
-                logger.debug("Skipping dependence %s (did not match whitelist)", ref)
+                logger.debug(
+                    "Skipping dependence %s (did not match whitelist)", ref
+                )
                 continue
 
             if not any([k == ref for k in nodes.keys()]):
diff --git a/bob/devtools/log.py b/bob/devtools/log.py
index b43a2a70..d5805967 100644
--- a/bob/devtools/log.py
+++ b/bob/devtools/log.py
@@ -127,7 +127,9 @@ def echo_warning(text):
 
 
 # helper functions to instantiate and set-up logging
-def setup(logger_name, format="%(levelname)s:%(name)s@%(asctime)s: %(message)s"):
+def setup(
+    logger_name, format="%(levelname)s:%(name)s@%(asctime)s: %(message)s"
+):
     """This function returns a logger object that is set up to perform logging
     using Bob loggers.
 
diff --git a/bob/devtools/mirror.py b/bob/devtools/mirror.py
index 1d05fab9..da41e4f2 100644
--- a/bob/devtools/mirror.py
+++ b/bob/devtools/mirror.py
@@ -70,7 +70,9 @@ def _list_conda_packages(local_dir):
         List of conda packages in `local_dir`
     """
     contents = os.listdir(local_dir)
-    return fnmatch.filter(contents, "*.conda") + fnmatch.filter(contents, "*.tar.bz2")
+    return fnmatch.filter(contents, "*.conda") + fnmatch.filter(
+        contents, "*.tar.bz2"
+    )
 
 
 def get_json(channel, platform, name):
@@ -126,7 +128,8 @@ def get_local_contents(path, arch):
     logger.info("Listing package contents of %s...", path_arch)
     contents = os.listdir(path_arch)
     return set(
-        fnmatch.filter(contents, "*.tar.bz2") + fnmatch.filter(contents, "*.conda")
+        fnmatch.filter(contents, "*.tar.bz2")
+        + fnmatch.filter(contents, "*.conda")
     )
 
 
@@ -301,7 +304,9 @@ def download_packages(packages, repodata, channel_url, dest_dir, arch, dry_run):
 
             # move
             local_dest = os.path.join(dest_dir, arch, p)
-            logger.info("[move: %d/%d] %s -> %s", k, total, temp_dest, local_dest)
+            logger.info(
+                "[move: %d/%d] %s -> %s", k, total, temp_dest, local_dest
+            )
 
             # check local directory is available before moving
             dirname = os.path.dirname(local_dest)
diff --git a/bob/devtools/pipelines.py b/bob/devtools/pipelines.py
index a39f80eb..230b5c46 100644
--- a/bob/devtools/pipelines.py
+++ b/bob/devtools/pipelines.py
@@ -30,7 +30,8 @@ def process_log(log):
 
         # Checking the date
         date = re.findall(
-            "[0-9]{4,4}-[0-9]{2,2}-[0-9]{2,2} [0-9]{2,2}:[0-9]{2,2}:[0-9]{2,2}", ll
+            "[0-9]{4,4}-[0-9]{2,2}-[0-9]{2,2} [0-9]{2,2}:[0-9]{2,2}:[0-9]{2,2}",
+            ll,
         )
         if len(date) > 0:
             # logs[date[0]]=current_package
diff --git a/bob/devtools/release.py b/bob/devtools/release.py
index be01e6d6..46b1784d 100644
--- a/bob/devtools/release.py
+++ b/bob/devtools/release.py
@@ -96,20 +96,30 @@ def _update_readme(readme, version):
     """
 
     # replace the badge in the readme's text with the given version
-    DOC_IMAGE = re.compile(r"\-(available|master|latest|(v\d+\.\d+\.\d+([abc]\d+)?))\-")
+    DOC_IMAGE = re.compile(
+        r"\-(available|master|latest|(v\d+\.\d+\.\d+([abc]\d+)?))\-"
+    )
     BRANCH_RE = re.compile(r"/(stable|master|(v\d+\.\d+\.\d+([abc]\d+)?))")
 
     new_readme = []
     for line in readme.splitlines():
         if BRANCH_RE.search(line) is not None:
             if "gitlab" in line:  # gitlab links
-                replacement = "/v%s" % version if version is not None else "/master"
+                replacement = (
+                    "/v%s" % version if version is not None else "/master"
+                )
                 line = BRANCH_RE.sub(replacement, line)
-            if ("software/bob" in line) or ("software/beat" in line):  # our doc server
-                replacement = "/v%s" % version if version is not None else "/master"
+            if ("software/bob" in line) or (
+                "software/beat" in line
+            ):  # our doc server
+                replacement = (
+                    "/v%s" % version if version is not None else "/master"
+                )
                 line = BRANCH_RE.sub(replacement, line)
         if DOC_IMAGE.search(line) is not None:
-            replacement = "-v%s-" % version if version is not None else "-latest-"
+            replacement = (
+                "-v%s-" % version if version is not None else "-latest-"
+            )
             line = DOC_IMAGE.sub(replacement, line)
         new_readme.append(line)
     return "\n".join(new_readme) + "\n"
@@ -242,7 +252,9 @@ def update_tag_comments(gitpkg, tag_name, tag_comments_list, dry_run=False):
     logger.info(tag_name)
     tag = gitpkg.tags.get(tag_name)
     tag_comments = "\n".join(tag_comments_list)
-    logger.info("Found tag %s, updating its comments with:\n%s", tag.name, tag_comments)
+    logger.info(
+        "Found tag %s, updating its comments with:\n%s", tag.name, tag_comments
+    )
     if not dry_run:
         tag.set_release_description(tag_comments)
     return tag
@@ -319,8 +331,12 @@ def update_files_with_mr(
                 logger.info("Merging !%d immediately - CI was skipped", mr.iid)
                 mr.merge()
             else:
-                logger.info("Auto-merging !%d only if pipeline succeeds", mr.iid)
-                time.sleep(0.5)  # to avoid the MR to be merged automatically - bug?
+                logger.info(
+                    "Auto-merging !%d only if pipeline succeeds", mr.iid
+                )
+                time.sleep(
+                    0.5
+                )  # to avoid the MR to be merged automatically - bug?
                 mr.merge(merge_when_pipeline_succeeds=True)
 
 
diff --git a/bob/devtools/scripts/badges.py b/bob/devtools/scripts/badges.py
index a5dda6f3..68e110b9 100644
--- a/bob/devtools/scripts/badges.py
+++ b/bob/devtools/scripts/badges.py
@@ -172,7 +172,9 @@ def badges(package, update_readme, dry_run, server):
 
         # download and edit README to setup badges
         if update_readme:
-            readme_file = use_package.files.get(file_path="README.rst", ref="master")
+            readme_file = use_package.files.get(
+                file_path="README.rst", ref="master"
+            )
             readme_content = readme_file.decode().decode()
             readme_content = _update_readme(readme_content, info)
             # commit and push changes
@@ -187,6 +189,8 @@ def badges(package, update_readme, dry_run, server):
 
     except gitlab.GitlabGetError:
         logger.warn(
-            "Gitlab access error - package %s does not exist?", package, exc_info=True
+            "Gitlab access error - package %s does not exist?",
+            package,
+            exc_info=True,
         )
         echo_warning("%s: unknown" % (package,))
diff --git a/bob/devtools/scripts/build.py b/bob/devtools/scripts/build.py
index a251f6d7..2a8f5b5e 100644
--- a/bob/devtools/scripts/build.py
+++ b/bob/devtools/scripts/build.py
@@ -218,7 +218,9 @@ def build(
     prefix = get_env_directory(os.environ["CONDA_EXE"], "base")
     condarc_options["croot"] = os.path.join(prefix, "conda-bld")
 
-    conda_config = make_conda_config(config, python, append_file, condarc_options)
+    conda_config = make_conda_config(
+        config, python, append_file, condarc_options
+    )
 
     set_environment("MATPLOTLIBRC", MATPLOTLIB_RCDIR)
     set_environment("BOBRC", BOBRC_PATH)
@@ -256,7 +258,9 @@ def build(
 
         # checks if we should actually build this recipe
         if should_skip_build(metadata):
-            logger.info("Skipping UNSUPPORTED build of %s for %s", recipe_dir, arch)
+            logger.info(
+                "Skipping UNSUPPORTED build of %s for %s", recipe_dir, arch
+            )
             continue
 
         rendered_recipe = get_parsed_recipe(metadata)
@@ -267,7 +271,9 @@ def build(
         path = get_output_path(metadata, conda_config)[0]
 
         # gets the next build number
-        build_number, _ = next_build_number(upload_channel, os.path.basename(path))
+        build_number, _ = next_build_number(
+            upload_channel, os.path.basename(path)
+        )
 
         logger.info(
             "Building %s-%s-py%s (build: %d) for %s",
@@ -283,7 +289,9 @@ def build(
             # get it right
             set_environment("BOB_BUILD_NUMBER", str(build_number))
             with root_logger_protection():
-                paths = conda_build.api.build(d, config=conda_config, notest=no_test)
+                paths = conda_build.api.build(
+                    d, config=conda_config, notest=no_test
+                )
             # if you get to this point, the package was successfully rebuilt
             # set environment to signal caller we may dispose of it
             os.environ["BDT_BUILD"] = ":".join(paths)
diff --git a/bob/devtools/scripts/changelog.py b/bob/devtools/scripts/changelog.py
index 193d2299..13be66da 100644
--- a/bob/devtools/scripts/changelog.py
+++ b/bob/devtools/scripts/changelog.py
@@ -46,7 +46,9 @@ Examples:
 @click.argument("target")
 @click.argument(
     "changelog",
-    type=click.Path(exists=False, dir_okay=False, file_okay=True, writable=True),
+    type=click.Path(
+        exists=False, dir_okay=False, file_okay=True, writable=True
+    ),
     required=False,
 )
 @click.option(
@@ -121,7 +123,9 @@ def changelog(target, changelog, group, mode, since):
                 if k.strip() and not k.strip().startswith("#")
             ]
     else:
-        logger.info("Assuming %s is a package name (file does not exist)...", target)
+        logger.info(
+            "Assuming %s is a package name (file does not exist)...", target
+        )
         packages = [target]
 
     # if the user passed a date, convert it
@@ -194,5 +198,7 @@ def changelog(target, changelog, group, mode, since):
             changelog_file = open(changelog, "at")
 
         # write_tags(f, use_package, last_release_date)
-        write_tags_with_commits(changelog_file, use_package, last_release_date, mode)
+        write_tags_with_commits(
+            changelog_file, use_package, last_release_date, mode
+        )
         changelog_file.flush()
diff --git a/bob/devtools/scripts/commitfile.py b/bob/devtools/scripts/commitfile.py
index 3100455c..a5f75dd4 100644
--- a/bob/devtools/scripts/commitfile.py
+++ b/bob/devtools/scripts/commitfile.py
@@ -5,7 +5,11 @@ import os
 import click
 
 from ..log import get_logger, verbosity_option
-from ..release import get_gitlab_instance, update_files_at_master, update_files_with_mr
+from ..release import (
+    get_gitlab_instance,
+    update_files_at_master,
+    update_files_with_mr,
+)
 from . import bdt
 
 logger = get_logger(__name__)
@@ -34,9 +38,13 @@ Examples:
 """
 )
 @click.argument("package")
-@click.argument("file", type=click.Path(file_okay=True, dir_okay=False, exists=True))
+@click.argument(
+    "file", type=click.Path(file_okay=True, dir_okay=False, exists=True)
+)
 @click.option("-m", "--message", help="Message to set for this commit")
-@click.option("-p", "--path", help="Which path to replace on the remote package")
+@click.option(
+    "-p", "--path", help="Which path to replace on the remote package"
+)
 @click.option(
     "-b",
     "--branch",
diff --git a/bob/devtools/scripts/create.py b/bob/devtools/scripts/create.py
index c3ec1ce9..52d2bfb7 100644
--- a/bob/devtools/scripts/create.py
+++ b/bob/devtools/scripts/create.py
@@ -11,7 +11,12 @@ import yaml
 from ..bootstrap import run_cmdline, set_environment
 from ..build import conda_create, make_conda_config, parse_dependencies, uniq
 from ..config import read_config
-from ..constants import BASE_CONDARC, CONDA_BUILD_CONFIG, CONDA_RECIPE_APPEND, SERVER
+from ..constants import (
+    BASE_CONDARC,
+    CONDA_BUILD_CONFIG,
+    CONDA_RECIPE_APPEND,
+    SERVER,
+)
 from ..log import echo_normal, get_logger, verbosity_option
 from . import bdt
 
@@ -80,7 +85,8 @@ Examples:
     "--python",
     default=("%d.%d" % sys.version_info[:2]),
     show_default=True,
-    help="Version of python to build the " "environment for [default: %(default)s]",
+    help="Version of python to build the "
+    "environment for [default: %(default)s]",
 )
 @click.option(
     "-o",
@@ -250,7 +256,9 @@ def create(
         "\n  - ".join(condarc_options["channels"]),
     )
 
-    conda_config = make_conda_config(config, python, append_file, condarc_options)
+    conda_config = make_conda_config(
+        config, python, append_file, condarc_options
+    )
     with warnings.catch_warnings():
         warnings.simplefilter("ignore")
         # conda parsing will raise a warning about splitting build/test phases
@@ -259,7 +267,9 @@ def create(
 
     # when creating a local development environment, remove the always_yes option
     del condarc_options["always_yes"]
-    conda_create(conda, name, overwrite, condarc_options, deps, dry_run, use_local)
+    conda_create(
+        conda, name, overwrite, condarc_options, deps, dry_run, use_local
+    )
 
     # part 2: pip-install everything listed in pip-extras
     # mix-in stuff from ~/.bdtrc and command-line
diff --git a/bob/devtools/scripts/dav.py b/bob/devtools/scripts/dav.py
index 8992f60d..98bf3d44 100644
--- a/bob/devtools/scripts/dav.py
+++ b/bob/devtools/scripts/dav.py
@@ -8,8 +8,18 @@ import pkg_resources
 
 from click_plugins import with_plugins
 
-from ..dav import augment_path_with_hash, remove_old_beta_packages, setup_webdav_client
-from ..log import echo_info, echo_normal, echo_warning, get_logger, verbosity_option
+from ..dav import (
+    augment_path_with_hash,
+    remove_old_beta_packages,
+    setup_webdav_client,
+)
+from ..log import (
+    echo_info,
+    echo_normal,
+    echo_warning,
+    get_logger,
+    verbosity_option,
+)
 from . import bdt
 
 logger = get_logger(__name__)
@@ -259,7 +269,9 @@ def upload(private, execute, checksum, local, remote):
     cl = setup_webdav_client(private)
 
     if not cl.check(remote):
-        echo_warning("base remote directory for upload %s does not exist" % (remote,))
+        echo_warning(
+            "base remote directory for upload %s does not exist" % (remote,)
+        )
         return 1
 
     for k in local:
diff --git a/bob/devtools/scripts/graph.py b/bob/devtools/scripts/graph.py
index b66caaed..c1f15b76 100644
--- a/bob/devtools/scripts/graph.py
+++ b/bob/devtools/scripts/graph.py
@@ -197,7 +197,9 @@ def graph(
         "\n  - ".join(condarc_options["channels"]),
     )
 
-    conda_config = make_conda_config(config, python, append_file, condarc_options)
+    conda_config = make_conda_config(
+        config, python, append_file, condarc_options
+    )
 
     set_environment("MATPLOTLIBRC", MATPLOTLIB_RCDIR)
     set_environment("BOBRC", BOBRC_PATH)
diff --git a/bob/devtools/scripts/jobs.py b/bob/devtools/scripts/jobs.py
index 2718f7da..b6c4c870 100644
--- a/bob/devtools/scripts/jobs.py
+++ b/bob/devtools/scripts/jobs.py
@@ -52,11 +52,15 @@ def jobs(name, status):
 
     # search for the runner(s) to affect
     runners = [
-        k for k in gl.runners.list(all=True) if k.attributes["description"] in names
+        k
+        for k in gl.runners.list(all=True)
+        if k.attributes["description"] in names
     ]
 
     if not runners:
-        raise RuntimeError("Cannot find runner with description = %s" % "|".join(names))
+        raise RuntimeError(
+            "Cannot find runner with description = %s" % "|".join(names)
+        )
 
     for runner in runners:
         jobs = runner.jobs.list(all=True, status=status)
diff --git a/bob/devtools/scripts/lasttag.py b/bob/devtools/scripts/lasttag.py
index 9e9bfd54..ab16cfcd 100644
--- a/bob/devtools/scripts/lasttag.py
+++ b/bob/devtools/scripts/lasttag.py
@@ -49,10 +49,13 @@ def lasttag(package):
         tag = get_last_tag(use_package)
         date = parse_date(tag.commit["committed_date"])
         echo_normal(
-            "%s: %s (%s)" % (package, tag.name, date.strftime("%Y-%m-%d %H:%M:%S"))
+            "%s: %s (%s)"
+            % (package, tag.name, date.strftime("%Y-%m-%d %H:%M:%S"))
         )
     except gitlab.GitlabGetError:
         logger.warn(
-            "Gitlab access error - package %s does not exist?", package, exc_info=True
+            "Gitlab access error - package %s does not exist?",
+            package,
+            exc_info=True,
         )
         echo_warning("%s: unknown" % (package,))
diff --git a/bob/devtools/scripts/local.py b/bob/devtools/scripts/local.py
index 50f49d50..eccb31b2 100644
--- a/bob/devtools/scripts/local.py
+++ b/bob/devtools/scripts/local.py
@@ -108,7 +108,9 @@ def docs(ctx, requirement, dry_run, python, group):
       \b
     """
     set_up_environment_variables(
-        python=python, name_space=group, project_dir=os.path.dirname(requirement)
+        python=python,
+        name_space=group,
+        project_dir=os.path.dirname(requirement),
     )
 
     ctx.invoke(ci.docs, requirement=requirement, dry_run=dry_run)
@@ -158,7 +160,9 @@ Examples:
 def build(ctx, dry_run, recipe_dir, python, group):
     """Run the CI build step locally."""
     set_up_environment_variables(
-        python=python, name_space=group, project_dir=os.path.join(recipe_dir, "..")
+        python=python,
+        name_space=group,
+        project_dir=os.path.join(recipe_dir, ".."),
     )
 
     ctx.invoke(ci.build, dry_run=dry_run, recipe_dir=recipe_dir)
diff --git a/bob/devtools/scripts/mirror.py b/bob/devtools/scripts/mirror.py
index 73542b87..c887dc01 100644
--- a/bob/devtools/scripts/mirror.py
+++ b/bob/devtools/scripts/mirror.py
@@ -205,9 +205,13 @@ def mirror(
         remote_package_info.update(remote_repodata.get("packages", {}))
         remote_package_info.update(remote_repodata.get("packages.conda", {}))
 
-        logger.info("%d packages available in remote index", len(remote_package_info))
+        logger.info(
+            "%d packages available in remote index", len(remote_package_info)
+        )
         local_packages = get_local_contents(dest_dir, arch)
-        logger.info("%d packages available in local mirror", len(local_packages))
+        logger.info(
+            "%d packages available in local mirror", len(local_packages)
+        )
 
         # by default, download everything
         remote_packages = set(remote_package_info.keys())
diff --git a/bob/devtools/scripts/pipelines.py b/bob/devtools/scripts/pipelines.py
index ac7bc411..87d27bd7 100644
--- a/bob/devtools/scripts/pipelines.py
+++ b/bob/devtools/scripts/pipelines.py
@@ -53,7 +53,8 @@ def process_pipelines(package, pipeline, job_id):
 
         if len(jobs) == 0:
             print(
-                "Job %s not found in the pipeline %s. Use `bdt gitlab get-pipelines` to search "
+                "Job %s not found in the pipeline %s. "
+                "Use `bdt gitlab get-pipelines` to search "
                 % (job_id, pipeline.attributes["id"])
             )
 
@@ -84,7 +85,9 @@ def process_pipelines(package, pipeline, job_id):
         pass
     except gitlab.GitlabGetError:
         logger.warn(
-            "Gitlab access error - package %s does not exist?", package, exc_info=True
+            "Gitlab access error - package %s does not exist?",
+            package,
+            exc_info=True,
         )
         echo_warning("%s: unknown" % (package,))
 
@@ -137,6 +140,8 @@ def get_pipelines(package):
 
     except gitlab.GitlabGetError:
         logger.warn(
-            "Gitlab access error - package %s does not exist?", package, exc_info=True
+            "Gitlab access error - package %s does not exist?",
+            package,
+            exc_info=True,
         )
         echo_warning("%s: unknown" % (package,))
diff --git a/bob/devtools/scripts/rebuild.py b/bob/devtools/scripts/rebuild.py
index f183f7a4..0521cfac 100644
--- a/bob/devtools/scripts/rebuild.py
+++ b/bob/devtools/scripts/rebuild.py
@@ -211,7 +211,9 @@ def rebuild(
     prefix = get_env_directory(os.environ["CONDA_EXE"], "base")
     condarc_options["croot"] = os.path.join(prefix, "conda-bld")
 
-    conda_config = make_conda_config(config, python, append_file, condarc_options)
+    conda_config = make_conda_config(
+        config, python, append_file, condarc_options
+    )
 
     set_environment("MATPLOTLIBRC", MATPLOTLIB_RCDIR)
     set_environment("BOBRC", BOBRC_PATH)
@@ -249,7 +251,9 @@ def rebuild(
 
         # checks if we should actually build this recipe
         if should_skip_build(metadata):
-            logger.info("Skipping UNSUPPORTED build of %s for %s", recipe_dir, arch)
+            logger.info(
+                "Skipping UNSUPPORTED build of %s for %s", recipe_dir, arch
+            )
             continue
 
         rendered_recipe = get_parsed_recipe(metadata)
@@ -263,7 +267,9 @@ def rebuild(
 
         should_build = True
 
-        if existing:  # other builds exist, get the latest and see if it still works
+        if (
+            existing
+        ):  # other builds exist, get the latest and see if it still works
 
             destpath = os.path.join(
                 condarc_options["croot"], arch, os.path.basename(existing[0])
@@ -310,7 +316,9 @@ def rebuild(
                 # set $BOB_BUILD_NUMBER and force conda_build to reparse recipe to get it
                 # right
                 set_environment("BOB_BUILD_NUMBER", str(build_number))
-                paths = conda_build.api.build(d, config=conda_config, notest=False)
+                paths = conda_build.api.build(
+                    d, config=conda_config, notest=False
+                )
                 # if you get to this point, the package was successfully rebuilt
                 # set environment to signal caller we may dispose of it
                 os.environ["BDT_BUILD"] = ":".join(paths)
diff --git a/bob/devtools/scripts/release.py b/bob/devtools/scripts/release.py
index eeaceeab..a6c53480 100644
--- a/bob/devtools/scripts/release.py
+++ b/bob/devtools/scripts/release.py
@@ -157,7 +157,9 @@ def release(changelog, group, package, resume, dry_run):
     if package:
         # get the index where the package first appears in the list
         start_idx = [
-            i for i, line in enumerate(changelogs) if line[1:].strip() == package
+            i
+            for i, line in enumerate(changelogs)
+            if line[1:].strip() == package
         ]
 
         if not start_idx:
@@ -194,7 +196,9 @@ def release(changelog, group, package, resume, dry_run):
 
         # release the package with the found tag and its comments
         if use_package:
-            pipeline_id = release_package(use_package, tag, tag_comments, dry_run)
+            pipeline_id = release_package(
+                use_package, tag, tag_comments, dry_run
+            )
             # now, wait for the pipeline to finish, before we can release the
             # next package
             wait_for_pipeline_to_finish(use_package, pipeline_id, dry_run)
diff --git a/bob/devtools/scripts/runners.py b/bob/devtools/scripts/runners.py
index 637f27bc..b8a208e9 100644
--- a/bob/devtools/scripts/runners.py
+++ b/bob/devtools/scripts/runners.py
@@ -15,7 +15,9 @@ def _get_runner_from_description(gl, descr):
 
     # search for the runner to affect
     the_runner = [
-        k for k in gl.runners.list(all=True) if k.attributes["description"] == descr
+        k
+        for k in gl.runners.list(all=True)
+        if k.attributes["description"] == descr
     ]
     if not the_runner:
         raise RuntimeError("Cannot find runner with description = %s", descr)
@@ -76,7 +78,9 @@ def _get_projects_from_runner(gl, runner):
     packages = []
     for k, proj in enumerate(the_runner.projects):
         packages.append(_get_project(gl, proj["id"]))
-        logger.debug("Got data from project %d/%d", k + 1, len(the_runner.projects))
+        logger.debug(
+            "Got data from project %d/%d", k + 1, len(the_runner.projects)
+        )
     return packages
 
 
diff --git a/bob/devtools/scripts/settings.py b/bob/devtools/scripts/settings.py
index 80a61b6f..4ddb3c49 100644
--- a/bob/devtools/scripts/settings.py
+++ b/bob/devtools/scripts/settings.py
@@ -8,7 +8,11 @@ import click
 from ..log import echo_info, echo_normal, get_logger, verbosity_option
 from ..release import get_gitlab_instance
 from . import bdt
-from .runners import _get_project, _get_projects_from_file, _get_projects_from_group
+from .runners import (
+    _get_project,
+    _get_projects_from_file,
+    _get_projects_from_group,
+)
 
 logger = get_logger(__name__)
 
diff --git a/bob/devtools/scripts/update_bob.py b/bob/devtools/scripts/update_bob.py
index 7c4dc1e6..7902a815 100644
--- a/bob/devtools/scripts/update_bob.py
+++ b/bob/devtools/scripts/update_bob.py
@@ -17,7 +17,8 @@ Examples:
 """
 )
 @click.option(
-    "--stable/--beta", help="To use the stable versions in the list and pin packages."
+    "--stable/--beta",
+    help="To use the stable versions in the list and pin packages.",
 )
 @verbosity_option()
 @bdt.raise_on_error
@@ -26,7 +27,11 @@ def update_bob(stable):
     import tempfile
 
     from ..ci import read_packages
-    from ..release import download_path, get_gitlab_instance, get_latest_tag_name
+    from ..release import (
+        download_path,
+        get_gitlab_instance,
+        get_latest_tag_name,
+    )
 
     gl = get_gitlab_instance()
 
@@ -53,11 +58,14 @@ def update_bob(stable):
         else:
             private_packages.append(package.replace("bob/", ""))
 
-        logger.debug("%s is %s", package, "public" if is_public else "not public")
+        logger.debug(
+            "%s is %s", package, "public" if is_public else "not public"
+        )
 
     logger.info("Found %d public packages", len(public_packages))
     logger.info(
-        "The following packages were not public:\n%s", "\n".join(private_packages)
+        "The following packages were not public:\n%s",
+        "\n".join(private_packages),
     )
 
     # if requires stable versions, add latest tag versions to the names
@@ -67,7 +75,9 @@ def update_bob(stable):
             get_latest_tag_name(gl.projects.get(f"bob/{pkg}"))
             for pkg in public_packages
         ]
-        public_packages = [f"{pkg} =={tag}" for pkg, tag in zip(public_packages, tags)]
+        public_packages = [
+            f"{pkg} =={tag}" for pkg, tag in zip(public_packages, tags)
+        ]
 
     # modify conda/meta.yaml and requirements.txt in bob/bob
     logger.info("Updating conda/meta.yaml")
@@ -80,7 +90,10 @@ def update_bob(stable):
         i2 = lines.find(end_tag)
 
         lines = (
-            lines[:i1] + "\n    - ".join([""] + public_packages) + "\n    " + lines[i2:]
+            lines[:i1]
+            + "\n    - ".join([""] + public_packages)
+            + "\n    "
+            + lines[i2:]
         )
 
     with open("conda/meta.yaml", "w") as f:
diff --git a/bob/devtools/scripts/visibility.py b/bob/devtools/scripts/visibility.py
index 7137f9c0..54edfc44 100644
--- a/bob/devtools/scripts/visibility.py
+++ b/bob/devtools/scripts/visibility.py
@@ -81,7 +81,8 @@ def visibility(target, group):
                 use_package.id,
             )
             echo_normal(
-                "%s: %s" % (package, use_package.attributes["visibility"].lower())
+                "%s: %s"
+                % (package, use_package.attributes["visibility"].lower())
             )
         except gitlab.GitlabGetError:
             logger.warn(
diff --git a/bob/devtools/templates/.pre-commit-config.yaml b/bob/devtools/templates/.pre-commit-config.yaml
index c201ab09..1515754e 100644
--- a/bob/devtools/templates/.pre-commit-config.yaml
+++ b/bob/devtools/templates/.pre-commit-config.yaml
@@ -5,6 +5,7 @@ repos:
     rev: 5.9.3
     hooks:
       - id: isort
+        args: [--settings-path, "pyproject.toml"]
   - repo: https://github.com/psf/black
     rev: 21.7b0
     hooks:
diff --git a/bob/devtools/templates/doc/conf.py b/bob/devtools/templates/doc/conf.py
index 07912148..235fee7c 100644
--- a/bob/devtools/templates/doc/conf.py
+++ b/bob/devtools/templates/doc/conf.py
@@ -232,7 +232,8 @@ autodoc_default_options = {
 sphinx_requirements = "extra-intersphinx.txt"
 if os.path.exists(sphinx_requirements):
     intersphinx_mapping = link_documentation(
-        additional_packages=["python", "numpy"] + load_requirements(sphinx_requirements)
+        additional_packages=["python", "numpy"]
+        + load_requirements(sphinx_requirements)
     )
 else:
     intersphinx_mapping = link_documentation()
diff --git a/bob/devtools/webdav3/client.py b/bob/devtools/webdav3/client.py
index 056a8b5f..1b571aa4 100644
--- a/bob/devtools/webdav3/client.py
+++ b/bob/devtools/webdav3/client.py
@@ -69,7 +69,9 @@ def get_options(option_type, from_options):
     _options = dict()
 
     for key in option_type.keys:
-        key_with_prefix = "{prefix}{key}".format(prefix=option_type.prefix, key=key)
+        key_with_prefix = "{prefix}{key}".format(
+            prefix=option_type.prefix, key=key
+        )
         if key not in from_options and key_with_prefix not in from_options:
             _options[key] = ""
         elif key in from_options:
@@ -253,8 +255,12 @@ class Client(object):
              `proxy_login`: login name for proxy server.
              `proxy_password`: password for proxy server.
         """
-        webdav_options = get_options(option_type=WebDAVSettings, from_options=options)
-        proxy_options = get_options(option_type=ProxySettings, from_options=options)
+        webdav_options = get_options(
+            option_type=WebDAVSettings, from_options=options
+        )
+        proxy_options = get_options(
+            option_type=ProxySettings, from_options=options
+        )
 
         self.webdav = WebDAVSettings(webdav_options)
         self.proxy = ProxySettings(proxy_options)
@@ -281,7 +287,9 @@ class Client(object):
             if not self.check(directory_urn.path()):
                 raise RemoteResourceNotFound(directory_urn.path())
 
-        response = self.execute_request(action="list", path=directory_urn.quote())
+        response = self.execute_request(
+            action="list", path=directory_urn.quote()
+        )
         urns = WebDavXmlUtils.parse_get_list_response(response.content)
 
         path = Urn.normalize_path(self.get_full_path(directory_urn))
@@ -336,7 +344,9 @@ class Client(object):
         if not self.check(directory_urn.parent()):
             raise RemoteParentNotFound(directory_urn.path())
 
-        response = self.execute_request(action="mkdir", path=directory_urn.quote())
+        response = self.execute_request(
+            action="mkdir", path=directory_urn.quote()
+        )
         return response.status_code in (200, 201)
 
     @wrap_connection_error
@@ -454,7 +464,9 @@ class Client(object):
 
         def target():
             return self.download_sync(
-                local_path=local_path, remote_path=remote_path, callback=callback
+                local_path=local_path,
+                remote_path=remote_path,
+                callback=callback,
             )
 
         threading.Thread(target=target).start()
@@ -561,7 +573,9 @@ class Client(object):
                     path=local_path, size=file_size, max_size=self.large_size
                 )
 
-            self.execute_request(action="upload", path=urn.quote(), data=local_file)
+            self.execute_request(
+                action="upload", path=urn.quote(), data=local_file
+            )
 
     def upload_sync(self, remote_path, local_path, callback=None):
         """Uploads resource to remote path on WebDAV server synchronously. In
@@ -589,7 +603,9 @@ class Client(object):
 
         def target():
             return self.upload_sync(
-                local_path=local_path, remote_path=remote_path, callback=callback
+                local_path=local_path,
+                remote_path=remote_path,
+                callback=callback,
             )
 
         threading.Thread(target=target).start()
@@ -641,7 +657,9 @@ class Client(object):
         header_destination = "Destination: {path}".format(
             path=self.get_full_path(urn_to)
         )
-        header_overwrite = "Overwrite: {flag}".format(flag="T" if overwrite else "F")
+        header_overwrite = "Overwrite: {flag}".format(
+            flag="T" if overwrite else "F"
+        )
         self.execute_request(
             action="move",
             path=urn_from.quote(),
@@ -795,7 +813,9 @@ class Client(object):
             if os.path.isdir(local_path):
                 if not self.check(remote_path=remote_path):
                     self.mkdir(remote_path=remote_path)
-                self.push(remote_directory=remote_path, local_directory=local_path)
+                self.push(
+                    remote_directory=remote_path, local_directory=local_path
+                )
             else:
                 if local_resource_name in remote_resource_names:
                     continue
@@ -831,16 +851,24 @@ class Client(object):
             if self.is_dir(remote_urn.path()):
                 if not os.path.exists(local_path):
                     os.mkdir(local_path)
-                self.pull(remote_directory=remote_path, local_directory=local_path)
+                self.pull(
+                    remote_directory=remote_path, local_directory=local_path
+                )
             else:
                 if remote_resource_name in local_resource_names:
                     continue
-                self.download_file(remote_path=remote_path, local_path=local_path)
+                self.download_file(
+                    remote_path=remote_path, local_path=local_path
+                )
 
     def sync(self, remote_directory, local_directory):
 
-        self.pull(remote_directory=remote_directory, local_directory=local_directory)
-        self.push(remote_directory=remote_directory, local_directory=local_directory)
+        self.pull(
+            remote_directory=remote_directory, local_directory=local_directory
+        )
+        self.push(
+            remote_directory=remote_directory, local_directory=local_directory
+        )
 
 
 class Resource(object):
@@ -874,7 +902,9 @@ class Resource(object):
 
     def copy(self, remote_path):
         urn = Urn(remote_path)
-        self.client.copy(remote_path_from=self.urn.path(), remote_path_to=remote_path)
+        self.client.copy(
+            remote_path_from=self.urn.path(), remote_path_to=remote_path
+        )
         return Resource(self.client, urn)
 
     def info(self, params=None):
@@ -928,7 +958,9 @@ class Resource(object):
 
     @property
     def property(self, option):
-        return self.client.get_property(remote_path=self.urn.path(), option=option)
+        return self.client.get_property(
+            remote_path=self.urn.path(), option=option
+        )
 
     @property.setter
     def property(self, option, value):
diff --git a/bob/devtools/webdav3/connection.py b/bob/devtools/webdav3/connection.py
index da54e7d8..1825cd48 100644
--- a/bob/devtools/webdav3/connection.py
+++ b/bob/devtools/webdav3/connection.py
@@ -60,16 +60,24 @@ class WebDAVSettings(ConnectionSettings):
     def is_valid(self):
 
         if not self.hostname:
-            raise OptionNotValid(name="hostname", value=self.hostname, ns=self.ns)
+            raise OptionNotValid(
+                name="hostname", value=self.hostname, ns=self.ns
+            )
 
         if self.cert_path and not exists(self.cert_path):
-            raise OptionNotValid(name="cert_path", value=self.cert_path, ns=self.ns)
+            raise OptionNotValid(
+                name="cert_path", value=self.cert_path, ns=self.ns
+            )
 
         if self.key_path and not exists(self.key_path):
-            raise OptionNotValid(name="key_path", value=self.key_path, ns=self.ns)
+            raise OptionNotValid(
+                name="key_path", value=self.key_path, ns=self.ns
+            )
 
         if self.key_path and not self.cert_path:
-            raise OptionNotValid(name="cert_path", value=self.cert_path, ns=self.ns)
+            raise OptionNotValid(
+                name="cert_path", value=self.cert_path, ns=self.ns
+            )
 
         if self.password and not self.login:
             raise OptionNotValid(name="login", value=self.login, ns=self.ns)
@@ -103,4 +111,6 @@ class ProxySettings(ConnectionSettings):
 
         if self.login or self.password:
             if not self.hostname:
-                raise OptionNotValid(name="hostname", value=self.hostname, ns=self.ns)
+                raise OptionNotValid(
+                    name="hostname", value=self.hostname, ns=self.ns
+                )
diff --git a/bob/devtools/webdav3/urn.py b/bob/devtools/webdav3/urn.py
index 4dafe771..0bdf2709 100644
--- a/bob/devtools/webdav3/urn.py
+++ b/bob/devtools/webdav3/urn.py
@@ -19,10 +19,14 @@ class Urn(object):
             self._path = sub(expression, Urn.separate, self._path)
 
         if not self._path.startswith(Urn.separate):
-            self._path = "{begin}{end}".format(begin=Urn.separate, end=self._path)
+            self._path = "{begin}{end}".format(
+                begin=Urn.separate, end=self._path
+            )
 
         if directory and not self._path.endswith(Urn.separate):
-            self._path = "{begin}{end}".format(begin=self._path, end=Urn.separate)
+            self._path = "{begin}{end}".format(
+                begin=self._path, end=Urn.separate
+            )
 
     def __str__(self):
         return self.path()
@@ -36,7 +40,11 @@ class Urn(object):
     def filename(self):
 
         path_split = self._path.split(Urn.separate)
-        name = path_split[-2] + Urn.separate if path_split[-1] == "" else path_split[-1]
+        name = (
+            path_split[-2] + Urn.separate
+            if path_split[-1] == ""
+            else path_split[-1]
+        )
         return unquote(name)
 
     def parent(self):
@@ -63,7 +71,11 @@ class Urn(object):
     @staticmethod
     def normalize_path(path):
         result = sub("/{2,}", "/", path)
-        return result if len(result) < 1 or result[-1] != Urn.separate else result[:-1]
+        return (
+            result
+            if len(result) < 1 or result[-1] != Urn.separate
+            else result[:-1]
+        )
 
     @staticmethod
     def compare_path(path_a, href):
diff --git a/deps/repodata-patches/gen_patch_json.py b/deps/repodata-patches/gen_patch_json.py
index 6bbdc396..4722be4f 100644
--- a/deps/repodata-patches/gen_patch_json.py
+++ b/deps/repodata-patches/gen_patch_json.py
@@ -128,7 +128,9 @@ def gen_new_index_and_patch_instructions(repodata):
     instructions = {}
     for i, packages_key in enumerate(["packages", "packages.conda"]):
         new_index = _gen_new_index(repodata, packages_key)
-        inst = _gen_patch_instructions(repodata[packages_key], new_index, packages_key)
+        inst = _gen_patch_instructions(
+            repodata[packages_key], new_index, packages_key
+        )
         _add_removals(inst, REMOVALS[repodata["info"]["subdir"]])
         if i == 0:
             instructions.update(inst)
@@ -149,7 +151,9 @@ def main():
     repodatas = {}
     subdirs = SUBDIRS
     for subdir in tqdm.tqdm(subdirs, desc="Downloading repodata"):
-        repodata_url = "/".join((BASE_URL, subdir, "repodata_from_packages.json"))
+        repodata_url = "/".join(
+            (BASE_URL, subdir, "repodata_from_packages.json")
+        )
         response = requests.get(repodata_url)
         response.raise_for_status()
         repodatas[subdir] = response.json()
@@ -169,7 +173,11 @@ def main():
         patch_instructions_path = join(prefix_subdir, "patch_instructions.json")
         with open(patch_instructions_path, "w") as fh:
             json.dump(
-                instructions, fh, indent=2, sort_keys=True, separators=(",", ": ")
+                instructions,
+                fh,
+                indent=2,
+                sort_keys=True,
+                separators=(",", ": "),
             )
 
 
diff --git a/deps/repodata-patches/show_diff.py b/deps/repodata-patches/show_diff.py
index fe6ad42e..8c95ffaf 100755
--- a/deps/repodata-patches/show_diff.py
+++ b/deps/repodata-patches/show_diff.py
@@ -7,10 +7,15 @@ import os
 import urllib
 
 from conda_build.index import _apply_instructions
-from gen_patch_json import BASE_URL, SUBDIRS, gen_new_index_and_patch_instructions
+from gen_patch_json import (
+    BASE_URL,
+    SUBDIRS,
+    gen_new_index_and_patch_instructions,
+)
 
 CACHE_DIR = os.environ.get(
-    "CACHE_DIR", os.path.join(os.path.dirname(os.path.abspath(__file__)), "cache")
+    "CACHE_DIR",
+    os.path.join(os.path.dirname(os.path.abspath(__file__)), "cache"),
 )
 
 
@@ -26,8 +31,14 @@ def show_record_diffs(subdir, ref_repodata, new_repodata):
             print(f"{subdir}::{name}")
             ref_lines = json.dumps(ref_pkg, indent=2).splitlines()
             new_lines = json.dumps(new_pkg, indent=2).splitlines()
-            for ln in difflib.unified_diff(ref_lines, new_lines, n=0, lineterm=""):
-                if ln.startswith("+++") or ln.startswith("---") or ln.startswith("@@"):
+            for ln in difflib.unified_diff(
+                ref_lines, new_lines, n=0, lineterm=""
+            ):
+                if (
+                    ln.startswith("+++")
+                    or ln.startswith("---")
+                    or ln.startswith("@@")
+                ):
                     continue
                 print(ln)
 
@@ -58,7 +69,10 @@ if __name__ == "__main__":
         description="show repodata changes from the current gen_patch_json"
     )
     parser.add_argument(
-        "--subdirs", nargs="*", default=None, help="subdir(s) show, default is all"
+        "--subdirs",
+        nargs="*",
+        default=None,
+        help="subdir(s) show, default is all",
     )
     parser.add_argument(
         "--use-cache",
@@ -76,7 +90,9 @@ if __name__ == "__main__":
         subdir_dir = os.path.join(CACHE_DIR, subdir)
         if not os.path.exists(subdir_dir):
             os.makedirs(subdir_dir)
-        raw_repodata_path = os.path.join(subdir_dir, "repodata_from_packages.json.bz2")
+        raw_repodata_path = os.path.join(
+            subdir_dir, "repodata_from_packages.json.bz2"
+        )
         ref_repodata_path = os.path.join(subdir_dir, "repodata.json.bz2")
         if not args.use_cache:
             download_subdir(subdir, raw_repodata_path, ref_repodata_path)
-- 
GitLab