diff --git a/bob/__init__.py b/bob/__init__.py
index 2ab1e28b150f0549def9963e9e87de3fdd6b2579..edbb4090fca046b19d22d3982711084621bff3be 100644
--- a/bob/__init__.py
+++ b/bob/__init__.py
@@ -1,3 +1,4 @@
 # see https://docs.python.org/3/library/pkgutil.html
 from pkgutil import extend_path
+
 __path__ = extend_path(__path__, __name__)
diff --git a/bob/devtools/bootstrap.py b/bob/devtools/bootstrap.py
index 1f9f23cabab04d66bd12d05278c3cf4ef6682c52..135d72eefc11d9481c8da22bf389f1fd003e0a09 100644
--- a/bob/devtools/bootstrap.py
+++ b/bob/devtools/bootstrap.py
@@ -2,10 +2,10 @@
 # -*- coding: utf-8 -*-
 
 
-'''Bootstraps a new miniconda installation and prepares it for development'''
+"""Bootstraps a new miniconda installation and prepares it for development."""
 
 
-_BASE_CONDARC = '''\
+_BASE_CONDARC = """\
 default_channels: #!final
   - https://repo.anaconda.com/pkgs/main
   - https://repo.anaconda.com/pkgs/free
@@ -20,18 +20,18 @@ remote_max_retries: 50 #!final
 remote_read_timeout_secs: 180.0 #!final
 channels:
   - defaults
-'''
+"""
 
-_SERVER = 'http://www.idiap.ch'
+_SERVER = "http://www.idiap.ch"
 
 _INTERVALS = (
-    ('weeks', 604800),  # 60 * 60 * 24 * 7
-    ('days', 86400),    # 60 * 60 * 24
-    ('hours', 3600),    # 60 * 60
-    ('minutes', 60),
-    ('seconds', 1),
-    )
-'''Time intervals that make up human readable time slots'''
+    ("weeks", 604800),  # 60 * 60 * 24 * 7
+    ("days", 86400),  # 60 * 60 * 24
+    ("hours", 3600),  # 60 * 60
+    ("minutes", 60),
+    ("seconds", 1),
+)
+"""Time intervals that make up human readable time slots"""
 
 
 import os
@@ -43,18 +43,19 @@ import platform
 import subprocess
 
 import logging
+
 logger = logging.getLogger(__name__)
 
 
 def set_environment(name, value, env=os.environ):
-    '''Function to setup the environment variable and print debug message
+    """Function to setup the environment variable and print debug message.
 
     Args:
 
       name: The name of the environment variable to set
       value: The value to set the environment variable to
       env: Optional environment (dictionary) where to set the variable at
-    '''
+    """
 
     env[name] = value
     logger.info('environ["%s"] = %s', name, value)
@@ -62,382 +63,447 @@ def set_environment(name, value, env=os.environ):
 
 
 def human_time(seconds, granularity=2):
-  '''Returns a human readable time string like "1 day, 2 hours"'''
-
-  result = []
+    """Returns a human readable time string like "1 day, 2 hours"."""
+
+    result = []
+
+    for name, count in _INTERVALS:
+        value = seconds // count
+        if value:
+            seconds -= value * count
+            if value == 1:
+                name = name.rstrip("s")
+            result.append("{} {}".format(int(value), name))
+        else:
+            # Add a blank if we're in the middle of other values
+            if len(result) > 0:
+                result.append(None)
+
+    if not result:
+        if seconds < 1.0:
+            return "%.2f seconds" % seconds
+        else:
+            if seconds == 1:
+                return "1 second"
+            else:
+                return "%d seconds" % seconds
+
+    return ", ".join([x for x in result[:granularity] if x is not None])
 
-  for name, count in _INTERVALS:
-    value = seconds // count
-    if value:
-      seconds -= value * count
-      if value == 1:
-        name = name.rstrip('s')
-      result.append("{} {}".format(int(value), name))
-    else:
-      # Add a blank if we're in the middle of other values
-      if len(result) > 0:
-        result.append(None)
-
-  if not result:
-    if seconds < 1.0:
-      return '%.2f seconds' % seconds
-    else:
-      if seconds == 1:
-        return '1 second'
-      else:
-        return '%d seconds' % seconds
 
-  return ', '.join([x for x in result[:granularity] if x is not None])
+def run_cmdline(cmd, env=None):
+    """Runs a command on a environment, logs output and reports status.
 
+    Parameters:
 
-def run_cmdline(cmd, env=None):
-  '''Runs a command on a environment, logs output and reports status
+      cmd (list): The command to run, with parameters separated on a list
 
+      env (dict, Optional): Environment to use for running the program on. If not
+        set, use :py:obj:`os.environ`.
+    """
 
-  Parameters:
+    if env is None:
+        env = os.environ
 
-    cmd (list): The command to run, with parameters separated on a list
+    logger.info("(system) %s" % " ".join(cmd))
 
-    env (dict, Optional): Environment to use for running the program on. If not
-      set, use :py:obj:`os.environ`.
+    start = time.time()
 
-  '''
+    p = subprocess.Popen(
+        cmd,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.STDOUT,
+        env=env,
+        bufsize=1,
+        universal_newlines=True,
+    )
 
-  if env is None: env = os.environ
+    for line in iter(p.stdout.readline, ""):
+        sys.stdout.write(line)
+        sys.stdout.flush()
 
-  logger.info('(system) %s' % ' '.join(cmd))
+    if p.wait() != 0:
+        raise RuntimeError(
+            "command `%s' exited with error state (%d)"
+            % (" ".join(cmd), p.returncode)
+        )
 
-  start = time.time()
+    total = time.time() - start
 
-  p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
-      env=env, bufsize=1, universal_newlines=True)
+    logger.info("command took %s" % human_time(total))
 
-  for line in iter(p.stdout.readline, ''):
-    sys.stdout.write(line)
-    sys.stdout.flush()
 
-  if p.wait() != 0:
-    raise RuntimeError("command `%s' exited with error state (%d)" % \
-        (' '.join(cmd), p.returncode))
+def touch(path):
+    """Python-implementation of the "touch" command-line application."""
 
-  total = time.time() - start
+    with open(path, "a"):
+        os.utime(path, None)
 
-  logger.info('command took %s' % human_time(total))
 
+def merge_conda_cache(cache, prefix, name):
+    """Merges conda pkg caches and conda-bld folders.
 
+    Args:
 
-def touch(path):
-  '''Python-implementation of the "touch" command-line application'''
+      cache: The cached directory (from previous builds)
+      prefix: The current prefix (root of conda installation)
+      name: The name of the current package
+    """
+
+    pkgs_dir = os.path.join(prefix, "pkgs")
+    pkgs_urls_txt = os.path.join(pkgs_dir, "urls.txt")
+    if not os.path.exists(pkgs_dir):
+        logger.info("mkdir -p %s", pkgs_dir)
+        os.makedirs(pkgs_dir)
+        logger.info("touch %s", pkgs_urls_txt)
+        touch(pkgs_urls_txt)
+
+    # move packages on cache/pkgs to pkgs_dir
+    cached_pkgs_dir = os.path.join(cache, "pkgs")
+    cached_packages = glob.glob(os.path.join(cached_pkgs_dir, "*.tar.bz2"))
+    cached_packages = [
+        k for k in cached_packages if not k.startswith(name + "-")
+    ]
+    logger.info("Merging %d cached conda packages", len(cached_packages))
+    for k in cached_packages:
+        dst = os.path.join(pkgs_dir, os.path.basename(k))
+        logger.debug("(move) %s -> %s", k, dst)
+        os.rename(k, dst)
+
+    # merge urls.txt files
+    logger.info("Merging urls.txt files from cache...")
+    urls = []
+    cached_pkgs_urls_txt = os.path.join(cached_pkgs_dir, "urls.txt")
+
+    if not os.path.exists(cached_pkgs_urls_txt):
+        with open(pkgs_urls_txt, "rb") as f1:
+            data = set(f1.readlines())
+            data = sorted(list(data))
+    else:
+        # use both cached and actual conda package caches
+        with open(pkgs_urls_txt, "rb") as f1, open(
+            cached_pkgs_urls_txt, "rb"
+        ) as f2:
+            data = set(f1.readlines() + f2.readlines())
+            data = sorted(list(data))
 
-  with open(path, 'a'):
-    os.utime(path, None)
+    with open(pkgs_urls_txt, "wb") as f:
+        f.writelines(data)
 
+    pkgs_urls = os.path.join(pkgs_dir, "urls")
+    touch(pkgs_urls)
 
-def merge_conda_cache(cache, prefix, name):
-  '''Merges conda pkg caches and conda-bld folders
-
-  Args:
-
-    cache: The cached directory (from previous builds)
-    prefix: The current prefix (root of conda installation)
-    name: The name of the current package
-  '''
-
-  pkgs_dir = os.path.join(prefix, 'pkgs')
-  pkgs_urls_txt = os.path.join(pkgs_dir, 'urls.txt')
-  if not os.path.exists(pkgs_dir):
-    logger.info('mkdir -p %s', pkgs_dir)
-    os.makedirs(pkgs_dir)
-    logger.info('touch %s', pkgs_urls_txt)
-    touch(pkgs_urls_txt)
-
-  # move packages on cache/pkgs to pkgs_dir
-  cached_pkgs_dir = os.path.join(cache, 'pkgs')
-  cached_packages = glob.glob(os.path.join(cached_pkgs_dir, '*.tar.bz2'))
-  cached_packages = [k for k in cached_packages if not \
-      k.startswith(name + '-')]
-  logger.info('Merging %d cached conda packages', len(cached_packages))
-  for k in cached_packages:
-    dst = os.path.join(pkgs_dir, os.path.basename(k))
-    logger.debug('(move) %s -> %s', k, dst)
-    os.rename(k, dst)
-
-  # merge urls.txt files
-  logger.info('Merging urls.txt files from cache...')
-  urls = []
-  cached_pkgs_urls_txt = os.path.join(cached_pkgs_dir, 'urls.txt')
-
-  if not os.path.exists(cached_pkgs_urls_txt):
-    with open(pkgs_urls_txt, 'rb') as f1:
-      data = set(f1.readlines())
-      data = sorted(list(data))
-  else:
-    # use both cached and actual conda package caches
-    with open(pkgs_urls_txt, 'rb') as f1, \
-        open(cached_pkgs_urls_txt, 'rb') as f2:
-      data = set(f1.readlines() + f2.readlines())
-      data = sorted(list(data))
-
-  with open(pkgs_urls_txt, 'wb') as f:
-    f.writelines(data)
-
-  pkgs_urls = os.path.join(pkgs_dir, 'urls')
-  touch(pkgs_urls)
-
-  # move conda-bld build results
-  cached_conda_bld = os.path.join(cache, 'conda-bld')
-  if os.path.exists(cached_conda_bld):
-    dst = os.path.join(prefix, 'conda-bld')
-    logger.info('(move) %s -> %s', cached_conda_bld, dst)
-    os.rename(cached_conda_bld, dst)
+    # move conda-bld build results
+    cached_conda_bld = os.path.join(cache, "conda-bld")
+    if os.path.exists(cached_conda_bld):
+        dst = os.path.join(prefix, "conda-bld")
+        logger.info("(move) %s -> %s", cached_conda_bld, dst)
+        os.rename(cached_conda_bld, dst)
 
 
 def get_miniconda_sh():
-  '''Retrieves the miniconda3 installer for the current system'''
+    """Retrieves the miniconda3 installer for the current system."""
 
-  import http.client
+    import http.client
 
-  server = 'repo.continuum.io'  #https
-  path = '/miniconda/Miniconda3-4.6.14-%s-x86_64.sh'
-  if platform.system() == 'Darwin':
-    path = path % 'MacOSX'
-  else:
-    path = path % 'Linux'
+    server = "repo.continuum.io"  # https
+    path = "/miniconda/Miniconda3-4.6.14-%s-x86_64.sh"
+    if platform.system() == "Darwin":
+        path = path % "MacOSX"
+    else:
+        path = path % "Linux"
 
-  logger.info('Connecting to https://%s...', server)
-  conn = http.client.HTTPSConnection(server)
-  conn.request("GET", path)
-  r1 = conn.getresponse()
+    logger.info("Connecting to https://%s...", server)
+    conn = http.client.HTTPSConnection(server)
+    conn.request("GET", path)
+    r1 = conn.getresponse()
 
-  assert r1.status == 200, 'Request for https://%s%s - returned status %d ' \
-      '(%s)' % (server, path, r1.status, r1.reason)
+    assert r1.status == 200, (
+        "Request for https://%s%s - returned status %d "
+        "(%s)" % (server, path, r1.status, r1.reason)
+    )
 
-  dst = 'miniconda.sh'
-  logger.info('(download) https://%s%s -> %s...', server, path, dst)
-  with open(dst, 'wb') as f:
-    f.write(r1.read())
+    dst = "miniconda.sh"
+    logger.info("(download) https://%s%s -> %s...", server, path, dst)
+    with open(dst, "wb") as f:
+        f.write(r1.read())
 
 
 def install_miniconda(prefix, name):
-  '''Creates a new miniconda installation
-
-  Args:
+    """Creates a new miniconda installation.
 
-    prefix: The path leading to the (new) root of the miniconda installation
-    name: The name of this package
+    Args:
 
-  '''
+      prefix: The path leading to the (new) root of the miniconda installation
+      name: The name of this package
+    """
 
-  logger.info("Installing miniconda in %s...", prefix)
+    logger.info("Installing miniconda in %s...", prefix)
 
-  if not os.path.exists('miniconda.sh'):  #re-downloads installer
-    get_miniconda_sh()
-  else:
-    logger.info("Re-using cached miniconda3 installer")
+    if not os.path.exists("miniconda.sh"):  # re-downloads installer
+        get_miniconda_sh()
+    else:
+        logger.info("Re-using cached miniconda3 installer")
 
-  cached = None
-  if os.path.exists(prefix):  #this is the previous cache, move it
-    cached = prefix + '.cached'
-    if os.path.exists(cached):
-      logger.info('(rmtree) %s', cached)
-      shutil.rmtree(cached)
-    logger.info('(move) %s -> %s', prefix, cached)
-    os.rename(prefix, cached)
+    cached = None
+    if os.path.exists(prefix):  # this is the previous cache, move it
+        cached = prefix + ".cached"
+        if os.path.exists(cached):
+            logger.info("(rmtree) %s", cached)
+            shutil.rmtree(cached)
+        logger.info("(move) %s -> %s", prefix, cached)
+        os.rename(prefix, cached)
 
-  run_cmdline(['bash', 'miniconda.sh', '-b', '-p', prefix])
-  if cached is not None:
-    merge_conda_cache(cached, prefix, name)
-    shutil.rmtree(cached)
+    run_cmdline(["bash", "miniconda.sh", "-b", "-p", prefix])
+    if cached is not None:
+        merge_conda_cache(cached, prefix, name)
+        shutil.rmtree(cached)
 
 
 def get_channels(public, stable, server, intranet, group):
-  '''Returns the relevant conda channels to consider if building project
-
-  The subset of channels to be returned depends on the visibility and stability
-  of the package being built.  Here are the rules:
+    """Returns the relevant conda channels to consider if building project.
 
-  * public and stable: only returns the public stable channel(s)
-  * public and not stable: returns both public stable and beta channels
-  * not public and stable: returns both public and private stable channels
-  * not public and not stable: returns all channels
+    The subset of channels to be returned depends on the visibility and stability
+    of the package being built.  Here are the rules:
 
-  Beta channels have priority over stable channels, if returned.  Private
-  channels have priority over public channles, if turned.
+    * public and stable: only returns the public stable channel(s)
+    * public and not stable: returns both public stable and beta channels
+    * not public and stable: returns both public and private stable channels
+    * not public and not stable: returns all channels
 
+    Beta channels have priority over stable channels, if returned.  Private
+    channels have priority over public channles, if turned.
 
-  Args:
 
-    public: Boolean indicating if we're supposed to include only public
-      channels
-    stable: Boolean indicating if we're supposed to include only stable
-      channels
-    server: The base address of the server containing our conda channels
-    intranet: Boolean indicating if we should add "private"/"public" prefixes
-      on the conda paths
-    group: The group of packages (gitlab namespace) the package we're compiling
-      is part of.  Values should match URL namespaces currently available on
-      our internal webserver.  Currently, only "bob" or "beat" will work.
+    Args:
 
+      public: Boolean indicating if we're supposed to include only public
+        channels
+      stable: Boolean indicating if we're supposed to include only stable
+        channels
+      server: The base address of the server containing our conda channels
+      intranet: Boolean indicating if we should add "private"/"public" prefixes
+        on the conda paths
+      group: The group of packages (gitlab namespace) the package we're compiling
+        is part of.  Values should match URL namespaces currently available on
+        our internal webserver.  Currently, only "bob" or "beat" will work.
 
-  Returns: a list of channels that need to be considered.
 
-  '''
+    Returns: a list of channels that need to be considered.
+    """
 
-  if (not public) and (not intranet):
-    raise RuntimeError('You cannot request for private channels and set' \
-        ' intranet=False (server=%s) - these are conflicting options' % server)
+    if (not public) and (not intranet):
+        raise RuntimeError(
+            "You cannot request for private channels and set"
+            " intranet=False (server=%s) - these are conflicting options"
+            % server
+        )
 
-  channels = []
+    channels = []
 
-  if not public:
-    prefix = '/private'
-    if not stable:  #allowed private channels
-      channels += [server + prefix + '/conda/label/beta']  #allowed betas
-    channels += [server + prefix + '/conda']
+    if not public:
+        prefix = "/private"
+        if not stable:  # allowed private channels
+            channels += [server + prefix + "/conda/label/beta"]  # allowed betas
+        channels += [server + prefix + "/conda"]
 
-  # do not use '/public' versions here
-  prefix = '/software/' + group
-  if not stable:
-    channels += [server + prefix + '/conda/label/beta']  #allowed betas
-  channels += [server + prefix + '/conda']
+    # do not use '/public' versions here
+    prefix = "/software/" + group
+    if not stable:
+        channels += [server + prefix + "/conda/label/beta"]  # allowed betas
+    channels += [server + prefix + "/conda"]
 
-  return channels
+    return channels
 
 
 def setup_logger(logger, level):
-  '''Sets-up the logging for this command at level ``INFO``'''
-
-  warn_err = logging.StreamHandler(sys.stderr)
-  warn_err.setLevel(logging.WARNING)
-  logger.addHandler(warn_err)
-
-  # debug and info messages are written to sys.stdout
-
-  class _InfoFilter:
-    def filter(self, record):
-      return record.levelno <= logging.INFO
-
-  debug_info = logging.StreamHandler(sys.stdout)
-  debug_info.setLevel(logging.DEBUG)
-  debug_info.addFilter(_InfoFilter())
-  logger.addHandler(debug_info)
-
-  formatter = logging.Formatter('%(levelname)s@%(asctime)s: %(message)s')
-
-  for handler in logger.handlers:
-    handler.setFormatter(formatter)
-
-  if level not in range(0, 4):
-    raise ValueError(
-        "The verbosity level %d does not exist. Please reduce the number of "
-        "'--verbose' parameters in your command line" % level)
-  # set up the verbosity level of the logging system
-  log_level = {
-      0: logging.ERROR,
-      1: logging.WARNING,
-      2: logging.INFO,
-      3: logging.DEBUG
-  }[level]
-
-  logger.setLevel(log_level)
-
-
-if __name__ == '__main__':
-
-  import argparse
-
-  parser = argparse.ArgumentParser(description='Bootstraps a new miniconda ' \
-      'installation and prepares it for development')
-  parser.add_argument('command', choices=['build', 'local', 'channel'],
-      help='How to prepare the current environment. Use: ``build``, to ' \
-          'build bob.devtools, ``local``, to bootstrap deploy or pypi ' \
-          'stages for bob.devtools builds, ``channel`` channel to bootstrap ' \
-          'CI environment for beta/stable builds')
-  parser.add_argument('envname', nargs='?', default='bdt',
-      help='The name of the conda environment that will host bdt ' \
-          '[default: %(default)s]')
-  parser.add_argument('-n', '--name',
-      default=os.environ.get('CI_PROJECT_NAME', 'bob.devtools'),
-      help='The name of the project being built [default: %(default)s]')
-  parser.add_argument('-c', '--conda-root',
-      default=os.environ.get('CONDA_ROOT',
-        os.path.realpath(os.path.join(os.curdir, 'miniconda'))),
-      help='The location where we should install miniconda ' \
-          '[default: %(default)s]')
-  parser.add_argument('-t', '--tag',
-      default=os.environ.get('CI_COMMIT_TAG', None),
-      help='If building a tag, pass it with this flag [default: %(default)s]')
-  parser.add_argument('--verbose', '-v', action='count', default=0,
-      help='Increases the verbosity level.  We always prints error and ' \
-          'critical messages. Use a single ``-v`` to enable warnings, ' \
-          'two ``-vv`` to enable information messages and three ``-vvv`` ' \
-          'to enable debug messages [default: %(default)s]')
-
-  args = parser.parse_args()
-
-  setup_logger(logger, args.verbose)
-
-  install_miniconda(args.conda_root, args.name)
-  conda_bin = os.path.join(args.conda_root, 'bin', 'conda')
-
-  # creates the condarc file
-  condarc = os.path.join(args.conda_root, 'condarc')
-  logger.info('(create) %s', condarc)
-  with open(condarc, 'wt') as f:
-    f.write(_BASE_CONDARC)
-
-  conda_version = '4'
-  conda_build_version = '3.16'
-  conda_verify_version = '3'
-
-  conda_verbosity = []
-  #if args.verbose >= 2:
-  #  conda_verbosity = ['-v']
-  if args.verbose >= 3:
-    conda_verbosity = ['-vv']
-
-  if args.command == 'build':
-
-    # simple - just use the defaults channels when self building
-    run_cmdline([conda_bin, 'install'] + conda_verbosity + ['-n', 'base',
-      'python',
-      'conda=%s' % conda_version,
-      'conda-build=%s' % conda_build_version,
-      'conda-verify=%s' % conda_verify_version,
-      'twine',  #required for checking readme of python (zip) distro
-      ])
-
-  elif args.command == 'local':
-
-    # index the locally built packages
-    run_cmdline([conda_bin, 'install'] + conda_verbosity + ['-n', 'base',
-      'python',
-      'conda=%s' % conda_version,
-      'conda-build=%s' % conda_build_version,
-      'conda-verify=%s' % conda_verify_version,
-      'twine',  #required for checking readme of python (zip) distro
-      ])
-    conda_bld_path = os.path.join(args.conda_root, 'conda-bld')
-    run_cmdline([conda_bin, 'index', conda_bld_path])
-    channels = get_channels(public=True, stable=True, server=_SERVER,
-        intranet=True, group='bob') + ['defaults']
-    channels = ['--override-channels'] + \
-        ['--channel=' + conda_bld_path] + \
-        ['--channel=%s' % k for k in channels]
-    conda_cmd = 'install' if args.envname in ('base', 'root') else 'create'
-    run_cmdline([conda_bin, conda_cmd] + conda_verbosity + channels + \
-        ['-n', args.envname, 'bob.devtools'])
-
-  elif args.command == 'channel':
-
-    # installs from channel
-    channels = get_channels(public=True,
-        stable=(args.tag is not None),
-        server=_SERVER, intranet=True, group='bob') + ['defaults']
-    channels = ['--override-channels'] + ['--channel=%s' % k for k in channels]
-    conda_cmd = 'install' if args.envname in ('base', 'root') else 'create'
-    run_cmdline([conda_bin, conda_cmd] + conda_verbosity + channels + \
-        ['-n', args.envname, 'bob.devtools'])
-
-  # print conda information for debugging purposes
-  run_cmdline([conda_bin, 'info'] + conda_verbosity)
+    """Sets-up the logging for this command at level ``INFO``"""
+
+    warn_err = logging.StreamHandler(sys.stderr)
+    warn_err.setLevel(logging.WARNING)
+    logger.addHandler(warn_err)
+
+    # debug and info messages are written to sys.stdout
+
+    class _InfoFilter:
+        def filter(self, record):
+            return record.levelno <= logging.INFO
+
+    debug_info = logging.StreamHandler(sys.stdout)
+    debug_info.setLevel(logging.DEBUG)
+    debug_info.addFilter(_InfoFilter())
+    logger.addHandler(debug_info)
+
+    formatter = logging.Formatter("%(levelname)s@%(asctime)s: %(message)s")
+
+    for handler in logger.handlers:
+        handler.setFormatter(formatter)
+
+    if level not in range(0, 4):
+        raise ValueError(
+            "The verbosity level %d does not exist. Please reduce the number of "
+            "'--verbose' parameters in your command line" % level
+        )
+    # set up the verbosity level of the logging system
+    log_level = {
+        0: logging.ERROR,
+        1: logging.WARNING,
+        2: logging.INFO,
+        3: logging.DEBUG,
+    }[level]
+
+    logger.setLevel(log_level)
+
+
+if __name__ == "__main__":
+
+    import argparse
+
+    parser = argparse.ArgumentParser(
+        description="Bootstraps a new miniconda "
+        "installation and prepares it for development"
+    )
+    parser.add_argument(
+        "command",
+        choices=["build", "local", "channel"],
+        help="How to prepare the current environment. Use: ``build``, to "
+        "build bob.devtools, ``local``, to bootstrap deploy or pypi "
+        "stages for bob.devtools builds, ``channel`` channel to bootstrap "
+        "CI environment for beta/stable builds",
+    )
+    parser.add_argument(
+        "envname",
+        nargs="?",
+        default="bdt",
+        help="The name of the conda environment that will host bdt "
+        "[default: %(default)s]",
+    )
+    parser.add_argument(
+        "-n",
+        "--name",
+        default=os.environ.get("CI_PROJECT_NAME", "bob.devtools"),
+        help="The name of the project being built [default: %(default)s]",
+    )
+    parser.add_argument(
+        "-c",
+        "--conda-root",
+        default=os.environ.get(
+            "CONDA_ROOT", os.path.realpath(os.path.join(os.curdir, "miniconda"))
+        ),
+        help="The location where we should install miniconda "
+        "[default: %(default)s]",
+    )
+    parser.add_argument(
+        "-t",
+        "--tag",
+        default=os.environ.get("CI_COMMIT_TAG", None),
+        help="If building a tag, pass it with this flag [default: %(default)s]",
+    )
+    parser.add_argument(
+        "--verbose",
+        "-v",
+        action="count",
+        default=0,
+        help="Increases the verbosity level.  We always prints error and "
+        "critical messages. Use a single ``-v`` to enable warnings, "
+        "two ``-vv`` to enable information messages and three ``-vvv`` "
+        "to enable debug messages [default: %(default)s]",
+    )
+
+    args = parser.parse_args()
+
+    setup_logger(logger, args.verbose)
+
+    install_miniconda(args.conda_root, args.name)
+    conda_bin = os.path.join(args.conda_root, "bin", "conda")
+
+    # creates the condarc file
+    condarc = os.path.join(args.conda_root, "condarc")
+    logger.info("(create) %s", condarc)
+    with open(condarc, "wt") as f:
+        f.write(_BASE_CONDARC)
+
+    conda_version = "4"
+    conda_build_version = "3.16"
+    conda_verify_version = "3"
+
+    conda_verbosity = []
+    # if args.verbose >= 2:
+    #  conda_verbosity = ['-v']
+    if args.verbose >= 3:
+        conda_verbosity = ["-vv"]
+
+    if args.command == "build":
+
+        # simple - just use the defaults channels when self building
+        run_cmdline(
+            [conda_bin, "install"]
+            + conda_verbosity
+            + [
+                "-n",
+                "base",
+                "python",
+                "conda=%s" % conda_version,
+                "conda-build=%s" % conda_build_version,
+                "conda-verify=%s" % conda_verify_version,
+                "twine",  # required for checking readme of python (zip) distro
+            ]
+        )
+
+    elif args.command == "local":
+
+        # index the locally built packages
+        run_cmdline(
+            [conda_bin, "install"]
+            + conda_verbosity
+            + [
+                "-n",
+                "base",
+                "python",
+                "conda=%s" % conda_version,
+                "conda-build=%s" % conda_build_version,
+                "conda-verify=%s" % conda_verify_version,
+                "twine",  # required for checking readme of python (zip) distro
+            ]
+        )
+        conda_bld_path = os.path.join(args.conda_root, "conda-bld")
+        run_cmdline([conda_bin, "index", conda_bld_path])
+        channels = get_channels(
+            public=True, stable=True, server=_SERVER, intranet=True, group="bob"
+        ) + ["defaults"]
+        channels = (
+            ["--override-channels"]
+            + ["--channel=" + conda_bld_path]
+            + ["--channel=%s" % k for k in channels]
+        )
+        conda_cmd = "install" if args.envname in ("base", "root") else "create"
+        run_cmdline(
+            [conda_bin, conda_cmd]
+            + conda_verbosity
+            + channels
+            + ["-n", args.envname, "bob.devtools"]
+        )
+
+    elif args.command == "channel":
+
+        # installs from channel
+        channels = get_channels(
+            public=True,
+            stable=(args.tag is not None),
+            server=_SERVER,
+            intranet=True,
+            group="bob",
+        ) + ["defaults"]
+        channels = ["--override-channels"] + [
+            "--channel=%s" % k for k in channels
+        ]
+        conda_cmd = "install" if args.envname in ("base", "root") else "create"
+        run_cmdline(
+            [conda_bin, conda_cmd]
+            + conda_verbosity
+            + channels
+            + ["-n", args.envname, "bob.devtools"]
+        )
+
+    # print conda information for debugging purposes
+    run_cmdline([conda_bin, "info"] + conda_verbosity)
diff --git a/bob/devtools/build.py b/bob/devtools/build.py
index 3802741a5fc1a1129fba749e864381017a261037..1ad84e52868c1259681b57adf68d3a5163a7a412 100644
--- a/bob/devtools/build.py
+++ b/bob/devtools/build.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-'''Tools for self-building and other utilities'''
+"""Tools for self-building and other utilities."""
 
 
 import os
@@ -14,6 +14,7 @@ import platform
 import subprocess
 
 import logging
+
 logger = logging.getLogger(__name__)
 
 import yaml
@@ -21,682 +22,819 @@ import distutils.version
 import conda_build.api
 
 
+def comment_cleanup(lines):
+    """Cleans-up comments and empty lines from textual data read from files."""
+
+    no_comments = [k.partition("#")[0].strip() for k in lines]
+    return [k for k in no_comments if k]
+
+
+def load_order_file(path):
+    """Loads an order.txt style file, removes empty lines and comments."""
+
+    with open(path, "rt") as f:
+        return comment_cleanup(f.readlines())
+
+
 def conda_arch():
-  """Returns the current OS name and architecture as recognized by conda"""
+    """Returns the current OS name and architecture as recognized by conda."""
 
-  r = 'unknown'
-  if platform.system().lower() == 'linux':
-    r = 'linux'
-  elif platform.system().lower() == 'darwin':
-    r = 'osx'
-  else:
-    raise RuntimeError('Unsupported system "%s"' % platform.system())
+    r = "unknown"
+    if platform.system().lower() == "linux":
+        r = "linux"
+    elif platform.system().lower() == "darwin":
+        r = "osx"
+    else:
+        raise RuntimeError('Unsupported system "%s"' % platform.system())
 
-  if platform.machine().lower() == 'x86_64':
-    r += '-64'
-  else:
-    raise RuntimeError('Unsupported machine type "%s"' % platform.machine())
+    if platform.machine().lower() == "x86_64":
+        r += "-64"
+    else:
+        raise RuntimeError('Unsupported machine type "%s"' % platform.machine())
 
-  return r
+    return r
 
 
 def should_skip_build(metadata_tuples):
-  """Takes the output of render_recipe as input and evaluates if this
-  recipe's build should be skipped.
-  """
+    """Takes the output of render_recipe as input and evaluates if this
+    recipe's build should be skipped."""
 
-  return all(m[0].skip() for m in metadata_tuples)
+    return all(m[0].skip() for m in metadata_tuples)
 
 
 def next_build_number(channel_url, basename):
-  """Calculates the next build number of a package given the channel
-
-  This function returns the next build number (integer) for a package given its
-  resulting tarball base filename (can be obtained with
-  :py:func:`get_output_path`).
-
-
-  Args:
-
-    channel_url: The URL where to look for packages clashes (normally a beta
-      channel)
-    basename: The tarball basename to check on the channel
-
-  Returns: The next build number with the current configuration.  Zero (0) is
-  returned if no match is found.  Also returns the URLs of the packages it
-  finds with matches on the name, version and python-version, ordered by
-  (reversed) build-number.
-
-  """
-
-  from conda.exports import get_index
-
-  # get the channel index
-  logger.debug('Downloading channel index from %s', channel_url)
-  index = get_index(channel_urls=[channel_url], prepend=False)
-
-  # remove .tar.bz2 from name, then split from the end twice, on '-'
-  name, version, build = basename[:-8].rsplit('-', 2)
-
-  # remove the build number as we're looking for the next value
-  # examples to be coped with:
-  # vlfeat-0.9.20-0 -> '0'
-  # vlfeat-0.9.21-h18fa195_0 -> 'h18fa195_0'
-  # tqdm-4.11.1-py36_0 -> 'py36_0'
-  # websocket-client-0.47.0-py27haf68d3b_0 -> 'py27haf68d3b_0'
-  # websocket-client-0.47.0-py36haf68d3b_0 -> 'py36haf68d3b_0'
-  build_variant = build.rsplit('_', 1)[0]
-  # vlfeat-0.9.20-0 -> '0'
-  # vlfeat-0.9.21-h18fa195_0 -> 'h18fa195'
-  # tqdm-4.11.1-py36_0 -> 'py36'
-  # websocket-client-0.47.0-py27haf68d3b_0 -> 'py27haf68d3b'
-  # websocket-client-0.47.0-py36haf68d3b_0 -> 'py36haf68d3b'
-  build_variant = build_variant.split('h', 1)[0]
-  # vlfeat-0.9.20-0 -> '0'
-  # vlfeat-0.9.21-h18fa195_0 -> ''
-  # tqdm-4.11.1-py36_0 -> 'py36'
-  # websocket-client-0.47.0-py27haf68d3b_0 -> 'py27'
-  # websocket-client-0.47.0-py36haf68d3b_0 -> 'py36'
-  if re.match('^[0-9]+$', build_variant) is not None: build_variant = ''
-
-  # search if package with the same characteristics
-  urls = {}
-  build_number = 0
-  for dist in index:
-    if dist.name == name and dist.version == version and \
-        dist.build_string.startswith(build_variant):  #match!
-      url = index[dist].url
-      logger.debug("Found match at %s for %s-%s-%s", url,
-          name, version, build_variant)
-      build_number = max(build_number, dist.build_number + 1)
-      urls[index[dist].timestamp] = url.replace(channel_url, '')
-
-  sorted_urls = [urls[k] for k in reversed(list(urls.keys()))]
-
-  return build_number, sorted_urls
+    """Calculates the next build number of a package given the channel.
+
+    This function returns the next build number (integer) for a package given its
+    resulting tarball base filename (can be obtained with
+    :py:func:`get_output_path`).
+
+
+    Args:
+
+      channel_url: The URL where to look for packages clashes (normally a beta
+        channel)
+      basename: The tarball basename to check on the channel
+
+    Returns: The next build number with the current configuration.  Zero (0) is
+    returned if no match is found.  Also returns the URLs of the packages it
+    finds with matches on the name, version and python-version, ordered by
+    (reversed) build-number.
+    """
+
+    from conda.exports import get_index
+
+    # get the channel index
+    logger.debug("Downloading channel index from %s", channel_url)
+    index = get_index(channel_urls=[channel_url], prepend=False)
+
+    # remove .tar.bz2 from name, then split from the end twice, on '-'
+    name, version, build = basename[:-8].rsplit("-", 2)
+
+    # remove the build number as we're looking for the next value
+    # examples to be coped with:
+    # vlfeat-0.9.20-0 -> '0'
+    # vlfeat-0.9.21-h18fa195_0 -> 'h18fa195_0'
+    # tqdm-4.11.1-py36_0 -> 'py36_0'
+    # websocket-client-0.47.0-py27haf68d3b_0 -> 'py27haf68d3b_0'
+    # websocket-client-0.47.0-py36haf68d3b_0 -> 'py36haf68d3b_0'
+    build_variant = build.rsplit("_", 1)[0]
+    # vlfeat-0.9.20-0 -> '0'
+    # vlfeat-0.9.21-h18fa195_0 -> 'h18fa195'
+    # tqdm-4.11.1-py36_0 -> 'py36'
+    # websocket-client-0.47.0-py27haf68d3b_0 -> 'py27haf68d3b'
+    # websocket-client-0.47.0-py36haf68d3b_0 -> 'py36haf68d3b'
+    build_variant = build_variant.split("h", 1)[0]
+    # vlfeat-0.9.20-0 -> '0'
+    # vlfeat-0.9.21-h18fa195_0 -> ''
+    # tqdm-4.11.1-py36_0 -> 'py36'
+    # websocket-client-0.47.0-py27haf68d3b_0 -> 'py27'
+    # websocket-client-0.47.0-py36haf68d3b_0 -> 'py36'
+    if re.match("^[0-9]+$", build_variant) is not None:
+        build_variant = ""
+
+    # search if package with the same characteristics
+    urls = {}
+    build_number = 0
+    for dist in index:
+        if (
+            dist.name == name
+            and dist.version == version
+            and dist.build_string.startswith(build_variant)
+        ):  # match!
+            url = index[dist].url
+            logger.debug(
+                "Found match at %s for %s-%s-%s",
+                url,
+                name,
+                version,
+                build_variant,
+            )
+            build_number = max(build_number, dist.build_number + 1)
+            urls[index[dist].timestamp] = url.replace(channel_url, "")
+
+    sorted_urls = [urls[k] for k in reversed(list(urls.keys()))]
+
+    return build_number, sorted_urls
 
 
 def make_conda_config(config, python, append_file, condarc_options):
-  '''Creates a conda configuration for a build merging various sources
+    """Creates a conda configuration for a build merging various sources.
 
-  This function will use the conda-build API to construct a configuration by
-  merging different sources of information.
+    This function will use the conda-build API to construct a configuration by
+    merging different sources of information.
 
-  Args:
+    Args:
 
-    config: Path leading to the ``conda_build_config.yaml`` to use
-    python: The version of python to use for the build as ``x.y`` (e.g.
-      ``3.6``)
-    append_file: Path leading to the ``recipe_append.yaml`` file to use
-    condarc_options: A dictionary (typically read from a condarc YAML file)
-      that contains build and channel options
+      config: Path leading to the ``conda_build_config.yaml`` to use
+      python: The version of python to use for the build as ``x.y`` (e.g.
+        ``3.6``)
+      append_file: Path leading to the ``recipe_append.yaml`` file to use
+      condarc_options: A dictionary (typically read from a condarc YAML file)
+        that contains build and channel options
 
-  Returns: A dictionary containing the merged configuration, as produced by
-  conda-build API's ``get_or_merge_config()`` function.
-  '''
+    Returns: A dictionary containing the merged configuration, as produced by
+    conda-build API's ``get_or_merge_config()`` function.
+    """
 
-  from conda_build.conda_interface import url_path
+    from conda_build.conda_interface import url_path
 
-  retval = conda_build.api.get_or_merge_config(None,
-      variant_config_files=config, python=python,
-      append_sections_file=append_file, **condarc_options)
+    retval = conda_build.api.get_or_merge_config(
+        None,
+        variant_config_files=config,
+        python=python,
+        append_sections_file=append_file,
+        **condarc_options,
+    )
 
-  retval.channel_urls = []
+    retval.channel_urls = []
 
-  for url in condarc_options['channels']:
-    # allow people to specify relative or absolute paths to local channels
-    #    These channels still must follow conda rules - they must have the
-    #    appropriate platform-specific subdir (e.g. win-64)
-    if os.path.isdir(url):
-      if not os.path.isabs(url):
-        url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url)))
-      url = url_path(url)
-    retval.channel_urls.append(url)
+    for url in condarc_options["channels"]:
+        # allow people to specify relative or absolute paths to local channels
+        #    These channels still must follow conda rules - they must have the
+        #    appropriate platform-specific subdir (e.g. win-64)
+        if os.path.isdir(url):
+            if not os.path.isabs(url):
+                url = os.path.normpath(
+                    os.path.abspath(os.path.join(os.getcwd(), url))
+                )
+            url = url_path(url)
+        retval.channel_urls.append(url)
 
-  return retval
+    return retval
 
 
 def get_output_path(metadata, config):
-  '''Renders the recipe and returns the name of the output file'''
+    """Renders the recipe and returns the name of the output file."""
 
-  return conda_build.api.get_output_file_paths(metadata, config=config)[0]
+    return conda_build.api.get_output_file_paths(metadata, config=config)[0]
 
 
 def get_rendered_metadata(recipe_dir, config):
-  '''Renders the recipe and returns the interpreted YAML file'''
+    """Renders the recipe and returns the interpreted YAML file."""
 
-  return conda_build.api.render(recipe_dir, config=config)
+    return conda_build.api.render(recipe_dir, config=config)
 
 
 def get_parsed_recipe(metadata):
-  '''Renders the recipe and returns the interpreted YAML file'''
+    """Renders the recipe and returns the interpreted YAML file."""
 
-  output = conda_build.api.output_yaml(metadata[0][0])
-  return yaml.load(output, Loader=yaml.FullLoader)
+    output = conda_build.api.output_yaml(metadata[0][0])
+    return yaml.load(output, Loader=yaml.FullLoader)
 
 
 def exists_on_channel(channel_url, basename):
-  """Checks on the given channel if a package with the specs exist
-
-  This procedure always ignores the package hash code, if one is set
-
-  Args:
+    """Checks on the given channel if a package with the specs exist.
 
-    channel_url: The URL where to look for packages clashes (normally a beta
-      channel)
-    basename: The basename of the tarball to search for
+    This procedure always ignores the package hash code, if one is set
 
-  Returns: A complete package url, if the package already exists in the channel
-  or ``None`` otherwise.
+    Args:
 
-  """
+      channel_url: The URL where to look for packages clashes (normally a beta
+        channel)
+      basename: The basename of the tarball to search for
 
-  build_number, urls = next_build_number(channel_url, basename)
+    Returns: A complete package url, if the package already exists in the channel
+    or ``None`` otherwise.
+    """
 
-  def _get_build_number(name):
+    build_number, urls = next_build_number(channel_url, basename)
 
-    # remove .tar.bz2 from name, then split from the end twice, on '-'
-    name, version, build = name[:-8].rsplit('-', 2)
+    def _get_build_number(name):
 
-    # remove the build number as we're looking for the next value
-    # examples to be coped with:
-    # vlfeat-0.9.20-0 -> '0'
-    # vlfeat-0.9.21-h18fa195_0 -> 'h18fa195_0'
-    # tqdm-4.11.1-py36_0 -> 'py36_0'
-    # websocket-client-0.47.0-py27haf68d3b_0 -> 'py27haf68d3b_0'
-    # websocket-client-0.47.0-py36haf68d3b_0 -> 'py36haf68d3b_0'
-    s = build.rsplit('_', 1)
-    return s[1] if len(s) == 2 else s[0]
+        # remove .tar.bz2 from name, then split from the end twice, on '-'
+        name, version, build = name[:-8].rsplit("-", 2)
 
+        # remove the build number as we're looking for the next value
+        # examples to be coped with:
+        # vlfeat-0.9.20-0 -> '0'
+        # vlfeat-0.9.21-h18fa195_0 -> 'h18fa195_0'
+        # tqdm-4.11.1-py36_0 -> 'py36_0'
+        # websocket-client-0.47.0-py27haf68d3b_0 -> 'py27haf68d3b_0'
+        # websocket-client-0.47.0-py36haf68d3b_0 -> 'py36haf68d3b_0'
+        s = build.rsplit("_", 1)
+        return s[1] if len(s) == 2 else s[0]
 
-  self_build_number = _get_build_number(basename)
-  other_build_numbers = [_get_build_number(os.path.basename(k)) for k in urls]
+    self_build_number = _get_build_number(basename)
+    other_build_numbers = [_get_build_number(os.path.basename(k)) for k in urls]
 
-  if self_build_number in other_build_numbers:
-    return ''.join((channel_url,
-        urls[other_build_numbers.index(self_build_number)]))
+    if self_build_number in other_build_numbers:
+        return "".join(
+            (channel_url, urls[other_build_numbers.index(self_build_number)])
+        )
 
 
 def remove_pins(deps):
-  return [l.split()[0] for l in deps]
+    return [l.split()[0] for l in deps]
 
 
 def parse_dependencies(recipe_dir, config):
 
-  metadata = get_rendered_metadata(recipe_dir, config)
-  recipe = get_parsed_recipe(metadata)
-  return remove_pins(recipe['requirements'].get('build', [])) + \
-      remove_pins(recipe['requirements'].get('host', [])) + \
-      recipe['requirements'].get('run', []) + \
-      recipe.get('test', {}).get('requires', []) + \
-      ['bob.buildout', 'mr.developer', 'ipdb']
-      # by last, packages required for local dev
+    metadata = get_rendered_metadata(recipe_dir, config)
+    recipe = get_parsed_recipe(metadata)
+    return (
+        remove_pins(recipe["requirements"].get("build", []))
+        + remove_pins(recipe["requirements"].get("host", []))
+        + recipe["requirements"].get("run", [])
+        + recipe.get("test", {}).get("requires", [])
+        + ["bob.buildout", "mr.developer", "ipdb"]
+    )
+    # by last, packages required for local dev
 
 
 def get_env_directory(conda, name):
-  '''Get the directory of a particular conda environment or fail silently'''
+    """Get the directory of a particular conda environment or fail silently."""
 
-  cmd = [conda, 'env', 'list', '--json']
-  output = subprocess.check_output(cmd)
-  data = json.loads(output)
-  paths = data.get('envs', [])
+    cmd = [conda, "env", "list", "--json"]
+    output = subprocess.check_output(cmd)
+    data = json.loads(output)
+    paths = data.get("envs", [])
 
-  if not paths:
-    # real error condition, reports it at least, but no exception raising...
-    logger.error('No environments in conda (%s) installation?', conda)
-    return None
+    if not paths:
+        # real error condition, reports it at least, but no exception raising...
+        logger.error("No environments in conda (%s) installation?", conda)
+        return None
 
-  if name in ('base', 'root'):
-    return paths[0]  #first environment is base
+    if name in ("base", "root"):
+        return paths[0]  # first environment is base
 
-  # else, must search for the path ending in ``/name``
-  retval = [k for k in paths if k.endswith(os.sep + name)]
-  if retval:
-    return retval[0]
+    # else, must search for the path ending in ``/name``
+    retval = [k for k in paths if k.endswith(os.sep + name)]
+    if retval:
+        return retval[0]
 
-  # if no environment with said name is found, return ``None``
-  return None
+    # if no environment with said name is found, return ``None``
+    return None
 
 
 def conda_create(conda, name, overwrite, condarc, packages, dry_run, use_local):
-  '''Creates a new conda environment following package specifications
-
-  This command can create a new conda environment following the list of input
-  packages.  It will overwrite an existing environment if indicated.
-
-  Args:
-    conda: path to the main conda executable of the installation
-    name: the name of the environment to create or overwrite
-    overwrite: if set to ```True``, overwrite potentially existing environments
-      with the same name
-    condarc: a dictionary of options for conda, including channel urls
-    packages: the package list specification
-    dry_run: if set, then don't execute anything, just print stuff
-    use_local: include the local conda-bld directory as a possible installation
-      channel (useful for testing multiple interdependent recipes that are
-      built locally)
-  '''
-
-  from .bootstrap import run_cmdline
-
-  specs = []
-  for k in packages:
-    k = ' '.join(k.split()[:2])  # remove eventual build string
-    if any(elem in k for elem in '><|'):
-      specs.append(k.replace(' ', ''))
-    else:
-      specs.append(k.replace(' ', '='))
-
-  # if the current environment exists, delete it first
-  envdir = get_env_directory(conda, name)
-  if envdir is not None:
-    if overwrite:
-      cmd = [conda, 'env', 'remove', '--yes', '--name', name]
-      logger.debug('$ ' + ' '.join(cmd))
-      if not dry_run:
-        run_cmdline(cmd)
-    else:
-      raise RuntimeError('environment `%s\' exists in `%s\' - use '
-                         '--overwrite to overwrite' % (name, envdir))
-
-  cmdline_channels = ['--channel=%s' % k for k in condarc['channels']]
-  cmd = [conda, 'create', '--yes', '--name', name, '--override-channels'] + \
-      cmdline_channels
-  if dry_run:
-    cmd.append('--dry-run')
-  if use_local:
-     cmd.append('--use-local')
-  cmd.extend(sorted(specs))
-  run_cmdline(cmd)
-
-  # creates a .condarc file to sediment the just created environment
-  if not dry_run:
-    # get envdir again - it may just be created!
+    """Creates a new conda environment following package specifications.
+
+    This command can create a new conda environment following the list of input
+    packages.  It will overwrite an existing environment if indicated.
+
+    Args:
+      conda: path to the main conda executable of the installation
+      name: the name of the environment to create or overwrite
+      overwrite: if set to ```True``, overwrite potentially existing environments
+        with the same name
+      condarc: a dictionary of options for conda, including channel urls
+      packages: the package list specification
+      dry_run: if set, then don't execute anything, just print stuff
+      use_local: include the local conda-bld directory as a possible installation
+        channel (useful for testing multiple interdependent recipes that are
+        built locally)
+    """
+
+    from .bootstrap import run_cmdline
+
+    specs = []
+    for k in packages:
+        k = " ".join(k.split()[:2])  # remove eventual build string
+        if any(elem in k for elem in "><|"):
+            specs.append(k.replace(" ", ""))
+        else:
+            specs.append(k.replace(" ", "="))
+
+    # if the current environment exists, delete it first
     envdir = get_env_directory(conda, name)
-    destrc = os.path.join(envdir, 'condarc')
-    logger.info('Creating %s...', destrc)
-    with open(destrc, 'w') as f:
-      yaml.dump(condarc, f, indent=2)
+    if envdir is not None:
+        if overwrite:
+            cmd = [conda, "env", "remove", "--yes", "--name", name]
+            logger.debug("$ " + " ".join(cmd))
+            if not dry_run:
+                run_cmdline(cmd)
+        else:
+            raise RuntimeError(
+                "environment `%s' exists in `%s' - use "
+                "--overwrite to overwrite" % (name, envdir)
+            )
+
+    cmdline_channels = ["--channel=%s" % k for k in condarc["channels"]]
+    cmd = [
+        conda,
+        "create",
+        "--yes",
+        "--name",
+        name,
+        "--override-channels",
+    ] + cmdline_channels
+    if dry_run:
+        cmd.append("--dry-run")
+    if use_local:
+        cmd.append("--use-local")
+    cmd.extend(sorted(specs))
+    run_cmdline(cmd)
+
+    # creates a .condarc file to sediment the just created environment
+    if not dry_run:
+        # get envdir again - it may just be created!
+        envdir = get_env_directory(conda, name)
+        destrc = os.path.join(envdir, "condarc")
+        logger.info("Creating %s...", destrc)
+        with open(destrc, "w") as f:
+            yaml.dump(condarc, f, indent=2)
 
 
 def get_docserver_setup(public, stable, server, intranet, group):
-  '''Returns a setup for BOB_DOCUMENTATION_SERVER
+    """Returns a setup for BOB_DOCUMENTATION_SERVER.
 
-  What is available to build the documentation depends on the setup of
-  ``public`` and ``stable``:
+    What is available to build the documentation depends on the setup of
+    ``public`` and ``stable``:
 
-  * public and stable: only returns the public stable channel(s)
-  * public and not stable: returns both public stable and beta channels
-  * not public and stable: returns both public and private stable channels
-  * not public and not stable: returns all channels
+    * public and stable: only returns the public stable channel(s)
+    * public and not stable: returns both public stable and beta channels
+    * not public and stable: returns both public and private stable channels
+    * not public and not stable: returns all channels
 
-  Beta channels have priority over stable channels, if returned.  Private
-  channels have priority over public channles, if turned.
+    Beta channels have priority over stable channels, if returned.  Private
+    channels have priority over public channles, if turned.
 
 
-  Args:
+    Args:
 
-    public: Boolean indicating if we're supposed to include only public
-      channels
-    stable: Boolean indicating if we're supposed to include only stable
-      channels
-    server: The base address of the server containing our conda channels
-    intranet: Boolean indicating if we should add "private"/"public" prefixes
-      on the returned paths
-    group: The group of packages (gitlab namespace) the package we're compiling
-      is part of.  Values should match URL namespaces currently available on
-      our internal webserver.  Currently, only "bob" or "beat" will work.
+      public: Boolean indicating if we're supposed to include only public
+        channels
+      stable: Boolean indicating if we're supposed to include only stable
+        channels
+      server: The base address of the server containing our conda channels
+      intranet: Boolean indicating if we should add "private"/"public" prefixes
+        on the returned paths
+      group: The group of packages (gitlab namespace) the package we're compiling
+        is part of.  Values should match URL namespaces currently available on
+        our internal webserver.  Currently, only "bob" or "beat" will work.
 
 
-  Returns: a string to be used by bob.extension to find dependent
-  documentation projects.
+    Returns: a string to be used by bob.extension to find dependent
+    documentation projects.
+    """
 
-  '''
+    if (not public) and (not intranet):
+        raise RuntimeError(
+            "You cannot request for private channels and set"
+            " intranet=False (server=%s) - these are conflicting options"
+            % server
+        )
 
-  if (not public) and (not intranet):
-    raise RuntimeError('You cannot request for private channels and set' \
-        ' intranet=False (server=%s) - these are conflicting options' % server)
-
-  entries = []
-
-  # public documentation: always can access
-  prefix = '/software/%s' % group
-  if stable:
-    entries += [
-        server + prefix + '/docs/' + group + '/%(name)s/%(version)s/',
-        server + prefix + '/docs/' + group + '/%(name)s/stable/',
-        ]
-  else:
-    entries += [
-        server + prefix + '/docs/' + group + '/%(name)s/master/',
-        ]
+    entries = []
 
-  if not public:
-    # add private channels, (notice they are not accessible outside idiap)
-    prefix = '/private'
+    # public documentation: always can access
+    prefix = "/software/%s" % group
     if stable:
-      entries += [
-          server + prefix + '/docs/' + group + '/%(name)s/%(version)s/',
-          server + prefix + '/docs/' + group + '/%(name)s/stable/',
-          ]
+        entries += [
+            server + prefix + "/docs/" + group + "/%(name)s/%(version)s/",
+            server + prefix + "/docs/" + group + "/%(name)s/stable/",
+        ]
     else:
-      entries += [
-          server + prefix + '/docs/' + group + '/%(name)s/master/',
-          ]
-
-  return '|'.join(entries)
-
-
-def check_version(workdir, envtag):
-  '''Checks if the version being built and the value reported match
-
-  This method will read the contents of the file ``version.txt`` and compare it
-  to the potentially set ``envtag`` (may be ``None``).  If the value of
-  ``envtag`` is different than ``None``, ensure it matches the value in
-  ``version.txt`` or raises an exception.
-
+        entries += [server + prefix + "/docs/" + group + "/%(name)s/master/"]
 
-  Args:
+    if not public:
+        # add private channels, (notice they are not accessible outside idiap)
+        prefix = "/private"
+        if stable:
+            entries += [
+                server + prefix + "/docs/" + group + "/%(name)s/%(version)s/",
+                server + prefix + "/docs/" + group + "/%(name)s/stable/",
+            ]
+        else:
+            entries += [
+                server + prefix + "/docs/" + group + "/%(name)s/master/"
+            ]
 
-    workdir: The work directory where the repo of the package being built was
-      checked-out
-    envtag: (optional) tag provided by the environment
+    return "|".join(entries)
 
 
-  Returns: A tuple with the version of the package that we're currently
-  building and a boolean flag indicating if the version number represents a
-  pre-release or a stable release.
-  '''
-
-  version = open(os.path.join(workdir, "version.txt"), 'rt').read().rstrip()
-
-  # if we're building a stable release, ensure a tag is set
-  parsed_version = distutils.version.LooseVersion(version).version
-  is_prerelease = any([isinstance(k, str) for k in parsed_version])
-  if is_prerelease:
-    if envtag is not None:
-      raise EnvironmentError('"version.txt" indicates version is a ' \
-          'pre-release (v%s) - but environment provided tag "%s", ' \
-          'which indicates this is a **stable** build. ' \
-          'Have you created the tag using ``bdt release``?' % (version,
-          envtag))
-  else:  #it is a stable build
-    if envtag is None:
-      raise EnvironmentError('"version.txt" indicates version is a ' \
-          'stable build (v%s) - but there is **NO** tag environment ' \
-          'variable defined, which indicates this is **not** ' \
-          'a tagged build. Use ``bdt release`` to create stable releases' % \
-          (version,))
-    if envtag[1:] != version:
-      raise EnvironmentError('"version.txt" and the value of ' \
-          'the provided tag do **NOT** agree - the former ' \
-          'reports version %s, the latter, %s' % (version, envtag[1:]))
-
-  return version, is_prerelease
+def check_version(workdir, envtag):
+    """Checks if the version being built and the value reported match.
+
+    This method will read the contents of the file ``version.txt`` and compare it
+    to the potentially set ``envtag`` (may be ``None``).  If the value of
+    ``envtag`` is different than ``None``, ensure it matches the value in
+    ``version.txt`` or raises an exception.
+
+
+    Args:
+
+      workdir: The work directory where the repo of the package being built was
+        checked-out
+      envtag: (optional) tag provided by the environment
+
+
+    Returns: A tuple with the version of the package that we're currently
+    building and a boolean flag indicating if the version number represents a
+    pre-release or a stable release.
+    """
+
+    version = open(os.path.join(workdir, "version.txt"), "rt").read().rstrip()
+
+    # if we're building a stable release, ensure a tag is set
+    parsed_version = distutils.version.LooseVersion(version).version
+    is_prerelease = any([isinstance(k, str) for k in parsed_version])
+    if is_prerelease:
+        if envtag is not None:
+            raise EnvironmentError(
+                '"version.txt" indicates version is a '
+                'pre-release (v%s) - but environment provided tag "%s", '
+                "which indicates this is a **stable** build. "
+                "Have you created the tag using ``bdt release``?"
+                % (version, envtag)
+            )
+    else:  # it is a stable build
+        if envtag is None:
+            raise EnvironmentError(
+                '"version.txt" indicates version is a '
+                "stable build (v%s) - but there is **NO** tag environment "
+                "variable defined, which indicates this is **not** "
+                "a tagged build. Use ``bdt release`` to create stable releases"
+                % (version,)
+            )
+        if envtag[1:] != version:
+            raise EnvironmentError(
+                '"version.txt" and the value of '
+                "the provided tag do **NOT** agree - the former "
+                "reports version %s, the latter, %s" % (version, envtag[1:])
+            )
+
+    return version, is_prerelease
 
 
 def git_clean_build(runner, verbose):
-  '''Runs git-clean to clean-up build products
-
-  Args:
-
-    runner: A pointer to the ``run_cmdline()`` function
-    verbose: A boolean flag indicating if the git command should report erased
-      files or not
-
-  '''
-
-  # glob wild card entries we'd like to keep
-  exclude_from_cleanup = [
-      "miniconda.sh",   #the installer, cached
-      "miniconda/pkgs/urls.txt",  #download index, cached
-      "sphinx",  #build artifact -- documentation
-      ]
-
-  # cache
-  exclude_from_cleanup += ["miniconda/pkgs/"]
-
-  # artifacts
-  exclude_from_cleanup += ["miniconda/conda-bld/"]
-  exclude_from_cleanup += glob.glob("dist/*.zip")
-
-  logger.debug('Excluding the following paths from git-clean:\n  - %s',
-      '  - '.join(exclude_from_cleanup))
-
-  # decide on verbosity
-  flags = '-ffdx'
-  if not verbose: flags += 'q'
-
-  runner(['git', 'clean', flags] + \
-      ['--exclude=%s' % k for k in exclude_from_cleanup])
-
-
-def base_build(bootstrap, server, intranet, group, recipe_dir,
-    conda_build_config, python_version, condarc_options):
-  '''Builds a non-beat/non-bob software dependence that doesn't exist on defaults
-
-  This function will build a software dependence that is required for our
-  software stack, but does not (yet) exist on the defaults channels.  It first
-  check if the build should run for the current architecture, checks if the
-  package is not already built on our public channel and, if that is true, then
-  proceeds with the build of the dependence.
-
-
-  Args:
-
-    bootstrap: Module that should be pre-loaded so this function can be used
-      in a pre-bdt build
-    server: The base address of the server containing our conda channels
-    intranet: Boolean indicating if we should add "private"/"public" prefixes
-      on the returned paths
-    group: The group of packages (gitlab namespace) the package we're compiling
-      is part of.  Values should match URL namespaces currently available on
-      our internal webserver.  Currently, only "bob" or "beat" will work.
-    recipe_dir: The directory containing the recipe's ``meta.yaml`` file
-    conda_build_config: Path to the ``conda_build_config.yaml`` file to use
-    python_version: String with the python version to build for, in the format
-      ``x.y`` (should be passed even if not building a python package).  It
-      can also be set to ``noarch``, or ``None``.  If set to ``None``, then we
-      don't assume there is a python-specific version being built.  If set to
-      ``noarch``, then it is a python package without a specific build.
-    condarc_options: Pre-parsed condarc options loaded from the respective YAML
-      file
-
-
-  Returns:
-
-    list: The list of built packages, as returned by
-    ``conda_build.api.build()``
-
-  '''
-
-  # if you get to this point, tries to build the package
-  public_channels = bootstrap.get_channels(public=True, stable=True,
-    server=server, intranet=intranet, group=group)
-
-  all_channels = public_channels + ['defaults']
-  logger.info('Using the following channels during (potential) build:\n  - %s',
-      '\n  - '.join(all_channels))
-  condarc_options['channels'] = all_channels
-
-  logger.info('Merging conda configuration files...')
-  if python_version not in ('noarch', None):
-    conda_config = make_conda_config(conda_build_config, python_version,
-        None, condarc_options)
-  else:
-    conda_config = make_conda_config(conda_build_config, None, None,
-        condarc_options)
-
-  metadata = get_rendered_metadata(recipe_dir, conda_config)
-
-  # handles different cases as explained on the description of
-  # ``python_version``
-  py_ver = python_version.replace('.', '') if python_version else None
-  if py_ver == 'noarch': py_ver = ''
-  arch = conda_arch()
-
-  # checks we should actually build this recipe
-  if should_skip_build(metadata):
-    if py_ver is None:
-      logger.warn('Skipping UNSUPPORTED build of "%s" on %s', recipe_dir, arch)
-    elif not py_ver:
-      logger.warn('Skipping UNSUPPORTED build of "%s" for (noarch) python ' \
-          'on %s', recipe_dir, arch)
-    else:
-      logger.warn('Skipping UNSUPPORTED build of "%s" for python-%s ' \
-          'on %s', recipe_dir, python_version, arch)
-    return
-
-  path = get_output_path(metadata, conda_config)
-
-  url = exists_on_channel(public_channels[0], os.path.basename(path))
-  if url is not None:
-    logger.info('Skipping build for %s as it exists (at %s)', path, url)
-    return
-
-  # if you get to this point, just builds the package
-  logger.info('Building %s', path)
-  return conda_build.api.build(recipe_dir, config=conda_config)
-
-
-if __name__ == '__main__':
-
-  import argparse
-
-  parser = argparse.ArgumentParser(description='Builds bob.devtools on the CI')
-  parser.add_argument('-g', '--group',
-      default=os.environ.get('CI_PROJECT_NAMESPACE', 'bob'),
-      help='The namespace of the project being built [default: %(default)s]')
-  parser.add_argument('-n', '--name',
-      default=os.environ.get('CI_PROJECT_NAME', 'bob.devtools'),
-      help='The name of the project being built [default: %(default)s]')
-  parser.add_argument('-c', '--conda-root',
-      default=os.environ.get('CONDA_ROOT',
-        os.path.realpath(os.path.join(os.curdir, 'miniconda'))),
-      help='The location where we should install miniconda ' \
-          '[default: %(default)s]')
-  parser.add_argument('-V', '--visibility',
-      choices=['public', 'internal', 'private'],
-      default=os.environ.get('CI_PROJECT_VISIBILITY', 'public'),
-      help='The visibility level for this project [default: %(default)s]')
-  parser.add_argument('-t', '--tag',
-      default=os.environ.get('CI_COMMIT_TAG', None),
-      help='If building a tag, pass it with this flag [default: %(default)s]')
-  parser.add_argument('-w', '--work-dir',
-      default=os.environ.get('CI_PROJECT_DIR', os.path.realpath(os.curdir)),
-      help='The directory where the repo was cloned [default: %(default)s]')
-  parser.add_argument('-p', '--python-version',
-      default=os.environ.get('PYTHON_VERSION', '%d.%d' % sys.version_info[:2]),
-      help='The version of python to build for [default: %(default)s]')
-  parser.add_argument('-T', '--twine-check', action='store_true',
-      default=False, help='If set, then performs the equivalent of a ' \
-          '"twine check" on the generated python package (zip file)')
-  parser.add_argument('--internet', '-i', default=False, action='store_true',
-      help='If executing on an internet-connected server, unset this flag')
-  parser.add_argument('--verbose', '-v', action='count', default=0,
-      help='Increases the verbosity level.  We always prints error and ' \
-          'critical messages. Use a single ``-v`` to enable warnings, ' \
-          'two ``-vv`` to enable information messages and three ``-vvv`` ' \
-          'to enable debug messages [default: %(default)s]')
-
-  args = parser.parse_args()
-
-  # loads the "adjacent" bootstrap module
-  import importlib.util
-  mydir = os.path.dirname(os.path.realpath(sys.argv[0]))
-  bootstrap_file = os.path.join(mydir, 'bootstrap.py')
-  spec = importlib.util.spec_from_file_location("bootstrap", bootstrap_file)
-  bootstrap = importlib.util.module_from_spec(spec)
-  spec.loader.exec_module(bootstrap)
-  server = bootstrap._SERVER
-
-  bootstrap.setup_logger(logger, args.verbose)
-
-  bootstrap.set_environment('DOCSERVER', server)
-  bootstrap.set_environment('LANG', 'en_US.UTF-8')
-  bootstrap.set_environment('LC_ALL', os.environ['LANG'])
-
-  # get information about the version of the package being built
-  version, is_prerelease = check_version(args.work_dir, args.tag)
-  bootstrap.set_environment('BOB_PACKAGE_VERSION', version)
-
-  # create the build configuration
-  conda_build_config = os.path.join(mydir, 'data', 'conda_build_config.yaml')
-  recipe_append = os.path.join(mydir, 'data', 'recipe_append.yaml')
-
-  condarc = os.path.join(args.conda_root, 'condarc')
-  logger.info('Loading (this build\'s) CONDARC file from %s...', condarc)
-  with open(condarc, 'rb') as f:
-    condarc_options = yaml.load(f, Loader=yaml.FullLoader)
-
-  # dump packages at conda_root
-  prefix = get_env_directory(os.environ['CONDA_EXE'], 'base')
-  if condarc_options.get('conda-build', {}).get('root-dir') is None:
-    condarc_options['croot'] = os.path.join(prefix, 'conda-bld')
-
-
-  # builds all dependencies in the 'deps' subdirectory - or at least checks
-  # these dependencies are already available; these dependencies go directly to
-  # the public channel once built
-  for recipe in glob.glob(os.path.join('deps', '*')):
-    if not os.path.exists(os.path.join(recipe, 'meta.yaml')):
-      # ignore - not a conda package
-      continue
-    base_build(bootstrap, server, not args.internet, args.group, recipe,
-        conda_build_config, args.python_version, condarc_options)
-
-  # notice this condarc typically will only contain the defaults channel - we
-  # need to boost this up with more channels to get it right for this package's
-  # build
-  public = ( args.visibility == 'public' )
-  channels = bootstrap.get_channels(public=public, stable=(not is_prerelease),
-      server=server, intranet=(not args.internet), group=args.group)
-  logger.info('Using the following channels during build:\n  - %s',
-      '\n  - '.join(channels + ['defaults']))
-  condarc_options['channels'] = channels + ['defaults']
-
-  logger.info('Merging conda configuration files...')
-  conda_config = make_conda_config(conda_build_config, args.python_version,
-      recipe_append, condarc_options)
-
-  recipe_dir = os.path.join(args.work_dir, 'conda')
-  metadata = get_rendered_metadata(recipe_dir, conda_config)
-  path = get_output_path(metadata, conda_config)
-
-  # asserts we're building at the right location
-  assert path.startswith(os.path.join(args.conda_root, 'conda-bld')), \
-      'Output path for build (%s) does not start with "%s" - this ' \
-      'typically means this build is running on a shared builder and ' \
-      'the file ~/.conda/environments.txt is polluted with other ' \
-      'environment paths.  To fix, empty that file and set its mode ' \
-      'to read-only for all.' % (path, os.path.join(args.conda_root,
-        'conda-bld'))
-
-  # retrieve the current build number for this build
-  build_number, _ = next_build_number(channels[0], os.path.basename(path))
-
-  # runs the build using the conda-build API
-  arch = conda_arch()
-  logger.info('Building %s-%s-py%s (build: %d) for %s',
-      args.name, version, args.python_version.replace('.',''), build_number,
-      arch)
-
-  # notice we cannot build from the pre-parsed metadata because it has already
-  # resolved the "wrong" build number.  We'll have to reparse after setting the
-  # environment variable BOB_BUILD_NUMBER.
-  bootstrap.set_environment('BOB_BUILD_NUMBER', str(build_number))
-  conda_build.api.build(recipe_dir, config=conda_config)
-
-  # checks if long_description of python package renders fine
-  if args.twine_check:
-    from twine.commands.check import check
-    package = glob.glob('dist/*.zip')
-    failed = check(package)
-
-    if failed:
-      raise RuntimeError('twine check (a.k.a. readme check) %s: FAILED' % \
-          package[0])
+    """Runs git-clean to clean-up build products.
+
+    Args:
+
+      runner: A pointer to the ``run_cmdline()`` function
+      verbose: A boolean flag indicating if the git command should report erased
+        files or not
+    """
+
+    # glob wild card entries we'd like to keep
+    exclude_from_cleanup = [
+        "miniconda.sh",  # the installer, cached
+        "miniconda/pkgs/urls.txt",  # download index, cached
+        "sphinx",  # build artifact -- documentation
+    ]
+
+    # cache
+    exclude_from_cleanup += ["miniconda/pkgs/"]
+
+    # artifacts
+    exclude_from_cleanup += ["miniconda/conda-bld/"]
+    exclude_from_cleanup += glob.glob("dist/*.zip")
+
+    logger.debug(
+        "Excluding the following paths from git-clean:\n  - %s",
+        "  - ".join(exclude_from_cleanup),
+    )
+
+    # decide on verbosity
+    flags = "-ffdx"
+    if not verbose:
+        flags += "q"
+
+    runner(
+        ["git", "clean", flags]
+        + ["--exclude=%s" % k for k in exclude_from_cleanup]
+    )
+
+
+def base_build(
+    bootstrap,
+    server,
+    intranet,
+    group,
+    recipe_dir,
+    conda_build_config,
+    python_version,
+    condarc_options,
+):
+    """Builds a non-beat/non-bob software dependence that doesn't exist on
+    defaults.
+
+    This function will build a software dependence that is required for our
+    software stack, but does not (yet) exist on the defaults channels.  It first
+    check if the build should run for the current architecture, checks if the
+    package is not already built on our public channel and, if that is true, then
+    proceeds with the build of the dependence.
+
+
+    Args:
+
+      bootstrap: Module that should be pre-loaded so this function can be used
+        in a pre-bdt build
+      server: The base address of the server containing our conda channels
+      intranet: Boolean indicating if we should add "private"/"public" prefixes
+        on the returned paths
+      group: The group of packages (gitlab namespace) the package we're compiling
+        is part of.  Values should match URL namespaces currently available on
+        our internal webserver.  Currently, only "bob" or "beat" will work.
+      recipe_dir: The directory containing the recipe's ``meta.yaml`` file
+      conda_build_config: Path to the ``conda_build_config.yaml`` file to use
+      python_version: String with the python version to build for, in the format
+        ``x.y`` (should be passed even if not building a python package).  It
+        can also be set to ``noarch``, or ``None``.  If set to ``None``, then we
+        don't assume there is a python-specific version being built.  If set to
+        ``noarch``, then it is a python package without a specific build.
+      condarc_options: Pre-parsed condarc options loaded from the respective YAML
+        file
+
+
+    Returns:
+
+      list: The list of built packages, as returned by
+      ``conda_build.api.build()``
+    """
+
+    # if you get to this point, tries to build the package
+    public_channels = bootstrap.get_channels(
+        public=True, stable=True, server=server, intranet=intranet, group=group
+    )
+
+    all_channels = public_channels + ["defaults"]
+    logger.info(
+        "Using the following channels during (potential) build:\n  - %s",
+        "\n  - ".join(all_channels),
+    )
+    condarc_options["channels"] = all_channels
+
+    logger.info("Merging conda configuration files...")
+    if python_version not in ("noarch", None):
+        conda_config = make_conda_config(
+            conda_build_config, python_version, None, condarc_options
+        )
     else:
-      logger.info('twine check (a.k.a. readme check) %s: OK', package[0])
-
-  git_clean_build(bootstrap.run_cmdline, verbose=(args.verbose >= 3))
+        conda_config = make_conda_config(
+            conda_build_config, None, None, condarc_options
+        )
+
+    metadata = get_rendered_metadata(recipe_dir, conda_config)
+
+    # handles different cases as explained on the description of
+    # ``python_version``
+    py_ver = python_version.replace(".", "") if python_version else None
+    if py_ver == "noarch":
+        py_ver = ""
+    arch = conda_arch()
+
+    # checks we should actually build this recipe
+    if should_skip_build(metadata):
+        if py_ver is None:
+            logger.warn(
+                'Skipping UNSUPPORTED build of "%s" on %s', recipe_dir, arch
+            )
+        elif not py_ver:
+            logger.warn(
+                'Skipping UNSUPPORTED build of "%s" for (noarch) python '
+                "on %s",
+                recipe_dir,
+                arch,
+            )
+        else:
+            logger.warn(
+                'Skipping UNSUPPORTED build of "%s" for python-%s ' "on %s",
+                recipe_dir,
+                python_version,
+                arch,
+            )
+        return
+
+    path = get_output_path(metadata, conda_config)
+
+    url = exists_on_channel(public_channels[0], os.path.basename(path))
+    if url is not None:
+        logger.info("Skipping build for %s as it exists (at %s)", path, url)
+        return
+
+    # if you get to this point, just builds the package
+    logger.info("Building %s", path)
+    return conda_build.api.build(recipe_dir, config=conda_config)
+
+
+if __name__ == "__main__":
+
+    import argparse
+
+    parser = argparse.ArgumentParser(
+        description="Builds bob.devtools on the CI"
+    )
+    parser.add_argument(
+        "-g",
+        "--group",
+        default=os.environ.get("CI_PROJECT_NAMESPACE", "bob"),
+        help="The namespace of the project being built [default: %(default)s]",
+    )
+    parser.add_argument(
+        "-n",
+        "--name",
+        default=os.environ.get("CI_PROJECT_NAME", "bob.devtools"),
+        help="The name of the project being built [default: %(default)s]",
+    )
+    parser.add_argument(
+        "-c",
+        "--conda-root",
+        default=os.environ.get(
+            "CONDA_ROOT", os.path.realpath(os.path.join(os.curdir, "miniconda"))
+        ),
+        help="The location where we should install miniconda "
+        "[default: %(default)s]",
+    )
+    parser.add_argument(
+        "-V",
+        "--visibility",
+        choices=["public", "internal", "private"],
+        default=os.environ.get("CI_PROJECT_VISIBILITY", "public"),
+        help="The visibility level for this project [default: %(default)s]",
+    )
+    parser.add_argument(
+        "-t",
+        "--tag",
+        default=os.environ.get("CI_COMMIT_TAG", None),
+        help="If building a tag, pass it with this flag [default: %(default)s]",
+    )
+    parser.add_argument(
+        "-w",
+        "--work-dir",
+        default=os.environ.get("CI_PROJECT_DIR", os.path.realpath(os.curdir)),
+        help="The directory where the repo was cloned [default: %(default)s]",
+    )
+    parser.add_argument(
+        "-p",
+        "--python-version",
+        default=os.environ.get(
+            "PYTHON_VERSION", "%d.%d" % sys.version_info[:2]
+        ),
+        help="The version of python to build for [default: %(default)s]",
+    )
+    parser.add_argument(
+        "-T",
+        "--twine-check",
+        action="store_true",
+        default=False,
+        help="If set, then performs the equivalent of a "
+        '"twine check" on the generated python package (zip file)',
+    )
+    parser.add_argument(
+        "--internet",
+        "-i",
+        default=False,
+        action="store_true",
+        help="If executing on an internet-connected server, unset this flag",
+    )
+    parser.add_argument(
+        "--verbose",
+        "-v",
+        action="count",
+        default=0,
+        help="Increases the verbosity level.  We always prints error and "
+        "critical messages. Use a single ``-v`` to enable warnings, "
+        "two ``-vv`` to enable information messages and three ``-vvv`` "
+        "to enable debug messages [default: %(default)s]",
+    )
+
+    args = parser.parse_args()
+
+    # loads the "adjacent" bootstrap module
+    import importlib.util
+
+    mydir = os.path.dirname(os.path.realpath(sys.argv[0]))
+    bootstrap_file = os.path.join(mydir, "bootstrap.py")
+    spec = importlib.util.spec_from_file_location("bootstrap", bootstrap_file)
+    bootstrap = importlib.util.module_from_spec(spec)
+    spec.loader.exec_module(bootstrap)
+    server = bootstrap._SERVER
+
+    bootstrap.setup_logger(logger, args.verbose)
+
+    bootstrap.set_environment("DOCSERVER", server)
+    bootstrap.set_environment("LANG", "en_US.UTF-8")
+    bootstrap.set_environment("LC_ALL", os.environ["LANG"])
+
+    # get information about the version of the package being built
+    version, is_prerelease = check_version(args.work_dir, args.tag)
+    bootstrap.set_environment("BOB_PACKAGE_VERSION", version)
+
+    # create the build configuration
+    conda_build_config = os.path.join(mydir, "data", "conda_build_config.yaml")
+    recipe_append = os.path.join(mydir, "data", "recipe_append.yaml")
+
+    condarc = os.path.join(args.conda_root, "condarc")
+    logger.info("Loading (this build's) CONDARC file from %s...", condarc)
+    with open(condarc, "rb") as f:
+        condarc_options = yaml.load(f, Loader=yaml.FullLoader)
+
+    # dump packages at conda_root
+    prefix = get_env_directory(os.environ["CONDA_EXE"], "base")
+    if condarc_options.get("conda-build", {}).get("root-dir") is None:
+        condarc_options["croot"] = os.path.join(prefix, "conda-bld")
+
+    # builds all dependencies in the 'deps' subdirectory - or at least checks
+    # these dependencies are already available; these dependencies go directly to
+    # the public channel once built
+    recipes = load_order_file(os.path.join("deps", "order.txt"))
+    for k, recipe in enumerate([os.path.join("deps", k) for k in recipes]):
+
+        if not os.path.exists(os.path.join(recipe, "meta.yaml")):
+            # ignore - not a conda package
+            continue
+        base_build(
+            bootstrap,
+            server,
+            not args.internet,
+            args.group,
+            recipe,
+            conda_build_config,
+            args.python_version,
+            condarc_options,
+        )
+
+    # notice this condarc typically will only contain the defaults channel - we
+    # need to boost this up with more channels to get it right for this package's
+    # build
+    public = args.visibility == "public"
+    channels = bootstrap.get_channels(
+        public=public,
+        stable=(not is_prerelease),
+        server=server,
+        intranet=(not args.internet),
+        group=args.group,
+    )
+    logger.info(
+        "Using the following channels during build:\n  - %s",
+        "\n  - ".join(channels + ["defaults"]),
+    )
+    condarc_options["channels"] = channels + ["defaults"]
+
+    logger.info("Merging conda configuration files...")
+    conda_config = make_conda_config(
+        conda_build_config, args.python_version, recipe_append, condarc_options
+    )
+
+    recipe_dir = os.path.join(args.work_dir, "conda")
+    metadata = get_rendered_metadata(recipe_dir, conda_config)
+    path = get_output_path(metadata, conda_config)
+
+    # asserts we're building at the right location
+    assert path.startswith(os.path.join(args.conda_root, "conda-bld")), (
+        'Output path for build (%s) does not start with "%s" - this '
+        "typically means this build is running on a shared builder and "
+        "the file ~/.conda/environments.txt is polluted with other "
+        "environment paths.  To fix, empty that file and set its mode "
+        "to read-only for all."
+        % (path, os.path.join(args.conda_root, "conda-bld"))
+    )
+
+    # retrieve the current build number for this build
+    build_number, _ = next_build_number(channels[0], os.path.basename(path))
+
+    # runs the build using the conda-build API
+    arch = conda_arch()
+    logger.info(
+        "Building %s-%s-py%s (build: %d) for %s",
+        args.name,
+        version,
+        args.python_version.replace(".", ""),
+        build_number,
+        arch,
+    )
+
+    # notice we cannot build from the pre-parsed metadata because it has already
+    # resolved the "wrong" build number.  We'll have to reparse after setting the
+    # environment variable BOB_BUILD_NUMBER.
+    bootstrap.set_environment("BOB_BUILD_NUMBER", str(build_number))
+    conda_build.api.build(recipe_dir, config=conda_config)
+
+    # checks if long_description of python package renders fine
+    if args.twine_check:
+        from twine.commands.check import check
+
+        package = glob.glob("dist/*.zip")
+        failed = check(package)
+
+        if failed:
+            raise RuntimeError(
+                "twine check (a.k.a. readme check) %s: FAILED" % package[0]
+            )
+        else:
+            logger.info("twine check (a.k.a. readme check) %s: OK", package[0])
+
+    git_clean_build(bootstrap.run_cmdline, verbose=(args.verbose >= 3))
diff --git a/bob/devtools/changelog.py b/bob/devtools/changelog.py
index aec6760bde66b60e731ae8ba07a875a3e0f7515b..ddba8b29fb077415c633b0ab5a892d4d301861a8 100644
--- a/bob/devtools/changelog.py
+++ b/bob/devtools/changelog.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-'''Utilities for retrieving, parsing and auto-generating changelogs'''
+"""Utilities for retrieving, parsing and auto-generating changelogs."""
 
 import io
 import datetime
@@ -10,42 +10,44 @@ import pytz
 import dateutil.parser
 
 from .log import get_logger
+
 logger = get_logger(__name__)
 
 
 def parse_date(d):
-    '''Parses any date supported by :py:func:`dateutil.parser.parse`'''
+    """Parses any date supported by :py:func:`dateutil.parser.parse`"""
 
     return dateutil.parser.parse(d, ignoretz=True).replace(
-                tzinfo=pytz.timezone("Europe/Zurich"))
+        tzinfo=pytz.timezone("Europe/Zurich")
+    )
 
 
 def _sort_commits(commits, reverse):
-    '''Sorts gitlab commit objects using their ``committed_date`` attribute'''
+    """Sorts gitlab commit objects using their ``committed_date`` attribute."""
 
-    return sorted(commits,
-        key=lambda x: parse_date(x.committed_date),
-        reverse=reverse,
-        )
+    return sorted(
+        commits, key=lambda x: parse_date(x.committed_date), reverse=reverse
+    )
 
 
 def _sort_tags(tags, reverse):
-    '''Sorts gitlab tag objects using their ``committed_date`` attribute'''
+    """Sorts gitlab tag objects using their ``committed_date`` attribute."""
 
-    return sorted(tags,
-        key=lambda x: parse_date(x.commit['committed_date']),
+    return sorted(
+        tags,
+        key=lambda x: parse_date(x.commit["committed_date"]),
         reverse=reverse,
-        )
+    )
 
 
-def get_file_from_gitlab(gitpkg, path, ref='master'):
-    '''Retrieves a file from a Gitlab repository, returns a (StringIO) file'''
+def get_file_from_gitlab(gitpkg, path, ref="master"):
+    """Retrieves a file from a Gitlab repository, returns a (StringIO) file."""
 
     return io.StringIO(gitpkg.files.get(file_path=path, ref=branch).decode())
 
 
 def get_last_tag(package):
-    '''Returns the last (gitlab object) tag for the given package
+    """Returns the last (gitlab object) tag for the given package.
 
     Args:
 
@@ -54,7 +56,7 @@ def get_last_tag(package):
 
 
     Returns: a tag object
-    '''
+    """
 
     # according to the Gitlab API documentation, tags are sorted from the last
     # updated to the first, by default - no need to do further sorting!
@@ -66,7 +68,7 @@ def get_last_tag(package):
 
 
 def get_last_tag_date(package):
-    '''Returns the last release date for the given package
+    """Returns the last release date for the given package.
 
     Falls back to the first commit date if the package has not yet been tagged
 
@@ -80,7 +82,7 @@ def get_last_tag_date(package):
     Returns: a datetime object that refers to the last date the package was
              released.  If the package was never released, then returns the
              date just before the first commit.
-    '''
+    """
 
     # according to the Gitlab API documentation, tags are sorted from the last
     # updated to the first, by default - no need to do further sorting!
@@ -89,10 +91,15 @@ def get_last_tag_date(package):
     if tag_list:
         # there are tags, use these
         last = tag_list[0]
-        logger.debug('Last tag for package %s (id=%d) is %s', package.name,
-            package.id, last.name)
-        return parse_date(last.commit['committed_date']) + \
-            datetime.timedelta(milliseconds=500)
+        logger.debug(
+            "Last tag for package %s (id=%d) is %s",
+            package.name,
+            package.id,
+            last.name,
+        )
+        return parse_date(last.commit["committed_date"]) + datetime.timedelta(
+            milliseconds=500
+        )
 
     else:
         commit_list = package.commits.list(all=True)
@@ -100,112 +107,130 @@ def get_last_tag_date(package):
         if commit_list:
             # there are commits, use these
             first = _sort_commits(commit_list, reverse=False)[0]
-            logger.debug('First commit for package %s (id=%d) is from %s',
-                package.name, package.id, first.committed_date)
-            return parse_date(first.committed_date) - \
-                datetime.timedelta(milliseconds=500)
+            logger.debug(
+                "First commit for package %s (id=%d) is from %s",
+                package.name,
+                package.id,
+                first.committed_date,
+            )
+            return parse_date(first.committed_date) - datetime.timedelta(
+                milliseconds=500
+            )
 
         else:
             # there are no commits nor tags - abort
-            raise RuntimeError('package %s (id=%d) does not have commits ' \
-                'or tags so I cannot devise a good starting date' % \
-                (package.name, package.id))
+            raise RuntimeError(
+                "package %s (id=%d) does not have commits "
+                "or tags so I cannot devise a good starting date"
+                % (package.name, package.id)
+            )
 
 
 def _get_tag_changelog(tag):
 
     try:
-        return tag.release['description']
+        return tag.release["description"]
     except Exception:
-        return ''
+        return ""
 
 
 def _write_one_tag(f, pkg_name, tag):
-    '''Prints commit information for a single tag of a given package
+    """Prints commit information for a single tag of a given package.
 
     Args:
 
         f: A :py:class:`File` ready to be written at
         pkg_name: The name of the package we are writing tags of
         tag: The tag value
+    """
 
-    '''
-
-    git_date = parse_date(tag.commit['committed_date'])
-    f.write('  * %s (%s)\n' % (tag.name,  git_date.strftime('%b %d, %Y %H:%M')))
+    git_date = parse_date(tag.commit["committed_date"])
+    f.write("  * %s (%s)\n" % (tag.name, git_date.strftime("%b %d, %Y %H:%M")))
 
-    for line in _get_tag_changelog(tag).replace('\r\n', '\n').split('\n'):
+    for line in _get_tag_changelog(tag).replace("\r\n", "\n").split("\n"):
 
         line = line.strip()
-        if line.startswith('* ') or line.startswith('- '):
+        if line.startswith("* ") or line.startswith("- "):
             line = line[2:]
 
-        line = line.replace('!', pkg_name + '!').replace(pkg_name + \
-            pkg_name, pkg_name)
-        line = line.replace('#', pkg_name + '#')
+        line = line.replace("!", pkg_name + "!").replace(
+            pkg_name + pkg_name, pkg_name
+        )
+        line = line.replace("#", pkg_name + "#")
         if not line:
             continue
-        f.write('%s* %s' % (5*' ', line))
+        f.write("%s* %s" % (5 * " ", line))
 
 
 def _write_commits_range(f, pkg_name, commits):
-    '''Writes all commits of a given package within a range, to the output file
+    """Writes all commits of a given package within a range, to the output
+    file.
 
     Args:
 
         f: A :py:class:`File` ready to be written at
         pkg_name: The name of the package we are writing tags of
         commits: List of commits to be written
-
-    '''
-
+    """
 
     for commit in commits:
         commit_title = commit.title
 
         # skip commits that do not carry much useful information
-        if '[skip ci]' in commit_title or \
-                        'Merge branch' in commit_title or \
-                        'Increased stable' in commit_title:
+        if (
+            "[skip ci]" in commit_title
+            or "Merge branch" in commit_title
+            or "Increased stable" in commit_title
+        ):
             continue
 
         commit_title = commit_title.strip()
-        commit_title = commit_title.replace('!', pkg_name + '!').replace(pkg_name + pkg_name, pkg_name)
-        commit_title = commit_title.replace('#', pkg_name + '#')
-        f.write('%s- %s\n' % (' ' * 5, commit_title))
+        commit_title = commit_title.replace("!", pkg_name + "!").replace(
+            pkg_name + pkg_name, pkg_name
+        )
+        commit_title = commit_title.replace("#", pkg_name + "#")
+        f.write("%s- %s\n" % (" " * 5, commit_title))
 
 
 def _write_mergerequests_range(f, pkg_name, mrs):
-    '''Writes all merge-requests of a given package, with a range, to the
-    output file
+    """Writes all merge-requests of a given package, with a range, to the
+    output file.
 
     Args:
 
         f: A :py:class:`File` ready to be written at
         pkg_name: The name of the package we are writing tags of
         mrs: The list of merge requests to write
-
-    '''
+    """
 
     for mr in mrs:
-        title = mr.title.strip().replace('\r','').replace('\n', '  ')
-        title = title.replace(' !', ' ' + pkg_name + '!')
-        title = title.replace(' #', ' ' + pkg_name + '#')
+        title = mr.title.strip().replace("\r", "").replace("\n", "  ")
+        title = title.replace(" !", " " + pkg_name + "!")
+        title = title.replace(" #", " " + pkg_name + "#")
         if mr.description is not None:
-          description = \
-              mr.description.strip().replace('\r','').replace('\n', '  ')
-          description = description.replace(' !', ' ' + pkg_name + '!')
-          description = description.replace(' #', ' ' + pkg_name + '#')
+            description = (
+                mr.description.strip().replace("\r", "").replace("\n", "  ")
+            )
+            description = description.replace(" !", " " + pkg_name + "!")
+            description = description.replace(" #", " " + pkg_name + "#")
         else:
-          description = 'No description for this MR'
-        space = ': ' if description else ''
-        log = '''     - {pkg}!{iid} {title}{space}{description}'''
-        f.write(log.format(pkg=pkg_name, iid=mr.iid, title=title, space=space, description=description))
-        f.write('\n')
+            description = "No description for this MR"
+        space = ": " if description else ""
+        log = """     - {pkg}!{iid} {title}{space}{description}"""
+        f.write(
+            log.format(
+                pkg=pkg_name,
+                iid=mr.iid,
+                title=title,
+                space=space,
+                description=description,
+            )
+        )
+        f.write("\n")
 
 
 def write_tags_with_commits(f, gitpkg, since, mode):
-    '''Writes all tags and commits of a given package to the output file
+    """Writes all tags and commits of a given package to the output file.
 
     Args:
 
@@ -214,14 +239,13 @@ def write_tags_with_commits(f, gitpkg, since, mode):
         since: Starting date (as a datetime object)
         mode: One of mrs (merge-requests), commits or tags indicating how to
               list entries in the changelog for this package
-
-    '''
+    """
 
     # get tags since release and sort them
     tags = gitpkg.tags.list()
 
     # sort tags by date
-    tags = [k for k in tags if parse_date(k.commit['committed_date']) >= since]
+    tags = [k for k in tags if parse_date(k.commit["committed_date"]) >= since]
     tags = _sort_tags(tags, reverse=False)
 
     # get commits since release date and sort them too
@@ -231,8 +255,17 @@ def write_tags_with_commits(f, gitpkg, since, mode):
     commits = _sort_commits(commits, reverse=False)
 
     # get merge requests since the release data
-    mrs = list(reversed(gitpkg.mergerequests.list(state='merged', updated_after=since, order_by='updated_at', all=True)))
-    f.write('* %s\n' % (gitpkg.attributes['path_with_namespace'],))
+    mrs = list(
+        reversed(
+            gitpkg.mergerequests.list(
+                state="merged",
+                updated_after=since,
+                order_by="updated_at",
+                all=True,
+            )
+        )
+    )
+    f.write("* %s\n" % (gitpkg.attributes["path_with_namespace"],))
 
     # go through tags and writes each with its message and corresponding
     # commits
@@ -240,65 +273,76 @@ def write_tags_with_commits(f, gitpkg, since, mode):
     for tag in tags:
 
         # write tag name and its text
-        _write_one_tag(f, gitpkg.attributes['path_with_namespace'], tag)
-        end_date = parse_date(tag.commit['committed_date'])
+        _write_one_tag(f, gitpkg.attributes["path_with_namespace"], tag)
+        end_date = parse_date(tag.commit["committed_date"])
 
-        if mode == 'commits':
+        if mode == "commits":
             # write commits from the previous tag up to this one
-            commits4tag = [k for k in commits \
-                if (start_date < parse_date(k.committed_date) <= end_date)]
-            _write_commits_range(f, gitpkg.attributes['path_with_namespace'],
-                commits4tag)
-
-        elif mode == 'mrs':
+            commits4tag = [
+                k
+                for k in commits
+                if (start_date < parse_date(k.committed_date) <= end_date)
+            ]
+            _write_commits_range(
+                f, gitpkg.attributes["path_with_namespace"], commits4tag
+            )
+
+        elif mode == "mrs":
             # write merge requests from the previous tag up to this one
             # the attribute 'merged_at' is not available in GitLab API as of 27
             # June 2018
-            mrs4tag = [k for k in mrs \
-                if (start_date < parse_date(k.updated_at) <= end_date)]
-            _write_mergerequests_range(f,
-                gitpkg.attributes['path_with_namespace'], mrs4tag)
+            mrs4tag = [
+                k
+                for k in mrs
+                if (start_date < parse_date(k.updated_at) <= end_date)
+            ]
+            _write_mergerequests_range(
+                f, gitpkg.attributes["path_with_namespace"], mrs4tag
+            )
 
         start_date = end_date
 
-    if mode != 'tags':
+    if mode != "tags":
 
         # write the tentative patch version bump for the future tag
-        f.write('  * patch\n')
+        f.write("  * patch\n")
 
-        if mode == 'mrs':
+        if mode == "mrs":
             # write leftover merge requests
             # the attribute 'merged_at' is not available in GitLab API as of 27
             # June 2018
-            leftover_mrs = [k for k in mrs \
-                if parse_date(k.updated_at) > start_date]
-            _write_mergerequests_range(f,
-                gitpkg.attributes['path_with_namespace'], leftover_mrs)
+            leftover_mrs = [
+                k for k in mrs if parse_date(k.updated_at) > start_date
+            ]
+            _write_mergerequests_range(
+                f, gitpkg.attributes["path_with_namespace"], leftover_mrs
+            )
 
         else:
             # write leftover commits that were not tagged yet
-            leftover_commits = [k for k in commits \
-                if parse_date(k.committed_date) > start_date]
-            _write_commits_range(f, gitpkg.attributes['path_with_namespace'],
-                leftover_commits)
+            leftover_commits = [
+                k for k in commits if parse_date(k.committed_date) > start_date
+            ]
+            _write_commits_range(
+                f, gitpkg.attributes["path_with_namespace"], leftover_commits
+            )
 
 
 def write_tags(f, gitpkg, since):
-    '''Writes all tags of a given package to the output file
+    """Writes all tags of a given package to the output file.
 
     Args:
 
         f: A :py:class:`File` ready to be written at
         gitpkg: A pointer to the gitlab package object
         since: Starting date as a datetime object
-
-    '''
+    """
 
     tags = gitpkg.tags.list()
     # sort tags by date
-    tags = [k for k in tags if parse_date(k.commit['committed_date']) >= since]
+    tags = [k for k in tags if parse_date(k.commit["committed_date"]) >= since]
     tags = _sort_tags(tags, reverse=False)
-    f.write('* %s\n')
+    f.write("* %s\n")
 
     for tag in tags:
-        _write_one_tag(gitpkg.attributes['path_with_namespace'], tag)
+        _write_one_tag(gitpkg.attributes["path_with_namespace"], tag)
diff --git a/bob/devtools/ci.py b/bob/devtools/ci.py
index 5285730d34a5e9603c95896f5e24d752fe2c79bc..0dea4c9cdb705627346f20c5017f9454cdc201d7 100644
--- a/bob/devtools/ci.py
+++ b/bob/devtools/ci.py
@@ -1,207 +1,215 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-'''Tools to help CI-based builds and artifact deployment'''
+"""Tools to help CI-based builds and artifact deployment."""
 
 
 import git
 import distutils.version
 
 from .log import get_logger
+from .build import load_order_file
+
 logger = get_logger(__name__)
 
 
 def is_master(refname, tag, repodir):
-  '''Tells if we're on the master branch via ref_name or tag
+    """Tells if we're on the master branch via ref_name or tag.
 
-  This function checks if the name of the branch being built is "master".  If a
-  tag is set, then it checks if the tag is on the master branch.  If so, then
-  also returns ``True``, otherwise, ``False``.
+    This function checks if the name of the branch being built is "master".  If a
+    tag is set, then it checks if the tag is on the master branch.  If so, then
+    also returns ``True``, otherwise, ``False``.
 
-  Args:
+    Args:
 
-    refname: The value of the environment variable ``CI_COMMIT_REF_NAME``
-    tag: The value of the environment variable ``CI_COMMIT_TAG`` - (may be
-      ``None``)
+      refname: The value of the environment variable ``CI_COMMIT_REF_NAME``
+      tag: The value of the environment variable ``CI_COMMIT_TAG`` - (may be
+        ``None``)
 
-  Returns: a boolean, indicating we're building the master branch **or** that
-  the tag being built was issued on the master branch.
-  '''
+    Returns: a boolean, indicating we're building the master branch **or** that
+    the tag being built was issued on the master branch.
+    """
 
-  if tag is not None:
-    repo = git.Repo(repodir)
-    _tag = repo.tag('refs/tags/%s' % tag)
-    return _tag.commit in repo.iter_commits(rev='master')
+    if tag is not None:
+        repo = git.Repo(repodir)
+        _tag = repo.tag("refs/tags/%s" % tag)
+        return _tag.commit in repo.iter_commits(rev="master")
 
-  return refname == 'master'
+    return refname == "master"
 
 
 def is_stable(package, refname, tag, repodir):
-  '''Determines if the package being published is stable
+    """Determines if the package being published is stable.
 
-  This is done by checking if a tag was set for the package.  If that is the
-  case, we still cross-check the tag is on the "master" branch.  If everything
-  checks out, we return ``True``.  Else, ``False``.
+    This is done by checking if a tag was set for the package.  If that is the
+    case, we still cross-check the tag is on the "master" branch.  If everything
+    checks out, we return ``True``.  Else, ``False``.
 
-  Args:
+    Args:
 
-    package: Package name in the format "group/name"
-    refname: The current value of the environment ``CI_COMMIT_REF_NAME``
-    tag: The current value of the enviroment ``CI_COMMIT_TAG`` (may be
-      ``None``)
-    repodir: The directory that contains the clone of the git repository
+      package: Package name in the format "group/name"
+      refname: The current value of the environment ``CI_COMMIT_REF_NAME``
+      tag: The current value of the enviroment ``CI_COMMIT_TAG`` (may be
+        ``None``)
+      repodir: The directory that contains the clone of the git repository
 
-  Returns: a boolean, indicating if the current build is for a stable release
-  '''
+    Returns: a boolean, indicating if the current build is for a stable release
+    """
 
-  if tag is not None:
-    logger.info('Project %s tag is "%s"', package, tag)
-    parsed_tag = distutils.version.LooseVersion(tag[1:]).version  #remove 'v'
-    is_prerelease = any([isinstance(k, str) for k in parsed_tag])
+    if tag is not None:
+        logger.info('Project %s tag is "%s"', package, tag)
+        parsed_tag = distutils.version.LooseVersion(
+            tag[1:]
+        ).version  # remove 'v'
+        is_prerelease = any([isinstance(k, str) for k in parsed_tag])
 
-    if is_prerelease:
-      logger.warn('Pre-release detected - not publishing to stable channels')
-      return False
+        if is_prerelease:
+            logger.warn(
+                "Pre-release detected - not publishing to stable channels"
+            )
+            return False
 
-    if is_master(refname, tag, repodir):
-      return True
-    else:
-      logger.warn('Tag %s in non-master branch will be ignored', tag)
-      return False
+        if is_master(refname, tag, repodir):
+            return True
+        else:
+            logger.warn("Tag %s in non-master branch will be ignored", tag)
+            return False
 
-  logger.info('No tag information available at build')
-  logger.info('Considering this to be a pre-release build')
-  return False
+    logger.info("No tag information available at build")
+    logger.info("Considering this to be a pre-release build")
+    return False
 
 
-def comment_cleanup(lines):
-  """Cleans-up comments and empty lines from textual data read from files"""
+def read_packages(filename):
+    """Return a python list of tuples (repository, branch), given a file
+    containing one package (and branch) per line.
 
-  no_comments = [k.partition('#')[0].strip() for k in lines]
-  return [k for k in no_comments if k]
+    Comments are excluded
+    """
 
+    lines = load_order_file(filename)
 
-def read_packages(filename):
-  """
-  Return a python list of tuples (repository, branch), given a file containing
-  one package (and branch) per line.  Comments are excluded
+    packages = []
+    for line in lines:
+        if "," in line:  # user specified a branch
+            path, branch = [k.strip() for k in line.split(",", 1)]
+            packages.append((path, branch))
+        else:
+            packages.append((line, "master"))
 
-  """
-  # loads dirnames from order file (accepts # comments and empty lines)
-  with open(filename, 'rt') as f:
-    lines = comment_cleanup(f.readlines())
+    return packages
 
-  packages = []
-  for line in lines:
-    if ',' in line:  #user specified a branch
-      path, branch = [k.strip() for k in line.split(',', 1)]
-      packages.append((path, branch))
-    else:
-      packages.append((line, 'master'))
 
-  return packages
+def uniq(seq, idfun=None):
+    """Very fast, order preserving uniq function."""
 
+    # order preserving
+    if idfun is None:
 
-def uniq(seq, idfun=None):
-  """Very fast, order preserving uniq function"""
-
-  # order preserving
-  if idfun is None:
-      def idfun(x): return x
-  seen = {}
-  result = []
-  for item in seq:
-      marker = idfun(item)
-      # in old Python versions:
-      # if seen.has_key(marker)
-      # but in new ones:
-      if marker in seen: continue
-      seen[marker] = 1
-      result.append(item)
-  return result
+        def idfun(x):
+            return x
+
+    seen = {}
+    result = []
+    for item in seq:
+        marker = idfun(item)
+        # in old Python versions:
+        # if seen.has_key(marker)
+        # but in new ones:
+        if marker in seen:
+            continue
+        seen[marker] = 1
+        result.append(item)
+    return result
 
 
 def select_build_file(basename, paths, branch):
-  '''Selects the file to use for a build
+    """Selects the file to use for a build.
 
-  This method will return the name of the most adequate build-accessory file
-  (conda_build_config.yaml, recipe_append.yaml) for a given build, in this
-  order of priority:
+    This method will return the name of the most adequate build-accessory file
+    (conda_build_config.yaml, recipe_append.yaml) for a given build, in this
+    order of priority:
 
-  1. The first file found is returned
-  2. We first search for a *specific* file if ``branch`` is set
-  3. If that file does not exist, returns the unbranded filename if that exists
-     in one of the paths
-  4. If no candidates exists, returns ``None``
+    1. The first file found is returned
+    2. We first search for a *specific* file if ``branch`` is set
+    3. If that file does not exist, returns the unbranded filename if that exists
+       in one of the paths
+    4. If no candidates exists, returns ``None``
 
-  The candidate filename is built using
-  ``os.path.splitext(os.path.basename(basename))[0]``.
+    The candidate filename is built using
+    ``os.path.splitext(os.path.basename(basename))[0]``.
 
-  Args:
+    Args:
 
-    basename: Name of the file to use for the search
-    paths (list): A list of paths leading to the location of the variants file
-      to use.  Priority is given to paths that come first
-    branch (str): Optional key to be set when searching for the variants file
-      to use.  This is typically the git-branch name of the current branch of
-      the repo being built.
+      basename: Name of the file to use for the search
+      paths (list): A list of paths leading to the location of the variants file
+        to use.  Priority is given to paths that come first
+      branch (str): Optional key to be set when searching for the variants file
+        to use.  This is typically the git-branch name of the current branch of
+        the repo being built.
 
 
-  Returns:
+    Returns:
 
-    str: A string containing the full, resolved path of the file to use.
-    Returns ``None``, if no candidate is found
+      str: A string containing the full, resolved path of the file to use.
+      Returns ``None``, if no candidate is found
+    """
 
-  '''
+    import os
 
-  import os
+    basename, extension = os.path.splitext(os.path.basename(basename))
 
-  basename, extension = os.path.splitext(os.path.basename(basename))
+    if branch:
+        specific_basename = "%s-%s" % (basename, branch)
+        for path in paths:
+            path = os.path.realpath(path)
+            candidate = os.path.join(
+                path, "%s%s" % (specific_basename, extension)
+            )
+            if os.path.exists(candidate):
+                return candidate
 
-  if branch:
-    specific_basename = '%s-%s' % (basename, branch)
     for path in paths:
-      path = os.path.realpath(path)
-      candidate = os.path.join(path, '%s%s' % (specific_basename, extension))
-      if os.path.exists(candidate):
-        return candidate
-
-  for path in paths:
-    path = os.path.realpath(path)
-    candidate = os.path.join(path, '%s%s' % (basename, extension))
-    if os.path.exists(candidate):
-      return candidate
+        path = os.path.realpath(path)
+        candidate = os.path.join(path, "%s%s" % (basename, extension))
+        if os.path.exists(candidate):
+            return candidate
 
 
 def select_conda_build_config(paths, branch):
-  '''Selects the default conda_build_config.yaml.
+    """Selects the default conda_build_config.yaml.
 
-  See :py:func:`select_build_file` for implementation details.  If no build
-  config file is found by :py:func:`select_build_file`, then returns the
-  default ``conda_build_config.yaml`` shipped with this package.
-  '''
+    See :py:func:`select_build_file` for implementation details.  If no
+    build config file is found by :py:func:`select_build_file`, then
+    returns the default ``conda_build_config.yaml`` shipped with this
+    package.
+    """
 
-  from .constants import CONDA_BUILD_CONFIG as default
-  return select_build_file(default, paths, branch) or default
+    from .constants import CONDA_BUILD_CONFIG as default
+
+    return select_build_file(default, paths, branch) or default
 
 
 def select_conda_recipe_append(paths, branch):
-  '''Selects the default recipe_append.yaml.
+    """Selects the default recipe_append.yaml.
+
+    See :py:func:`select_build_file` for implementation details.  If no
+    recipe append file is found by :py:func:`select_build_file`, then
+    returns the default ``recipe_append.yaml`` shipped with this
+    package.
+    """
 
-  See :py:func:`select_build_file` for implementation details.  If no recipe
-  append file is found by :py:func:`select_build_file`, then returns the
-  default ``recipe_append.yaml`` shipped with this package.
-  '''
+    from .constants import CONDA_RECIPE_APPEND as default
 
-  from .constants import CONDA_RECIPE_APPEND as default
-  return select_build_file(default, paths, branch) or default
+    return select_build_file(default, paths, branch) or default
 
 
 def select_user_condarc(paths, branch):
-  '''Selects the user condarc file to read (if any)
+    """Selects the user condarc file to read (if any)
 
-  See :py:func:`select_build_file` for implementation details.  If no recipe
-  condarc is found by :py:func:`select_build_file`, then returns ``None``.
-  '''
+    See :py:func:`select_build_file` for implementation details.  If no
+    recipe condarc is found by :py:func:`select_build_file`, then
+    returns ``None``.
+    """
 
-  return select_build_file('condarc', paths, branch)
+    return select_build_file("condarc", paths, branch)
diff --git a/bob/devtools/constants.py b/bob/devtools/constants.py
index 847a0b65fb6cdd7a374343aee639eaf8e716509e..32b0194820ea5adbc1914ab4afc4726d8d6a3d06 100644
--- a/bob/devtools/constants.py
+++ b/bob/devtools/constants.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-'''Constants used for building and more'''
+"""Constants used for building and more."""
 
 import os
 import pkg_resources
@@ -9,57 +9,60 @@ import pkg_resources
 from . import bootstrap
 
 from .log import get_logger
+
 logger = get_logger(__name__)
 
 
 BASE_CONDARC = bootstrap._BASE_CONDARC
-'''Default setup for conda builds'''
+"""Default setup for conda builds"""
 
 
-CONDA_BUILD_CONFIG = pkg_resources.resource_filename(__name__,
-    os.path.join('data', 'conda_build_config.yaml'))
-'''Configuration variants we like building'''
+CONDA_BUILD_CONFIG = pkg_resources.resource_filename(
+    __name__, os.path.join("data", "conda_build_config.yaml")
+)
+"""Configuration variants we like building"""
 
 
-CONDA_RECIPE_APPEND = pkg_resources.resource_filename(__name__,
-    os.path.join('data', 'recipe_append.yaml'))
-'''Extra information to be appended to every recipe upon building'''
+CONDA_RECIPE_APPEND = pkg_resources.resource_filename(
+    __name__, os.path.join("data", "recipe_append.yaml")
+)
+"""Extra information to be appended to every recipe upon building"""
 
 
 SERVER = bootstrap._SERVER
-'''This is the default server use use to store data and build artifacts'''
+"""This is the default server use use to store data and build artifacts"""
 
 
 WEBDAV_PATHS = {
-    True: {  #stable?
-      False: {  #visible?
-        'root': '/private-upload',
-        'conda': '/conda',
-        'docs': '/docs',
+    True: {  # stable?
+        False: {  # visible?
+            "root": "/private-upload",
+            "conda": "/conda",
+            "docs": "/docs",
         },
-      True: {  #visible?
-        'root': '/public-upload',
-        'conda': '/conda',
-        'docs': '/docs',
+        True: {  # visible?
+            "root": "/public-upload",
+            "conda": "/conda",
+            "docs": "/docs",
         },
-      },
-    False: {  #stable?
-      False: {  #visible?
-        'root': '/private-upload',
-        'conda': '/conda/label/beta',
-        'docs': '/docs',
+    },
+    False: {  # stable?
+        False: {  # visible?
+            "root": "/private-upload",
+            "conda": "/conda/label/beta",
+            "docs": "/docs",
         },
-      True: {  #visible?
-        'root': '/public-upload',
-        'conda': '/conda/label/beta',
-        'docs': '/docs',
+        True: {  # visible?
+            "root": "/public-upload",
+            "conda": "/conda/label/beta",
+            "docs": "/docs",
         },
-      },
-    }
-'''Default locations of our webdav upload paths'''
+    },
+}
+"""Default locations of our webdav upload paths"""
 
 
-IDIAP_ROOT_CA = b'''
+IDIAP_ROOT_CA = b"""
 Idiap Root CA 2016 - for internal use
 =====================================
 -----BEGIN CERTIFICATE-----
@@ -102,25 +105,26 @@ NqU3Xei/78W+eLh9HZvVqXpi4s/fF6z+lXKDHpqVRh5kNAKJbYQUfcV2H7FEtCux
 NIDS6J1GnHJKCmYPuwFSrQ5VXM/1p7w+A9MkJktsxw2kxsRUvJn7Ka+bp7M6wERU
 JHsX
 -----END CERTIFICATE-----
-'''
+"""
 
 
-CACERT_URL = 'https://curl.haxx.se/ca/cacert.pem'
-'''Location of the most up-to-date CA certificate bundle'''
+CACERT_URL = "https://curl.haxx.se/ca/cacert.pem"
+"""Location of the most up-to-date CA certificate bundle"""
 
 
-CACERT = pkg_resources.resource_filename(__name__,
-    os.path.join('data', 'cacert.pem'))
-'''We keep a copy of the CA certificates we trust here
+CACERT = pkg_resources.resource_filename(
+    __name__, os.path.join("data", "cacert.pem")
+)
+"""We keep a copy of the CA certificates we trust here
 
    To update this file use: ``curl --remote-name --time-cond cacert.pem https://curl.haxx.se/ca/cacert.pem``
 
    More information here: https://curl.haxx.se/docs/caextract.html
-'''
+"""
 
 
-MATPLOTLIB_RCDIR = pkg_resources.resource_filename(__name__, 'data')
-'''Base directory where the file matplotlibrc lives
+MATPLOTLIB_RCDIR = pkg_resources.resource_filename(__name__, "data")
+"""Base directory where the file matplotlibrc lives
 
 It is required for certain builds that use matplotlib functionality.
-'''
+"""
diff --git a/bob/devtools/deploy.py b/bob/devtools/deploy.py
index 5a7c21fd1103a2a630620ff13fef7362520aad98..9c7782902346e8f2914d71b88321a949ddd3af4d 100644
--- a/bob/devtools/deploy.py
+++ b/bob/devtools/deploy.py
@@ -1,144 +1,167 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-'''Deployment utilities for conda packages and documentation via webDAV'''
+"""Deployment utilities for conda packages and documentation via webDAV."""
 
 
 import os
 
 from .constants import WEBDAV_PATHS, SERVER
 from .log import get_logger
+
 logger = get_logger(__name__)
 
 
 def _setup_webdav_client(server, root, username, password):
-  '''Configures and checks the webdav client'''
-
-  # setup webdav connection
-  webdav_options = dict(webdav_hostname=server, webdav_root=root,
-      webdav_login=username, webdav_password=password)
-
-  from .webdav3 import client as webdav
-
-  retval = webdav.Client(webdav_options)
-  assert retval.valid()
-
-  return retval
-
-
-def deploy_conda_package(package, arch, stable, public, username, password,
-    overwrite, dry_run):
-  '''Deploys a single conda package on the appropriate path
-
-  Args:
-
-    package (str): Path leading to the conda package to be deployed
-    arch (str): The conda architecture to deploy to (``linux-64``, ``osx-64``,
-      ``noarch``, or ``None`` - in which case the architecture is going to be
-      guessed from the directory where the package sits)
-    stable (bool): Indicates if the package should be deployed on a stable
-      (``True``) or beta (``False``) channel
-    public (bool): Indicates if the package is supposed to be distributed
-      publicly or privatly (within Idiap network)
-    username (str): The name of the user on the webDAV server to use for
-      uploading the package
-    password (str): The password of the user on the webDAV server to use for
-      uploading the package
-    overwrite (bool): If we should overwrite a package with equal name existing
-      on the destination directory.  Otherwise, an exception is raised.
-    dry_run (bool): If we're supposed to really do the actions, or just log
-      messages.
-
-  '''
-
-  server_info = WEBDAV_PATHS[stable][public]
-  davclient = _setup_webdav_client(SERVER, server_info['root'], username,
-      password)
-
-  basename = os.path.basename(package)
-  arch = arch or os.path.basename(os.path.dirname(package))
-  remote_path = '%s/%s/%s' % (server_info['conda'], arch, basename)
-
-  if davclient.check(remote_path):
-    if not overwrite:
-      raise RuntimeError('The file %s/%s already exists on the server ' \
-          '- this can be due to more than one build with deployment ' \
-          'running at the same time.  Re-running the broken builds ' \
-          'normally fixes it' % (SERVER, remote_path))
-
-    else:
-      logger.info('[dav] rm -f %s%s%s', SERVER, server_info['root'],
-          remote_path)
-      if not dry_run:
-        davclient.clean(remote_path)
-
-  logger.info('[dav] %s -> %s%s%s', package, SERVER, server_info['root'],
-      remote_path)
-  if not dry_run:
-    davclient.upload(local_path=package, remote_path=remote_path)
-
-
-def deploy_documentation(path, package, stable, latest, public, branch, tag,
-    username, password, dry_run):
-  '''Deploys sphinx documentation to the appropriate webdav locations
-
-  Args:
-
-    path (str): Path leading to the root of the documentation to be deployed
-    package (str): Full name (with namespace) of the package being treated
-    stable (bool): Indicates if the documentation corresponds to the latest
-      stable build
-    latest (bool): Indicates if the documentation being deployed correspond to
-      the latest stable for the package or not.  In case the documentation
-      comes from a patch release which is not on the master branch, please set
-      this flag to ``False``, which will make us avoid deployment of the
-      documentation to ``master`` and ``stable`` sub-directories.
-    public (bool): Indicates if the documentation is supposed to be distributed
-      publicly or privatly (within Idiap network)
-    branch (str): The name of the branch for the current build
-    tag (str): The name of the tag currently built (may be ``None``)
-    username (str): The name of the user on the webDAV server to use for
-      uploading the package
-    password (str): The password of the user on the webDAV server to use for
-      uploading the package
-    dry_run (bool): If we're supposed to really do the actions, or just log
-      messages.
-
-  '''
-
-  # uploads documentation artifacts
-  if not os.path.exists(path):
-    raise RuntimeError('Documentation is not available at %s - ' \
-        'ensure documentation is being produced for your project!' % path)
-
-  server_info = WEBDAV_PATHS[stable][public]
-  davclient = _setup_webdav_client(SERVER, server_info['root'], username,
-      password)
-
-  remote_path_prefix = '%s/%s' % (server_info['docs'], package)
-
-  # finds out the correct mixture of sub-directories we should deploy to.
-  # 1. if ref-name is a tag, don't forget to publish to 'master' as well -
-  # all tags are checked to come from that branch
-  # 2. if ref-name is a branch name, deploy to it
-  # 3. in case a tag is being published, make sure to deploy to the special
-  # "stable" subdir as well
-  deploy_docs_to = set([branch])
-  if stable:
-    if tag is not None:
-      deploy_docs_to.add(tag)
-    if latest:
-      deploy_docs_to.add('master')
-      deploy_docs_to.add('stable')
-
-  # creates package directory, and then uploads directory there
-  for k in deploy_docs_to:
-    if not davclient.check(remote_path_prefix):  #base package directory
-      logger.info('[dav] mkdir %s', remote_path_prefix)
-      if not dry_run:
-        davclient.mkdir(remote_path_prefix)
-    remote_path = '%s/%s' % (remote_path_prefix, k)
-    logger.info('[dav] %s -> %s%s%s', path, SERVER, server_info['root'],
-        remote_path)
+    """Configures and checks the webdav client."""
+
+    # setup webdav connection
+    webdav_options = dict(
+        webdav_hostname=server,
+        webdav_root=root,
+        webdav_login=username,
+        webdav_password=password,
+    )
+
+    from .webdav3 import client as webdav
+
+    retval = webdav.Client(webdav_options)
+    assert retval.valid()
+
+    return retval
+
+
+def deploy_conda_package(
+    package, arch, stable, public, username, password, overwrite, dry_run
+):
+    """Deploys a single conda package on the appropriate path.
+
+    Args:
+
+      package (str): Path leading to the conda package to be deployed
+      arch (str): The conda architecture to deploy to (``linux-64``, ``osx-64``,
+        ``noarch``, or ``None`` - in which case the architecture is going to be
+        guessed from the directory where the package sits)
+      stable (bool): Indicates if the package should be deployed on a stable
+        (``True``) or beta (``False``) channel
+      public (bool): Indicates if the package is supposed to be distributed
+        publicly or privatly (within Idiap network)
+      username (str): The name of the user on the webDAV server to use for
+        uploading the package
+      password (str): The password of the user on the webDAV server to use for
+        uploading the package
+      overwrite (bool): If we should overwrite a package with equal name existing
+        on the destination directory.  Otherwise, an exception is raised.
+      dry_run (bool): If we're supposed to really do the actions, or just log
+        messages.
+    """
+
+    server_info = WEBDAV_PATHS[stable][public]
+    davclient = _setup_webdav_client(
+        SERVER, server_info["root"], username, password
+    )
+
+    basename = os.path.basename(package)
+    arch = arch or os.path.basename(os.path.dirname(package))
+    remote_path = "%s/%s/%s" % (server_info["conda"], arch, basename)
+
+    if davclient.check(remote_path):
+        if not overwrite:
+            raise RuntimeError(
+                "The file %s/%s already exists on the server "
+                "- this can be due to more than one build with deployment "
+                "running at the same time.  Re-running the broken builds "
+                "normally fixes it" % (SERVER, remote_path)
+            )
+
+        else:
+            logger.info(
+                "[dav] rm -f %s%s%s", SERVER, server_info["root"], remote_path
+            )
+            if not dry_run:
+                davclient.clean(remote_path)
+
+    logger.info(
+        "[dav] %s -> %s%s%s", package, SERVER, server_info["root"], remote_path
+    )
     if not dry_run:
-      davclient.upload_directory(local_path=path, remote_path=remote_path)
+        davclient.upload(local_path=package, remote_path=remote_path)
+
+
+def deploy_documentation(
+    path,
+    package,
+    stable,
+    latest,
+    public,
+    branch,
+    tag,
+    username,
+    password,
+    dry_run,
+):
+    """Deploys sphinx documentation to the appropriate webdav locations.
+
+    Args:
+
+      path (str): Path leading to the root of the documentation to be deployed
+      package (str): Full name (with namespace) of the package being treated
+      stable (bool): Indicates if the documentation corresponds to the latest
+        stable build
+      latest (bool): Indicates if the documentation being deployed correspond to
+        the latest stable for the package or not.  In case the documentation
+        comes from a patch release which is not on the master branch, please set
+        this flag to ``False``, which will make us avoid deployment of the
+        documentation to ``master`` and ``stable`` sub-directories.
+      public (bool): Indicates if the documentation is supposed to be distributed
+        publicly or privatly (within Idiap network)
+      branch (str): The name of the branch for the current build
+      tag (str): The name of the tag currently built (may be ``None``)
+      username (str): The name of the user on the webDAV server to use for
+        uploading the package
+      password (str): The password of the user on the webDAV server to use for
+        uploading the package
+      dry_run (bool): If we're supposed to really do the actions, or just log
+        messages.
+    """
+
+    # uploads documentation artifacts
+    if not os.path.exists(path):
+        raise RuntimeError(
+            "Documentation is not available at %s - "
+            "ensure documentation is being produced for your project!" % path
+        )
+
+    server_info = WEBDAV_PATHS[stable][public]
+    davclient = _setup_webdav_client(
+        SERVER, server_info["root"], username, password
+    )
+
+    remote_path_prefix = "%s/%s" % (server_info["docs"], package)
+
+    # finds out the correct mixture of sub-directories we should deploy to.
+    # 1. if ref-name is a tag, don't forget to publish to 'master' as well -
+    # all tags are checked to come from that branch
+    # 2. if ref-name is a branch name, deploy to it
+    # 3. in case a tag is being published, make sure to deploy to the special
+    # "stable" subdir as well
+    deploy_docs_to = set([branch])
+    if stable:
+        if tag is not None:
+            deploy_docs_to.add(tag)
+        if latest:
+            deploy_docs_to.add("master")
+            deploy_docs_to.add("stable")
+
+    # creates package directory, and then uploads directory there
+    for k in deploy_docs_to:
+        if not davclient.check(remote_path_prefix):  # base package directory
+            logger.info("[dav] mkdir %s", remote_path_prefix)
+            if not dry_run:
+                davclient.mkdir(remote_path_prefix)
+        remote_path = "%s/%s" % (remote_path_prefix, k)
+        logger.info(
+            "[dav] %s -> %s%s%s", path, SERVER, server_info["root"], remote_path
+        )
+        if not dry_run:
+            davclient.upload_directory(local_path=path, remote_path=remote_path)
diff --git a/bob/devtools/log.py b/bob/devtools/log.py
index 8ccad76117833b12e0e5f9f98a6157b67d2076dc..894f74ee685f906d75d0101f565ca0dc3508d934 100644
--- a/bob/devtools/log.py
+++ b/bob/devtools/log.py
@@ -1,8 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-"""Logging utilities
-"""
+"""Logging utilities."""
 
 import sys
 import logging
@@ -12,7 +11,7 @@ import termcolor
 
 
 # get the default root logger of Bob
-_logger = logging.getLogger('bob')
+_logger = logging.getLogger("bob")
 
 # by default, warning and error messages should be written to sys.stderr
 _warn_err = logging.StreamHandler(sys.stderr)
@@ -21,9 +20,10 @@ _logger.addHandler(_warn_err)
 
 # debug and info messages are written to sys.stdout
 
+
 class _InfoFilter:
-  def filter(self, record):
-    return record.levelno <= logging.INFO
+    def filter(self, record):
+        return record.levelno <= logging.INFO
 
 
 _debug_info = logging.StreamHandler(sys.stdout)
@@ -34,168 +34,177 @@ _logger.addHandler(_debug_info)
 
 COLORMAP = dict(
     debug=dict(),
-    info=dict(attrs=['bold']),
-    warn=dict(color='yellow', attrs=['bold']),
-    warning=dict(color='yellow', attrs=['bold']),
-    error=dict(color='red'),
-    exception=dict(color='red', attrs=['bold']),
-    critical=dict(color='red', attrs=['bold']),
-    )
-'''Default color map for homogenized color display'''
+    info=dict(attrs=["bold"]),
+    warn=dict(color="yellow", attrs=["bold"]),
+    warning=dict(color="yellow", attrs=["bold"]),
+    error=dict(color="red"),
+    exception=dict(color="red", attrs=["bold"]),
+    critical=dict(color="red", attrs=["bold"]),
+)
+"""Default color map for homogenized color display"""
 
 
 def _supports_color():
-  """
-  Returns True if the running system's terminal supports color, and False
-  otherwise.
-  """
-  plat = sys.platform
-  supported_platform = plat != 'Pocket PC' and (plat != 'win32' or
-                                                'ANSICON' in os.environ)
-  # isatty is not always implemented, #6223.
-  is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
-  if not supported_platform or not is_a_tty:
-    return False
-  return True
+    """Returns True if the running system's terminal supports color, and False
+    otherwise."""
+    plat = sys.platform
+    supported_platform = plat != "Pocket PC" and (
+        plat != "win32" or "ANSICON" in os.environ
+    )
+    # isatty is not always implemented, #6223.
+    is_a_tty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty()
+    if not supported_platform or not is_a_tty:
+        return False
+    return True
 
 
 class ColorLog(object):
-  '''Colorizes logging colors'''
-
-  def __init__(self, logger):
-    self._log = logger
-
-  def __getattr__(self, name):
-    if name in ['debug', 'info', 'warn', 'warning', 'error', 'exception',
-        'critical']:
-      if _supports_color():
-        return lambda s, *args: getattr(self._log, name)(
-            termcolor.colored(s, **COLORMAP[name]), *args)
-      else:
-        return lambda s, *args: getattr(self._log, name)(s, *args)
-
-    return getattr(self._log, name)
+    """Colorizes logging colors."""
+
+    def __init__(self, logger):
+        self._log = logger
+
+    def __getattr__(self, name):
+        if name in [
+            "debug",
+            "info",
+            "warn",
+            "warning",
+            "error",
+            "exception",
+            "critical",
+        ]:
+            if _supports_color():
+                return lambda s, *args: getattr(self._log, name)(
+                    termcolor.colored(s, **COLORMAP[name]), *args
+                )
+            else:
+                return lambda s, *args: getattr(self._log, name)(s, *args)
+
+        return getattr(self._log, name)
 
 
 def get_logger(name):
-  """Returns the default logger as setup by this module"""
+    """Returns the default logger as setup by this module."""
 
-  return ColorLog(logging.getLogger(name))
+    return ColorLog(logging.getLogger(name))
 
 
 def _echo(text, *args, **kwargs):
-  """Provides a colorized version of :py:func:`click.echo` (for terminals)
+    """Provides a colorized version of :py:func:`click.echo` (for terminals)
 
-  The color is stripped off if outputting to a file or piping the results of
-  a command using this function.
+    The color is stripped off if outputting to a file or piping the results of
+    a command using this function.
 
-  Parameters:
+    Parameters:
 
-    text (str): The text to be printed
-    args (tuple): Tuple of attributes directly passed to
-      :py:func:`termcolor.colored`
-    kwargs (dict): Dictionary of attributes directly passed to
-      :py:func:`termcolor.colored`
-  """
+      text (str): The text to be printed
+      args (tuple): Tuple of attributes directly passed to
+        :py:func:`termcolor.colored`
+      kwargs (dict): Dictionary of attributes directly passed to
+        :py:func:`termcolor.colored`
+    """
 
-  click.echo(termcolor.colored(text, *args, **kwargs))
+    click.echo(termcolor.colored(text, *args, **kwargs))
 
 
 def echo_normal(text):
-  """Color preset for normal text output for :py:func:`click.echo`"""
+    """Color preset for normal text output for :py:func:`click.echo`"""
 
-  click.echo(text)
+    click.echo(text)
 
 
 def echo_info(text):
-  """Color preset for normal text output for :py:func:`click.echo`"""
+    """Color preset for normal text output for :py:func:`click.echo`"""
 
-  _echo(text, 'green')
+    _echo(text, "green")
 
 
 def echo_warning(text):
-  """Color preset for normal warning output for :py:func:`click.echo`"""
+    """Color preset for normal warning output for :py:func:`click.echo`"""
 
-  _echo(text, **COLORMAP['warn'])
+    _echo(text, **COLORMAP["warn"])
 
 
 # helper functions to instantiate and set-up logging
-def setup(logger_name,
-    format="%(levelname)s:%(name)s@%(asctime)s: %(message)s"):
-  """This function returns a logger object that is set up to perform logging
-  using Bob loggers.
-
-  Parameters
-  ----------
-  logger_name : str
-      The name of the module to generate logs for
-  format : :obj:`str`, optional
-      The format of the logs, see :py:class:`logging.LogRecord` for more
-      details. By default, the log contains the logger name, the log time, the
-      log level and the massage.
-
-  Returns
-  -------
-  logger : :py:class:`logging.Logger`
-      The logger configured for logging. The same logger can be retrieved using
-      the :py:func:`logging.getLogger` function.
-  """
-  # generate new logger object
-  logger = logging.getLogger(logger_name)
-
-  # add log the handlers if not yet done
-  if not logger_name.startswith("bob") and not logger.handlers:
-    logger.addHandler(_warn_err)
-    logger.addHandler(_debug_info)
-
-  # this formats the logger to print the desired information
-  formatter = logging.Formatter(format)
-  # we have to set the formatter to all handlers registered in the current
-  # logger
-  for handler in logger.handlers:
-    handler.setFormatter(formatter)
-
-  # set the same formatter for bob loggers
-  for handler in _logger.handlers:
-    handler.setFormatter(formatter)
-
-  return ColorLog(logger)
+def setup(
+    logger_name, format="%(levelname)s:%(name)s@%(asctime)s: %(message)s"
+):
+    """This function returns a logger object that is set up to perform logging
+    using Bob loggers.
+
+    Parameters
+    ----------
+    logger_name : str
+        The name of the module to generate logs for
+    format : :obj:`str`, optional
+        The format of the logs, see :py:class:`logging.LogRecord` for more
+        details. By default, the log contains the logger name, the log time, the
+        log level and the massage.
+
+    Returns
+    -------
+    logger : :py:class:`logging.Logger`
+        The logger configured for logging. The same logger can be retrieved using
+        the :py:func:`logging.getLogger` function.
+    """
+    # generate new logger object
+    logger = logging.getLogger(logger_name)
+
+    # add log the handlers if not yet done
+    if not logger_name.startswith("bob") and not logger.handlers:
+        logger.addHandler(_warn_err)
+        logger.addHandler(_debug_info)
+
+    # this formats the logger to print the desired information
+    formatter = logging.Formatter(format)
+    # we have to set the formatter to all handlers registered in the current
+    # logger
+    for handler in logger.handlers:
+        handler.setFormatter(formatter)
+
+    # set the same formatter for bob loggers
+    for handler in _logger.handlers:
+        handler.setFormatter(formatter)
+
+    return ColorLog(logger)
 
 
 def set_verbosity_level(logger, level):
-  """Sets the log level for the given logger.
-
-  Parameters
-  ----------
-  logger : :py:class:`logging.Logger` or str
-      The logger to generate logs for, or the name  of the module to generate
-      logs for.
-  level : int
-      Possible log levels are: 0: Error; 1: Warning; 2: Info; 3: Debug.
-
-  Raises
-  ------
-  ValueError
-      If the level is not in range(0, 4).
-  """
-  if level not in range(0, 4):
-    raise ValueError(
-        "The verbosity level %d does not exist. Please reduce the number of "
-        "'--verbose' parameters in your command line" % level)
-  # set up the verbosity level of the logging system
-  log_level = {
-      0: logging.ERROR,
-      1: logging.WARNING,
-      2: logging.INFO,
-      3: logging.DEBUG
-  }[level]
-
-  # set this log level to the logger with the specified name
-  if isinstance(logger, str):
-    logger = logging.getLogger(logger)
-  logger.setLevel(log_level)
-  # set the same log level for the bob logger
-  _logger.setLevel(log_level)
+    """Sets the log level for the given logger.
+
+    Parameters
+    ----------
+    logger : :py:class:`logging.Logger` or str
+        The logger to generate logs for, or the name  of the module to generate
+        logs for.
+    level : int
+        Possible log levels are: 0: Error; 1: Warning; 2: Info; 3: Debug.
+
+    Raises
+    ------
+    ValueError
+        If the level is not in range(0, 4).
+    """
+    if level not in range(0, 4):
+        raise ValueError(
+            "The verbosity level %d does not exist. Please reduce the number of "
+            "'--verbose' parameters in your command line" % level
+        )
+    # set up the verbosity level of the logging system
+    log_level = {
+        0: logging.ERROR,
+        1: logging.WARNING,
+        2: logging.INFO,
+        3: logging.DEBUG,
+    }[level]
+
+    # set this log level to the logger with the specified name
+    if isinstance(logger, str):
+        logger = logging.getLogger(logger)
+    logger.setLevel(log_level)
+    # set the same log level for the bob logger
+    _logger.setLevel(log_level)
 
 
 def verbosity_option(**kwargs):
@@ -216,16 +225,23 @@ def verbosity_option(**kwargs):
 
     def custom_verbosity_option(f):
         def callback(ctx, param, value):
-            ctx.meta['verbosity'] = value
+            ctx.meta["verbosity"] = value
             set_verbosity_level(_logger, value)
             _logger.debug("`bob' logging level set to %d", value)
             return value
+
         return click.option(
-            '-v', '--verbose', count=True,
-            expose_value=False, default=0,
+            "-v",
+            "--verbose",
+            count=True,
+            expose_value=False,
+            default=0,
             help="Increase the verbosity level from 0 (only error messages) "
             "to 1 (warnings), 2 (info messages), 3 (debug information) by "
             "adding the --verbose option as often as desired "
             "(e.g. '-vvv' for debug).",
-            callback=callback, **kwargs)(f)
+            callback=callback,
+            **kwargs,
+        )(f)
+
     return custom_verbosity_option
diff --git a/bob/devtools/release.py b/bob/devtools/release.py
index 8ca8eb3bd2557332e0fb4044ac49643c898395db..707c144c96467e06b22aff281e9c12e7f3f37460 100644
--- a/bob/devtools/release.py
+++ b/bob/devtools/release.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-'''Utilities to needed to release packages'''
+"""Utilities to needed to release packages."""
 
 import os
 import re
@@ -10,13 +10,14 @@ import shutil
 import gitlab
 
 from .log import get_logger
+
 logger = get_logger(__name__)
 
 from distutils.version import StrictVersion
 
 
-def download_path(package, path, output=None, ref='master'):
-    '''Downloads paths from gitlab, with an optional recurse
+def download_path(package, path, output=None, ref="master"):
+    """Downloads paths from gitlab, with an optional recurse.
 
     This method will download an archive of the repository from chosen
     reference, and then it will search insize the zip blob for the path to be
@@ -32,52 +33,56 @@ def download_path(package, path, output=None, ref='master'):
         the basename of ``path`` as storage point with respect to the current
         directory
       ref: the name of the git reference (branch, tag or commit hash) to use
-
-    '''
+    """
     from io import BytesIO
     import tarfile
     import tempfile
 
     output = output or os.path.realpath(os.curdir)
 
-    logger.debug('Downloading archive of "%s" from "%s"...', ref,
-        package.attributes['path_with_namespace'])
+    logger.debug(
+        'Downloading archive of "%s" from "%s"...',
+        ref,
+        package.attributes["path_with_namespace"],
+    )
     archive = package.repository_archive(ref=ref)
-    logger.debug('Archive has %d bytes', len(archive))
+    logger.debug("Archive has %d bytes", len(archive))
     logger.debug('Searching for "%s" within archive...', path)
 
     with tempfile.TemporaryDirectory() as d:
-      with tarfile.open(fileobj=BytesIO(archive), mode='r:gz') as f:
-        f.extractall(path=d)
+        with tarfile.open(fileobj=BytesIO(archive), mode="r:gz") as f:
+            f.extractall(path=d)
 
-      # move stuff to "output"
-      basedir = os.listdir(d)[0]
-      shutil.move(os.path.join(d, basedir, path), output)
+        # move stuff to "output"
+        basedir = os.listdir(d)[0]
+        shutil.move(os.path.join(d, basedir, path), output)
 
 
 def get_gitlab_instance():
-    '''Returns an instance of the gitlab object for remote operations'''
+    """Returns an instance of the gitlab object for remote operations."""
 
     # tries to figure if we can authenticate using a global configuration
-    cfgs = ['~/.python-gitlab.cfg', '/etc/python-gitlab.cfg']
+    cfgs = ["~/.python-gitlab.cfg", "/etc/python-gitlab.cfg"]
     cfgs = [os.path.expanduser(k) for k in cfgs]
     if any([os.path.exists(k) for k in cfgs]):
-        gl = gitlab.Gitlab.from_config('idiap', cfgs)
-    else: #ask the user for a token or use one from the current runner
+        gl = gitlab.Gitlab.from_config("idiap", cfgs)
+    else:  # ask the user for a token or use one from the current runner
         server = "https://gitlab.idiap.ch"
-        token = os.environ.get('CI_JOB_TOKEN')
+        token = os.environ.get("CI_JOB_TOKEN")
         if token is None:
-          logger.debug('Did not find any of %s nor CI_JOB_TOKEN is defined. ' \
-              'Asking for user token on the command line...', '|'.join(cfgs))
-          token = input("Your %s (private) token: " % server)
+            logger.debug(
+                "Did not find any of %s nor CI_JOB_TOKEN is defined. "
+                "Asking for user token on the command line...",
+                "|".join(cfgs),
+            )
+            token = input("Your %s (private) token: " % server)
         gl = gitlab.Gitlab(server, private_token=token, api_version=4)
 
     return gl
 
 
 def _update_readme(readme, version):
-    """
-    Inside text of the readme, replaces parts of the links to the provided
+    """Inside text of the readme, replaces parts of the links to the provided
     version. If version is not provided, replace to `stable` or `master`.
 
     Args:
@@ -89,32 +94,37 @@ def _update_readme(readme, version):
     """
 
     # replace the badge in the readme's text with the given version
-    DOC_IMAGE = re.compile(r'\-(stable|(v\d+\.\d+\.\d+([abc]\d+)?))\-')
-    BRANCH_RE = re.compile(r'/(stable|master|(v\d+\.\d+\.\d+([abc]\d+)?))')
+    DOC_IMAGE = re.compile(r"\-(stable|(v\d+\.\d+\.\d+([abc]\d+)?))\-")
+    BRANCH_RE = re.compile(r"/(stable|master|(v\d+\.\d+\.\d+([abc]\d+)?))")
 
     new_readme = []
     for line in readme.splitlines():
         if BRANCH_RE.search(line) is not None:
             if "gitlab" in line:  # gitlab links
-                replacement = "/v%s" % version if version is not None \
-                    else "/master"
+                replacement = (
+                    "/v%s" % version if version is not None else "/master"
+                )
                 line = BRANCH_RE.sub(replacement, line)
-            if ("software/bob" in line) or \
-                ("software/beat" in line):  # our doc server
-                if 'master' not in line:  # don't replace 'latest' pointer
-                    replacement = "/v%s" % version if version is not None \
-                        else "/stable"
+            if ("software/bob" in line) or (
+                "software/beat" in line
+            ):  # our doc server
+                if "master" not in line:  # don't replace 'latest' pointer
+                    replacement = (
+                        "/v%s" % version if version is not None else "/stable"
+                    )
                     line = BRANCH_RE.sub(replacement, line)
         if DOC_IMAGE.search(line) is not None:
-            replacement = '-v%s-' % version if version is not None \
-                else '-stable-'
+            replacement = (
+                "-v%s-" % version if version is not None else "-stable-"
+            )
             line = DOC_IMAGE.sub(replacement, line)
         new_readme.append(line)
-    return '\n'.join(new_readme) + '\n'
+    return "\n".join(new_readme) + "\n"
 
 
 def get_latest_tag_name(gitpkg):
-    """Find the name of the latest tag for a given package in the format '#.#.#'
+    """Find the name of the latest tag for a given package in the format
+    '#.#.#'.
 
     Args:
         gitpkg: gitlab package object
@@ -129,8 +139,11 @@ def get_latest_tag_name(gitpkg):
         return None
     # create list of tags' names but ignore the first 'v' character in each name
     # also filter out non version tags
-    tag_names = [tag.name[1:] for tag in latest_tags \
-        if StrictVersion.version_re.match(tag.name[1:])]
+    tag_names = [
+        tag.name[1:]
+        for tag in latest_tags
+        if StrictVersion.version_re.match(tag.name[1:])
+    ]
     # sort them correctly according to each subversion number
     tag_names.sort(key=StrictVersion)
     # take the last one, as it is the latest tag in the sorted tags
@@ -139,9 +152,8 @@ def get_latest_tag_name(gitpkg):
 
 
 def get_parsed_tag(gitpkg, tag):
-    """
-    An older tag is formatted as 'v2.1.3 (Sep 22, 2017 10:37)', from which we
-    need only v2.1.3
+    """An older tag is formatted as 'v2.1.3 (Sep 22, 2017 10:37)', from which
+    we need only v2.1.3.
 
     The latest tag is either patch, minor, major, or none
     """
@@ -153,7 +165,7 @@ def get_parsed_tag(gitpkg, tag):
 
     # if we bump the version, we need to find the latest released version for
     # this package
-    if tag in ('major', 'minor', 'patch'):
+    if tag in ("major", "minor", "patch"):
 
         # find the correct latest tag of this package (without 'v' in front),
         # None if there are no tags yet
@@ -161,48 +173,57 @@ def get_parsed_tag(gitpkg, tag):
 
         # if there were no tags yet, assume the very first version
         if latest_tag_name is None:
-            if tag == 'major':
-              assume_version = 'v1.0.0'
-            elif tag == 'minor':
-              assume_version = 'v0.1.0'
-            elif tag == 'patch':
-              assume_version = 'v0.0.1'
-            logger.warn('Package %s does not have any tags. I\'m assuming ' \
+            if tag == "major":
+                assume_version = "v1.0.0"
+            elif tag == "minor":
+                assume_version = "v0.1.0"
+            elif tag == "patch":
+                assume_version = "v0.0.1"
+            logger.warn(
+                "Package %s does not have any tags. I'm assuming "
                 'version will be %s since you proposed a "%s" bump',
-                gitpkg.attributes['path_with_namespace'], assume_version,
-                tag)
+                gitpkg.attributes["path_with_namespace"],
+                assume_version,
+                tag,
+            )
             return assume_version
 
         # check that it has expected format #.#.#
         # latest_tag_name = Version(latest_tag_name)
         m = re.match(r"(\d.\d.\d)", latest_tag_name)
         if not m:
-            raise ValueError('The latest tag name {0} in package {1} has ' \
-                'unknown format'.format('v' + latest_tag_name,
-                  gitpkg.attributes['path_with_namespace']))
+            raise ValueError(
+                "The latest tag name {0} in package {1} has "
+                "unknown format".format(
+                    "v" + latest_tag_name,
+                    gitpkg.attributes["path_with_namespace"],
+                )
+            )
 
         # increase the version accordingly
-        major, minor, patch = latest_tag_name.split('.')
+        major, minor, patch = latest_tag_name.split(".")
 
-        if 'major' == tag:
+        if "major" == tag:
             # increment the first number in 'v#.#.#' but make minor and patch
             # to be 0
-            return 'v' + str(int(major) + 1) + '.0.0'
+            return "v" + str(int(major) + 1) + ".0.0"
 
-        if 'minor' == tag:
+        if "minor" == tag:
             # increment the second number in 'v#.#.#' but make patch to be 0
-            return 'v' + major + '.' + str(int(minor) + 1) + '.0'
+            return "v" + major + "." + str(int(minor) + 1) + ".0"
 
-        if 'patch' == tag:
+        if "patch" == tag:
             # increment the last number in 'v#.#.#'
-            return 'v' + major + '.' + minor + '.' + str(int(patch) + 1)
+            return "v" + major + "." + minor + "." + str(int(patch) + 1)
 
-    if 'none' == tag:
+    if "none" == tag:
         # we do nothing in this case
         return tag
 
-    raise ValueError('Cannot parse changelog tag {0} of the ' \
-        'package {1}'.format(tag, gitpkg.attributes['path_with_namespace']))
+    raise ValueError(
+        "Cannot parse changelog tag {0} of the "
+        "package {1}".format(tag, gitpkg.attributes["path_with_namespace"])
+    )
 
 
 def update_tag_comments(gitpkg, tag_name, tag_comments_list, dry_run=False):
@@ -221,16 +242,19 @@ def update_tag_comments(gitpkg, tag_name, tag_comments_list, dry_run=False):
     # get tag and update its description
     logger.info(tag_name)
     tag = gitpkg.tags.get(tag_name)
-    tag_comments = '\n'.join(tag_comments_list)
-    logger.info('Found tag %s, updating its comments with:\n%s', tag.name,
-        tag_comments)
-    if not dry_run: tag.set_release_description(tag_comments)
+    tag_comments = "\n".join(tag_comments_list)
+    logger.info(
+        "Found tag %s, updating its comments with:\n%s", tag.name, tag_comments
+    )
+    if not dry_run:
+        tag.set_release_description(tag_comments)
     return tag
 
 
-def update_files_with_mr(gitpkg, files_dict, message, branch, automerge,
-    dry_run, user_id):
-    """Update (via a commit) files of a given gitlab package, through an MR
+def update_files_with_mr(
+    gitpkg, files_dict, message, branch, automerge, dry_run, user_id
+):
+    """Update (via a commit) files of a given gitlab package, through an MR.
 
     This function can update a file in a gitlab package, but will do this
     through a formal merge request.  You can auto-merge this request
@@ -245,49 +269,66 @@ def update_files_with_mr(gitpkg, files_dict, message, branch, automerge,
           created MR
         dry_run: If True, nothing will be pushed to gitlab
         user_id: The integer which numbers the user to attribute this MR to
-
     """
 
     data = {
-        'branch': branch,
-        'start_branch': 'master',
-        'commit_message': message,
-        'actions': []
+        "branch": branch,
+        "start_branch": "master",
+        "commit_message": message,
+        "actions": [],
     }
 
     # add files to update
     for filename in files_dict.keys():
-        update_action = dict(action='update', file_path=filename)
-        update_action['content'] = files_dict[filename]
-        data['actions'].append(update_action)
-
-    logger.debug("Committing changes in files (%s) to branch '%s'",
-        ', '.join(files_dict.keys()), branch)
+        update_action = dict(action="update", file_path=filename)
+        update_action["content"] = files_dict[filename]
+        data["actions"].append(update_action)
+
+    logger.debug(
+        "Committing changes in files (%s) to branch '%s'",
+        ", ".join(files_dict.keys()),
+        branch,
+    )
     if not dry_run:
         commit = gitpkg.commits.create(data)
-        logger.info('Created commit %s at %s (branch=%s)',
-            commit.short_id, gitpkg.attributes['path_with_namespace'], branch)
+        logger.info(
+            "Created commit %s at %s (branch=%s)",
+            commit.short_id,
+            gitpkg.attributes["path_with_namespace"],
+            branch,
+        )
 
     logger.debug("Creating merge request %s -> master", branch)
     if not dry_run:
-        mr = gitpkg.mergerequests.create({
-          'source_branch': branch,
-          'target_branch': 'master',
-          'title': message,
-          'remove_source_branch': True,
-          'assignee_id': user_id,
-          })
-        logger.info('Created merge-request !%d (%s -> %s) at %s', mr.iid,
-            branch, 'master', gitpkg.attributes['path_with_namespace'])
+        mr = gitpkg.mergerequests.create(
+            {
+                "source_branch": branch,
+                "target_branch": "master",
+                "title": message,
+                "remove_source_branch": True,
+                "assignee_id": user_id,
+            }
+        )
+        logger.info(
+            "Created merge-request !%d (%s -> %s) at %s",
+            mr.iid,
+            branch,
+            "master",
+            gitpkg.attributes["path_with_namespace"],
+        )
 
         if automerge:
-          if '[ci skip]' in message.lower() or '[skip ci]' in message.lower():
-            logger.info('Merging !%d immediately - CI was skipped', mr.iid)
-            mr.merge()
-          else:
-            logger.info('Auto-merging !%d only if pipeline succeeds', mr.iid)
-            time.sleep(0.5)  # to avoid the MR to be merged automatically - bug?
-            mr.merge(merge_when_pipeline_succeeds=True)
+            if "[ci skip]" in message.lower() or "[skip ci]" in message.lower():
+                logger.info("Merging !%d immediately - CI was skipped", mr.iid)
+                mr.merge()
+            else:
+                logger.info(
+                    "Auto-merging !%d only if pipeline succeeds", mr.iid
+                )
+                time.sleep(
+                    0.5
+                )  # to avoid the MR to be merged automatically - bug?
+                mr.merge(merge_when_pipeline_succeeds=True)
 
 
 def update_files_at_master(gitpkg, files_dict, message, dry_run):
@@ -300,31 +341,32 @@ def update_files_at_master(gitpkg, files_dict, message, dry_run):
         files_dict: Dictionary of file names and their contents (as text)
         message: Commit message
         dry_run: If True, nothing will be committed or pushed to GitLab
-
     """
 
-    data = {
-        'branch': 'master',  # v4
-        'commit_message': message,
-        'actions': []
-    }
+    data = {"branch": "master", "commit_message": message, "actions": []}  # v4
 
     # add files to update
     for filename in files_dict.keys():
-        update_action = dict(action='update', file_path=filename)
-        update_action['content'] = files_dict[filename]
-        data['actions'].append(update_action)
-
-    logger.debug("Committing changes in files (%s) to branch 'master'",
-        ', '.join(files_dict.keys()))
+        update_action = dict(action="update", file_path=filename)
+        update_action["content"] = files_dict[filename]
+        data["actions"].append(update_action)
+
+    logger.debug(
+        "Committing changes in files (%s) to branch 'master'",
+        ", ".join(files_dict.keys()),
+    )
     if not dry_run:
         commit = gitpkg.commits.create(data)
-        logger.info('Created commit %s at %s (branch=%s)',
-            commit.short_id, gitpkg.attributes['path_with_namespace'], 'master')
+        logger.info(
+            "Created commit %s at %s (branch=%s)",
+            commit.short_id,
+            gitpkg.attributes["path_with_namespace"],
+            "master",
+        )
 
 
 def get_last_pipeline(gitpkg):
-    """Returns the last pipeline of the project
+    """Returns the last pipeline of the project.
 
     Args:
 
@@ -342,7 +384,7 @@ def get_last_pipeline(gitpkg):
 
 
 def just_build_package(gitpkg, dry_run=False):
-    """Creates the pipeline with the latest tag and starts it
+    """Creates the pipeline with the latest tag and starts it.
 
     Args:
 
@@ -350,17 +392,16 @@ def just_build_package(gitpkg, dry_run=False):
         dry_run: If True, the pipeline will not be created on GitLab
 
     Returns:
-
     """
 
     # get the latest tag
-    latest_tag_name = 'v' + get_latest_tag_name(gitpkg)
+    latest_tag_name = "v" + get_latest_tag_name(gitpkg)
 
     # create the pipeline with this tag and start it
     logger.info("Creating and starting pipeline for tag %s", latest_tag_name)
 
     if not dry_run:
-        new_pipeline = gitpkg.pipelines.create({'ref': latest_tag_name})
+        new_pipeline = gitpkg.pipelines.create({"ref": latest_tag_name})
         return new_pipeline.id
 
     return None
@@ -378,18 +419,21 @@ def wait_for_pipeline_to_finish(gitpkg, pipeline_id, dry_run=False):
         pipeline_id: id of the pipeline for which we are waiting to finish
         dry_run: If True, outputs log message and exit. There wil be no
                  waiting.
-
     """
 
     sleep_step = 30
     max_sleep = 120 * 60  # two hours
     # pipeline = get_last_pipeline(gitpkg, before_last=before_last)
 
-    logger.warn('Waiting for the pipeline %s of "%s" to finish',
-        pipeline_id, gitpkg.attributes['path_with_namespace'])
-    logger.warn('Do **NOT** interrupt!')
+    logger.warn(
+        'Waiting for the pipeline %s of "%s" to finish',
+        pipeline_id,
+        gitpkg.attributes["path_with_namespace"],
+    )
+    logger.warn("Do **NOT** interrupt!")
 
-    if dry_run: return
+    if dry_run:
+        return
 
     # retrieve the pipeline we are waiting for
     pipeline = gitpkg.pipelines.get(pipeline_id)
@@ -397,44 +441,55 @@ def wait_for_pipeline_to_finish(gitpkg, pipeline_id, dry_run=False):
     # probe and wait for the pipeline to finish
     slept_so_far = 0
 
-    while pipeline.status == 'running' or pipeline.status == 'pending':
+    while pipeline.status == "running" or pipeline.status == "pending":
 
         time.sleep(sleep_step)
         slept_so_far += sleep_step
         if slept_so_far > max_sleep:
-            raise ValueError('I cannot wait longer than {0} seconds for '
-                'pipeline {1} to finish running!'.format(max_sleep, pipeline_id))
+            raise ValueError(
+                "I cannot wait longer than {0} seconds for "
+                "pipeline {1} to finish running!".format(max_sleep, pipeline_id)
+            )
         # probe gitlab to update the status of the pipeline
         pipeline = gitpkg.pipelines.get(pipeline_id)
 
     # finished running, now check if it succeeded
-    if pipeline.status != 'success':
-        raise ValueError('Pipeline {0} of project {1} exited with ' \
-            'undesired status "{2}". Release is not possible.' \
-            .format(pipeline_id, gitpkg.attributes['path_with_namespace'],
-              pipeline.status))
-
-    logger.info('Pipeline %s of package %s SUCCEEDED. Continue processing.',
-        pipeline_id, gitpkg.attributes['path_with_namespace'])
+    if pipeline.status != "success":
+        raise ValueError(
+            "Pipeline {0} of project {1} exited with "
+            'undesired status "{2}". Release is not possible.'.format(
+                pipeline_id,
+                gitpkg.attributes["path_with_namespace"],
+                pipeline.status,
+            )
+        )
+
+    logger.info(
+        "Pipeline %s of package %s SUCCEEDED. Continue processing.",
+        pipeline_id,
+        gitpkg.attributes["path_with_namespace"],
+    )
 
 
 def cancel_last_pipeline(gitpkg):
-    """ Cancel the last started pipeline of a package
+    """Cancel the last started pipeline of a package.
 
     Args:
 
         gitpkg: gitlab package object
-
     """
 
     pipeline = get_last_pipeline(gitpkg)
-    logger.info('Cancelling the last pipeline %s of project %s', pipeline.id,
-      gitpkg.attributes['path_with_namespace'])
+    logger.info(
+        "Cancelling the last pipeline %s of project %s",
+        pipeline.id,
+        gitpkg.attributes["path_with_namespace"],
+    )
     pipeline.cancel()
 
 
 def release_package(gitpkg, tag_name, tag_comments_list, dry_run=False):
-    """Release package
+    """Release package.
 
     The provided tag will be annotated with a given list of comments.
     README.rst and version.txt files will also be updated according to the
@@ -446,30 +501,31 @@ def release_package(gitpkg, tag_name, tag_comments_list, dry_run=False):
         tag_name: The name of the release tag
         tag_comments_list: New annotations for this tag in a form of list
         dry_run: If True, nothing will be committed or pushed to GitLab
-
     """
 
     # if there is nothing to release, just rebuild the package
     latest_tag = get_latest_tag_name(gitpkg)
 
-    if tag_name == 'none' or (latest_tag and ('v' + latest_tag) == tag_name):
-        logger.warn("Since the tag is 'none' or already exists, we just " \
-            "re-build the last pipeline")
+    if tag_name == "none" or (latest_tag and ("v" + latest_tag) == tag_name):
+        logger.warn(
+            "Since the tag is 'none' or already exists, we just "
+            "re-build the last pipeline"
+        )
         return just_build_package(gitpkg, dry_run)
 
     # 1. Replace branch tag in Readme to new tag, change version file to new
     # version tag. Add and commit to gitlab
     version_number = tag_name[1:]  # remove 'v' in front
-    readme_file = gitpkg.files.get(file_path='README.rst', ref='master')
+    readme_file = gitpkg.files.get(file_path="README.rst", ref="master")
     readme_content = readme_file.decode().decode()
     readme_content = _update_readme(readme_content, version_number)
     # commit and push changes
-    update_files_at_master(gitpkg,
-        {
-          'README.rst': readme_content,
-          'version.txt': version_number
-          },
-        'Increased stable version to %s' % version_number, dry_run)
+    update_files_at_master(
+        gitpkg,
+        {"README.rst": readme_content, "version.txt": version_number},
+        "Increased stable version to %s" % version_number,
+        dry_run,
+    )
 
     if not dry_run:
         # cancel running the pipeline triggered by the last commit
@@ -477,10 +533,10 @@ def release_package(gitpkg, tag_name, tag_comments_list, dry_run=False):
 
     # 2. Tag package with new tag and push
     logger.info('Tagging "%s"', tag_name)
-    tag_comments = '\n'.join(tag_comments_list)
+    tag_comments = "\n".join(tag_comments_list)
     logger.debug("Updating tag comments with:\n%s", tag_comments)
     if not dry_run:
-        tag = gitpkg.tags.create({'tag_name': tag_name, 'ref': 'master'})
+        tag = gitpkg.tags.create({"tag_name": tag_name, "ref": "master"})
         # update tag with comments
         if tag_comments:
             tag.set_release_description(tag_comments)
@@ -491,21 +547,21 @@ def release_package(gitpkg, tag_name, tag_comments_list, dry_run=False):
     # 3. Replace branch tag in Readme to master, change version file to beta
     # version tag. Git add, commit, and push.
     readme_content = _update_readme(readme_content, None)
-    major, minor, patch = version_number.split('.')
-    version_number = '{}.{}.{}b0'.format(major, minor, int(patch)+1)
+    major, minor, patch = version_number.split(".")
+    version_number = "{}.{}.{}b0".format(major, minor, int(patch) + 1)
     # commit and push changes
-    update_files_at_master(gitpkg, {
-      'README.rst': readme_content,
-      'version.txt': version_number,
-      },
-      'Increased latest version to %s [skip ci]' % version_number, dry_run)
+    update_files_at_master(
+        gitpkg,
+        {"README.rst": readme_content, "version.txt": version_number},
+        "Increased latest version to %s [skip ci]" % version_number,
+        dry_run,
+    )
 
     return running_pipeline.id
 
 
-def parse_and_process_package_changelog(gl, gitpkg,
-    package_changelog, dry_run):
-    """Process the changelog of a single package
+def parse_and_process_package_changelog(gl, gitpkg, package_changelog, dry_run):
+    """Process the changelog of a single package.
 
     Parse the log following specific format.  Update annotations of the
     provided older tags and release the package by following the last tag
@@ -520,7 +576,6 @@ def parse_and_process_package_changelog(gl, gitpkg,
 
     Returns: the name of the latest tag, and tag's
     comments
-
     """
 
     cur_tag = None
@@ -529,7 +584,7 @@ def parse_and_process_package_changelog(gl, gitpkg,
     # we assume that changelog is formatted as structured text
     # first line is the name of the package
     for line in package_changelog:
-        if '  *' == line[:3]:  # a tag level
+        if "  *" == line[:3]:  # a tag level
             # write the comments collected for the previous tag
             if cur_tag:
                 update_tag_comments(gitpkg, cur_tag, cur_tag_comments, dry_run)
@@ -546,10 +601,12 @@ def parse_and_process_package_changelog(gl, gitpkg,
 
 
 def release_bob(changelog_file):
-    """Process the changelog and releases the ``bob`` metapackage"""
+    """Process the changelog and releases the ``bob`` metapackage."""
 
-    logger.info('Read the section "Releasing the Bob meta package" ' \
-        'on the documentation')
+    logger.info(
+        'Read the section "Releasing the Bob meta package" '
+        "on the documentation"
+    )
 
     # get the list of bob's dependencies.
     # Get their latest tags (since bob's last release) and the tag's changelog
@@ -558,18 +615,18 @@ def release_bob(changelog_file):
     latest_pkg = None
     for line in changelog_file:
         # if saw_a_new_package:
-        if line.startswith('*'):
+        if line.startswith("*"):
             pkg = line[2:].strip()
             saw_a_new_package = True
-            logger.info('%s == %s', latest_pkg, latest_tag)
+            logger.info("%s == %s", latest_pkg, latest_tag)
             latest_pkg = pkg
             latest_tag = None
             continue
-        if line.startswith('  *'):
+        if line.startswith("  *"):
             latest_tag = line.split()[1][1:]
         saw_a_new_package = False
-    logger.info('%s == %s', latest_pkg, latest_tag)
-    readme = open('../../bob/README.rst').read()
+    logger.info("%s == %s", latest_pkg, latest_tag)
+    readme = open("../../bob/README.rst").read()
     readme = _update_readme(readme, bob_version)
-    open('../../bob/README.rst', 'wt').write(readme)
-    open('../../bob/version.txt', 'wt').write(bob_version)
+    open("../../bob/README.rst", "wt").write(readme)
+    open("../../bob/version.txt", "wt").write(bob_version)
diff --git a/bob/devtools/scripts/bdt.py b/bob/devtools/scripts/bdt.py
index cb65ed19d2dd58016476d0c221897dd788d6b797..2943070b772ccd90e761cf95c512496b7fe342f1 100644
--- a/bob/devtools/scripts/bdt.py
+++ b/bob/devtools/scripts/bdt.py
@@ -1,8 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-"""Main entry point for bdt
-"""
+"""Main entry point for bdt."""
 
 import os
 import pkg_resources
@@ -11,29 +10,30 @@ import click
 from click_plugins import with_plugins
 
 from ..log import setup
-logger = setup('bob')
+
+logger = setup("bob")
 
 
 class AliasedGroup(click.Group):
-  ''' Class that handles prefix aliasing for commands '''
-  def get_command(self, ctx, cmd_name):
-    rv = click.Group.get_command(self, ctx, cmd_name)
-    if rv is not None:
-      return rv
-    matches = [x for x in self.list_commands(ctx)
-               if x.startswith(cmd_name)]
-    if not matches:
-      return None
-    elif len(matches) == 1:
-      return click.Group.get_command(self, ctx, matches[0])
-    ctx.fail('Too many matches: %s' % ', '.join(sorted(matches)))
+    """Class that handles prefix aliasing for commands."""
+
+    def get_command(self, ctx, cmd_name):
+        rv = click.Group.get_command(self, ctx, cmd_name)
+        if rv is not None:
+            return rv
+        matches = [x for x in self.list_commands(ctx) if x.startswith(cmd_name)]
+        if not matches:
+            return None
+        elif len(matches) == 1:
+            return click.Group.get_command(self, ctx, matches[0])
+        ctx.fail("Too many matches: %s" % ", ".join(sorted(matches)))
 
 
 def raise_on_error(view_func):
     """Raise a click exception if returned value is not zero.
 
-    Click exits successfully if anything is returned, in order to exit properly
-    when something went wrong an exception must be raised.
+    Click exits successfully if anything is returned, in order to exit
+    properly when something went wrong an exception must be raised.
     """
 
     from functools import wraps
@@ -45,19 +45,23 @@ def raise_on_error(view_func):
             exception.exit_code = value
             raise exception
         return value
+
     return wraps(view_func)(_decorator)
 
 
 # warning: must set LANG and LC_ALL before using click
 # see: https://click.palletsprojects.com/en/7.x/python3/
-if 'LANG' not in os.environ:
-  os.environ['LANG'] = 'en_US.UTF-8'
-if 'LC_ALL' not in os.environ:
-  os.environ['LC_ALL'] = 'en_US.UTF-8'
-
-@with_plugins(pkg_resources.iter_entry_points('bdt.cli'))
-@click.group(cls=AliasedGroup,
-             context_settings=dict(help_option_names=['-?', '-h', '--help']))
+if "LANG" not in os.environ:
+    os.environ["LANG"] = "en_US.UTF-8"
+if "LC_ALL" not in os.environ:
+    os.environ["LC_ALL"] = "en_US.UTF-8"
+
+
+@with_plugins(pkg_resources.iter_entry_points("bdt.cli"))
+@click.group(
+    cls=AliasedGroup,
+    context_settings=dict(help_option_names=["-?", "-h", "--help"]),
+)
 def main():
     """Bob Development Tools - see available commands below"""
 
@@ -65,5 +69,5 @@ def main():
     from ..bootstrap import set_environment
 
     # certificate setup: required for gitlab API interaction
-    set_environment('SSL_CERT_FILE', CACERT, os.environ)
-    set_environment('REQUESTS_CA_BUNDLE', CACERT, os.environ)
+    set_environment("SSL_CERT_FILE", CACERT, os.environ)
+    set_environment("REQUESTS_CA_BUNDLE", CACERT, os.environ)
diff --git a/bob/devtools/scripts/build.py b/bob/devtools/scripts/build.py
index 9a79a64b6f1d7ce9e4613b0282446a590fb9059c..193b8e596a93f9fc8cda03d538c55d26e91f92d9 100644
--- a/bob/devtools/scripts/build.py
+++ b/bob/devtools/scripts/build.py
@@ -10,18 +10,33 @@ import pkg_resources
 import conda_build.api
 
 from . import bdt
-from ..build import next_build_number, conda_arch, should_skip_build, \
-    get_rendered_metadata, get_parsed_recipe, make_conda_config, \
-    get_docserver_setup, get_env_directory, get_output_path
-from ..constants import CONDA_BUILD_CONFIG, CONDA_RECIPE_APPEND, \
-    SERVER, MATPLOTLIB_RCDIR, BASE_CONDARC
+from ..build import (
+    next_build_number,
+    conda_arch,
+    should_skip_build,
+    get_rendered_metadata,
+    get_parsed_recipe,
+    make_conda_config,
+    get_docserver_setup,
+    get_env_directory,
+    get_output_path,
+)
+from ..constants import (
+    CONDA_BUILD_CONFIG,
+    CONDA_RECIPE_APPEND,
+    SERVER,
+    MATPLOTLIB_RCDIR,
+    BASE_CONDARC,
+)
 from ..bootstrap import set_environment, get_channels
 
 from ..log import verbosity_option, get_logger, echo_info
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Builds recipe from one of our build dependencies (inside bob.conda):
@@ -39,130 +54,219 @@ Examples:
   3. To build multiple recipes, just pass the paths to them:
 
      $ bdt build --python=3.6 -vv path/to/recipe-dir1 path/to/recipe-dir2
-''')
-@click.argument('recipe-dir', required=False, type=click.Path(file_okay=False,
-  dir_okay=True, exists=True), nargs=-1)
-@click.option('-p', '--python', default=('%d.%d' % sys.version_info[:2]),
-    show_default=True, help='Version of python to build the environment for')
-@click.option('-r', '--condarc',
-    help='Use custom conda configuration file instead of our own',)
-@click.option('-m', '--config', '--variant-config-files', show_default=True,
-    default=CONDA_BUILD_CONFIG, help='overwrites the path leading to ' \
-        'variant configuration file to use')
-@click.option('-n', '--no-test', is_flag=True,
-    help='Do not test the package, only builds it')
-@click.option('-a', '--append-file', show_default=True,
-    default=CONDA_RECIPE_APPEND, help='overwrites the path leading to ' \
-        'appended configuration file to use')
-@click.option('-S', '--server', show_default=True, default=SERVER,
-    help='Server used for downloading conda packages and documentation ' \
-        'indexes of required packages')
-@click.option('-g', '--group', show_default=True, default='bob',
-    help='Group of packages (gitlab namespace) this package belongs to')
-@click.option('-P', '--private/--no-private', default=False,
-    help='Set this to **include** private channels on your build - ' \
-        'you **must** be at Idiap to execute this build in this case - ' \
-        'you **must** also use the correct server name through --server - ' \
-        'notice this option has no effect to conda if you also pass --condarc')
-@click.option('-X', '--stable/--no-stable', default=False,
-    help='Set this to **exclude** beta channels from your build - ' \
-        'notice this option has no effect if you also pass --condarc')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
-@click.option('-C', '--ci/--no-ci', default=False, hidden=True,
-    help='Use this flag to indicate the build will be running on the CI')
+"""
+)
+@click.argument(
+    "recipe-dir",
+    required=False,
+    type=click.Path(file_okay=False, dir_okay=True, exists=True),
+    nargs=-1,
+)
+@click.option(
+    "-p",
+    "--python",
+    default=("%d.%d" % sys.version_info[:2]),
+    show_default=True,
+    help="Version of python to build the environment for",
+)
+@click.option(
+    "-r",
+    "--condarc",
+    help="Use custom conda configuration file instead of our own",
+)
+@click.option(
+    "-m",
+    "--config",
+    "--variant-config-files",
+    show_default=True,
+    default=CONDA_BUILD_CONFIG,
+    help="overwrites the path leading to " "variant configuration file to use",
+)
+@click.option(
+    "-n",
+    "--no-test",
+    is_flag=True,
+    help="Do not test the package, only builds it",
+)
+@click.option(
+    "-a",
+    "--append-file",
+    show_default=True,
+    default=CONDA_RECIPE_APPEND,
+    help="overwrites the path leading to " "appended configuration file to use",
+)
+@click.option(
+    "-S",
+    "--server",
+    show_default=True,
+    default=SERVER,
+    help="Server used for downloading conda packages and documentation "
+    "indexes of required packages",
+)
+@click.option(
+    "-g",
+    "--group",
+    show_default=True,
+    default="bob",
+    help="Group of packages (gitlab namespace) this package belongs to",
+)
+@click.option(
+    "-P",
+    "--private/--no-private",
+    default=False,
+    help="Set this to **include** private channels on your build - "
+    "you **must** be at Idiap to execute this build in this case - "
+    "you **must** also use the correct server name through --server - "
+    "notice this option has no effect to conda if you also pass --condarc",
+)
+@click.option(
+    "-X",
+    "--stable/--no-stable",
+    default=False,
+    help="Set this to **exclude** beta channels from your build - "
+    "notice this option has no effect if you also pass --condarc",
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
+@click.option(
+    "-C",
+    "--ci/--no-ci",
+    default=False,
+    hidden=True,
+    help="Use this flag to indicate the build will be running on the CI",
+)
 @verbosity_option()
 @bdt.raise_on_error
-def build(recipe_dir, python, condarc, config, no_test, append_file,
-    server, group, private, stable, dry_run, ci):
-  """Builds package through conda-build with stock configuration
-
-  This command wraps the execution of conda-build so that you use the same
-  conda configuration we use for our CI.  It always set
-  ``--no-anaconda-upload``.
-  """
-
-  # if we are in a dry-run mode, let's let it be known
-  if dry_run:
-    logger.warn('!!!! DRY RUN MODE !!!!')
-    logger.warn('Nothing will be really built')
-
-  recipe_dir = recipe_dir or [os.path.join(os.path.realpath('.'), 'conda')]
-
-  logger.debug('This package is considered part of group "%s" - tunning ' \
-      'conda package and documentation URLs for this...', group)
-
-  # get potential channel upload and other auxiliary channels
-  channels = get_channels(public=(not private), stable=stable, server=server,
-      intranet=ci, group=group)
-
-  if condarc is not None:
-    logger.info('Loading CONDARC file from %s...', condarc)
-    with open(condarc, 'rb') as f:
-      condarc_options = yaml.load(f, Loader=yaml.FullLoader)
-  else:
-    # use default and add channels
-    all_channels = []
-    all_channels += channels + ['defaults']
-    condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
-    logger.info('Using the following channels during build:\n  - %s',
-        '\n  - '.join(all_channels))
-    condarc_options['channels'] = all_channels
-
-  # dump packages at base environment
-  prefix = get_env_directory(os.environ['CONDA_EXE'], 'base')
-  condarc_options['croot'] = os.path.join(prefix, 'conda-bld')
-
-  conda_config = make_conda_config(config, python, append_file,
-      condarc_options)
-
-  set_environment('MATPLOTLIBRC', MATPLOTLIB_RCDIR)
-
-  # setup BOB_DOCUMENTATION_SERVER environment variable (used for bob.extension
-  # and derived documentation building via Sphinx)
-  set_environment('DOCSERVER', server)
-  doc_urls = get_docserver_setup(public=(not private), stable=stable,
-      server=server, intranet=ci, group=group)
-  set_environment('BOB_DOCUMENTATION_SERVER', doc_urls)
-
-  arch = conda_arch()
-
-  for d in recipe_dir:
-
-    if not os.path.exists(d):
-      raise RuntimeError("The directory %s does not exist" % recipe_dir)
-
-    version_candidate = os.path.join(d, '..', 'version.txt')
-    if os.path.exists(version_candidate):
-      version = open(version_candidate).read().rstrip()
-      set_environment('BOB_PACKAGE_VERSION', version)
-
-    # pre-renders the recipe - figures out the destination
-    metadata = get_rendered_metadata(d, conda_config)
-
-    # checks if we should actually build this recipe
-    if should_skip_build(metadata):
-      logger.info('Skipping UNSUPPORTED build of %s for %s', recipe_dir, arch)
-      continue
-
-    rendered_recipe = get_parsed_recipe(metadata)
-    path = get_output_path(metadata, conda_config)
-
-    # gets the next build number
-    build_number, _ = next_build_number(channels[0], os.path.basename(path))
-
-    logger.info('Building %s-%s-py%s (build: %d) for %s',
-        rendered_recipe['package']['name'],
-        rendered_recipe['package']['version'], python.replace('.',''),
-        build_number, arch)
-
-    if not dry_run:
-      # set $BOB_BUILD_NUMBER and force conda_build to reparse recipe to get it
-      # right
-      set_environment('BOB_BUILD_NUMBER', str(build_number))
-      paths = conda_build.api.build(d, config=conda_config, notest=no_test)
-      # if you get to this point, the package was successfully rebuilt
-      # set environment to signal caller we may dispose of it
-      os.environ['BDT_BUILD'] = ':'.join(paths)
+def build(
+    recipe_dir,
+    python,
+    condarc,
+    config,
+    no_test,
+    append_file,
+    server,
+    group,
+    private,
+    stable,
+    dry_run,
+    ci,
+):
+    """Builds package through conda-build with stock configuration.
+
+    This command wraps the execution of conda-build so that you use the
+    same conda configuration we use for our CI.  It always set ``--no-
+    anaconda-upload``.
+    """
+
+    # if we are in a dry-run mode, let's let it be known
+    if dry_run:
+        logger.warn("!!!! DRY RUN MODE !!!!")
+        logger.warn("Nothing will be really built")
+
+    recipe_dir = recipe_dir or [os.path.join(os.path.realpath("."), "conda")]
+
+    logger.debug(
+        'This package is considered part of group "%s" - tunning '
+        "conda package and documentation URLs for this...",
+        group,
+    )
+
+    # get potential channel upload and other auxiliary channels
+    channels = get_channels(
+        public=(not private),
+        stable=stable,
+        server=server,
+        intranet=ci,
+        group=group,
+    )
+
+    if condarc is not None:
+        logger.info("Loading CONDARC file from %s...", condarc)
+        with open(condarc, "rb") as f:
+            condarc_options = yaml.load(f, Loader=yaml.FullLoader)
+    else:
+        # use default and add channels
+        all_channels = []
+        all_channels += channels + ["defaults"]
+        condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
+        logger.info(
+            "Using the following channels during build:\n  - %s",
+            "\n  - ".join(all_channels),
+        )
+        condarc_options["channels"] = all_channels
+
+    # dump packages at base environment
+    prefix = get_env_directory(os.environ["CONDA_EXE"], "base")
+    condarc_options["croot"] = os.path.join(prefix, "conda-bld")
+
+    conda_config = make_conda_config(
+        config, python, append_file, condarc_options
+    )
+
+    set_environment("MATPLOTLIBRC", MATPLOTLIB_RCDIR)
+
+    # setup BOB_DOCUMENTATION_SERVER environment variable (used for bob.extension
+    # and derived documentation building via Sphinx)
+    set_environment("DOCSERVER", server)
+    doc_urls = get_docserver_setup(
+        public=(not private),
+        stable=stable,
+        server=server,
+        intranet=ci,
+        group=group,
+    )
+    set_environment("BOB_DOCUMENTATION_SERVER", doc_urls)
+
+    arch = conda_arch()
+
+    for d in recipe_dir:
+
+        if not os.path.exists(d):
+            raise RuntimeError("The directory %s does not exist" % recipe_dir)
+
+        version_candidate = os.path.join(d, "..", "version.txt")
+        if os.path.exists(version_candidate):
+            version = open(version_candidate).read().rstrip()
+            set_environment("BOB_PACKAGE_VERSION", version)
+
+        # pre-renders the recipe - figures out the destination
+        metadata = get_rendered_metadata(d, conda_config)
+
+        # checks if we should actually build this recipe
+        if should_skip_build(metadata):
+            logger.info(
+                "Skipping UNSUPPORTED build of %s for %s", recipe_dir, arch
+            )
+            continue
+
+        rendered_recipe = get_parsed_recipe(metadata)
+        path = get_output_path(metadata, conda_config)
+
+        # gets the next build number
+        build_number, _ = next_build_number(channels[0], os.path.basename(path))
+
+        logger.info(
+            "Building %s-%s-py%s (build: %d) for %s",
+            rendered_recipe["package"]["name"],
+            rendered_recipe["package"]["version"],
+            python.replace(".", ""),
+            build_number,
+            arch,
+        )
+
+        if not dry_run:
+            # set $BOB_BUILD_NUMBER and force conda_build to reparse recipe to get it
+            # right
+            set_environment("BOB_BUILD_NUMBER", str(build_number))
+            paths = conda_build.api.build(
+                d, config=conda_config, notest=no_test
+            )
+            # if you get to this point, the package was successfully rebuilt
+            # set environment to signal caller we may dispose of it
+            os.environ["BDT_BUILD"] = ":".join(paths)
diff --git a/bob/devtools/scripts/caupdate.py b/bob/devtools/scripts/caupdate.py
index b6629a5ae09a56854a3f0135c965a9a90a97958b..9330c81bfb6809029b4f61e1b13c22f4adb474f5 100644
--- a/bob/devtools/scripts/caupdate.py
+++ b/bob/devtools/scripts/caupdate.py
@@ -7,10 +7,12 @@ import click
 from . import bdt
 
 from ..log import verbosity_option, get_logger
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Update the root certificate authority bundle on the distribution:
@@ -19,27 +21,28 @@ Examples:
      $ git status  #to check if bundle changed
      $ git commit -m '[data] Update CA bundle'  #if need be
 
-''')
+"""
+)
 @verbosity_option()
 @bdt.raise_on_error
 def caupdate():
-    """Updates the root certificate authority bundle on the distribution
+    """Updates the root certificate authority bundle on the distribution.
 
     This script will download the latest CA bundle from curl at
-    https://curl.haxx.se/ca/cacert.pem and will append Idiap's Root CA to the
-    bundle, so we can use https://gitlab.idiap.ch transparently.
+    https://curl.haxx.se/ca/cacert.pem and will append Idiap's Root CA
+    to the bundle, so we can use https://gitlab.idiap.ch transparently.
     """
 
     import requests
     from ..constants import CACERT, CACERT_URL, IDIAP_ROOT_CA
 
-    logger.info('Retrieving %s...', CACERT_URL)
+    logger.info("Retrieving %s...", CACERT_URL)
     r = requests.get(CACERT_URL, allow_redirects=True)
 
-    logger.info('Writing %s...', CACERT)
-    with open(CACERT, 'wb') as f:
-      f.write(r.content)
-      f.write(IDIAP_ROOT_CA)
+    logger.info("Writing %s...", CACERT)
+    with open(CACERT, "wb") as f:
+        f.write(r.content)
+        f.write(IDIAP_ROOT_CA)
 
-    logger.warn('CA bundle is updated')
-    logger.warn('Run git status, commit and push (if need be)')
+    logger.warn("CA bundle is updated")
+    logger.warn("Run git status, commit and push (if need be)")
diff --git a/bob/devtools/scripts/changelog.py b/bob/devtools/scripts/changelog.py
index 1edb25ed66d90f2e13d82ddc92c0fb892220961c..186b4e35eb0a2886b48d4c6d9b68ea82ed7124d3 100644
--- a/bob/devtools/scripts/changelog.py
+++ b/bob/devtools/scripts/changelog.py
@@ -12,10 +12,12 @@ from ..changelog import parse_date
 from ..release import get_gitlab_instance
 
 from ..log import verbosity_option, get_logger
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Generates the changelog for a single package using merge requests:
@@ -47,25 +49,44 @@ Examples:
      $ bdt gitlab lasttag bob/bob
      # copy and paste date to next command
      $ bdt gitlab changelog --since="2018-07-17 10:23:40" order.txt changelog.md
-''')
-@click.argument('target')
-@click.argument('changelog', type=click.Path(exists=False, dir_okay=False,
-  file_okay=True, writable=True), required=False)
-@click.option('-g', '--group', default='bob', show_default=True,
-    help='Gitlab default group name where packages are located (if not ' \
-        'specified using a "/" on the package name - e.g. ' \
-        '"bob/bob.extension")')
-@click.option('-m', '--mode', type=click.Choice(['mrs', 'tags', 'commits']),
-    default='mrs', show_default=True,
-    help='Changes the way we produce the changelog.  By default, uses the ' \
-        'text in every merge request (mode "mrs"). To use tag annotations, ' \
-        'use mode "tags". If you use "commits" as mode, we use the text ' \
-        'in commits to produce the changelog')
-@click.option('-s', '--since',
-    help='A starting date in any format accepted by dateutil.parser.parse() ' \
-    '(see https://dateutil.readthedocs.io/en/stable/parser.html) from ' \
-    'which you want to generate the changelog.  If not set, the package\'s' \
-    'last release date will be used')
+"""
+)
+@click.argument("target")
+@click.argument(
+    "changelog",
+    type=click.Path(
+        exists=False, dir_okay=False, file_okay=True, writable=True
+    ),
+    required=False,
+)
+@click.option(
+    "-g",
+    "--group",
+    default="bob",
+    show_default=True,
+    help="Gitlab default group name where packages are located (if not "
+    'specified using a "/" on the package name - e.g. '
+    '"bob/bob.extension")',
+)
+@click.option(
+    "-m",
+    "--mode",
+    type=click.Choice(["mrs", "tags", "commits"]),
+    default="mrs",
+    show_default=True,
+    help="Changes the way we produce the changelog.  By default, uses the "
+    'text in every merge request (mode "mrs"). To use tag annotations, '
+    'use mode "tags". If you use "commits" as mode, we use the text '
+    "in commits to produce the changelog",
+)
+@click.option(
+    "-s",
+    "--since",
+    help="A starting date in any format accepted by dateutil.parser.parse() "
+    "(see https://dateutil.readthedocs.io/en/stable/parser.html) from "
+    "which you want to generate the changelog.  If not set, the package's"
+    "last release date will be used",
+)
 @verbosity_option()
 @bdt.raise_on_error
 def changelog(target, changelog, group, mode, since):
@@ -81,68 +102,84 @@ def changelog(target, changelog, group, mode, since):
     an existing file containing a list of packages that will be iterated on.
 
     For each package, we will contact the Gitlab server and create a changelog
-		using merge-requests (default), tags or commits since a given date.  If a
-		starting date is not passed, we'll use the date of the last tagged value or
-		the date of the first commit, if no tags are available in the package.
+                using merge-requests (default), tags or commits since a given date.  If a
+                starting date is not passed, we'll use the date of the last tagged value or
+                the date of the first commit, if no tags are available in the package.
     """
 
     gl = get_gitlab_instance()
 
     # reads package list or considers name to be a package name
     if os.path.exists(target) and os.path.isfile(target):
-        logger.info('Reading package names from file %s...', target)
-        with open(target, 'rt') as f:
-            packages = [k.strip() for k in f.readlines() if k.strip() and not \
-                k.strip().startswith('#')]
+        logger.info("Reading package names from file %s...", target)
+        with open(target, "rt") as f:
+            packages = [
+                k.strip()
+                for k in f.readlines()
+                if k.strip() and not k.strip().startswith("#")
+            ]
     else:
-        logger.info('Assuming %s is a package name (file does not exist)...',
-            target)
+        logger.info(
+            "Assuming %s is a package name (file does not exist)...", target
+        )
         packages = [target]
 
     # if the user passed a date, convert it
-    if since: since = parse_date(since)
+    if since:
+        since = parse_date(since)
 
     # iterates over the packages and dumps required information
     for package in packages:
 
-        if '/' not in package:
-            package = '/'.join((group, package))
+        if "/" not in package:
+            package = "/".join((group, package))
 
         # retrieves the gitlab package object
         use_package = gl.projects.get(package)
-        logger.info('Found gitlab project %s (id=%d)',
-            use_package.attributes['path_with_namespace'], use_package.id)
+        logger.info(
+            "Found gitlab project %s (id=%d)",
+            use_package.attributes["path_with_namespace"],
+            use_package.id,
+        )
 
         last_release_date = since or get_last_tag_date(use_package)
-        logger.info('Retrieving data (mode=%s) since %s', mode,
-            last_release_date.strftime('%b %d, %Y %H:%M'))
+        logger.info(
+            "Retrieving data (mode=%s) since %s",
+            mode,
+            last_release_date.strftime("%b %d, %Y %H:%M"),
+        )
 
         # add 1s to avoid us retrieving previous release data
         last_release_date += datetime.timedelta(seconds=1)
 
-        if mode == 'tags':
-            visibility = ('public',)
+        if mode == "tags":
+            visibility = ("public",)
         else:
-            visibility = ('public', 'private', 'internal')
+            visibility = ("public", "private", "internal")
 
-        if use_package.attributes['namespace'] == use_package.name:
+        if use_package.attributes["namespace"] == use_package.name:
             # skip system meta-package
-            logger.warn('Skipping meta package %s...',
-                use_package.attributes['path_with_namespace'])
+            logger.warn(
+                "Skipping meta package %s...",
+                use_package.attributes["path_with_namespace"],
+            )
             continue
 
-        if use_package.attributes['visibility'] not in visibility:
-            logger.warn('Skipping package %s (visibility not in "%s")...',
-                use_package.attributes['path_with_namespace'],
-                '|'.join(visibility))
+        if use_package.attributes["visibility"] not in visibility:
+            logger.warn(
+                'Skipping package %s (visibility not in "%s")...',
+                use_package.attributes["path_with_namespace"],
+                "|".join(visibility),
+            )
             continue
 
-        if (not changelog) or (changelog == '-'):
-          changelog_file = sys.stdout
+        if (not changelog) or (changelog == "-"):
+            changelog_file = sys.stdout
         else:
-          changelog_file = open(changelog, 'at')
+            changelog_file = open(changelog, "at")
 
         # write_tags(f, use_package, last_release_date)
-        write_tags_with_commits(changelog_file, use_package, last_release_date,
-          mode)
+        write_tags_with_commits(
+            changelog_file, use_package, last_release_date, mode
+        )
         changelog_file.flush()
diff --git a/bob/devtools/scripts/ci.py b/bob/devtools/scripts/ci.py
index da4473971353a9ba4f895b9c14c35434b4d2e4d7..cffe0c09ec437a8a4317c4ecf05ad10f2b031a64 100644
--- a/bob/devtools/scripts/ci.py
+++ b/bob/devtools/scripts/ci.py
@@ -13,74 +13,97 @@ from click_plugins import with_plugins
 from . import bdt
 from ..constants import SERVER, WEBDAV_PATHS, BASE_CONDARC
 from ..deploy import deploy_conda_package, deploy_documentation
-from ..ci import read_packages, comment_cleanup, uniq, \
-    select_conda_build_config, select_conda_recipe_append, select_user_condarc
+from ..build import comment_cleanup, load_order_file
+from ..ci import (
+    read_packages,
+    uniq,
+    select_conda_build_config,
+    select_conda_recipe_append,
+    select_user_condarc,
+)
 
 from ..log import verbosity_option, get_logger, echo_normal
+
 logger = get_logger(__name__)
 
 
-@with_plugins(pkg_resources.iter_entry_points('bdt.ci.cli'))
+@with_plugins(pkg_resources.iter_entry_points("bdt.ci.cli"))
 @click.group(cls=bdt.AliasedGroup)
 def ci():
-  """Commands for building packages and handling CI activities
+    """Commands for building packages and handling CI activities.
 
-  Commands defined here are supposed to run on our CI, where a number of
-  variables that define their behavior is correctly defined.  Do **NOT**
-  attempt to run these commands in your own installation.  Unexpected errors
-  may occur.
-  """
-  pass
+    Commands defined here are supposed to run on our CI, where a number
+    of variables that define their behavior is correctly defined.  Do
+    **NOT** attempt to run these commands in your own installation.
+    Unexpected errors may occur.
+    """
+    pass
 
 
-@ci.command(epilog='''
+@ci.command(
+    epilog="""
 Examples:
 
   1. Deploys base build artifacts (dependencies) to the appropriate channels:
 
      $ bdt ci base-deploy -vv
 
-''')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
+"""
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
 @verbosity_option()
 @bdt.raise_on_error
 def base_deploy(dry_run):
-    """Deploys dependencies not available at the defaults channel
+    """Deploys dependencies not available at the defaults channel.
 
     Deployment happens to our public channel directly, as these are
-    dependencies are required for proper bob/beat package runtime environments.
+    dependencies are required for proper bob/beat package runtime
+    environments.
     """
 
     if dry_run:
-        logger.warn('!!!! DRY RUN MODE !!!!')
-        logger.warn('Nothing is being deployed to server')
+        logger.warn("!!!! DRY RUN MODE !!!!")
+        logger.warn("Nothing is being deployed to server")
 
-    package = os.environ['CI_PROJECT_PATH']
-    group, name = package.split('/')
+    package = os.environ["CI_PROJECT_PATH"]
+    group, name = package.split("/")
 
     # deploys all conda package artefacts currently available (erases them
     # afterwards)
-    for arch in ('linux-64', 'osx-64', 'noarch'):
-      # finds conda dependencies and uploads what we can find
-      package_path = os.path.join(os.environ['CONDA_ROOT'], 'conda-bld', arch,
-          '*.tar.bz2')
-      deploy_packages = glob.glob(package_path)
+    for arch in ("linux-64", "osx-64", "noarch"):
+        # finds conda dependencies and uploads what we can find
+        package_path = os.path.join(
+            os.environ["CONDA_ROOT"], "conda-bld", arch, "*.tar.bz2"
+        )
+        deploy_packages = glob.glob(package_path)
 
-      for k in deploy_packages:
+        for k in deploy_packages:
 
-        if os.path.basename(k).startswith(name):
-          logger.debug('Skipping deploying of %s - not a base package', k)
-          continue
+            if os.path.basename(k).startswith(name):
+                logger.debug("Skipping deploying of %s - not a base package", k)
+                continue
 
-        deploy_conda_package(k, arch=arch, stable=True, public=True,
-            username=os.environ['DOCUSER'], password=os.environ['DOCPASS'],
-            overwrite=False, dry_run=dry_run)
+            deploy_conda_package(
+                k,
+                arch=arch,
+                stable=True,
+                public=True,
+                username=os.environ["DOCUSER"],
+                password=os.environ["DOCPASS"],
+                overwrite=False,
+                dry_run=dry_run,
+            )
 
 
-@ci.command(epilog='''
+@ci.command(
+    epilog="""
 Examples:
 
   1. Deploys current build artifacts to the appropriate channels:
@@ -92,19 +115,28 @@ Examples:
 
      $ bdt ci deploy -vv --no-latest
 
-''')
-@click.option('-n', '--latest/--no-latest', default=True,
-    help='If set (the default), for stable builds, deploy documentation ' \
-        'to both "stable" and "master" branches, besides "<branch>" and ' \
-        '"<tag>" - otherwise, only deploys documentation to "<branch>" ' \
-        'and "<tag>".  This option is useful if you are publishing ' \
-        'corrections of a release from a stable branch which is **NOT** ' \
-        'the master branch, so you would not like to overwrite ' \
-        'documentation deployments for "stable" and "master"')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
+"""
+)
+@click.option(
+    "-n",
+    "--latest/--no-latest",
+    default=True,
+    help="If set (the default), for stable builds, deploy documentation "
+    'to both "stable" and "master" branches, besides "<branch>" and '
+    '"<tag>" - otherwise, only deploys documentation to "<branch>" '
+    'and "<tag>".  This option is useful if you are publishing '
+    "corrections of a release from a stable branch which is **NOT** "
+    "the master branch, so you would not like to overwrite "
+    'documentation deployments for "stable" and "master"',
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
 @verbosity_option()
 @bdt.raise_on_error
 def deploy(latest, dry_run):
@@ -120,36 +152,53 @@ def deploy(latest, dry_run):
     """
 
     if dry_run:
-        logger.warn('!!!! DRY RUN MODE !!!!')
-        logger.warn('Nothing is being deployed to server')
+        logger.warn("!!!! DRY RUN MODE !!!!")
+        logger.warn("Nothing is being deployed to server")
 
-    package = os.environ['CI_PROJECT_PATH']
-    group, name = package.split('/')
+    package = os.environ["CI_PROJECT_PATH"]
+    group, name = package.split("/")
 
     # determine if building branch or tag, and project visibility
-    stable = ('CI_COMMIT_TAG' in os.environ)
-    public = (os.environ['CI_PROJECT_VISIBILITY'] == 'public')
+    stable = "CI_COMMIT_TAG" in os.environ
+    public = os.environ["CI_PROJECT_VISIBILITY"] == "public"
 
     # deploys all conda package artefacts currently available (erases them
     # afterwards)
-    for arch in ('linux-64', 'osx-64', 'noarch'):
-      # finds conda packages and uploads what we can find
-      package_path = os.path.join(os.environ['CONDA_ROOT'], 'conda-bld', arch,
-          name + '*.tar.bz2')
-      deploy_packages = glob.glob(package_path)
-      for k in deploy_packages:
-        deploy_conda_package(k, arch=arch, stable=stable, public=public,
-            username=os.environ['DOCUSER'], password=os.environ['DOCPASS'],
-            overwrite=False, dry_run=dry_run)
-
-    local_docs = os.path.join(os.environ['CI_PROJECT_DIR'], 'sphinx')
-    deploy_documentation(local_docs, package, stable=stable, latest=latest,
-        public=public, branch=os.environ['CI_COMMIT_REF_NAME'],
-        tag=os.environ.get('CI_COMMIT_TAG'), username=os.environ['DOCUSER'],
-        password=os.environ['DOCPASS'], dry_run=dry_run)
-
-
-@ci.command(epilog='''
+    for arch in ("linux-64", "osx-64", "noarch"):
+        # finds conda packages and uploads what we can find
+        package_path = os.path.join(
+            os.environ["CONDA_ROOT"], "conda-bld", arch, name + "*.tar.bz2"
+        )
+        deploy_packages = glob.glob(package_path)
+        for k in deploy_packages:
+            deploy_conda_package(
+                k,
+                arch=arch,
+                stable=stable,
+                public=public,
+                username=os.environ["DOCUSER"],
+                password=os.environ["DOCPASS"],
+                overwrite=False,
+                dry_run=dry_run,
+            )
+
+    local_docs = os.path.join(os.environ["CI_PROJECT_DIR"], "sphinx")
+    deploy_documentation(
+        local_docs,
+        package,
+        stable=stable,
+        latest=latest,
+        public=public,
+        branch=os.environ["CI_COMMIT_REF_NAME"],
+        tag=os.environ.get("CI_COMMIT_TAG"),
+        username=os.environ["DOCUSER"],
+        password=os.environ["DOCPASS"],
+        dry_run=dry_run,
+    )
+
+
+@ci.command(
+    epilog="""
 Examples:
 
   1. Checks the long description of setup.py (correctly parseable and will
@@ -158,91 +207,113 @@ Examples:
 
      $ bdt ci readme -vv dist/*.zip
 
-''')
-@click.argument('package', required=True, type=click.Path(file_okay=True,
-  dir_okay=False, exists=True), nargs=-1)
+"""
+)
+@click.argument(
+    "package",
+    required=True,
+    type=click.Path(file_okay=True, dir_okay=False, exists=True),
+    nargs=-1,
+)
 @verbosity_option()
 @bdt.raise_on_error
 def readme(package):
-    """Checks setup.py's ``long_description`` syntax
+    """Checks setup.py's ``long_description`` syntax.
 
-    This program checks the syntax of the contents of the ``long_description``
-    field at the package's ``setup()`` function.  It verifies it will be
-    correctly displayed at PyPI.
+    This program checks the syntax of the contents of the
+    ``long_description`` field at the package's ``setup()`` function.
+    It verifies it will be correctly displayed at PyPI.
     """
 
     for k in package:
 
-      logger.info('Checking python package %s', k)
-      #twine check dist/*.zip
+        logger.info("Checking python package %s", k)
+        # twine check dist/*.zip
+
+        from twine.commands.check import check
+
+        failed = check([k])
 
-      from twine.commands.check import check
-      failed = check([k])
+        if failed:
+            raise RuntimeError(
+                "twine check (a.k.a. readme check) %s: FAILED" % k
+            )
+        else:
+            logger.info("twine check (a.k.a. readme check) %s: OK", k)
 
-      if failed:
-        raise RuntimeError('twine check (a.k.a. readme check) %s: FAILED' % k)
-      else:
-        logger.info('twine check (a.k.a. readme check) %s: OK', k)
 
-@ci.command(epilog='''
+@ci.command(
+    epilog="""
 Examples:
 
   1. Deploys current build artifacts to the Python Package Index (PyPI):
 
      $ bdt ci pypi -vv dist/*.zip
 
-''')
-@click.argument('package', required=True, type=click.Path(file_okay=True,
-  dir_okay=False, exists=True), nargs=-1)
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
+"""
+)
+@click.argument(
+    "package",
+    required=True,
+    type=click.Path(file_okay=True, dir_okay=False, exists=True),
+    nargs=-1,
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
 @verbosity_option()
 @bdt.raise_on_error
 def pypi(package, dry_run):
     """Deploys build artifacts (python packages to PyPI)
 
     Deployment is only allowed for packages in which the visibility is
-    "public".  This check prevents publishing of private resources to the
-    (public) PyPI webserver.
+    "public".  This check prevents publishing of private resources to
+    the (public) PyPI webserver.
     """
 
     if dry_run:
-        logger.warn('!!!! DRY RUN MODE !!!!')
-        logger.warn('Nothing is being deployed to server')
+        logger.warn("!!!! DRY RUN MODE !!!!")
+        logger.warn("Nothing is being deployed to server")
 
     # determine project visibility
-    public = (os.environ['CI_PROJECT_VISIBILITY'] == 'public')
+    public = os.environ["CI_PROJECT_VISIBILITY"] == "public"
 
     if not public:
-      raise RuntimeError('The repository %s is not public - a package ' \
-          'deriving from it therefore, CANNOT be published to PyPI. ' \
-          'You must follow the relevant software disclosure procedures ' \
-          'and set this repository to "public" before trying again.' % \
-          os.environ['CI_PROJECT_PATH'])
+        raise RuntimeError(
+            "The repository %s is not public - a package "
+            "deriving from it therefore, CANNOT be published to PyPI. "
+            "You must follow the relevant software disclosure procedures "
+            'and set this repository to "public" before trying again.'
+            % os.environ["CI_PROJECT_PATH"]
+        )
 
     from ..constants import CACERT
     from twine.settings import Settings
 
     settings = Settings(
-        username=os.environ['PYPIUSER'],
-        password=os.environ['PYPIPASS'],
+        username=os.environ["PYPIUSER"],
+        password=os.environ["PYPIPASS"],
         skip_existing=True,
         cacert=CACERT,
-        )
+    )
 
     if not dry_run:
-      from twine.commands.upload import upload
+        from twine.commands.upload import upload
 
-      for k in package:
+        for k in package:
 
-        logger.info('Deploying python package %s to PyPI', k)
-        upload(settings, [k])
-        logger.info('%s: Deployed to PyPI - OK', k)
+            logger.info("Deploying python package %s to PyPI", k)
+            upload(settings, [k])
+            logger.info("%s: Deployed to PyPI - OK", k)
 
 
-@ci.command(epilog='''
+@ci.command(
+    epilog="""
 Examples:
 
   1. Builds a list of non-python packages (base dependencies) defined in a text
@@ -256,482 +327,622 @@ Examples:
 
      $ bdt ci base-build -vv --python=3.6 --python=3.7 order.txt
 
-''')
-@click.argument('order', required=True, type=click.Path(file_okay=True,
-  dir_okay=False, exists=True), nargs=1)
-@click.option('-g', '--group', show_default=True, default='bob',
-    help='Group of packages (gitlab namespace) this package belongs to')
-@click.option('-p', '--python', multiple=True,
-    help='Versions of python in the format "x.y" we should build for.  Pass ' \
-        'various times this option to build for multiple python versions')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
+"""
+)
+@click.argument(
+    "order",
+    required=True,
+    type=click.Path(file_okay=True, dir_okay=False, exists=True),
+    nargs=1,
+)
+@click.option(
+    "-g",
+    "--group",
+    show_default=True,
+    default="bob",
+    help="Group of packages (gitlab namespace) this package belongs to",
+)
+@click.option(
+    "-p",
+    "--python",
+    multiple=True,
+    help='Versions of python in the format "x.y" we should build for.  Pass '
+    "various times this option to build for multiple python versions",
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
 @verbosity_option()
 @bdt.raise_on_error
 def base_build(order, group, python, dry_run):
-  """Builds base (dependence) packages
-
-  This command builds dependence packages (packages that are not Bob/BEAT
-  packages) in the CI infrastructure.  It is **not** meant to be used outside
-  this context.
-  """
-
-  condarc = select_user_condarc(paths=[os.curdir],
-        branch=os.environ.get('CI_COMMIT_REF_NAME'))
-
-  condarc = condarc or os.path.join(os.environ['CONDA_ROOT'], 'condarc')
-
-  if os.path.exists(condarc):
-    logger.info('Loading (this build\'s) CONDARC file from %s...', condarc)
-    with open(condarc, 'rb') as f:
-      condarc_options = yaml.load(f, Loader=yaml.FullLoader)
-
-  else:  #not building on the CI? - use defaults
-    from ..bootstrap import get_channels
-
-    # get potential channel upload and other auxiliary channels
-    channels = get_channels(public=True, stable=True, server=SERVER,
-        intranet='True', group='bob')
-
-    # use default and add channels
-    condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
-    channels = ['local'] + channels + ['defaults']
-    logger.info('Using the following channels during build:\n  - %s',
-        '\n  - '.join(channels))
-    condarc_options['channels'] = channels
-
-  # dump packages at conda_root
-  condarc_options['croot'] = os.path.join(os.environ['CONDA_ROOT'],
-      'conda-bld')
-
-  # loads dirnames from order file (accepts # comments and empty lines)
-  recipes = []
-  with open(order, 'rt') as f:
-    for line in f:
-      line = line.partition('#')[0].strip()
-      if line: recipes.append(line)
-
-  import itertools
-  from .. import bootstrap
-  from ..build import base_build as _build
-
-  # combine all versions of python with recipes
-  if python:
-    recipes = list(itertools.product(python, recipes))
-  else:
-    recipes = list(itertools.product([None], recipes))
-
-  for order, (pyver, recipe) in enumerate(recipes):
-    echo_normal('\n' + (80*'='))
-    pytext = 'for python-%s ' % pyver if pyver is not None else ''
-    echo_normal('Building "%s" %s(%d/%d)' % \
-        (recipe, pytext, order+1, len(recipes)))
-    echo_normal((80*'=') + '\n')
-    if not os.path.exists(os.path.join(recipe, 'meta.yaml')):
-      logger.info('Ignoring directory "%s" - no meta.yaml found' % recipe)
-      continue
-
-    variants_file = select_conda_build_config(paths=[recipe, os.curdir],
-        branch=os.environ.get('CI_COMMIT_REF_NAME'))
-    logger.info('Conda build configuration file: %s', variants_file)
-
-    _build(
-        bootstrap=bootstrap,
-        server=SERVER,
-        intranet=True,
-        group=group,
-        recipe_dir=recipe,
-        conda_build_config=variants_file,
-        python_version=pyver,
-        condarc_options=condarc_options,
+    """Builds base (dependence) packages.
+
+    This command builds dependence packages (packages that are not
+    Bob/BEAT packages) in the CI infrastructure.  It is **not** meant to
+    be used outside this context.
+    """
+
+    condarc = select_user_condarc(
+        paths=[os.curdir], branch=os.environ.get("CI_COMMIT_REF_NAME")
+    )
+
+    condarc = condarc or os.path.join(os.environ["CONDA_ROOT"], "condarc")
+
+    if os.path.exists(condarc):
+        logger.info("Loading (this build's) CONDARC file from %s...", condarc)
+        with open(condarc, "rb") as f:
+            condarc_options = yaml.load(f, Loader=yaml.FullLoader)
+
+    else:  # not building on the CI? - use defaults
+        from ..bootstrap import get_channels
+
+        # get potential channel upload and other auxiliary channels
+        channels = get_channels(
+            public=True,
+            stable=True,
+            server=SERVER,
+            intranet="True",
+            group="bob",
         )
 
+        # use default and add channels
+        condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
+        channels = ["local"] + channels + ["defaults"]
+        logger.info(
+            "Using the following channels during build:\n  - %s",
+            "\n  - ".join(channels),
+        )
+        condarc_options["channels"] = channels
+
+    # dump packages at conda_root
+    condarc_options["croot"] = os.path.join(
+        os.environ["CONDA_ROOT"], "conda-bld"
+    )
+
+    recipes = load_order_file(order)
+
+    import itertools
+    from .. import bootstrap
+    from ..build import base_build as _build
+
+    # combine all versions of python with recipes
+    if python:
+        recipes = list(itertools.product(python, recipes))
+    else:
+        recipes = list(itertools.product([None], recipes))
+
+    for k, (pyver, recipe) in enumerate(recipes):
+        echo_normal("\n" + (80 * "="))
+        pytext = "for python-%s " % pyver if pyver is not None else ""
+        echo_normal(
+            'Building "%s" %s(%d/%d)'
+            % (recipe, pytext, k + 1, len(recipes))
+        )
+        echo_normal((80 * "=") + "\n")
+        if not os.path.exists(os.path.join(recipe, "meta.yaml")):
+            logger.info('Ignoring directory "%s" - no meta.yaml found' % recipe)
+            continue
+
+        variants_file = select_conda_build_config(
+            paths=[recipe, os.curdir],
+            branch=os.environ.get("CI_COMMIT_REF_NAME"),
+        )
+        logger.info("Conda build configuration file: %s", variants_file)
+
+        _build(
+            bootstrap=bootstrap,
+            server=SERVER,
+            intranet=True,
+            group=group,
+            recipe_dir=recipe,
+            conda_build_config=variants_file,
+            python_version=pyver,
+            condarc_options=condarc_options,
+        )
 
-@ci.command(epilog='''
+
+@ci.command(
+    epilog="""
 Examples:
 
   1. Tests the current package
 
      $ bdt ci test -vv
 
-''')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
+"""
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
 @verbosity_option()
 @bdt.raise_on_error
 @click.pass_context
 def test(ctx, dry_run):
-  """Tests packages
-
-  This command tests packages in the CI infrastructure.  It is **not** meant
-  to be used outside this context.
-  """
-
-  group = os.environ['CI_PROJECT_NAMESPACE']
-  if group not in ('bob', 'beat'):
-    # defaults back to bob - no other server setups are available as of now
-    group = 'bob'
-
-  # Use custom variants and append files if available on recipe-dir
-  recipe_dir = os.path.join(os.path.realpath(os.curdir), 'conda')
-
-  condarc = select_user_condarc(paths=[recipe_dir, os.curdir],
-        branch=os.environ.get('CI_COMMIT_REF_NAME'))
-  if condarc is not None:
-    logger.info('Condarc configuration file: %s', condarc)
-
-  variants_file = select_conda_build_config(paths=[recipe_dir, os.curdir],
-      branch=os.environ.get('CI_COMMIT_REF_NAME'))
-  logger.info('Conda build configuration file: %s', variants_file)
-
-  append_file = select_conda_recipe_append(paths=[recipe_dir, os.curdir],
-      branch=os.environ.get('CI_COMMIT_REF_NAME'))
-  logger.info('Conda build recipe-append file: %s', append_file)
-
-  from .test import test
-  ctx.invoke(test,
-      package = glob.glob(os.path.join(os.environ['CONDA_ROOT'], 'conda-bld',
-        '*', os.environ['CI_PROJECT_NAME'] + '*.tar.bz2')),
-      condarc=condarc,
-      config=variants_file,
-      append_file=append_file,
-      server=SERVER,
-      group=group,
-      private=(os.environ['CI_PROJECT_VISIBILITY'] != 'public'),
-      stable='CI_COMMIT_TAG' in os.environ,
-      dry_run=dry_run,
-      ci=True,
-      )
-
-
-@ci.command(epilog='''
+    """Tests packages.
+
+    This command tests packages in the CI infrastructure.  It is **not**
+    meant to be used outside this context.
+    """
+
+    group = os.environ["CI_PROJECT_NAMESPACE"]
+    if group not in ("bob", "beat"):
+        # defaults back to bob - no other server setups are available as of now
+        group = "bob"
+
+    # Use custom variants and append files if available on recipe-dir
+    recipe_dir = os.path.join(os.path.realpath(os.curdir), "conda")
+
+    condarc = select_user_condarc(
+        paths=[recipe_dir, os.curdir],
+        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+    )
+    if condarc is not None:
+        logger.info("Condarc configuration file: %s", condarc)
+
+    variants_file = select_conda_build_config(
+        paths=[recipe_dir, os.curdir],
+        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+    )
+    logger.info("Conda build configuration file: %s", variants_file)
+
+    append_file = select_conda_recipe_append(
+        paths=[recipe_dir, os.curdir],
+        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+    )
+    logger.info("Conda build recipe-append file: %s", append_file)
+
+    from .test import test
+
+    ctx.invoke(
+        test,
+        package=glob.glob(
+            os.path.join(
+                os.environ["CONDA_ROOT"],
+                "conda-bld",
+                "*",
+                os.environ["CI_PROJECT_NAME"] + "*.tar.bz2",
+            )
+        ),
+        condarc=condarc,
+        config=variants_file,
+        append_file=append_file,
+        server=SERVER,
+        group=group,
+        private=(os.environ["CI_PROJECT_VISIBILITY"] != "public"),
+        stable="CI_COMMIT_TAG" in os.environ,
+        dry_run=dry_run,
+        ci=True,
+    )
+
+
+@ci.command(
+    epilog="""
 Examples:
 
   1. Builds the current package
 
      $ bdt ci build -vv
 
-''')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
+"""
+)
 @click.option(
-  '-r',
-  '--recipe-dir',
-  default=os.path.join(os.path.realpath(os.curdir), 'conda'),
-  help="Custom recipe folder for build. Useful for debugging."
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
+@click.option(
+    "-r",
+    "--recipe-dir",
+    default=os.path.join(os.path.realpath(os.curdir), "conda"),
+    help="Custom recipe folder for build. Useful for debugging.",
 )
 @verbosity_option()
 @bdt.raise_on_error
 @click.pass_context
 def build(ctx, dry_run, recipe_dir):
-  """Builds packages
-
-  This command builds packages in the CI infrastructure.  It is **not** meant
-  to be used outside this context.
-  """
-
-  group = os.environ['CI_PROJECT_NAMESPACE']
-  if group not in ('bob', 'beat'):
-    # defaults back to bob - no other server setups are available as of now
-    group = 'bob'
-
-  # Use custom variants and append files if available on recipe-dir
-  condarc = select_user_condarc(paths=[recipe_dir, os.curdir],
-        branch=os.environ.get('CI_COMMIT_REF_NAME'))
-  if condarc is not None:
-    logger.info('Condarc configuration file: %s', condarc)
-
-  variants_file = select_conda_build_config(paths=[recipe_dir, os.curdir],
-      branch=os.environ.get('CI_COMMIT_REF_NAME'))
-  logger.info('Conda build configuration file: %s', variants_file)
-
-  append_file = select_conda_recipe_append(paths=[recipe_dir, os.curdir],
-      branch=os.environ.get('CI_COMMIT_REF_NAME'))
-  logger.info('Conda build recipe-append file: %s', append_file)
-
-  from .build import build
-  ctx.invoke(build,
-      recipe_dir=[recipe_dir],
-      python=os.environ['PYTHON_VERSION'],  #python version
-      condarc=condarc,
-      config=variants_file,
-      no_test=False,
-      append_file=append_file,
-      server=SERVER,
-      group=group,
-      private=(os.environ['CI_PROJECT_VISIBILITY'] != 'public'),
-      stable='CI_COMMIT_TAG' in os.environ,
-      dry_run=dry_run,
-      ci=True,
-      )
-
-
-@ci.command(epilog='''
+    """Builds packages.
+
+    This command builds packages in the CI infrastructure.  It is
+    **not** meant to be used outside this context.
+    """
+
+    group = os.environ["CI_PROJECT_NAMESPACE"]
+    if group not in ("bob", "beat"):
+        # defaults back to bob - no other server setups are available as of now
+        group = "bob"
+
+    # Use custom variants and append files if available on recipe-dir
+    condarc = select_user_condarc(
+        paths=[recipe_dir, os.curdir],
+        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+    )
+    if condarc is not None:
+        logger.info("Condarc configuration file: %s", condarc)
+
+    variants_file = select_conda_build_config(
+        paths=[recipe_dir, os.curdir],
+        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+    )
+    logger.info("Conda build configuration file: %s", variants_file)
+
+    append_file = select_conda_recipe_append(
+        paths=[recipe_dir, os.curdir],
+        branch=os.environ.get("CI_COMMIT_REF_NAME"),
+    )
+    logger.info("Conda build recipe-append file: %s", append_file)
+
+    from .build import build
+
+    ctx.invoke(
+        build,
+        recipe_dir=[recipe_dir],
+        python=os.environ["PYTHON_VERSION"],  # python version
+        condarc=condarc,
+        config=variants_file,
+        no_test=False,
+        append_file=append_file,
+        server=SERVER,
+        group=group,
+        private=(os.environ["CI_PROJECT_VISIBILITY"] != "public"),
+        stable="CI_COMMIT_TAG" in os.environ,
+        dry_run=dry_run,
+        ci=True,
+    )
+
+
+@ci.command(
+    epilog="""
 Examples:
 
   1. Cleans the current build (and prints what it cleans)
 
      $ bdt ci clean -vv
 
-''')
+"""
+)
 @verbosity_option()
 @bdt.raise_on_error
 @click.pass_context
 def clean(ctx):
-  """Cleans builds
+    """Cleans builds.
 
-  This command cleans builds in the CI infrastructure.  It is **not** meant
-  to be used outside this context.
-  """
+    This command cleans builds in the CI infrastructure.  It is **not**
+    meant to be used outside this context.
+    """
 
-  from ..build import git_clean_build
-  from ..bootstrap import run_cmdline
+    from ..build import git_clean_build
+    from ..bootstrap import run_cmdline
 
-  git_clean_build(run_cmdline, verbose=(ctx.meta['verbosity']>=3))
+    git_clean_build(run_cmdline, verbose=(ctx.meta["verbosity"] >= 3))
 
 
-@ci.command(epilog='''
+@ci.command(
+    epilog="""
 Examples:
 
   1. Runs the nightly builds following a list of packages in a file:
 
      $ bdt ci nightlies -vv order.txt
 
-''')
-@click.argument('order', required=True, type=click.Path(file_okay=True,
-  dir_okay=False, exists=True), nargs=1)
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
+"""
+)
+@click.argument(
+    "order",
+    required=True,
+    type=click.Path(file_okay=True, dir_okay=False, exists=True),
+    nargs=1,
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
 @verbosity_option()
 @bdt.raise_on_error
 @click.pass_context
 def nightlies(ctx, order, dry_run):
-  """Runs nightly builds
-
-  This command can run nightly builds for packages listed on a file.
-
-  The build or each package happens in a few phases:
+    """Runs nightly builds.
 
-  1. Package is checked out and switched to the requested branch (master if not
-     set otherwise)
-  2. A build string is calculated from current dependencies.  If the package
-     has already been compiled, it is downloaded from the respective conda
-     channel and tested.  If the test does not pass, the package is completely
-     rebuilt
-  3. If the rebuild is successful, the new package is uploaded to the
-     respective conda channel, and the program continues with the next package
+    This command can run nightly builds for packages listed on a file.
 
-  Dependencies are searched with priority to locally built packages.  For this
-  reason, the input file **must** be provided in the right dependence order.
-  """
+    The build or each package happens in a few phases:
 
-  # loads dirnames from order file (accepts # comments and empty lines)
-  packages = read_packages(order)
+    1. Package is checked out and switched to the requested branch (master if not
+       set otherwise)
+    2. A build string is calculated from current dependencies.  If the package
+       has already been compiled, it is downloaded from the respective conda
+       channel and tested.  If the test does not pass, the package is completely
+       rebuilt
+    3. If the rebuild is successful, the new package is uploaded to the
+       respective conda channel, and the program continues with the next package
 
-  token = os.environ['CI_JOB_TOKEN']
+    Dependencies are searched with priority to locally built packages.  For this
+    reason, the input file **must** be provided in the right dependence order.
+    """
 
-  import git
-  from .build import build
-  from urllib.request import urlopen
+    # loads dirnames from order file (accepts # comments and empty lines)
+    packages = read_packages(order)
 
-  # loaded all recipes, now cycle through them implementing what is described
-  # in the documentation of this function
-  for n, (package, branch) in enumerate(packages):
+    token = os.environ["CI_JOB_TOKEN"]
 
-    echo_normal('\n' + (80*'='))
-    echo_normal('Building %s@%s (%d/%d)' % (package, branch, n+1,
-      len(packages)))
-    echo_normal((80*'=') + '\n')
+    import git
+    from .build import build
+    from urllib.request import urlopen
 
-    group, name = package.split('/', 1)
+    # loaded all recipes, now cycle through them implementing what is described
+    # in the documentation of this function
+    for n, (package, branch) in enumerate(packages):
 
-    clone_to = os.path.join(os.environ['CI_PROJECT_DIR'], 'src', group, name)
-    dirname = os.path.dirname(clone_to)
-    if not os.path.exists(dirname):
-      os.makedirs(dirname)
+        echo_normal("\n" + (80 * "="))
+        echo_normal(
+            "Building %s@%s (%d/%d)" % (package, branch, n + 1, len(packages))
+        )
+        echo_normal((80 * "=") + "\n")
 
-    # clone the repo, shallow version, on the specified branch
-    logger.info('Cloning "%s", branch "%s" (depth=1)...', package, branch)
-    git.Repo.clone_from('https://gitlab-ci-token:%s@gitlab.idiap.ch/%s' % \
-        (token, package), clone_to, branch=branch, depth=1)
+        group, name = package.split("/", 1)
 
-    # determine package visibility
-    private = urlopen('https://gitlab.idiap.ch/%s' % package).getcode() != 200
-    stable = 'STABLE' in os.environ
+        clone_to = os.path.join(
+            os.environ["CI_PROJECT_DIR"], "src", group, name
+        )
+        dirname = os.path.dirname(clone_to)
+        if not os.path.exists(dirname):
+            os.makedirs(dirname)
+
+        # clone the repo, shallow version, on the specified branch
+        logger.info('Cloning "%s", branch "%s" (depth=1)...', package, branch)
+        git.Repo.clone_from(
+            "https://gitlab-ci-token:%s@gitlab.idiap.ch/%s" % (token, package),
+            clone_to,
+            branch=branch,
+            depth=1,
+        )
 
-    # Use custom variants and append files if available on recipe-dir
-    recipe_dir = os.path.join(clone_to, 'conda')
+        # determine package visibility
+        private = (
+            urlopen("https://gitlab.idiap.ch/%s" % package).getcode() != 200
+        )
+        stable = "STABLE" in os.environ
 
-    condarc = select_user_condarc(paths=[recipe_dir, os.curdir],
-          branch=os.environ.get('CI_COMMIT_REF_NAME'))
-    if condarc is not None:
-      logger.info('Condarc configuration file: %s', condarc)
+        # Use custom variants and append files if available on recipe-dir
+        recipe_dir = os.path.join(clone_to, "conda")
 
-    variants_file = select_conda_build_config(paths=[recipe_dir, os.curdir],
-        branch=os.environ.get('CI_COMMIT_REF_NAME'))
-    logger.info('Conda build configuration file: %s', variants_file)
+        condarc = select_user_condarc(
+            paths=[recipe_dir, os.curdir],
+            branch=os.environ.get("CI_COMMIT_REF_NAME"),
+        )
+        if condarc is not None:
+            logger.info("Condarc configuration file: %s", condarc)
 
-    append_file = select_conda_recipe_append(paths=[recipe_dir, os.curdir],
-        branch=os.environ.get('CI_COMMIT_REF_NAME'))
-    logger.info('Conda build recipe-append file: %s', append_file)
+        variants_file = select_conda_build_config(
+            paths=[recipe_dir, os.curdir],
+            branch=os.environ.get("CI_COMMIT_REF_NAME"),
+        )
+        logger.info("Conda build configuration file: %s", variants_file)
 
-    ctx.invoke(build,
-        recipe_dir=[recipe_dir],
-        python=os.environ['PYTHON_VERSION'],  #python version
-        condarc=condarc,
-        config=variants_file,
-        no_test=False,
-        append_file=append_file,
-        server=SERVER,
-        group=group,
-        private=private,
-        stable=stable,
-        dry_run=dry_run,
-        ci=True,
+        append_file = select_conda_recipe_append(
+            paths=[recipe_dir, os.curdir],
+            branch=os.environ.get("CI_COMMIT_REF_NAME"),
+        )
+        logger.info("Conda build recipe-append file: %s", append_file)
+
+        ctx.invoke(
+            build,
+            recipe_dir=[recipe_dir],
+            python=os.environ["PYTHON_VERSION"],  # python version
+            condarc=condarc,
+            config=variants_file,
+            no_test=False,
+            append_file=append_file,
+            server=SERVER,
+            group=group,
+            private=private,
+            stable=stable,
+            dry_run=dry_run,
+            ci=True,
         )
 
-    is_master = os.environ['CI_COMMIT_REF_NAME'] == 'master'
-
-    # re-deploys a new conda package if it was rebuilt and it is the master
-    # branch
-    # n.b.: can only arrive here if dry_run was ``False`` (no need to check
-    # again)
-    if 'BDT_BUILD' in os.environ and is_master:
-      tarball = os.environ['BDT_BUILD']
-      del os.environ['BDT_BUILD']
-      deploy_conda_package(tarball, arch=None, stable=stable,
-          public=(not private), username=os.environ['DOCUSER'],
-          password=os.environ['DOCPASS'], overwrite=False, dry_run=dry_run)
-
-    # removes the documentation to avoid permissions issues with the following
-    # projects being built
-    local_docs = os.path.join(os.environ['CI_PROJECT_DIR'], 'sphinx')
-    if os.path.exists(local_docs):
-      logger.debug('Sphinx output was generated during test/rebuild ' \
-          'of %s - Erasing...', package)
-      shutil.rmtree(local_docs)
-
-
-@ci.command(epilog='''
+        is_master = os.environ["CI_COMMIT_REF_NAME"] == "master"
+
+        # re-deploys a new conda package if it was rebuilt and it is the master
+        # branch
+        # n.b.: can only arrive here if dry_run was ``False`` (no need to check
+        # again)
+        if "BDT_BUILD" in os.environ and is_master:
+            tarball = os.environ["BDT_BUILD"]
+            del os.environ["BDT_BUILD"]
+            deploy_conda_package(
+                tarball,
+                arch=None,
+                stable=stable,
+                public=(not private),
+                username=os.environ["DOCUSER"],
+                password=os.environ["DOCPASS"],
+                overwrite=False,
+                dry_run=dry_run,
+            )
+
+        # removes the documentation to avoid permissions issues with the following
+        # projects being built
+        local_docs = os.path.join(os.environ["CI_PROJECT_DIR"], "sphinx")
+        if os.path.exists(local_docs):
+            logger.debug(
+                "Sphinx output was generated during test/rebuild "
+                "of %s - Erasing...",
+                package,
+            )
+            shutil.rmtree(local_docs)
+
+
+@ci.command(
+    epilog="""
 Examples:
 
   1. Prepares the docs for the subsequent `bdt ci build ...`:
 
      $ bdt ci docs -vv requirements.txt
 
-''')
-@click.argument('requirement', required=True, type=click.Path(file_okay=True,
-  dir_okay=False, exists=True), nargs=1)
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
+"""
+)
+@click.argument(
+    "requirement",
+    required=True,
+    type=click.Path(file_okay=True, dir_okay=False, exists=True),
+    nargs=1,
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
 @verbosity_option()
 @bdt.raise_on_error
 @click.pass_context
 def docs(ctx, requirement, dry_run):
-  """Prepares documentation build
-
-  This command:
-    \b
-
-    1. Clones all the necessary packages necessary to build the bob/beat
-       documentation
-    \b
-
-    2. Generates the `extra-intersphinx.txt` and `nitpick-exceptions.txt` file
-    \b
+    """Prepares documentation build.
 
-  This command is supposed to be run **instead** of `bdt ci build...`
+    This command:
+      \b
 
-  """
+      1. Clones all the necessary packages necessary to build the bob/beat
+         documentation
+      \b
 
-  packages = read_packages(requirement)
+      2. Generates the `extra-intersphinx.txt` and `nitpick-exceptions.txt` file
+      \b
 
-  import git
-  token = os.environ['CI_JOB_TOKEN']
-
-  # loaded all recipes, now cycle through them implementing what is described
-  # in the documentation of this function
-  extra_intersphinx = []
-  nitpick = []
-  doc_path = os.path.join(os.environ['CI_PROJECT_DIR'], 'doc')
-
-  for n, (package, branch) in enumerate(packages):
-
-    group, name = package.split('/', 1)
+    This command is supposed to be run **instead** of `bdt ci build...`
+    """
 
-    clone_to = os.path.join(doc_path, group, name)
-    dirname = os.path.dirname(clone_to)
-    if not os.path.exists(dirname):
-      os.makedirs(dirname)
+    packages = read_packages(requirement)
+
+    import git
+
+    token = os.environ["CI_JOB_TOKEN"]
+
+    # loaded all recipes, now cycle through them implementing what is described
+    # in the documentation of this function
+    extra_intersphinx = []
+    nitpick = []
+    doc_path = os.path.join(os.environ["CI_PROJECT_DIR"], "doc")
+
+    for n, (package, branch) in enumerate(packages):
+
+        group, name = package.split("/", 1)
+
+        clone_to = os.path.join(doc_path, group, name)
+        dirname = os.path.dirname(clone_to)
+        if not os.path.exists(dirname):
+            os.makedirs(dirname)
+
+        # clone the repo, shallow version, on the specified branch
+        if dry_run:
+            logger.info(
+                'Cloning "%s" [%d/%d], branch "%s" (depth=1) to %s...',
+                package,
+                n + 1,
+                len(packages),
+                branch,
+                clone_to,
+            )
+        else:
+            if os.path.exists(clone_to):
+                logger.info(
+                    'Repo "%s" [%d/%d], already cloned at %s; '
+                    'updating branch "%s"...',
+                    package,
+                    n + 1,
+                    len(packages),
+                    clone_to,
+                    branch,
+                )
+                git.Git(clone_to).pull("origin", branch)
+            else:
+                logger.info(
+                    'Cloning "%s" [%d/%d], branch "%s" (depth=1) to %s...',
+                    package,
+                    n + 1,
+                    len(packages),
+                    branch,
+                    clone_to,
+                )
+                git.Repo.clone_from(
+                    "https://gitlab-ci-token:%s@gitlab.idiap.ch/%s"
+                    % (token, package),
+                    clone_to,
+                    branch=branch,
+                    depth=1,
+                )
+
+            # Copying the content from extra_intersphinx
+            extra_intersphinx_path = os.path.join(
+                clone_to, "doc", "extra-intersphinx.txt"
+            )
+            if os.path.exists(extra_intersphinx_path):
+                with open(extra_intersphinx_path) as f:
+                    extra_intersphinx += comment_cleanup(f.readlines())
+
+            test_requirements_path = os.path.join(
+                clone_to, "doc", "test-requirements.txt"
+            )
+            if os.path.exists(test_requirements_path):
+                with open(test_requirements_path) as f:
+                    extra_intersphinx += comment_cleanup(f.readliens())
+
+            requirements_path = os.path.join(clone_to, "requirements.txt")
+            if os.path.exists(requirements_path):
+                with open(requirements_path) as f:
+                    extra_intersphinx += comment_cleanup(f.readlines())
+
+            nitpick_path = os.path.join(
+                clone_to, "doc", "nitpick-exceptions.txt"
+            )
+            if os.path.exists(nitpick_path):
+                with open(nitpick_path) as f:
+                    nitpick += comment_cleanup(f.readlines())
+
+    logger.info("Generating (extra) sphinx files...")
+
+    # Making unique lists and removing all bob/beat references
+    if not dry_run:
 
-    # clone the repo, shallow version, on the specified branch
-    if dry_run:
-      logger.info('Cloning "%s" [%d/%d], branch "%s" (depth=1) to %s...',
-          package, n+1, len(packages), branch, clone_to)
-    else:
-      if os.path.exists(clone_to):
-         logger.info('Repo "%s" [%d/%d], already cloned at %s; ' \
-             'updating branch "%s"...', package, n+1, len(packages), clone_to,
-             branch)
-         git.Git(clone_to).pull("origin", branch)
-      else:
-        logger.info('Cloning "%s" [%d/%d], branch "%s" (depth=1) to %s...',
-            package, n+1, len(packages), branch, clone_to)
-        git.Repo.clone_from('https://gitlab-ci-token:%s@gitlab.idiap.ch/%s' % \
-                (token, package), clone_to, branch=branch, depth=1)
-
-      # Copying the content from extra_intersphinx
-      extra_intersphinx_path = os.path.join(clone_to, "doc",
-          "extra-intersphinx.txt")
-      if os.path.exists(extra_intersphinx_path):
-        with open(extra_intersphinx_path) as f:
-          extra_intersphinx += comment_cleanup(f.readlines())
-
-      test_requirements_path = os.path.join(clone_to, "doc",
-          "test-requirements.txt")
-      if os.path.exists(test_requirements_path):
-        with open(test_requirements_path) as f:
-          extra_intersphinx += comment_cleanup(f.readliens())
-
-      requirements_path = os.path.join(clone_to, "requirements.txt")
-      if os.path.exists(requirements_path):
-        with open(requirements_path) as f:
-          extra_intersphinx += comment_cleanup(f.readlines())
-
-      nitpick_path = os.path.join(clone_to, "doc", "nitpick-exceptions.txt")
-      if os.path.exists(nitpick_path):
-        with open(nitpick_path) as f:
-          nitpick += comment_cleanup(f.readlines())
-
-  logger.info('Generating (extra) sphinx files...')
-
-  # Making unique lists and removing all bob/beat references
-  if not dry_run:
-
-    # extra requirements for sphinx
-    group = os.environ['CI_PROJECT_NAMESPACE']
-    extra_intersphinx = set([k.strip() for k in extra_intersphinx \
-        if not k.strip().startswith((group, "gridtk"))])
-    data = '\n'.join(uniq(sorted(extra_intersphinx)))
-    logger.info('Contents of "doc/extra-intersphinx.txt":\n%s', data)
-    with open(os.path.join(doc_path, 'extra-intersphinx.txt'), 'w') as f:
-      f.write(data)
-
-    # nitpick exceptions
-    data = '\n'.join(uniq(sorted(nitpick)))
-    logger.info('Contents of "doc/nitpick-exceptions.txt":\n%s', data)
-    with open(os.path.join(doc_path, 'nitpick-exceptions.txt'), 'w') as f:
-      f.write(data)
-
-  logger.info('Building documentation...')
-  ctx.invoke(build, dry_run=dry_run)
+        # extra requirements for sphinx
+        group = os.environ["CI_PROJECT_NAMESPACE"]
+        extra_intersphinx = set(
+            [
+                k.strip()
+                for k in extra_intersphinx
+                if not k.strip().startswith((group, "gridtk"))
+            ]
+        )
+        data = "\n".join(uniq(sorted(extra_intersphinx)))
+        logger.info('Contents of "doc/extra-intersphinx.txt":\n%s', data)
+        with open(os.path.join(doc_path, "extra-intersphinx.txt"), "w") as f:
+            f.write(data)
+
+        # nitpick exceptions
+        data = "\n".join(uniq(sorted(nitpick)))
+        logger.info('Contents of "doc/nitpick-exceptions.txt":\n%s', data)
+        with open(os.path.join(doc_path, "nitpick-exceptions.txt"), "w") as f:
+            f.write(data)
+
+    logger.info("Building documentation...")
+    ctx.invoke(build, dry_run=dry_run)
diff --git a/bob/devtools/scripts/commitfile.py b/bob/devtools/scripts/commitfile.py
index 154e8fc57edfc99b5e814c3d16ec630226462adf..678bd7ea843374db736f9bd94a10d9a16bec958a 100644
--- a/bob/devtools/scripts/commitfile.py
+++ b/bob/devtools/scripts/commitfile.py
@@ -5,14 +5,19 @@ import os
 import click
 
 from . import bdt
-from ..release import get_gitlab_instance, update_files_with_mr, \
-    update_files_at_master
+from ..release import (
+    get_gitlab_instance,
+    update_files_with_mr,
+    update_files_at_master,
+)
 
 from ..log import verbosity_option, get_logger
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Replaces the README.rst file on the package bob/bob.extension, through a direct commit to the master branch, using the contents of the local file with the same name:
@@ -31,63 +36,87 @@ Examples:
 \b
      $ bdt gitlab commitfile -vv bob/bob.blitz --path=conda/meta.yaml --branch=conda-changes --auto-merge new.yaml
 
-''')
-@click.argument('package')
-@click.argument('file', type=click.Path(file_okay=True, dir_okay=False,
-  exists=True))
-@click.option('-m', '--message',
-    help='Message to set for this commit',)
-@click.option('-p', '--path',
-    help='Which path to replace on the remote package',)
-@click.option('-b', '--branch', default='master',
-    help='Name of the branch to create for this commit.  If the branch ' \
-        'name is not "master", then create a new branch and propose the ' \
-        'merge through a proper merge-request.  Otherwise, the default ' \
-        'behaviour is to commit directly to the master branch ' \
-        '[default: %(default)s',)
-@click.option('-a', '--auto-merge/--no-auto-merge', default=False,
-    help='If set, then the created merge request will be merged when ' \
-        'a potentially associated pipeline succeeds')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
+"""
+)
+@click.argument("package")
+@click.argument(
+    "file", type=click.Path(file_okay=True, dir_okay=False, exists=True)
+)
+@click.option("-m", "--message", help="Message to set for this commit")
+@click.option(
+    "-p", "--path", help="Which path to replace on the remote package"
+)
+@click.option(
+    "-b",
+    "--branch",
+    default="master",
+    help="Name of the branch to create for this commit.  If the branch "
+    'name is not "master", then create a new branch and propose the '
+    "merge through a proper merge-request.  Otherwise, the default "
+    "behaviour is to commit directly to the master branch "
+    "[default: %(default)s",
+)
+@click.option(
+    "-a",
+    "--auto-merge/--no-auto-merge",
+    default=False,
+    help="If set, then the created merge request will be merged when "
+    "a potentially associated pipeline succeeds",
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
 @verbosity_option()
 @bdt.raise_on_error
 def commitfile(package, message, file, path, branch, auto_merge, dry_run):
-    """Changes a file on a given package, directly on master or through MR
-    """
+    """Changes a file on a given package, directly on master or through MR."""
 
-    if '/' not in package:
+    if "/" not in package:
         raise RuntimeError('PACKAGE should be specified as "group/name"')
 
     gl = get_gitlab_instance()
     gl.auth()
-    user_id = gl.user.attributes['id']
+    user_id = gl.user.attributes["id"]
 
     # we lookup the gitlab package once
     use_package = gl.projects.get(package)
-    logger.debug('Found gitlab project %s (id=%d)',
-        use_package.attributes['path_with_namespace'], use_package.id)
+    logger.debug(
+        "Found gitlab project %s (id=%d)",
+        use_package.attributes["path_with_namespace"],
+        use_package.id,
+    )
 
     # if we are in a dry-run mode, let's let it be known
     if dry_run:
-        logger.warn('!!!! DRY RUN MODE !!!!')
-        logger.warn('Nothing is being committed to Gitlab')
+        logger.warn("!!!! DRY RUN MODE !!!!")
+        logger.warn("Nothing is being committed to Gitlab")
 
     path = path or file
 
     # load file contents
-    with open(file, 'rt') as f:
-      contents = f.read()
+    with open(file, "rt") as f:
+        contents = f.read()
 
     components = os.path.splitext(path)[0].split(os.sep)
-    message = message or ("%s update" % \
-        ''.join(['[%s]' % k.lower() for k in components]))
+    message = message or (
+        "%s update" % "".join(["[%s]" % k.lower() for k in components])
+    )
 
     # commit and push changes
-    if branch == 'master':
-      update_files_at_master(use_package, {path: contents}, message, dry_run)
+    if branch == "master":
+        update_files_at_master(use_package, {path: contents}, message, dry_run)
     else:
-      update_files_with_mr(use_package, {path: contents}, message, branch,
-        auto_merge, dry_run, user_id)
+        update_files_with_mr(
+            use_package,
+            {path: contents},
+            message,
+            branch,
+            auto_merge,
+            dry_run,
+            user_id,
+        )
diff --git a/bob/devtools/scripts/create.py b/bob/devtools/scripts/create.py
index 1c707d26c518a52105115046e6a3e88fd51eb2e6..835064d01ac5fd083dccb5cd113aa7c08dc0c05e 100644
--- a/bob/devtools/scripts/create.py
+++ b/bob/devtools/scripts/create.py
@@ -10,15 +10,21 @@ import yaml
 
 from . import bdt
 from ..build import parse_dependencies, conda_create, make_conda_config
-from ..constants import BASE_CONDARC, CONDA_BUILD_CONFIG, \
-    CONDA_RECIPE_APPEND, SERVER
+from ..constants import (
+    BASE_CONDARC,
+    CONDA_BUILD_CONFIG,
+    CONDA_RECIPE_APPEND,
+    SERVER,
+)
 from ..bootstrap import set_environment, get_channels
 
 from ..log import verbosity_option, get_logger, echo_normal
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Creates an environment called `myenv' for developing the currently checked-out package (N.B.: first activate the base environment):
@@ -51,98 +57,175 @@ Examples:
 
 
      $ bdt create -vvv --dry-run myenv
-''')
-@click.argument('name')
-@click.argument('recipe-dir', required=False, type=click.Path(file_okay=False,
-  dir_okay=True, exists=True))
-@click.option('-p', '--python', default=('%d.%d' % sys.version_info[:2]),
-    show_default=True, help='Version of python to build the ' \
-        'environment for [default: %(default)s]')
-@click.option('-o', '--overwrite/--no-overwrite', default=False,
-      help='If set and an environment with the same name exists, ' \
-          'deletes it first before creating the new environment',
-          show_default=True)
-@click.option('-r', '--condarc',
-    help='Use custom conda configuration file instead of our own',)
-@click.option('-l', '--use-local', default=False,
-    help='Allow the use of local channels for package retrieval')
-@click.option('-m', '--config', '--variant-config-files', show_default=True,
-      default=CONDA_BUILD_CONFIG, help='overwrites the path leading to ' \
-          'variant configuration file to use')
-@click.option('-a', '--append-file', show_default=True,
-      default=CONDA_RECIPE_APPEND, help='overwrites the path leading to ' \
-          'appended configuration file to use')
-@click.option('-S', '--server', show_default=True, default=SERVER,
-    help='Server used for downloading conda packages and documentation ' \
-        'indexes of required packages')
-@click.option('-g', '--group', show_default=True, default='bob',
-    help='Group of packages (gitlab namespace) this package belongs to')
-@click.option('-P', '--private/--no-private', default=False,
-    help='Set this to **include** private channels on your build - ' \
-        'you **must** be at Idiap to execute this build in this case - ' \
-        'you **must** also use the correct server name through --server - ' \
-        'notice this option has no effect if you also pass --condarc')
-@click.option('-X', '--stable/--no-stable', default=False,
-    help='Set this to **exclude** beta channels from your build - ' \
-        'notice this option has no effect if you also pass --condarc')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
+"""
+)
+@click.argument("name")
+@click.argument(
+    "recipe-dir",
+    required=False,
+    type=click.Path(file_okay=False, dir_okay=True, exists=True),
+)
+@click.option(
+    "-p",
+    "--python",
+    default=("%d.%d" % sys.version_info[:2]),
+    show_default=True,
+    help="Version of python to build the "
+    "environment for [default: %(default)s]",
+)
+@click.option(
+    "-o",
+    "--overwrite/--no-overwrite",
+    default=False,
+    help="If set and an environment with the same name exists, "
+    "deletes it first before creating the new environment",
+    show_default=True,
+)
+@click.option(
+    "-r",
+    "--condarc",
+    help="Use custom conda configuration file instead of our own",
+)
+@click.option(
+    "-l",
+    "--use-local",
+    default=False,
+    help="Allow the use of local channels for package retrieval",
+)
+@click.option(
+    "-m",
+    "--config",
+    "--variant-config-files",
+    show_default=True,
+    default=CONDA_BUILD_CONFIG,
+    help="overwrites the path leading to " "variant configuration file to use",
+)
+@click.option(
+    "-a",
+    "--append-file",
+    show_default=True,
+    default=CONDA_RECIPE_APPEND,
+    help="overwrites the path leading to " "appended configuration file to use",
+)
+@click.option(
+    "-S",
+    "--server",
+    show_default=True,
+    default=SERVER,
+    help="Server used for downloading conda packages and documentation "
+    "indexes of required packages",
+)
+@click.option(
+    "-g",
+    "--group",
+    show_default=True,
+    default="bob",
+    help="Group of packages (gitlab namespace) this package belongs to",
+)
+@click.option(
+    "-P",
+    "--private/--no-private",
+    default=False,
+    help="Set this to **include** private channels on your build - "
+    "you **must** be at Idiap to execute this build in this case - "
+    "you **must** also use the correct server name through --server - "
+    "notice this option has no effect if you also pass --condarc",
+)
+@click.option(
+    "-X",
+    "--stable/--no-stable",
+    default=False,
+    help="Set this to **exclude** beta channels from your build - "
+    "notice this option has no effect if you also pass --condarc",
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
 @verbosity_option()
 @bdt.raise_on_error
-def create(name, recipe_dir, python, overwrite, condarc, use_local, config,
-    append_file, server, group, private, stable, dry_run):
-  """Creates a development environment for a recipe
-
-  It uses the conda render API to render a recipe and install an environment
-  containing all build/host, run and test dependencies of a package. It does
-  **not** build the package itself, just install dependencies so you can build
-  the package by hand, possibly using buildout or similar. If you'd like to
-  conda-build your package, just use `conda build` instead.
-
-  Once the environment is created, a copy of the used `condarc' file is placed
-  on the root of the environment. Installing or updating packages on the newly
-  created environment should be possible without further configuration. Notice
-  that beta packages quickly get outdated and upgrading may no longer be
-  possible for aging development environments. You're advised to always re-use
-  this app and use the flag `--overwrite` to re-create from scratch the
-  development environment.
-  """
-
-  recipe_dir = recipe_dir or os.path.join(os.path.realpath('.'), 'conda')
-
-  if not os.path.exists(recipe_dir):
-    raise RuntimeError("The directory %s does not exist" % recipe_dir)
-
-  # this is not used to conda-build, just to create the final environment
-  conda = os.environ.get('CONDA_EXE')
-  if conda is None:
-    raise RuntimeError("Cannot find `conda' executable (${CONDA_EXEC}) - " \
-        "have you activated the build environment containing bob.devtools " \
-        "properly?")
-
-  # set some environment variables before continuing
-  set_environment('DOCSERVER', server, os.environ)
-
-  logger.debug('This package is considered part of group "%s" - tunning ' \
-      'conda package URLs for this...', group)
-
-  if condarc is not None:
-    logger.info('Loading CONDARC file from %s...', condarc)
-    with open(condarc, 'rb') as f:
-      condarc_options = yaml.load(f, Loader=yaml.FullLoader)
-  else:
-    # use default and add channels
-    condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
-    channels = get_channels(public=(not private), stable=stable, server=server,
-        intranet=private, group=group)
-    condarc_options['channels'] = channels + ['defaults']
-
-  conda_config = make_conda_config(config, python, append_file, condarc_options)
-  deps = parse_dependencies(recipe_dir, conda_config)
-  # when creating a local development environment, remove the always_yes option
-  del condarc_options["always_yes"]
-  status = conda_create(conda, name, overwrite, condarc_options, deps,
-      dry_run, use_local)
-  echo_normal('Execute on your shell: "conda activate %s"' % name)
+def create(
+    name,
+    recipe_dir,
+    python,
+    overwrite,
+    condarc,
+    use_local,
+    config,
+    append_file,
+    server,
+    group,
+    private,
+    stable,
+    dry_run,
+):
+    """Creates a development environment for a recipe.
+
+    It uses the conda render API to render a recipe and install an environment
+    containing all build/host, run and test dependencies of a package. It does
+    **not** build the package itself, just install dependencies so you can build
+    the package by hand, possibly using buildout or similar. If you'd like to
+    conda-build your package, just use `conda build` instead.
+
+    Once the environment is created, a copy of the used `condarc' file is placed
+    on the root of the environment. Installing or updating packages on the newly
+    created environment should be possible without further configuration. Notice
+    that beta packages quickly get outdated and upgrading may no longer be
+    possible for aging development environments. You're advised to always re-use
+    this app and use the flag `--overwrite` to re-create from scratch the
+    development environment.
+    """
+
+    recipe_dir = recipe_dir or os.path.join(os.path.realpath("."), "conda")
+
+    if not os.path.exists(recipe_dir):
+        raise RuntimeError("The directory %s does not exist" % recipe_dir)
+
+    # this is not used to conda-build, just to create the final environment
+    conda = os.environ.get("CONDA_EXE")
+    if conda is None:
+        raise RuntimeError(
+            "Cannot find `conda' executable (${CONDA_EXEC}) - "
+            "have you activated the build environment containing bob.devtools "
+            "properly?"
+        )
+
+    # set some environment variables before continuing
+    set_environment("DOCSERVER", server, os.environ)
+
+    logger.debug(
+        'This package is considered part of group "%s" - tunning '
+        "conda package URLs for this...",
+        group,
+    )
+
+    if condarc is not None:
+        logger.info("Loading CONDARC file from %s...", condarc)
+        with open(condarc, "rb") as f:
+            condarc_options = yaml.load(f, Loader=yaml.FullLoader)
+    else:
+        # use default and add channels
+        condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
+        channels = get_channels(
+            public=(not private),
+            stable=stable,
+            server=server,
+            intranet=private,
+            group=group,
+        )
+        condarc_options["channels"] = channels + ["defaults"]
+
+    conda_config = make_conda_config(
+        config, python, append_file, condarc_options
+    )
+    deps = parse_dependencies(recipe_dir, conda_config)
+    # when creating a local development environment, remove the always_yes option
+    del condarc_options["always_yes"]
+    status = conda_create(
+        conda, name, overwrite, condarc_options, deps, dry_run, use_local
+    )
+    echo_normal('Execute on your shell: "conda activate %s"' % name)
diff --git a/bob/devtools/scripts/dumpsphinx.py b/bob/devtools/scripts/dumpsphinx.py
index 51b399a864b31f2202b61b932e3d2a509e978a32..1882802fe7ffe24fed3e91fe4a5581e2f84b0c88 100644
--- a/bob/devtools/scripts/dumpsphinx.py
+++ b/bob/devtools/scripts/dumpsphinx.py
@@ -9,10 +9,12 @@ import click
 from . import bdt
 
 from ..log import verbosity_option, get_logger
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Dumps objects documented in python 3.x:
@@ -28,14 +30,15 @@ Examples:
   3. Dumps objects documented in matplotlib:
 
      $ bdt dumpsphinx http://matplotlib.org/objects.inv
-''')
-@click.argument('url')
+"""
+)
+@click.argument("url")
 @verbosity_option()
 @bdt.raise_on_error
 def dumpsphinx(url):
-  """Dumps all the objects given an sphinx catalog/inventory URL
+    """Dumps all the objects given an sphinx catalog/inventory URL.
 
-  This command is useful when you are struggling to do proper links from your
-  documentation.
-  """
-  intersphinx.inspect_main([url])
+    This command is useful when you are struggling to do proper links
+    from your documentation.
+    """
+    intersphinx.inspect_main([url])
diff --git a/bob/devtools/scripts/getpath.py b/bob/devtools/scripts/getpath.py
index 22f80164a99fdd1bd23e9d2cb944ccf54604249c..04cba7715af3049bebe0451dacd2d00c5a79f821 100644
--- a/bob/devtools/scripts/getpath.py
+++ b/bob/devtools/scripts/getpath.py
@@ -8,10 +8,12 @@ from . import bdt
 from ..release import get_gitlab_instance, download_path
 
 from ..log import verbosity_option, get_logger
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Get the file ``order.txt`` from bob.nightlies master branch:
@@ -27,28 +29,37 @@ Examples:
   3. Get the directory ``gitlab`` (and eventual sub-directories) from bob.admin, save outputs in directory ``_ci``:
 
      $ bdt gitlab getpath bob/bob.admin master gitlab _ci
-''')
-@click.argument('package')
-@click.argument('path')
-@click.argument('output', type=click.Path(exists=False), required=False)
-@click.option('-r', '--ref', default='master', show_default=True,
-    help='Download path from the provided git reference (may be a branch, tag or commit hash)')
+"""
+)
+@click.argument("package")
+@click.argument("path")
+@click.argument("output", type=click.Path(exists=False), required=False)
+@click.option(
+    "-r",
+    "--ref",
+    default="master",
+    show_default=True,
+    help="Download path from the provided git reference (may be a branch, tag or commit hash)",
+)
 @verbosity_option()
 @bdt.raise_on_error
 def getpath(package, path, output, ref):
-    """Downloads files and directories from gitlab
+    """Downloads files and directories from gitlab.
 
     Files are downloaded and stored.  Directories are recursed and fully
     downloaded to the client.
     """
 
-    if '/' not in package:
+    if "/" not in package:
         raise RuntimeError('PACKAGE should be specified as "group/name"')
 
     gl = get_gitlab_instance()
 
     # we lookup the gitlab package once
     use_package = gl.projects.get(package)
-    logger.info('Found gitlab project %s (id=%d)',
-        use_package.attributes['path_with_namespace'], use_package.id)
+    logger.info(
+        "Found gitlab project %s (id=%d)",
+        use_package.attributes["path_with_namespace"],
+        use_package.id,
+    )
     download_path(use_package, path, output, ref=ref)
diff --git a/bob/devtools/scripts/gitlab.py b/bob/devtools/scripts/gitlab.py
index 537a5e6475f1458bd529eeb7b90a93d6dd009c21..90afda5183a82fd33a0c10c8866e8c12ebaeed05 100644
--- a/bob/devtools/scripts/gitlab.py
+++ b/bob/devtools/scripts/gitlab.py
@@ -10,12 +10,12 @@ from click_plugins import with_plugins
 from . import bdt
 
 
-@with_plugins(pkg_resources.iter_entry_points('bdt.gitlab.cli'))
+@with_plugins(pkg_resources.iter_entry_points("bdt.gitlab.cli"))
 @click.group(cls=bdt.AliasedGroup)
 def gitlab():
-  """Commands for that interact with gitlab
+    """Commands for that interact with gitlab.
 
-  Commands defined here are supposed to interact with gitlab, and
-  add/modify/remove resources on it directly.
-  """
-  pass
+    Commands defined here are supposed to interact with gitlab, and
+    add/modify/remove resources on it directly.
+    """
+    pass
diff --git a/bob/devtools/scripts/jobs.py b/bob/devtools/scripts/jobs.py
index df0d8b5c6d950dc036e20f31d4faa69e7757bcfd..05352f30a70e5dc7740c1fdfa198e5c54819d085 100644
--- a/bob/devtools/scripts/jobs.py
+++ b/bob/devtools/scripts/jobs.py
@@ -8,10 +8,12 @@ from . import bdt
 from ..release import get_gitlab_instance
 
 from ..log import verbosity_option, get_logger, echo_normal, echo_info
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. List running jobs on any of our runners
@@ -23,47 +25,67 @@ Examples:
 
      $ bdt gitlab jobs -vv macmini
 
-''')
-@click.argument('name', nargs=-1)
-@click.option('-s', '--status', type=click.Choice(['running', 'success',
-  'failed', 'canceled']),
-    default='running', show_default=True,
-    help='The status of jobs we are searching for - one of "running", ' \
-        '"success", "failed" or "canceled"')
+"""
+)
+@click.argument("name", nargs=-1)
+@click.option(
+    "-s",
+    "--status",
+    type=click.Choice(["running", "success", "failed", "canceled"]),
+    default="running",
+    show_default=True,
+    help='The status of jobs we are searching for - one of "running", '
+    '"success", "failed" or "canceled"',
+)
 @verbosity_option()
 @bdt.raise_on_error
 def jobs(name, status):
-    """Lists jobs on a given runner identified by description
-    """
+    """Lists jobs on a given runner identified by description."""
 
     gl = get_gitlab_instance()
     gl.auth()
-    user_id = gl.user.attributes['id']
+    user_id = gl.user.attributes["id"]
 
     names = name or [
-        'linux-desktop-shell',
-        'linux-desktop-docker',
-        'linux-server-shell',
-        'linux-server-docker',
-        'macpro',
-        'macmini',
-        ]
+        "linux-desktop-shell",
+        "linux-desktop-docker",
+        "linux-server-shell",
+        "linux-server-docker",
+        "macpro",
+        "macmini",
+    ]
 
     # search for the runner(s) to affect
-    runners = [k for k in gl.runners.list(all=True) if \
-        k.attributes['description'] in names]
+    runners = [
+        k
+        for k in gl.runners.list(all=True)
+        if k.attributes["description"] in names
+    ]
 
     if not runners:
-      raise RuntimeError('Cannot find runner with description = %s' % \
-          '|'.join(names))
+        raise RuntimeError(
+            "Cannot find runner with description = %s" % "|".join(names)
+        )
 
     for runner in runners:
-      jobs = runner.jobs.list(all=True, status=status)
-      echo_normal('Runner %s (id=%d) -- %d running' % \
-          (runner.attributes['description'], runner.attributes['id'],
-          len(jobs)))
-      for k in jobs:
-        echo_info('** job %d: %s (%s), since %s, by %s [%s]' % \
-            (k.id, k.attributes['project']['path_with_namespace'],
-          k.attributes['name'], k.attributes['started_at'],
-          k.attributes['user']['username'], k.attributes['web_url']))
+        jobs = runner.jobs.list(all=True, status=status)
+        echo_normal(
+            "Runner %s (id=%d) -- %d running"
+            % (
+                runner.attributes["description"],
+                runner.attributes["id"],
+                len(jobs),
+            )
+        )
+        for k in jobs:
+            echo_info(
+                "** job %d: %s (%s), since %s, by %s [%s]"
+                % (
+                    k.id,
+                    k.attributes["project"]["path_with_namespace"],
+                    k.attributes["name"],
+                    k.attributes["started_at"],
+                    k.attributes["user"]["username"],
+                    k.attributes["web_url"],
+                )
+            )
diff --git a/bob/devtools/scripts/lasttag.py b/bob/devtools/scripts/lasttag.py
index e7d0d63cbd4bc2669be83ef06d4e7908a9d159a8..356f3ee766606b73727ce137ab49cc1d4fc9de84 100644
--- a/bob/devtools/scripts/lasttag.py
+++ b/bob/devtools/scripts/lasttag.py
@@ -10,10 +10,12 @@ from ..changelog import get_last_tag, parse_date
 from ..release import get_gitlab_instance
 
 from ..log import verbosity_option, get_logger, echo_normal, echo_warning
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Get the last tag information of the bob/bob package
@@ -25,30 +27,34 @@ Examples:
 
      $ bdt gitlab lasttag beat/beat.core
 
-''')
-@click.argument('package')
+"""
+)
+@click.argument("package")
 @verbosity_option()
 @bdt.raise_on_error
 def lasttag(package):
-    """Returns the last tag information on a given PACKAGE
-    """
+    """Returns the last tag information on a given PACKAGE."""
 
-    if '/' not in package:
+    if "/" not in package:
         raise RuntimeError('PACKAGE should be specified as "group/name"')
 
     gl = get_gitlab_instance()
 
     # we lookup the gitlab package once
     try:
-      use_package = gl.projects.get(package)
-      logger.info('Found gitlab project %s (id=%d)',
-          use_package.attributes['path_with_namespace'], use_package.id)
-
-      tag = get_last_tag(use_package)
-      date = parse_date(tag.commit['committed_date'])
-      echo_normal('%s: %s (%s)' % \
-          (package, tag.name, date.strftime('%Y-%m-%d %H:%M:%S')))
+        use_package = gl.projects.get(package)
+        logger.info(
+            "Found gitlab project %s (id=%d)",
+            use_package.attributes["path_with_namespace"],
+            use_package.id,
+        )
+
+        tag = get_last_tag(use_package)
+        date = parse_date(tag.commit["committed_date"])
+        echo_normal(
+            "%s: %s (%s)"
+            % (package, tag.name, date.strftime("%Y-%m-%d %H:%M:%S"))
+        )
     except gitlab.GitlabGetError as e:
-      logger.warn('Gitlab access error - package %s does not exist?',
-          package)
-      echo_warning('%s: unknown' % (package,))
+        logger.warn("Gitlab access error - package %s does not exist?", package)
+        echo_warning("%s: unknown" % (package,))
diff --git a/bob/devtools/scripts/local.py b/bob/devtools/scripts/local.py
index c8ca3aac5726cc6b4142878b3a57a22ff33a1950..d7bacd6a9598e0bcf080832852dcaff69a1469ce 100644
--- a/bob/devtools/scripts/local.py
+++ b/bob/devtools/scripts/local.py
@@ -2,30 +2,17 @@
 
 import os
 import sys
-import re
-import glob
-import shutil
 
 import gitlab
 
-import yaml
 import click
 import pkg_resources
 from click_plugins import with_plugins
 
 from . import bdt
 from . import ci
-from ..constants import (
-    SERVER,
-    CONDA_BUILD_CONFIG,
-    CONDA_RECIPE_APPEND,
-    WEBDAV_PATHS,
-    BASE_CONDARC,
-)
-from ..deploy import deploy_conda_package, deploy_documentation
-from ..ci import read_packages, comment_cleanup, uniq
 
-from ..log import verbosity_option, get_logger, echo_normal
+from ..log import verbosity_option, get_logger
 
 logger = get_logger(__name__)
 
@@ -33,11 +20,11 @@ logger = get_logger(__name__)
 def set_up_environment_variables(
     python, name_space, project_dir=".", project_visibility="public"
 ):
-    """
-  This function sets up the proper environment variables when user wants to run the commands usually run on ci
-  locally
-  """
-    os.environ["CI_JOB_TOKEN"] = gitlab.Gitlab.from_config("idiap").private_token
+    """This function sets up the proper environment variables when user wants
+    to run the commands usually run on ci locally."""
+    os.environ["CI_JOB_TOKEN"] = gitlab.Gitlab.from_config(
+        "idiap"
+    ).private_token
     os.environ["CI_PROJECT_DIR"] = project_dir
     os.environ["CI_PROJECT_NAMESPACE"] = name_space
     os.environ["CI_PROJECT_VISIBILITY"] = project_visibility
@@ -49,10 +36,10 @@ def set_up_environment_variables(
 @click.group(cls=bdt.AliasedGroup)
 def local():
     """Commands for building packages and handling certain activities locally
-  it requires a proper set up for ~/.python-gitlab.cfg
+    it requires a proper set up for ~/.python-gitlab.cfg.
 
-  Commands defined here can be run in your own installation.
-  """
+    Commands defined here can be run in your own installation.
+    """
     pass
 
 
@@ -98,21 +85,20 @@ Examples:
 @bdt.raise_on_error
 @click.pass_context
 def docs(ctx, requirement, dry_run, python, group):
-    """Prepares documentation build
-
-  This command:
-    \b
+    """Prepares documentation build.
 
-    1. Clones all the necessary packages necessary to build the bob/beat
-       documentation
+    This command:
+      \b
 
-    \b
+      1. Clones all the necessary packages necessary to build the bob/beat
+         documentation
 
-    2. Generates the `extra-intersphinx.txt` and `nitpick-exceptions.txt` file
+      \b
 
-    \b
+      2. Generates the `extra-intersphinx.txt` and `nitpick-exceptions.txt` file
 
-  """
+      \b
+    """
     set_up_environment_variables(python=python, name_space=group)
 
     ctx.invoke(ci.docs, requirement=requirement, dry_run=dry_run)
@@ -160,8 +146,7 @@ Examples:
 @bdt.raise_on_error
 @click.pass_context
 def build(ctx, dry_run, recipe_dir, python, group):
-    """Run the CI build step locally
-    """
+    """Run the CI build step locally."""
     set_up_environment_variables(python=python, name_space=group)
 
     ctx.invoke(ci.build, dry_run=dry_run, recipe_dir=recipe_dir)
@@ -209,8 +194,9 @@ Examples:
 @bdt.raise_on_error
 @click.pass_context
 def base_build(ctx, order, dry_run, python, group):
-    """Run the CI build step locally
-    """
+    """Run the CI build step locally."""
     set_up_environment_variables(python=python, name_space=group)
 
-    ctx.invoke(ci.base_build, order=order, dry_run=dry_run, group=group, python=python)
+    ctx.invoke(
+        ci.base_build, order=order, dry_run=dry_run, group=group, python=python
+    )
diff --git a/bob/devtools/scripts/new.py b/bob/devtools/scripts/new.py
index 2c8ce0b6fc9e7f9398ce4ed39f363e6324c5b711..2fca5cb0279ffd6e865fd8160516ba34a0150ced 100644
--- a/bob/devtools/scripts/new.py
+++ b/bob/devtools/scripts/new.py
@@ -11,170 +11,200 @@ import pkg_resources
 from . import bdt
 
 from ..log import verbosity_option, get_logger
+
 logger = get_logger(__name__)
 
 
 def copy_file(template, output_dir):
-  '''Copies a file from the template directory to the output directory
+    """Copies a file from the template directory to the output directory.
 
-  Args:
+    Args:
 
-    template: The path to the template, from the internal templates directory
-    output_dir: Where to save the output
-  '''
+      template: The path to the template, from the internal templates directory
+      output_dir: Where to save the output
+    """
 
-  template_file = pkg_resources.resource_filename(__name__, os.path.join('..',
-    'templates', template))
-  output_file = os.path.join(output_dir, template)
+    template_file = pkg_resources.resource_filename(
+        __name__, os.path.join("..", "templates", template)
+    )
+    output_file = os.path.join(output_dir, template)
 
-  basedir = os.path.dirname(output_file)
-  if not os.path.exists(basedir):
-    logger.info('mkdir %s', basedir)
-    os.makedirs(basedir)
+    basedir = os.path.dirname(output_file)
+    if not os.path.exists(basedir):
+        logger.info("mkdir %s", basedir)
+        os.makedirs(basedir)
 
-  logger.info('cp -a %s %s', template_file, output_file)
-  shutil.copy2(template_file, output_file)
+    logger.info("cp -a %s %s", template_file, output_file)
+    shutil.copy2(template_file, output_file)
 
 
 def render_template(jenv, template, context, output_dir):
-  '''Renders a template to the output directory using specific context
+    """Renders a template to the output directory using specific context.
 
-  Args:
+    Args:
 
-    jenv: The Jinja2 environment to use for rendering the template
-    template: The path to the template, from the internal templates directory
-    context: A dictionary with the context to render the template with
-    output_dir: Where to save the output
-  '''
+      jenv: The Jinja2 environment to use for rendering the template
+      template: The path to the template, from the internal templates directory
+      context: A dictionary with the context to render the template with
+      output_dir: Where to save the output
+    """
 
-  output_file = os.path.join(output_dir, template)
+    output_file = os.path.join(output_dir, template)
 
-  basedir = os.path.dirname(output_file)
-  if not os.path.exists(basedir):
-    logger.info('mkdir %s', basedir)
-    os.makedirs(basedir)
+    basedir = os.path.dirname(output_file)
+    if not os.path.exists(basedir):
+        logger.info("mkdir %s", basedir)
+        os.makedirs(basedir)
 
-  with open(output_file, 'wt') as f:
-    logger.info('rendering %s', output_file)
-    T = jenv.get_template(template)
-    f.write(T.render(**context))
+    with open(output_file, "wt") as f:
+        logger.info("rendering %s", output_file)
+        T = jenv.get_template(template)
+        f.write(T.render(**context))
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Generates a new project for Bob:
 
      $ bdt new -vv bob/bob.newpackage "John Doe" "joe@example.com"
-''')
-@click.argument('package')
-@click.argument('author')
-@click.argument('email')
-@click.option('-t', '--title', show_default=True,
-    default='New package', help='This entry defines the package title. ' \
-        'The package title should be a few words only.  It will appear ' \
-        'at the description of your package and as the title of your ' \
-        'documentation')
-@click.option('-l', '--license', type=click.Choice(['bsd', 'gplv3']),
-    default='gplv3', show_default=True,
-    help='Changes the default licensing scheme to use for your package')
-@click.option('-o', '--output-dir', help='Directory where to dump the new ' \
-          'project - must not exist')
+"""
+)
+@click.argument("package")
+@click.argument("author")
+@click.argument("email")
+@click.option(
+    "-t",
+    "--title",
+    show_default=True,
+    default="New package",
+    help="This entry defines the package title. "
+    "The package title should be a few words only.  It will appear "
+    "at the description of your package and as the title of your "
+    "documentation",
+)
+@click.option(
+    "-l",
+    "--license",
+    type=click.Choice(["bsd", "gplv3"]),
+    default="gplv3",
+    show_default=True,
+    help="Changes the default licensing scheme to use for your package",
+)
+@click.option(
+    "-o",
+    "--output-dir",
+    help="Directory where to dump the new " "project - must not exist",
+)
 @verbosity_option()
 @bdt.raise_on_error
 def new(package, author, email, title, license, output_dir):
-    """Creates a folder structure for a new Bob/BEAT package
-    """
+    """Creates a folder structure for a new Bob/BEAT package."""
 
-    if '/' not in package:
+    if "/" not in package:
         raise RuntimeError('PACKAGE should be specified as "group/name"')
 
-    group, name = package.split('/')
+    group, name = package.split("/")
 
     # creates the rst title, which looks like this:
     # =======
     #  Title
     # =======
-    rst_title = ('=' * (2+len(title))) + '\n ' + title + '\n' + \
-        ('=' * (2+len(title)))
+    rst_title = (
+        ("=" * (2 + len(title)))
+        + "\n "
+        + title
+        + "\n"
+        + ("=" * (2 + len(title)))
+    )
 
     # the jinja context defines the substitutions to be performed
     today = datetime.datetime.today()
     context = dict(
-        package = package,
-        group = group,
-        name = name,
-        author = author,
-        email = email,
-        title = title,
-        rst_title = rst_title,
-        license = license,
-        year = today.strftime('%Y'),
-        date = today.strftime('%c'),
-        )
+        package=package,
+        group=group,
+        name=name,
+        author=author,
+        email=email,
+        title=title,
+        rst_title=rst_title,
+        license=license,
+        year=today.strftime("%Y"),
+        date=today.strftime("%c"),
+    )
 
     # copy the whole template structure and de-templatize the needed files
     if output_dir is None:
-      output_dir = os.path.join(os.path.realpath(os.curdir), name)
-    logger.info('Creating structure for %s at directory %s', package,
-        output_dir)
+        output_dir = os.path.join(os.path.realpath(os.curdir), name)
+    logger.info(
+        "Creating structure for %s at directory %s", package, output_dir
+    )
 
     if os.path.exists(output_dir):
-      raise IOError('The package directory %s already exists - cannot '
-          'overwrite!' % output_dir)
+        raise IOError(
+            "The package directory %s already exists - cannot "
+            "overwrite!" % output_dir
+        )
 
-    logger.info('mkdir %s', output_dir)
+    logger.info("mkdir %s", output_dir)
     os.makedirs(output_dir)
 
     # base jinja2 engine
     env = jinja2.Environment(
-        loader=jinja2.PackageLoader('bob.devtools', 'templates'),
-        autoescape=jinja2.select_autoescape(['html', 'xml'])
-        )
+        loader=jinja2.PackageLoader("bob.devtools", "templates"),
+        autoescape=jinja2.select_autoescape(["html", "xml"]),
+    )
 
     # other standard files
     simple = [
-        'requirements.txt',
-        'buildout.cfg',
-        'MANIFEST.in',
-        'setup.py',
-        '.gitignore',
-        'doc/index.rst',
-        'doc/conf.py',
-        'doc/links.rst',
-        '.gitlab-ci.yml',
-        'README.rst',
-        'version.txt',
-        ]
+        "requirements.txt",
+        "buildout.cfg",
+        "MANIFEST.in",
+        "setup.py",
+        ".gitignore",
+        "doc/index.rst",
+        "doc/conf.py",
+        "doc/links.rst",
+        ".gitlab-ci.yml",
+        "README.rst",
+        "version.txt",
+    ]
     for k in simple:
-      render_template(env, k, context, output_dir)
+        render_template(env, k, context, output_dir)
 
     # handles the license file
-    if license == 'gplv3':
-      render_template(env, 'COPYING', context, output_dir)
+    if license == "gplv3":
+        render_template(env, "COPYING", context, output_dir)
     else:
-      render_template(env, 'LICENSE', context, output_dir)
+        render_template(env, "LICENSE", context, output_dir)
 
     # creates the base python module structure
-    template_dir = pkg_resources.resource_filename(__name__, os.path.join('..',
-      'templates'))
-    logger.info('Creating base %s python module', group)
-    shutil.copytree(os.path.join(template_dir, 'pkg'),
-        os.path.join(output_dir, group))
+    template_dir = pkg_resources.resource_filename(
+        __name__, os.path.join("..", "templates")
+    )
+    logger.info("Creating base %s python module", group)
+    shutil.copytree(
+        os.path.join(template_dir, "pkg"), os.path.join(output_dir, group)
+    )
 
     # copies specific images to the right spot
-    copy_file(os.path.join('doc', 'img', '%s-favicon.ico' % group), output_dir)
-    copy_file(os.path.join('doc', 'img', '%s-128x128.png' % group), output_dir)
-    copy_file(os.path.join('doc', 'img', '%s-logo.png' % group), output_dir)
+    copy_file(os.path.join("doc", "img", "%s-favicon.ico" % group), output_dir)
+    copy_file(os.path.join("doc", "img", "%s-128x128.png" % group), output_dir)
+    copy_file(os.path.join("doc", "img", "%s-logo.png" % group), output_dir)
 
     # finally, render the conda recipe template-template!
     # this one is special since it is already a jinja2 template
     conda_env = jinja2.Environment(
-        loader=jinja2.PackageLoader('bob.devtools', 'templates'),
-        autoescape=jinja2.select_autoescape(['html', 'xml']),
-        block_start_string='(%', block_end_string='%)',
-        variable_start_string='((', variable_end_string='))',
-        comment_start_string='(#', comment_end_string='#)',
-        )
-    render_template(conda_env, os.path.join('conda', 'meta.yaml'), context,
-        output_dir)
+        loader=jinja2.PackageLoader("bob.devtools", "templates"),
+        autoescape=jinja2.select_autoescape(["html", "xml"]),
+        block_start_string="(%",
+        block_end_string="%)",
+        variable_start_string="((",
+        variable_end_string="))",
+        comment_start_string="(#",
+        comment_end_string="#)",
+    )
+    render_template(
+        conda_env, os.path.join("conda", "meta.yaml"), context, output_dir
+    )
diff --git a/bob/devtools/scripts/rebuild.py b/bob/devtools/scripts/rebuild.py
index b97f5bea89bf52a0d1fe6d768817c2318b78ee6a..17c019edf59a485c74d5eefe64ad9edcb39deecb 100644
--- a/bob/devtools/scripts/rebuild.py
+++ b/bob/devtools/scripts/rebuild.py
@@ -11,18 +11,33 @@ import pkg_resources
 import conda_build.api
 
 from . import bdt
-from ..build import next_build_number, conda_arch, should_skip_build, \
-    get_rendered_metadata, get_parsed_recipe, make_conda_config, \
-    get_docserver_setup, get_env_directory, get_output_path
-from ..constants import CONDA_BUILD_CONFIG, CONDA_RECIPE_APPEND, \
-    SERVER, MATPLOTLIB_RCDIR, BASE_CONDARC
+from ..build import (
+    next_build_number,
+    conda_arch,
+    should_skip_build,
+    get_rendered_metadata,
+    get_parsed_recipe,
+    make_conda_config,
+    get_docserver_setup,
+    get_env_directory,
+    get_output_path,
+)
+from ..constants import (
+    CONDA_BUILD_CONFIG,
+    CONDA_RECIPE_APPEND,
+    SERVER,
+    MATPLOTLIB_RCDIR,
+    BASE_CONDARC,
+)
 from ..bootstrap import set_environment, get_channels
 
 from ..log import verbosity_option, get_logger, echo_normal
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
   1. Rebuilds a recipe from one of our packages, checked-out at "bob/bob.extension", for python 3.6:
 
@@ -34,169 +49,264 @@ Examples:
 
 \b
      $ bdt rebuild -vv --python=3.6 path/to/recipe-dir1 path/to/recipe-dir2
-''')
-@click.argument('recipe-dir', required=False, type=click.Path(file_okay=False,
-  dir_okay=True, exists=True), nargs=-1)
-@click.option('-p', '--python', default=('%d.%d' % sys.version_info[:2]),
-    show_default=True, help='Version of python to build the environment for')
-@click.option('-r', '--condarc',
-    help='Use custom conda configuration file instead of our own',)
-@click.option('-m', '--config', '--variant-config-files', show_default=True,
-    default=CONDA_BUILD_CONFIG, help='overwrites the path leading to ' \
-        'variant configuration file to use')
-@click.option('-a', '--append-file', show_default=True,
-    default=CONDA_RECIPE_APPEND, help='overwrites the path leading to ' \
-        'appended configuration file to use')
-@click.option('-S', '--server', show_default=True, default=SERVER,
-    help='Server used for downloading conda packages and documentation ' \
-        'indexes of required packages')
-@click.option('-g', '--group', show_default=True, default='bob',
-    help='Group of packages (gitlab namespace) this package belongs to')
-@click.option('-P', '--private/--no-private', default=False,
-    help='Set this to **include** private channels on your build - ' \
-        'you **must** be at Idiap to execute this build in this case - ' \
-        'you **must** also use the correct server name through --server - ' \
-        'notice this option has no effect to conda if you also pass --condarc')
-@click.option('-X', '--stable/--no-stable', default=False,
-    help='Set this to **exclude** beta channels from your build - ' \
-        'notice this option has no effect if you also pass --condarc')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
-@click.option('-C', '--ci/--no-ci', default=False, hidden=True,
-    help='Use this flag to indicate the build will be running on the CI')
+"""
+)
+@click.argument(
+    "recipe-dir",
+    required=False,
+    type=click.Path(file_okay=False, dir_okay=True, exists=True),
+    nargs=-1,
+)
+@click.option(
+    "-p",
+    "--python",
+    default=("%d.%d" % sys.version_info[:2]),
+    show_default=True,
+    help="Version of python to build the environment for",
+)
+@click.option(
+    "-r",
+    "--condarc",
+    help="Use custom conda configuration file instead of our own",
+)
+@click.option(
+    "-m",
+    "--config",
+    "--variant-config-files",
+    show_default=True,
+    default=CONDA_BUILD_CONFIG,
+    help="overwrites the path leading to " "variant configuration file to use",
+)
+@click.option(
+    "-a",
+    "--append-file",
+    show_default=True,
+    default=CONDA_RECIPE_APPEND,
+    help="overwrites the path leading to " "appended configuration file to use",
+)
+@click.option(
+    "-S",
+    "--server",
+    show_default=True,
+    default=SERVER,
+    help="Server used for downloading conda packages and documentation "
+    "indexes of required packages",
+)
+@click.option(
+    "-g",
+    "--group",
+    show_default=True,
+    default="bob",
+    help="Group of packages (gitlab namespace) this package belongs to",
+)
+@click.option(
+    "-P",
+    "--private/--no-private",
+    default=False,
+    help="Set this to **include** private channels on your build - "
+    "you **must** be at Idiap to execute this build in this case - "
+    "you **must** also use the correct server name through --server - "
+    "notice this option has no effect to conda if you also pass --condarc",
+)
+@click.option(
+    "-X",
+    "--stable/--no-stable",
+    default=False,
+    help="Set this to **exclude** beta channels from your build - "
+    "notice this option has no effect if you also pass --condarc",
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
+@click.option(
+    "-C",
+    "--ci/--no-ci",
+    default=False,
+    hidden=True,
+    help="Use this flag to indicate the build will be running on the CI",
+)
 @verbosity_option()
 @bdt.raise_on_error
-def rebuild(recipe_dir, python, condarc, config, append_file,
-    server, group, private, stable, dry_run, ci):
-  """Tests and rebuilds packages through conda-build with stock configuration
-
-  This command wraps the execution of conda-build in two stages: first, from
-  the original package recipe and some channel look-ups, it figures out what is
-  the lastest version of the package available.  It downloads such file and
-  runs a test.  If the test suceeds, then it proceeds to the next recipe.
-  Otherwise, it rebuilds the package and uploads a new version to the channel.
-  """
-
-  # if we are in a dry-run mode, let's let it be known
-  if dry_run:
-      logger.warn('!!!! DRY RUN MODE !!!!')
-      logger.warn('Nothing will be really built')
-
-  recipe_dir = recipe_dir or [os.path.join(os.path.realpath('.'), 'conda')]
-
-  logger.debug('This package is considered part of group "%s" - tunning ' \
-      'conda package and documentation URLs for this...', group)
-
-  # get potential channel upload and other auxiliary channels
-  channels = get_channels(public=(not private), stable=stable, server=server,
-      intranet=ci, group=group)
-
-  if condarc is not None:
-    logger.info('Loading CONDARC file from %s...', condarc)
-    with open(condarc, 'rb') as f:
-      condarc_options = yaml.load(f, Loader=yaml.FullLoader)
-  else:
-    # use default and add channels
-    condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
-    logger.info('Using the following channels during build:\n  - %s',
-        '\n  - '.join(channels + ['defaults']))
-    condarc_options['channels'] = channels + ['defaults']
-
-  # dump packages at base environment
-  prefix = get_env_directory(os.environ['CONDA_EXE'], 'base')
-  condarc_options['croot'] = os.path.join(prefix, 'conda-bld')
-
-  conda_config = make_conda_config(config, python, append_file,
-      condarc_options)
-
-  set_environment('MATPLOTLIBRC', MATPLOTLIB_RCDIR)
-
-  # setup BOB_DOCUMENTATION_SERVER environment variable (used for bob.extension
-  # and derived documentation building via Sphinx)
-  set_environment('DOCSERVER', server)
-  doc_urls = get_docserver_setup(public=(not private), stable=stable,
-      server=server, intranet=ci, group=group)
-  set_environment('BOB_DOCUMENTATION_SERVER', doc_urls)
-
-  arch = conda_arch()
-
-  for d in recipe_dir:
-
-    if not os.path.exists(d):
-      raise RuntimeError("The directory %s does not exist" % recipe_dir)
-
-    version_candidate = os.path.join(d, '..', 'version.txt')
-    if os.path.exists(version_candidate):
-      version = open(version_candidate).read().rstrip()
-      set_environment('BOB_PACKAGE_VERSION', version)
-
-    # pre-renders the recipe - figures out the destination
-    metadata = get_rendered_metadata(d, conda_config)
-
-    # checks if we should actually build this recipe
-    if should_skip_build(metadata):
-      logger.info('Skipping UNSUPPORTED build of %s for %s', recipe_dir, arch)
-      continue
-
-    rendered_recipe = get_parsed_recipe(metadata)
-    path = get_output_path(metadata, conda_config)
-
-    # Get the latest build number
-    build_number, existing = next_build_number(channels[0],
-        os.path.basename(path))
-
-    should_build = True
-
-    if existing:  #other builds exist, get the latest and see if it still works
-
-      destpath = os.path.join(condarc_options['croot'], arch,
-          os.path.basename(existing[0]))
-      if not os.path.exists(os.path.dirname(destpath)):
-        os.makedirs(os.path.dirname(destpath))
-      src = channels[0] + existing[0]
-      logger.info('Downloading %s -> %s', src, destpath)
-      urllib.request.urlretrieve(src, destpath)
-
-      # conda_build may either raise an exception or return ``False`` in case
-      # the build fails, depending on the reason.  This bit of code tries to
-      # accomodate both code paths and decides if we should rebuild the package
-      # or not
-      logger.info('Testing %s', src)
-      try:
-        result = conda_build.api.test(destpath, config=conda_config)
-        should_build = not result
-      except Exception as error:
-        logger.exception(error)
-      except:
-        logger.error('conda_build.api.test() threw an unknown exception - ' \
-            'looks like bad programming, but not on our side this time...')
-
-      if should_build:
-        logger.warn('Test for %s: FAILED. Building...', src)
-      else:
-        logger.info('Test for %s: SUCCESS (package is up-to-date)', src)
-
-
-    if should_build:  #something wrong happened, run a full build
-
-      logger.info('Re-building %s-%s-py%s (build: %d) for %s',
-          rendered_recipe['package']['name'],
-          rendered_recipe['package']['version'], python.replace('.',''),
-          build_number, arch)
-
-      if not dry_run:
-        # set $BOB_BUILD_NUMBER and force conda_build to reparse recipe to get it
-        # right
-        set_environment('BOB_BUILD_NUMBER', str(build_number))
-        paths = conda_build.api.build(d, config=conda_config, notest=False)
-        # if you get to this point, the package was successfully rebuilt
-        # set environment to signal caller we may dispose of it
-        os.environ['BDT_BUILD'] = ':'.join(paths)
-
-    else:  #skip build, test worked
-      logger.info('Skipping rebuild of %s-%s-py%s (build: %d) for %s',
-          rendered_recipe['package']['name'],
-          rendered_recipe['package']['version'], python.replace('.',''),
-          build_number, arch)
+def rebuild(
+    recipe_dir,
+    python,
+    condarc,
+    config,
+    append_file,
+    server,
+    group,
+    private,
+    stable,
+    dry_run,
+    ci,
+):
+    """Tests and rebuilds packages through conda-build with stock
+    configuration.
+
+    This command wraps the execution of conda-build in two stages:
+    first, from the original package recipe and some channel look-ups,
+    it figures out what is the lastest version of the package available.
+    It downloads such file and runs a test.  If the test suceeds, then
+    it proceeds to the next recipe. Otherwise, it rebuilds the package
+    and uploads a new version to the channel.
+    """
+
+    # if we are in a dry-run mode, let's let it be known
+    if dry_run:
+        logger.warn("!!!! DRY RUN MODE !!!!")
+        logger.warn("Nothing will be really built")
+
+    recipe_dir = recipe_dir or [os.path.join(os.path.realpath("."), "conda")]
+
+    logger.debug(
+        'This package is considered part of group "%s" - tunning '
+        "conda package and documentation URLs for this...",
+        group,
+    )
+
+    # get potential channel upload and other auxiliary channels
+    channels = get_channels(
+        public=(not private),
+        stable=stable,
+        server=server,
+        intranet=ci,
+        group=group,
+    )
+
+    if condarc is not None:
+        logger.info("Loading CONDARC file from %s...", condarc)
+        with open(condarc, "rb") as f:
+            condarc_options = yaml.load(f, Loader=yaml.FullLoader)
+    else:
+        # use default and add channels
+        condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
+        logger.info(
+            "Using the following channels during build:\n  - %s",
+            "\n  - ".join(channels + ["defaults"]),
+        )
+        condarc_options["channels"] = channels + ["defaults"]
+
+    # dump packages at base environment
+    prefix = get_env_directory(os.environ["CONDA_EXE"], "base")
+    condarc_options["croot"] = os.path.join(prefix, "conda-bld")
+
+    conda_config = make_conda_config(
+        config, python, append_file, condarc_options
+    )
+
+    set_environment("MATPLOTLIBRC", MATPLOTLIB_RCDIR)
+
+    # setup BOB_DOCUMENTATION_SERVER environment variable (used for bob.extension
+    # and derived documentation building via Sphinx)
+    set_environment("DOCSERVER", server)
+    doc_urls = get_docserver_setup(
+        public=(not private),
+        stable=stable,
+        server=server,
+        intranet=ci,
+        group=group,
+    )
+    set_environment("BOB_DOCUMENTATION_SERVER", doc_urls)
+
+    arch = conda_arch()
+
+    for d in recipe_dir:
+
+        if not os.path.exists(d):
+            raise RuntimeError("The directory %s does not exist" % recipe_dir)
+
+        version_candidate = os.path.join(d, "..", "version.txt")
+        if os.path.exists(version_candidate):
+            version = open(version_candidate).read().rstrip()
+            set_environment("BOB_PACKAGE_VERSION", version)
+
+        # pre-renders the recipe - figures out the destination
+        metadata = get_rendered_metadata(d, conda_config)
+
+        # checks if we should actually build this recipe
+        if should_skip_build(metadata):
+            logger.info(
+                "Skipping UNSUPPORTED build of %s for %s", recipe_dir, arch
+            )
+            continue
+
+        rendered_recipe = get_parsed_recipe(metadata)
+        path = get_output_path(metadata, conda_config)
+
+        # Get the latest build number
+        build_number, existing = next_build_number(
+            channels[0], os.path.basename(path)
+        )
+
+        should_build = True
+
+        if (
+            existing
+        ):  # other builds exist, get the latest and see if it still works
+
+            destpath = os.path.join(
+                condarc_options["croot"], arch, os.path.basename(existing[0])
+            )
+            if not os.path.exists(os.path.dirname(destpath)):
+                os.makedirs(os.path.dirname(destpath))
+            src = channels[0] + existing[0]
+            logger.info("Downloading %s -> %s", src, destpath)
+            urllib.request.urlretrieve(src, destpath)
+
+            # conda_build may either raise an exception or return ``False`` in case
+            # the build fails, depending on the reason.  This bit of code tries to
+            # accomodate both code paths and decides if we should rebuild the package
+            # or not
+            logger.info("Testing %s", src)
+            try:
+                result = conda_build.api.test(destpath, config=conda_config)
+                should_build = not result
+            except Exception as error:
+                logger.exception(error)
+            except:
+                logger.error(
+                    "conda_build.api.test() threw an unknown exception - "
+                    "looks like bad programming, but not on our side this time..."
+                )
+
+            if should_build:
+                logger.warn("Test for %s: FAILED. Building...", src)
+            else:
+                logger.info("Test for %s: SUCCESS (package is up-to-date)", src)
+
+        if should_build:  # something wrong happened, run a full build
+
+            logger.info(
+                "Re-building %s-%s-py%s (build: %d) for %s",
+                rendered_recipe["package"]["name"],
+                rendered_recipe["package"]["version"],
+                python.replace(".", ""),
+                build_number,
+                arch,
+            )
+
+            if not dry_run:
+                # set $BOB_BUILD_NUMBER and force conda_build to reparse recipe to get it
+                # right
+                set_environment("BOB_BUILD_NUMBER", str(build_number))
+                paths = conda_build.api.build(
+                    d, config=conda_config, notest=False
+                )
+                # if you get to this point, the package was successfully rebuilt
+                # set environment to signal caller we may dispose of it
+                os.environ["BDT_BUILD"] = ":".join(paths)
+
+        else:  # skip build, test worked
+            logger.info(
+                "Skipping rebuild of %s-%s-py%s (build: %d) for %s",
+                rendered_recipe["package"]["name"],
+                rendered_recipe["package"]["version"],
+                python.replace(".", ""),
+                build_number,
+                arch,
+            )
diff --git a/bob/devtools/scripts/release.py b/bob/devtools/scripts/release.py
index 502f5001c163566d88fa38501611a4eae9a24148..f666bc57f636ef07dc7d35eeb90fb95d44cbc8bc 100644
--- a/bob/devtools/scripts/release.py
+++ b/bob/devtools/scripts/release.py
@@ -12,10 +12,12 @@ from ..release import release_package, wait_for_pipeline_to_finish
 from ..release import get_gitlab_instance
 
 from ..log import verbosity_option, get_logger
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Releases a single package:
@@ -42,27 +44,43 @@ Examples:
   4. The option `-dry-run` can be used to let the script print what it would do instead of actually doing it:
 
      $ bdt gitlab release --dry-run changelog_since_last_release.md
-'''
+"""
+)
+@click.argument("changelog", type=click.File("rt", lazy=False))
+@click.option(
+    "-g",
+    "--group",
+    default="bob",
+    show_default=True,
+    help="Group name where all packages are located (if not provided with the package)",
+)
+@click.option(
+    "-p",
+    "--package",
+    help="If the name of a package is provided, then this package will be "
+    "found in the changelog file and the release will resume from it "
+    "(if option ``--resume`` is set) or only this package will be "
+    "released.  If there is only a single package in the changelog, "
+    "then you do NOT need to set this flag",
+)
+@click.option(
+    "-r",
+    "--resume/--no-resume",
+    default=False,
+    help="The overall release will resume from the provided package name",
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
 )
-@click.argument('changelog', type=click.File('rt', lazy=False))
-@click.option('-g', '--group', default='bob', show_default=True,
-    help='Group name where all packages are located (if not provided with the package)')
-@click.option('-p', '--package',
-    help='If the name of a package is provided, then this package will be ' \
-        'found in the changelog file and the release will resume from it ' \
-        '(if option ``--resume`` is set) or only this package will be ' \
-        'released.  If there is only a single package in the changelog, ' \
-        'then you do NOT need to set this flag')
-@click.option('-r', '--resume/--no-resume', default=False,
-    help='The overall release will resume from the provided package name')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
 @verbosity_option()
 @bdt.raise_on_error
 def release(changelog, group, package, resume, dry_run):
-    """Tags packages on gitlab from an input CHANGELOG in markdown formatting
+    """Tags packages on gitlab from an input CHANGELOG in markdown formatting.
 
     By using a CHANGELOG file as an input (that can be generated with the ``bdt
     changelog`` command), this script goes through all packages in CHANGELOG
@@ -127,7 +145,7 @@ def release(changelog, group, package, resume, dry_run):
 
     # if we are releasing 'bob' metapackage, it's a simple thing, no GitLab
     # API
-    if package == 'bob':
+    if package == "bob":
         release_bob(changelog)
         return
 
@@ -137,26 +155,28 @@ def release(changelog, group, package, resume, dry_run):
     changelogs = changelog.readlines()
 
     # find the starts of each package's description in the changelog
-    pkgs = [i for i, line in enumerate(changelogs) if line.startswith('*')]
-    pkgs.append(len(changelogs)) #the end
+    pkgs = [i for i, line in enumerate(changelogs) if line.startswith("*")]
+    pkgs.append(len(changelogs))  # the end
     start_idx = 0
 
     if package:
         # get the index where the package first appears in the list
-        start_idx = [i for i, line in enumerate(changelogs) \
-            if line[1:].strip() == package]
+        start_idx = [
+            i
+            for i, line in enumerate(changelogs)
+            if line[1:].strip() == package
+        ]
 
         if not start_idx:
-            logger.error('Package %s was not found in the changelog',
-                package)
+            logger.error("Package %s was not found in the changelog", package)
             return
 
         start_idx = pkgs.index(start_idx[0])
 
     # if we are in a dry-run mode, let's let it be known
     if dry_run:
-        logger.warn('!!!! DRY RUN MODE !!!!')
-        logger.warn('Nothing is being committed to Gitlab')
+        logger.warn("!!!! DRY RUN MODE !!!!")
+        logger.warn("Nothing is being committed to Gitlab")
 
     # go through the list of packages and release them starting from the
     # start_idx
@@ -164,21 +184,26 @@ def release(changelog, group, package, resume, dry_run):
 
         cur_package_name = changelogs[pkgs[i]][1:].strip()
 
-        if '/' not in cur_package_name:
-            cur_package_name = '/'.join((group, cur_package_name))
+        if "/" not in cur_package_name:
+            cur_package_name = "/".join((group, cur_package_name))
 
         # retrieves the gitlab package object
         use_package = gl.projects.get(cur_package_name)
-        logger.info('Processing %s (gitlab id=%d)',
-            use_package.attributes['path_with_namespace'], use_package.id)
+        logger.info(
+            "Processing %s (gitlab id=%d)",
+            use_package.attributes["path_with_namespace"],
+            use_package.id,
+        )
 
-        tag, tag_comments = parse_and_process_package_changelog(gl,
-            use_package, changelogs[pkgs[i] + 1: pkgs[i + 1]], dry_run)
+        tag, tag_comments = parse_and_process_package_changelog(
+            gl, use_package, changelogs[pkgs[i] + 1 : pkgs[i + 1]], dry_run
+        )
 
         # release the package with the found tag and its comments
         if use_package:
-            pipeline_id = release_package(use_package, tag, tag_comments,
-                dry_run)
+            pipeline_id = release_package(
+                use_package, tag, tag_comments, dry_run
+            )
             # now, wait for the pipeline to finish, before we can release the
             # next package
             wait_for_pipeline_to_finish(use_package, pipeline_id, dry_run)
@@ -188,4 +213,4 @@ def release(changelog, group, package, resume, dry_run):
         if package == cur_package_name and not resume:
             break
 
-    logger.info('Finished processing %s', changelog.name)
+    logger.info("Finished processing %s", changelog.name)
diff --git a/bob/devtools/scripts/runners.py b/bob/devtools/scripts/runners.py
index d0a0c4a9676536fd9e7b338aa3439c58b07d597c..3ca015ba65bcba156f51abca8a3cf2fc8c1e7cca 100644
--- a/bob/devtools/scripts/runners.py
+++ b/bob/devtools/scripts/runners.py
@@ -8,10 +8,12 @@ from . import bdt
 from ..release import get_gitlab_instance
 
 from ..log import verbosity_option, get_logger
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Disables the runner with description "macmini" for all active projects in group "bob":
@@ -28,89 +30,127 @@ Examples:
 
      $ bdt gitlab runners -vv bob/bob.extension enable linux-srv02
 
-''')
-@click.argument('target')
-@click.argument('cmd', type=click.Choice(['enable', 'disable']))
-@click.argument('name')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
+"""
+)
+@click.argument("target")
+@click.argument("cmd", type=click.Choice(["enable", "disable"]))
+@click.argument("name")
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
 @verbosity_option()
 @bdt.raise_on_error
 def runners(target, cmd, name, dry_run):
-    """Enables and disables runners on whole gitlab groups or single projects
-    """
+    """Enables and disables runners on whole gitlab groups or single
+    projects."""
 
     gl = get_gitlab_instance()
     gl.auth()
-    user_id = gl.user.attributes['id']
-
-    if '/' in target:  #it is a specific project
-      packages = [gl.projects.get(target)]
-      logger.debug('Found gitlab project %s (id=%d)',
-          packages[0].attributes['path_with_namespace'], packages[0].id)
-
-    else:  #it is a group - get all projects
-      logger.warn('Retrieving group by name - may take long...')
-      group = gl.groups.get(target)
-      logger.debug('Found gitlab group %s (id=%d)', group.attributes['path'],
-          group.id)
-      logger.warn('Retrieving all projects (with details) from group ' \
-          '%s (id=%d)...', group.attributes['path'], group.id)
-      packages = [gl.projects.get(k.id) for k in \
-          group.projects.list(all=True, simple=True)]
-      logger.info('Found %d projects under group %s', len(packages),
-          group.attributes['path'])
+    user_id = gl.user.attributes["id"]
+
+    if "/" in target:  # it is a specific project
+        packages = [gl.projects.get(target)]
+        logger.debug(
+            "Found gitlab project %s (id=%d)",
+            packages[0].attributes["path_with_namespace"],
+            packages[0].id,
+        )
+
+    else:  # it is a group - get all projects
+        logger.warn("Retrieving group by name - may take long...")
+        group = gl.groups.get(target)
+        logger.debug(
+            "Found gitlab group %s (id=%d)", group.attributes["path"], group.id
+        )
+        logger.warn(
+            "Retrieving all projects (with details) from group "
+            "%s (id=%d)...",
+            group.attributes["path"],
+            group.id,
+        )
+        packages = [
+            gl.projects.get(k.id)
+            for k in group.projects.list(all=True, simple=True)
+        ]
+        logger.info(
+            "Found %d projects under group %s",
+            len(packages),
+            group.attributes["path"],
+        )
 
     # search for the runner to affect
-    the_runner = [k for k in gl.runners.list(all=True) if \
-        k.attributes['description'] == name]
+    the_runner = [
+        k
+        for k in gl.runners.list(all=True)
+        if k.attributes["description"] == name
+    ]
     if not the_runner:
-      raise RuntimeError('Cannot find runner with description = %s', name)
+        raise RuntimeError("Cannot find runner with description = %s", name)
     the_runner = the_runner[0]
-    logger.info('Found runner %s (id=%d)',
-        the_runner.attributes['description'], the_runner.attributes['id'])
+    logger.info(
+        "Found runner %s (id=%d)",
+        the_runner.attributes["description"],
+        the_runner.attributes["id"],
+    )
 
     for k in packages:
-      logger.info('Processing project %s (id=%d)',
-          k.attributes['path_with_namespace'], k.id)
-
-      if cmd == 'enable':
-
-        #checks if runner is not enabled first
-        enabled = False
-        for l in k.runners.list(all=True):
-          if l.id == the_runner.id:  #it is there already
-            logger.warn('Runner %s (id=%d) is already enabled for project %s',
-                l.attributes['description'], l.id,
-                k.attributes['path_with_namespace'])
-            enabled = True
-            break
-
-        if not enabled:  #enable it
-          if not dry_run:
-            k.runners.create({'runner_id': the_runner.id})
-          logger.info('Enabled runner %s (id=%d) for project %s',
-              the_runner.attributes['description'], the_runner.id,
-              k.attributes['path_with_namespace'])
-
-
-      elif cmd == 'disable':
-
-        #checks if runner is not already disabled first
-        disabled = True
-        for l in k.runners.list(all=True):
-          if l.id == the_runner.id:  #it is there already
-            logger.debug('Runner %s (id=%d) is enabled for project %s',
-                l.attributes['description'], l.id,
-                k.attributes['path_with_namespace'])
-            disabled = False
-            break
-
-        if not disabled:  #enable it
-          if not dry_run:
-            k.runners.delete(the_runner.id)
-          logger.info('Disabled runner %s (id=%d) for project %s',
-              the_runner.attributes['description'], the_runner.id,
-              k.attributes['path_with_namespace'])
+        logger.info(
+            "Processing project %s (id=%d)",
+            k.attributes["path_with_namespace"],
+            k.id,
+        )
+
+        if cmd == "enable":
+
+            # checks if runner is not enabled first
+            enabled = False
+            for l in k.runners.list(all=True):
+                if l.id == the_runner.id:  # it is there already
+                    logger.warn(
+                        "Runner %s (id=%d) is already enabled for project %s",
+                        l.attributes["description"],
+                        l.id,
+                        k.attributes["path_with_namespace"],
+                    )
+                    enabled = True
+                    break
+
+            if not enabled:  # enable it
+                if not dry_run:
+                    k.runners.create({"runner_id": the_runner.id})
+                logger.info(
+                    "Enabled runner %s (id=%d) for project %s",
+                    the_runner.attributes["description"],
+                    the_runner.id,
+                    k.attributes["path_with_namespace"],
+                )
+
+        elif cmd == "disable":
+
+            # checks if runner is not already disabled first
+            disabled = True
+            for l in k.runners.list(all=True):
+                if l.id == the_runner.id:  # it is there already
+                    logger.debug(
+                        "Runner %s (id=%d) is enabled for project %s",
+                        l.attributes["description"],
+                        l.id,
+                        k.attributes["path_with_namespace"],
+                    )
+                    disabled = False
+                    break
+
+            if not disabled:  # enable it
+                if not dry_run:
+                    k.runners.delete(the_runner.id)
+                logger.info(
+                    "Disabled runner %s (id=%d) for project %s",
+                    the_runner.attributes["description"],
+                    the_runner.id,
+                    k.attributes["path_with_namespace"],
+                )
diff --git a/bob/devtools/scripts/test.py b/bob/devtools/scripts/test.py
index e2eb6a4f4cb794b88d1673f55f5f297dc2aeb127..3265020173e7c0d5e3da5146a88a8db2e163bfbc 100644
--- a/bob/devtools/scripts/test.py
+++ b/bob/devtools/scripts/test.py
@@ -10,17 +10,28 @@ import pkg_resources
 import conda_build.api
 
 from . import bdt
-from ..build import conda_arch, make_conda_config, get_docserver_setup, \
-    get_env_directory
-from ..constants import CONDA_BUILD_CONFIG, CONDA_RECIPE_APPEND, \
-    SERVER, MATPLOTLIB_RCDIR, BASE_CONDARC
+from ..build import (
+    conda_arch,
+    make_conda_config,
+    get_docserver_setup,
+    get_env_directory,
+)
+from ..constants import (
+    CONDA_BUILD_CONFIG,
+    CONDA_RECIPE_APPEND,
+    SERVER,
+    MATPLOTLIB_RCDIR,
+    BASE_CONDARC,
+)
 from ..bootstrap import set_environment, get_channels
 
 from ..log import verbosity_option, get_logger
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Tests conda package:
@@ -34,85 +45,154 @@ Examples:
 \b
      $ bdt test -vv /path/to/conda-package-v1.0.0.tar.bz2 /path/to/other-conda-package-v2.0.0.tar.bz2
 
-''')
-@click.argument('package', required=True, type=click.Path(file_okay=True,
-  dir_okay=False, exists=True), nargs=-1)
-@click.option('-r', '--condarc',
-    help='Use custom conda configuration file instead of our own',)
-@click.option('-m', '--config', '--variant-config-files', show_default=True,
-    default=CONDA_BUILD_CONFIG, help='overwrites the path leading to ' \
-        'variant configuration file to use')
-@click.option('-a', '--append-file', show_default=True,
-    default=CONDA_RECIPE_APPEND, help='overwrites the path leading to ' \
-        'appended configuration file to use')
-@click.option('-S', '--server', show_default=True, default=SERVER,
-    help='Server used for downloading conda packages and documentation ' \
-        'indexes of required packages')
-@click.option('-g', '--group', show_default=True, default='bob',
-    help='Group of packages (gitlab namespace) this package belongs to')
-@click.option('-P', '--private/--no-private', default=False,
-    help='Set this to **include** private channels on your build - ' \
-        'you **must** be at Idiap to execute this build in this case - ' \
-        'you **must** also use the correct server name through --server - ' \
-        'notice this option has no effect to conda if you also pass --condarc')
-@click.option('-X', '--stable/--no-stable', default=False,
-    help='Set this to **exclude** beta channels from your build - ' \
-        'notice this option has no effect if you also pass --condarc')
-@click.option('-d', '--dry-run/--no-dry-run', default=False,
-    help='Only goes through the actions, but does not execute them ' \
-        '(combine with the verbosity flags - e.g. ``-vvv``) to enable ' \
-        'printing to help you understand what will be done')
-@click.option('-C', '--ci/--no-ci', default=False, hidden=True,
-    help='Use this flag to indicate the build will be running on the CI')
+"""
+)
+@click.argument(
+    "package",
+    required=True,
+    type=click.Path(file_okay=True, dir_okay=False, exists=True),
+    nargs=-1,
+)
+@click.option(
+    "-r",
+    "--condarc",
+    help="Use custom conda configuration file instead of our own",
+)
+@click.option(
+    "-m",
+    "--config",
+    "--variant-config-files",
+    show_default=True,
+    default=CONDA_BUILD_CONFIG,
+    help="overwrites the path leading to " "variant configuration file to use",
+)
+@click.option(
+    "-a",
+    "--append-file",
+    show_default=True,
+    default=CONDA_RECIPE_APPEND,
+    help="overwrites the path leading to " "appended configuration file to use",
+)
+@click.option(
+    "-S",
+    "--server",
+    show_default=True,
+    default=SERVER,
+    help="Server used for downloading conda packages and documentation "
+    "indexes of required packages",
+)
+@click.option(
+    "-g",
+    "--group",
+    show_default=True,
+    default="bob",
+    help="Group of packages (gitlab namespace) this package belongs to",
+)
+@click.option(
+    "-P",
+    "--private/--no-private",
+    default=False,
+    help="Set this to **include** private channels on your build - "
+    "you **must** be at Idiap to execute this build in this case - "
+    "you **must** also use the correct server name through --server - "
+    "notice this option has no effect to conda if you also pass --condarc",
+)
+@click.option(
+    "-X",
+    "--stable/--no-stable",
+    default=False,
+    help="Set this to **exclude** beta channels from your build - "
+    "notice this option has no effect if you also pass --condarc",
+)
+@click.option(
+    "-d",
+    "--dry-run/--no-dry-run",
+    default=False,
+    help="Only goes through the actions, but does not execute them "
+    "(combine with the verbosity flags - e.g. ``-vvv``) to enable "
+    "printing to help you understand what will be done",
+)
+@click.option(
+    "-C",
+    "--ci/--no-ci",
+    default=False,
+    hidden=True,
+    help="Use this flag to indicate the build will be running on the CI",
+)
 @verbosity_option()
 @bdt.raise_on_error
-def test(package, condarc, config, append_file, server, group, private, stable, dry_run, ci):
-  """Tests (pre-built) package through conda-build with stock configuration
-
-  This command wraps the execution of conda-build so that you use the same
-  conda configuration we use for our CI.  It always set
-  ``--no-anaconda-upload``.
-  """
-
-  # if we are in a dry-run mode, let's let it be known
-  if dry_run:
-      logger.warn('!!!! DRY RUN MODE !!!!')
-      logger.warn('Nothing will be really built')
-
-  logger.debug('This package is considered part of group "%s" - tunning ' \
-      'conda package and documentation URLs for this...', group)
-
-  # get potential channel upload and other auxiliary channels
-  channels = get_channels(public=(not private), stable=stable, server=server,
-      intranet=ci, group=group)
-
-  if condarc is not None:
-    logger.info('Loading CONDARC file from %s...', condarc)
-    with open(condarc, 'rb') as f:
-      condarc_options = yaml.load(f, Loader=yaml.FullLoader)
-  else:
-    # use default and add channels
-    all_channels = []
-    all_channels += channels + ['defaults']
-    condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
-    logger.info('Using the following channels during build:\n  - %s',
-        '\n  - '.join(all_channels))
-    condarc_options['channels'] = all_channels
-
-  conda_config = make_conda_config(config, None, append_file,
-      condarc_options)
-
-  set_environment('MATPLOTLIBRC', MATPLOTLIB_RCDIR)
-
-  # setup BOB_DOCUMENTATION_SERVER environment variable (used for bob.extension
-  # and derived documentation building via Sphinx)
-  set_environment('DOCSERVER', server)
-  doc_urls = get_docserver_setup(public=(not private), stable=stable,
-      server=server, intranet=ci, group=group)
-  set_environment('BOB_DOCUMENTATION_SERVER', doc_urls)
-
-  arch = conda_arch()
-  for p in package:
-    logger.info('Testing %s at %s', p, arch)
-    if not dry_run:
-      conda_build.api.test(p, config=conda_config)
+def test(
+    package,
+    condarc,
+    config,
+    append_file,
+    server,
+    group,
+    private,
+    stable,
+    dry_run,
+    ci,
+):
+    """Tests (pre-built) package through conda-build with stock configuration.
+
+    This command wraps the execution of conda-build so that you use the
+    same conda configuration we use for our CI.  It always set ``--no-
+    anaconda-upload``.
+    """
+
+    # if we are in a dry-run mode, let's let it be known
+    if dry_run:
+        logger.warn("!!!! DRY RUN MODE !!!!")
+        logger.warn("Nothing will be really built")
+
+    logger.debug(
+        'This package is considered part of group "%s" - tunning '
+        "conda package and documentation URLs for this...",
+        group,
+    )
+
+    # get potential channel upload and other auxiliary channels
+    channels = get_channels(
+        public=(not private),
+        stable=stable,
+        server=server,
+        intranet=ci,
+        group=group,
+    )
+
+    if condarc is not None:
+        logger.info("Loading CONDARC file from %s...", condarc)
+        with open(condarc, "rb") as f:
+            condarc_options = yaml.load(f, Loader=yaml.FullLoader)
+    else:
+        # use default and add channels
+        all_channels = []
+        all_channels += channels + ["defaults"]
+        condarc_options = yaml.load(BASE_CONDARC, Loader=yaml.FullLoader)
+        logger.info(
+            "Using the following channels during build:\n  - %s",
+            "\n  - ".join(all_channels),
+        )
+        condarc_options["channels"] = all_channels
+
+    conda_config = make_conda_config(config, None, append_file, condarc_options)
+
+    set_environment("MATPLOTLIBRC", MATPLOTLIB_RCDIR)
+
+    # setup BOB_DOCUMENTATION_SERVER environment variable (used for bob.extension
+    # and derived documentation building via Sphinx)
+    set_environment("DOCSERVER", server)
+    doc_urls = get_docserver_setup(
+        public=(not private),
+        stable=stable,
+        server=server,
+        intranet=ci,
+        group=group,
+    )
+    set_environment("BOB_DOCUMENTATION_SERVER", doc_urls)
+
+    arch = conda_arch()
+    for p in package:
+        logger.info("Testing %s at %s", p, arch)
+        if not dry_run:
+            conda_build.api.test(p, config=conda_config)
diff --git a/bob/devtools/scripts/visibility.py b/bob/devtools/scripts/visibility.py
index ee24cebb14bd5b6cd2f4b00cbc11d7e97c9971bd..1ff96dbd4b52230efd2d27f1c0b85ba4139406fe 100644
--- a/bob/devtools/scripts/visibility.py
+++ b/bob/devtools/scripts/visibility.py
@@ -10,10 +10,12 @@ from . import bdt
 from ..release import get_gitlab_instance
 
 from ..log import verbosity_option, get_logger, echo_normal, echo_warning
+
 logger = get_logger(__name__)
 
 
-@click.command(epilog='''
+@click.command(
+    epilog="""
 Examples:
 
   1. Check the visibility of a package you can access
@@ -26,50 +28,66 @@ Examples:
 \b
      $ bdt gitlab getpath bob/bob.nightlies order.txt
      $ bdt gitlab visibility order.txt
-''')
-@click.argument('target')
-@click.option('-g', '--group', default='bob', show_default=True,
-    help='Gitlab default group name where packages are located (if not ' \
-        'specified using a "/" on the package name - e.g. ' \
-        '"bob/bob.extension")')
+"""
+)
+@click.argument("target")
+@click.option(
+    "-g",
+    "--group",
+    default="bob",
+    show_default=True,
+    help="Gitlab default group name where packages are located (if not "
+    'specified using a "/" on the package name - e.g. '
+    '"bob/bob.extension")',
+)
 @verbosity_option()
 @bdt.raise_on_error
 def visibility(target, group):
-    '''Reports visibility of gitlab repository
+    """Reports visibility of gitlab repository.
 
-    This command checks if the named package is visible to the currently logged
-    in user, and reports its visibility level ('public', 'internal',
-    'private').  If the package does not exist or it is private to the current
-    user, it says 'unknown' instead.
-    '''
+    This command checks if the named package is visible to the currently
+    logged in user, and reports its visibility level ('public',
+    'internal', 'private').  If the package does not exist or it is
+    private to the current user, it says 'unknown' instead.
+    """
 
     gl = get_gitlab_instance()
 
     # reads package list or considers name to be a package name
     if os.path.exists(target) and os.path.isfile(target):
-        logger.debug('Reading package names from file %s...', target)
-        with open(target, 'rt') as f:
-            packages = [k.strip() for k in f.readlines() if k.strip() and not \
-                k.strip().startswith('#')]
+        logger.debug("Reading package names from file %s...", target)
+        with open(target, "rt") as f:
+            packages = [
+                k.strip()
+                for k in f.readlines()
+                if k.strip() and not k.strip().startswith("#")
+            ]
     else:
-        logger.debug('Assuming %s is a package name (file does not ' \
-            'exist)...', target)
+        logger.debug(
+            "Assuming %s is a package name (file does not " "exist)...", target
+        )
         packages = [target]
 
     # iterates over the packages and dumps required information
     for package in packages:
 
-        if '/' not in package:
-            package = '/'.join((group, package))
+        if "/" not in package:
+            package = "/".join((group, package))
 
         # retrieves the gitlab package object
         try:
-          use_package = gl.projects.get(package)
-          logger.debug('Found gitlab project %s (id=%d)',
-              use_package.attributes['path_with_namespace'], use_package.id)
-          echo_normal('%s: %s' % (package,
-            use_package.attributes['visibility'].lower()))
+            use_package = gl.projects.get(package)
+            logger.debug(
+                "Found gitlab project %s (id=%d)",
+                use_package.attributes["path_with_namespace"],
+                use_package.id,
+            )
+            echo_normal(
+                "%s: %s"
+                % (package, use_package.attributes["visibility"].lower())
+            )
         except gitlab.GitlabGetError as e:
-          logger.warn('Gitlab access error - package %s does not exist?',
-              package)
-          echo_warning('%s: unknown' % (package,))
+            logger.warn(
+                "Gitlab access error - package %s does not exist?", package
+            )
+            echo_warning("%s: unknown" % (package,))
diff --git a/bob/devtools/templates/doc/conf.py b/bob/devtools/templates/doc/conf.py
index 906b971e3fdb8fe9e3f144ba078fbb295d9f5cfc..1545f41605231d75d5b600e42729dad864b38bd3 100644
--- a/bob/devtools/templates/doc/conf.py
+++ b/bob/devtools/templates/doc/conf.py
@@ -7,22 +7,22 @@ import pkg_resources
 # -- General configuration -----------------------------------------------------
 
 # If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = '1.3'
+needs_sphinx = "1.3"
 
 # Add any Sphinx extension module names here, as strings. They can be extensions
 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
 extensions = [
-    'sphinx.ext.todo',
-    'sphinx.ext.coverage',
-    'sphinx.ext.ifconfig',
-    'sphinx.ext.autodoc',
-    'sphinx.ext.autosummary',
-    'sphinx.ext.doctest',
-    'sphinx.ext.graphviz',
-    'sphinx.ext.intersphinx',
-    'sphinx.ext.napoleon',
-    'sphinx.ext.viewcode',
-    'sphinx.ext.mathjax',
+    "sphinx.ext.todo",
+    "sphinx.ext.coverage",
+    "sphinx.ext.ifconfig",
+    "sphinx.ext.autodoc",
+    "sphinx.ext.autosummary",
+    "sphinx.ext.doctest",
+    "sphinx.ext.graphviz",
+    "sphinx.ext.intersphinx",
+    "sphinx.ext.napoleon",
+    "sphinx.ext.viewcode",
+    "sphinx.ext.mathjax",
     #'matplotlib.sphinxext.plot_directive'
 ]
 
@@ -33,8 +33,8 @@ nitpicky = True
 nitpick_ignore = []
 
 # Allows the user to override warnings from a separate file
-if os.path.exists('nitpick-exceptions.txt'):
-    for line in open('nitpick-exceptions.txt'):
+if os.path.exists("nitpick-exceptions.txt"):
+    for line in open("nitpick-exceptions.txt"):
         if line.strip() == "" or line.startswith("#"):
             continue
         dtype, target = line.split(None, 1)
@@ -51,26 +51,27 @@ autosummary_generate = True
 numfig = True
 
 # If we are on OSX, the 'dvipng' path maybe different
-dvipng_osx = '/Library/TeX/texbin/dvipng'
+dvipng_osx = "/Library/TeX/texbin/dvipng"
 if os.path.exists(dvipng_osx):
     pngmath_dvipng = dvipng_osx
 
 # Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
 
 # The suffix of source filenames.
-source_suffix = '.rst'
+source_suffix = ".rst"
 
 # The encoding of source files.
-#source_encoding = 'utf-8-sig'
+# source_encoding = 'utf-8-sig'
 
 # The master toctree document.
-master_doc = 'index'
+master_doc = "index"
 
 # General information about the project.
-project = u'{{ name }}'
+project = "{{ name }}"
 import time
-copyright = u'%s, Idiap Research Institute' % time.strftime('%Y')
+
+copyright = "%s, Idiap Research Institute" % time.strftime("%Y")
 
 # Grab the setup entry
 distribution = pkg_resources.require(project)[0]
@@ -86,122 +87,123 @@ release = distribution.version
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
-#language = None
+# language = None
 
 # There are two options for replacing |today|: either, you set today to some
 # non-false value, then it is used:
-#today = ''
+# today = ''
 # Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
+# today_fmt = '%B %d, %Y'
 
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
-exclude_patterns = ['links.rst']
+exclude_patterns = ["links.rst"]
 
 # The reST default role (used for this markup: `text`) to use for all documents.
-#default_role = None
+# default_role = None
 
 # If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
+# add_function_parentheses = True
 
 # If true, the current module name will be prepended to all description
 # unit titles (such as .. function::).
-#add_module_names = True
+# add_module_names = True
 
 # If true, sectionauthor and moduleauthor directives will be shown in the
 # output. They are ignored by default.
-#show_authors = False
+# show_authors = False
 
 # The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = "sphinx"
 
 # A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
+# modindex_common_prefix = []
 
 # Some variables which are useful for generated material
-project_variable = project.replace('.', '_')
-short_description = u'{{ title }}'
-owner = [u'Idiap Research Institute']
+project_variable = project.replace(".", "_")
+short_description = "{{ title }}"
+owner = ["Idiap Research Institute"]
 
 # -- Options for HTML output ---------------------------------------------------
 
 # The theme to use for HTML and HTML Help pages.  See the documentation for
 # a list of builtin themes.
 import sphinx_rtd_theme
-html_theme = 'sphinx_rtd_theme'
+
+html_theme = "sphinx_rtd_theme"
 
 # Theme options are theme-specific and customize the look and feel of a theme
 # further.  For a list of options available for each theme, see the
 # documentation.
-#html_theme_options = {}
+# html_theme_options = {}
 
 # Add any paths that contain custom themes here, relative to this directory.
 html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
 
 # The name for this set of Sphinx documents.  If None, it defaults to
 # "<project> v<release> documentation".
-#html_title = None
+# html_title = None
 
 # A shorter title for the navigation bar.  Default is the same as html_title.
-#html_short_title = project_variable
+# html_short_title = project_variable
 
 # The name of an image file (relative to this directory) to place at the top
 # of the sidebar.
-html_logo = 'img/{{ group }}-logo.png'
+html_logo = "img/{{ group }}-logo.png"
 
 # The name of an image file (within the static path) to use as favicon of the
 # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
 # pixels large.
-html_favicon = 'img/{{ group }}-favicon.ico'
+html_favicon = "img/{{ group }}-favicon.ico"
 
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
 # so a file named "default.css" will overwrite the builtin "default.css".
-#html_static_path = ['_static']
+# html_static_path = ['_static']
 
 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
 # using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
+# html_last_updated_fmt = '%b %d, %Y'
 
 # If true, SmartyPants will be used to convert quotes and dashes to
 # typographically correct entities.
-#html_use_smartypants = True
+# html_use_smartypants = True
 
 # Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
+# html_sidebars = {}
 
 # Additional templates that should be rendered to pages, maps page names to
 # template names.
-#html_additional_pages = {}
+# html_additional_pages = {}
 
 # If false, no module index is generated.
-#html_domain_indices = True
+# html_domain_indices = True
 
 # If false, no index is generated.
-#html_use_index = True
+# html_use_index = True
 
 # If true, the index is split into individual pages for each letter.
-#html_split_index = False
+# html_split_index = False
 
 # If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
+# html_show_sourcelink = True
 
 # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
+# html_show_sphinx = True
 
 # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
+# html_show_copyright = True
 
 # If true, an OpenSearch description file will be output, and all pages will
 # contain a <link> tag referring to it.  The value of this option must be the
 # base URL from which the finished HTML is served.
-#html_use_opensearch = ''
+# html_use_opensearch = ''
 
 # This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
+# html_file_suffix = None
 
 # Output file base name for HTML help builder.
-htmlhelp_basename = project_variable + u'_doc'
+htmlhelp_basename = project_variable + "_doc"
 
 # -- Post configuration --------------------------------------------------------
 
@@ -210,47 +212,47 @@ rst_epilog = """
 .. |project| replace:: Bob
 .. |version| replace:: %s
 .. |current-year| date:: %%Y
-""" % (version, )
+""" % (
+    version,
+)
 
 
 # Default processing flags for sphinx
-autoclass_content = 'class'
-autodoc_member_order = 'bysource'
-autodoc_default_flags = [
-    'members',
-    'undoc-members',
-    'show-inheritance',
-]
+autoclass_content = "class"
+autodoc_member_order = "bysource"
+autodoc_default_flags = ["members", "undoc-members", "show-inheritance"]
 
 
 # For inter-documentation mapping:
 from bob.extension.utils import link_documentation, load_requirements
+
 sphinx_requirements = "extra-intersphinx.txt"
 if os.path.exists(sphinx_requirements):
     intersphinx_mapping = link_documentation(
-        additional_packages=['python', 'numpy'] + \
-            load_requirements(sphinx_requirements))
+        additional_packages=["python", "numpy"]
+        + load_requirements(sphinx_requirements)
+    )
 else:
     intersphinx_mapping = link_documentation()
 
 # We want to remove all private (i.e. _. or __.__) members
 # that are not in the list of accepted functions
-accepted_private_functions = ['__array__']
+accepted_private_functions = ["__array__"]
 
 
 def member_function_test(app, what, name, obj, skip, options):
     # test if we have a private function
-    if len(name) > 1 and name[0] == '_':
+    if len(name) > 1 and name[0] == "_":
         # test if this private function should be allowed
         if name not in accepted_private_functions:
             # omit privat functions that are not in the list of accepted private functions
             return skip
         else:
             # test if the method is documented
-            if not hasattr(obj, '__doc__') or not obj.__doc__:
+            if not hasattr(obj, "__doc__") or not obj.__doc__:
                 return skip
     return False
 
 
 def setup(app):
-    app.connect('autodoc-skip-member', member_function_test)
+    app.connect("autodoc-skip-member", member_function_test)
diff --git a/bob/devtools/templates/pkg/__init__.py b/bob/devtools/templates/pkg/__init__.py
index 2ab1e28b150f0549def9963e9e87de3fdd6b2579..edbb4090fca046b19d22d3982711084621bff3be 100644
--- a/bob/devtools/templates/pkg/__init__.py
+++ b/bob/devtools/templates/pkg/__init__.py
@@ -1,3 +1,4 @@
 # see https://docs.python.org/3/library/pkgutil.html
 from pkgutil import extend_path
+
 __path__ = extend_path(__path__, __name__)
diff --git a/bob/devtools/webdav3/client.py b/bob/devtools/webdav3/client.py
index b91e630ef4cda566090c9ffd2ccd06be1cd5fe03..f42d0274e203205b218d25b8d585e8cc44cb924b 100644
--- a/bob/devtools/webdav3/client.py
+++ b/bob/devtools/webdav3/client.py
@@ -15,6 +15,7 @@ from .exceptions import *
 from .urn import Urn
 
 from ..log import get_logger
+
 logger = get_logger(__name__)
 
 
@@ -28,7 +29,8 @@ __version__ = "0.2"
 
 
 def listdir(directory):
-    """Returns list of nested files and directories for local directory by path
+    """Returns list of nested files and directories for local directory by
+    path.
 
     :param directory: absolute or relative path to local directory
     :return: list nested of file or directory names
@@ -37,13 +39,15 @@ def listdir(directory):
     for filename in os.listdir(directory):
         file_path = os.path.join(directory, filename)
         if os.path.isdir(file_path):
-            filename = "{filename}{separate}".format(filename=filename, separate=os.path.sep)
+            filename = "{filename}{separate}".format(
+                filename=filename, separate=os.path.sep
+            )
         file_names.append(filename)
     return file_names
 
 
 def get_options(option_type, from_options):
-    """Extract options for specified option type from all options
+    """Extract options for specified option type from all options.
 
     :param option_type: the object of specified type of options
     :param from_options: all options dictionary
@@ -53,7 +57,9 @@ def get_options(option_type, from_options):
     _options = dict()
 
     for key in option_type.keys:
-        key_with_prefix = "{prefix}{key}".format(prefix=option_type.prefix, key=key)
+        key_with_prefix = "{prefix}{key}".format(
+            prefix=option_type.prefix, key=key
+        )
         if key not in from_options and key_with_prefix not in from_options:
             _options[key] = ""
         elif key in from_options:
@@ -76,14 +82,16 @@ def wrap_connection_error(fn):
             raise ConnectionException(re)
         else:
             return res
+
     return _wrapper
 
 
 class Client(object):
-    """The client for WebDAV servers provides an ability to control files on remote WebDAV server.
-    """
+    """The client for WebDAV servers provides an ability to control files on
+    remote WebDAV server."""
+
     # path to root directory of WebDAV
-    root = '/'
+    root = "/"
 
     # Max size of file for uploading
     large_size = 2 * 1024 * 1024 * 1024
@@ -93,16 +101,24 @@ class Client(object):
 
     # HTTP headers for different actions
     http_header = {
-        'list': ["Accept: */*", "Depth: 1"],
-        'free': ["Accept: */*", "Depth: 0", "Content-Type: text/xml"],
-        'copy': ["Accept: */*"],
-        'move': ["Accept: */*"],
-        'mkdir': ["Accept: */*", "Connection: Keep-Alive"],
-        'clean': ["Accept: */*", "Connection: Keep-Alive"],
-        'check': ["Accept: */*"],
-        'info': ["Accept: */*", "Depth: 1"],
-        'get_property': ["Accept: */*", "Depth: 1", "Content-Type: application/x-www-form-urlencoded"],
-        'set_property': ["Accept: */*", "Depth: 1", "Content-Type: application/x-www-form-urlencoded"]
+        "list": ["Accept: */*", "Depth: 1"],
+        "free": ["Accept: */*", "Depth: 0", "Content-Type: text/xml"],
+        "copy": ["Accept: */*"],
+        "move": ["Accept: */*"],
+        "mkdir": ["Accept: */*", "Connection: Keep-Alive"],
+        "clean": ["Accept: */*", "Connection: Keep-Alive"],
+        "check": ["Accept: */*"],
+        "info": ["Accept: */*", "Depth: 1"],
+        "get_property": [
+            "Accept: */*",
+            "Depth: 1",
+            "Content-Type: application/x-www-form-urlencoded",
+        ],
+        "set_property": [
+            "Accept: */*",
+            "Depth: 1",
+            "Content-Type: application/x-www-form-urlencoded",
+        ],
     }
 
     def get_headers(self, action, headers_ext=None):
@@ -125,9 +141,11 @@ class Client(object):
             headers.extend(headers_ext)
 
         if self.webdav.token:
-            webdav_token = "Authorization: OAuth {token}".format(token=self.webdav.token)
+            webdav_token = "Authorization: OAuth {token}".format(
+                token=self.webdav.token
+            )
             headers.append(webdav_token)
-        return dict([map(lambda s: s.strip(), i.split(':')) for i in headers])
+        return dict([map(lambda s: s.strip(), i.split(":")) for i in headers])
 
     def get_url(self, path):
         """Generates url by uri path.
@@ -135,7 +153,11 @@ class Client(object):
         :param path: uri path.
         :return: the url string.
         """
-        url = {'hostname': self.webdav.hostname, 'root': self.webdav.root, 'path': path}
+        url = {
+            "hostname": self.webdav.hostname,
+            "root": self.webdav.root,
+            "path": path,
+        }
         return "{hostname}{root}{path}".format(**url)
 
     def get_full_path(self, urn):
@@ -147,7 +169,8 @@ class Client(object):
         return "{root}{path}".format(root=self.webdav.root, path=urn.path())
 
     def execute_request(self, action, path, data=None, headers_ext=None):
-        """Generate request to WebDAV server for specified action and path and execute it.
+        """Generate request to WebDAV server for specified action and path and
+        execute it.
 
         :param action: the action for WebDAV server which should be executed.
         :param path: the path to resource for action
@@ -163,39 +186,41 @@ class Client(object):
             auth=(self.webdav.login, self.webdav.password),
             headers=self.get_headers(action, headers_ext),
             timeout=self.timeout,
-            data=data
+            data=data,
         )
         if response.status_code == 507:
             raise NotEnoughSpace()
         if response.status_code >= 400:
-            raise ResponseErrorCode(url=self.get_url(path), code=response.status_code, message=response.content)
+            raise ResponseErrorCode(
+                url=self.get_url(path),
+                code=response.status_code,
+                message=response.content,
+            )
         return response
 
     # mapping of actions to WebDAV methods
     requests = {
-        'download': "GET",
-        'upload': "PUT",
-        'copy': "COPY",
-        'move': "MOVE",
-        'mkdir': "MKCOL",
-        'clean': "DELETE",
-        'check': "HEAD",
-        'list': "PROPFIND",
-        'free': "PROPFIND",
-        'info': "PROPFIND",
-        'publish': "PROPPATCH",
-        'unpublish': "PROPPATCH",
-        'published': "PROPPATCH",
-        'get_property': "PROPFIND",
-        'set_property': "PROPPATCH"
+        "download": "GET",
+        "upload": "PUT",
+        "copy": "COPY",
+        "move": "MOVE",
+        "mkdir": "MKCOL",
+        "clean": "DELETE",
+        "check": "HEAD",
+        "list": "PROPFIND",
+        "free": "PROPFIND",
+        "info": "PROPFIND",
+        "publish": "PROPPATCH",
+        "unpublish": "PROPPATCH",
+        "published": "PROPPATCH",
+        "get_property": "PROPFIND",
+        "set_property": "PROPPATCH",
     }
 
-    meta_xmlns = {
-        'https://webdav.yandex.ru': "urn:yandex:disk:meta",
-    }
+    meta_xmlns = {"https://webdav.yandex.ru": "urn:yandex:disk:meta"}
 
     def __init__(self, options):
-        """Constructor of WebDAV client
+        """Constructor of WebDAV client.
 
         :param options: the dictionary of connection options to WebDAV can include proxy server options.
             WebDev settings:
@@ -218,8 +243,12 @@ class Client(object):
              `proxy_login`: login name for proxy server.
              `proxy_password`: password for proxy server.
         """
-        webdav_options = get_options(option_type=WebDAVSettings, from_options=options)
-        proxy_options = get_options(option_type=ProxySettings, from_options=options)
+        webdav_options = get_options(
+            option_type=WebDAVSettings, from_options=options
+        )
+        proxy_options = get_options(
+            option_type=ProxySettings, from_options=options
+        )
 
         self.webdav = WebDAVSettings(webdav_options)
         self.proxy = ProxySettings(proxy_options)
@@ -234,8 +263,9 @@ class Client(object):
 
     @wrap_connection_error
     def list(self, remote_path=root):
-        """Returns list of nested files and directories for remote WebDAV directory by path.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_PROPFIND
+        """Returns list of nested files and directories for remote WebDAV
+        directory by path. More information you can find by link
+        http://webdav.org/specs/rfc4918.html#METHOD_PROPFIND.
 
         :param remote_path: path to remote directory.
         :return: list of nested file or directory names.
@@ -245,34 +275,44 @@ class Client(object):
             if not self.check(directory_urn.path()):
                 raise RemoteResourceNotFound(directory_urn.path())
 
-        response = self.execute_request(action='list', path=directory_urn.quote())
+        response = self.execute_request(
+            action="list", path=directory_urn.quote()
+        )
         urns = WebDavXmlUtils.parse_get_list_response(response.content)
 
         path = Urn.normalize_path(self.get_full_path(directory_urn))
-        return [urn.filename() for urn in urns if Urn.compare_path(path, urn.path()) is False]
+        return [
+            urn.filename()
+            for urn in urns
+            if Urn.compare_path(path, urn.path()) is False
+        ]
 
     @wrap_connection_error
     def free(self):
-        """Returns an amount of free space on remote WebDAV server.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_PROPFIND
+        """Returns an amount of free space on remote WebDAV server. More
+        information you can find by link
+        http://webdav.org/specs/rfc4918.html#METHOD_PROPFIND.
 
         :return: an amount of free space in bytes.
         """
         data = WebDavXmlUtils.create_free_space_request_content()
-        response = self.execute_request(action='free', path='', data=data)
-        return WebDavXmlUtils.parse_free_space_response(response.content, self.webdav.hostname)
+        response = self.execute_request(action="free", path="", data=data)
+        return WebDavXmlUtils.parse_free_space_response(
+            response.content, self.webdav.hostname
+        )
 
     @wrap_connection_error
     def check(self, remote_path=root):
-        """Checks an existence of remote resource on WebDAV server by remote path.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#rfc.section.9.4
+        """Checks an existence of remote resource on WebDAV server by remote
+        path. More information you can find by link
+        http://webdav.org/specs/rfc4918.html#rfc.section.9.4.
 
         :param remote_path: (optional) path to resource on WebDAV server. Defaults is root directory of WebDAV.
         :return: True if resource is exist or False otherwise
         """
         urn = Urn(remote_path)
         try:
-            response = self.execute_request(action='check', path=urn.quote())
+            response = self.execute_request(action="check", path=urn.quote())
         except ResponseErrorCode:
             return False
 
@@ -282,18 +322,19 @@ class Client(object):
 
     @wrap_connection_error
     def mkdir(self, remote_path):
-        """Makes new directory on WebDAV server.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_MKCOL
+        """Makes new directory on WebDAV server. More information you can find
+        by link http://webdav.org/specs/rfc4918.html#METHOD_MKCOL.
 
         :param remote_path: path to directory
         :return: True if request executed with code 200 or 201 and False otherwise.
-
         """
         directory_urn = Urn(remote_path, directory=True)
         if not self.check(directory_urn.parent()):
             raise RemoteParentNotFound(directory_urn.path())
 
-        response = self.execute_request(action='mkdir', path=directory_urn.quote())
+        response = self.execute_request(
+            action="mkdir", path=directory_urn.quote()
+        )
         return response.status_code in (200, 201)
 
     @wrap_connection_error
@@ -310,12 +351,13 @@ class Client(object):
         if not self.check(urn.path()):
             raise RemoteResourceNotFound(urn.path())
 
-        response = self.execute_request(action='download', path=urn.quote())
+        response = self.execute_request(action="download", path=urn.quote())
         buff.write(response.content)
 
     def download(self, remote_path, local_path, progress=None):
         """Downloads remote resource from WebDAV and save it in local path.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#rfc.section.9.4
+        More information you can find by link
+        http://webdav.org/specs/rfc4918.html#rfc.section.9.4.
 
         :param remote_path: the path to remote resource for downloading can be file and directory.
         :param local_path: the path to save resource locally.
@@ -323,13 +365,22 @@ class Client(object):
         """
         urn = Urn(remote_path)
         if self.is_dir(urn.path()):
-            self.download_directory(local_path=local_path, remote_path=remote_path, progress=progress)
+            self.download_directory(
+                local_path=local_path,
+                remote_path=remote_path,
+                progress=progress,
+            )
         else:
-            self.download_file(local_path=local_path, remote_path=remote_path, progress=progress)
+            self.download_file(
+                local_path=local_path,
+                remote_path=remote_path,
+                progress=progress,
+            )
 
     def download_directory(self, remote_path, local_path, progress=None):
-        """Downloads directory and downloads all nested files and directories from remote WebDAV to local.
-        If there is something on local path it deletes directories and files then creates new.
+        """Downloads directory and downloads all nested files and directories
+        from remote WebDAV to local. If there is something on local path it
+        deletes directories and files then creates new.
 
         :param remote_path: the path to directory for downloading form WebDAV server.
         :param local_path: the path to local directory for saving downloaded files and directories.
@@ -345,14 +396,21 @@ class Client(object):
         os.makedirs(local_path)
 
         for resource_name in self.list(urn.path()):
-            _remote_path = "{parent}{name}".format(parent=urn.path(), name=resource_name)
+            _remote_path = "{parent}{name}".format(
+                parent=urn.path(), name=resource_name
+            )
             _local_path = os.path.join(local_path, resource_name)
-            self.download(local_path=_local_path, remote_path=_remote_path, progress=progress)
+            self.download(
+                local_path=_local_path,
+                remote_path=_remote_path,
+                progress=progress,
+            )
 
     @wrap_connection_error
     def download_file(self, remote_path, local_path, progress=None):
-        """Downloads file from WebDAV server and save it locally.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#rfc.section.9.4
+        """Downloads file from WebDAV server and save it locally. More
+        information you can find by link
+        http://webdav.org/specs/rfc4918.html#rfc.section.9.4.
 
         :param remote_path: the path to remote file for downloading.
         :param local_path: the path to save file locally.
@@ -368,8 +426,8 @@ class Client(object):
         if not self.check(urn.path()):
             raise RemoteResourceNotFound(urn.path())
 
-        with open(local_path, 'wb') as local_file:
-            response = self.execute_request('download', urn.quote())
+        with open(local_path, "wb") as local_file:
+            response = self.execute_request("download", urn.quote())
             for block in response.iter_content(1024):
                 local_file.write(block)
 
@@ -385,19 +443,22 @@ class Client(object):
             callback()
 
     def download_async(self, remote_path, local_path, callback=None):
-        """Downloads remote resources from WebDAV server asynchronously
+        """Downloads remote resources from WebDAV server asynchronously.
 
         :param remote_path: the path to remote resource on WebDAV server. Can be file and directory.
         :param local_path: the path to save resource locally.
         :param callback: the callback which will be invoked when downloading is complete.
         """
-        target = (lambda: self.download_sync(local_path=local_path, remote_path=remote_path, callback=callback))
+        target = lambda: self.download_sync(
+            local_path=local_path, remote_path=remote_path, callback=callback
+        )
         threading.Thread(target=target).start()
 
     @wrap_connection_error
     def upload_to(self, buff, remote_path):
-        """Uploads file from buffer to remote path on WebDAV server.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_PUT
+        """Uploads file from buffer to remote path on WebDAV server. More
+        information you can find by link
+        http://webdav.org/specs/rfc4918.html#METHOD_PUT.
 
         :param buff: the buffer with content for file.
         :param remote_path: the path to save file remotely on WebDAV server.
@@ -409,26 +470,31 @@ class Client(object):
         if not self.check(urn.parent()):
             raise RemoteParentNotFound(urn.path())
 
-        self.execute_request(action='upload', path=urn.quote(), data=buff)
+        self.execute_request(action="upload", path=urn.quote(), data=buff)
 
     def upload(self, remote_path, local_path, progress=None):
-        """Uploads resource to remote path on WebDAV server.
-        In case resource is directory it will upload all nested files and directories.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_PUT
+        """Uploads resource to remote path on WebDAV server. In case resource
+        is directory it will upload all nested files and directories. More
+        information you can find by link
+        http://webdav.org/specs/rfc4918.html#METHOD_PUT.
 
         :param remote_path: the path for uploading resources on WebDAV server. Can be file and directory.
         :param local_path: the path to local resource for uploading.
         :param progress: Progress function. Not supported now.
         """
         if os.path.isdir(local_path):
-            self.upload_directory(local_path=local_path, remote_path=remote_path, progress=progress)
+            self.upload_directory(
+                local_path=local_path,
+                remote_path=remote_path,
+                progress=progress,
+            )
         else:
             self.upload_file(local_path=local_path, remote_path=remote_path)
 
     def upload_directory(self, remote_path, local_path, progress=None):
-        """Uploads directory to remote path on WebDAV server.
-        In case directory is exist on remote server it will delete it and then upload directory with nested files and
-        directories.
+        """Uploads directory to remote path on WebDAV server. In case directory
+        is exist on remote server it will delete it and then upload directory
+        with nested files and directories.
 
         :param remote_path: the path to directory for uploading on WebDAV server.
         :param local_path: the path to local directory for uploading.
@@ -450,14 +516,21 @@ class Client(object):
         self.mkdir(remote_path)
 
         for resource_name in listdir(local_path):
-            _remote_path = "{parent}{name}".format(parent=urn.path(), name=resource_name)
+            _remote_path = "{parent}{name}".format(
+                parent=urn.path(), name=resource_name
+            )
             _local_path = os.path.join(local_path, resource_name)
-            self.upload(local_path=_local_path, remote_path=_remote_path, progress=progress)
+            self.upload(
+                local_path=_local_path,
+                remote_path=_remote_path,
+                progress=progress,
+            )
 
     @wrap_connection_error
     def upload_file(self, remote_path, local_path, progress=None):
-        """Uploads file to remote path on WebDAV server. File should be 2Gb or less.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_PUT
+        """Uploads file to remote path on WebDAV server. File should be 2Gb or
+        less. More information you can find by link
+        http://webdav.org/specs/rfc4918.html#METHOD_PUT.
 
         :param remote_path: the path to uploading file on WebDAV server.
         :param local_path: the path to local file for uploading.
@@ -479,13 +552,18 @@ class Client(object):
         with open(local_path, "rb") as local_file:
             file_size = os.path.getsize(local_path)
             if file_size > self.large_size:
-                raise ResourceTooBig(path=local_path, size=file_size, max_size=self.large_size)
+                raise ResourceTooBig(
+                    path=local_path, size=file_size, max_size=self.large_size
+                )
 
-            self.execute_request(action='upload', path=urn.quote(), data=local_file)
+            self.execute_request(
+                action="upload", path=urn.quote(), data=local_file
+            )
 
     def upload_sync(self, remote_path, local_path, callback=None):
-        """Uploads resource to remote path on WebDAV server synchronously.
-        In case resource is directory it will upload all nested files and directories.
+        """Uploads resource to remote path on WebDAV server synchronously. In
+        case resource is directory it will upload all nested files and
+        directories.
 
         :param remote_path: the path for uploading resources on WebDAV server. Can be file and directory.
         :param local_path: the path to local resource for uploading.
@@ -497,20 +575,24 @@ class Client(object):
             callback()
 
     def upload_async(self, remote_path, local_path, callback=None):
-        """Uploads resource to remote path on WebDAV server asynchronously.
-        In case resource is directory it will upload all nested files and directories.
+        """Uploads resource to remote path on WebDAV server asynchronously. In
+        case resource is directory it will upload all nested files and
+        directories.
 
         :param remote_path: the path for uploading resources on WebDAV server. Can be file and directory.
         :param local_path: the path to local resource for uploading.
         :param callback: the callback which will be invoked when downloading is complete.
         """
-        target = (lambda: self.upload_sync(local_path=local_path, remote_path=remote_path, callback=callback))
+        target = lambda: self.upload_sync(
+            local_path=local_path, remote_path=remote_path, callback=callback
+        )
         threading.Thread(target=target).start()
 
     @wrap_connection_error
     def copy(self, remote_path_from, remote_path_to):
-        """Copies resource from one place to another on WebDAV server.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_COPY
+        """Copies resource from one place to another on WebDAV server. More
+        information you can find by link
+        http://webdav.org/specs/rfc4918.html#METHOD_COPY.
 
         :param remote_path_from: the path to resource which will be copied,
         :param remote_path_to: the path where resource will be copied.
@@ -523,13 +605,20 @@ class Client(object):
         if not self.check(urn_to.parent()):
             raise RemoteParentNotFound(urn_to.path())
 
-        header_destination = "Destination: {path}".format(path=self.get_full_path(urn_to))
-        self.execute_request(action='copy', path=urn_from.quote(), headers_ext=[header_destination])
+        header_destination = "Destination: {path}".format(
+            path=self.get_full_path(urn_to)
+        )
+        self.execute_request(
+            action="copy",
+            path=urn_from.quote(),
+            headers_ext=[header_destination],
+        )
 
     @wrap_connection_error
     def move(self, remote_path_from, remote_path_to, overwrite=False):
-        """Moves resource from one place to another on WebDAV server.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_MOVE
+        """Moves resource from one place to another on WebDAV server. More
+        information you can find by link
+        http://webdav.org/specs/rfc4918.html#METHOD_MOVE.
 
         :param remote_path_from: the path to resource which will be moved,
         :param remote_path_to: the path where resource will be moved.
@@ -543,25 +632,34 @@ class Client(object):
         if not self.check(urn_to.parent()):
             raise RemoteParentNotFound(urn_to.path())
 
-        header_destination = "Destination: {path}".format(path=self.get_full_path(urn_to))
-        header_overwrite = "Overwrite: {flag}".format(flag="T" if overwrite else "F")
-        self.execute_request(action='move', path=urn_from.quote(), headers_ext=[header_destination, header_overwrite])
+        header_destination = "Destination: {path}".format(
+            path=self.get_full_path(urn_to)
+        )
+        header_overwrite = "Overwrite: {flag}".format(
+            flag="T" if overwrite else "F"
+        )
+        self.execute_request(
+            action="move",
+            path=urn_from.quote(),
+            headers_ext=[header_destination, header_overwrite],
+        )
 
     @wrap_connection_error
     def clean(self, remote_path):
-        """Cleans (Deletes) a remote resource on WebDAV server. The name of method is not changed for back compatibility
-        with original library.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_DELETE
+        """Cleans (Deletes) a remote resource on WebDAV server. The name of
+        method is not changed for back compatibility with original library.
+        More information you can find by link
+        http://webdav.org/specs/rfc4918.html#METHOD_DELETE.
 
         :param remote_path: the remote resource whisch will be deleted.
         """
         urn = Urn(remote_path)
-        self.execute_request(action='clean', path=urn.quote())
+        self.execute_request(action="clean", path=urn.quote())
 
     @wrap_connection_error
     def info(self, remote_path):
-        """Gets information about resource on WebDAV.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_PROPFIND
+        """Gets information about resource on WebDAV. More information you can
+        find by link http://webdav.org/specs/rfc4918.html#METHOD_PROPFIND.
 
         :param remote_path: the path to remote resource.
         :return: a dictionary of information attributes and them values with following keys:
@@ -571,34 +669,43 @@ class Client(object):
                  `modified`: date of resource modification.
         """
         urn = Urn(remote_path)
-        if not self.check(urn.path()) and not self.check(Urn(remote_path, directory=True).path()):
+        if not self.check(urn.path()) and not self.check(
+            Urn(remote_path, directory=True).path()
+        ):
             raise RemoteResourceNotFound(remote_path)
 
-        response = self.execute_request(action='info', path=urn.quote())
+        response = self.execute_request(action="info", path=urn.quote())
         path = self.get_full_path(urn)
-        return WebDavXmlUtils.parse_info_response(content=response.content, path=path, hostname=self.webdav.hostname)
+        return WebDavXmlUtils.parse_info_response(
+            content=response.content, path=path, hostname=self.webdav.hostname
+        )
 
     @wrap_connection_error
     def is_dir(self, remote_path):
-        """Checks is the remote resource directory.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_PROPFIND
+        """Checks is the remote resource directory. More information you can
+        find by link http://webdav.org/specs/rfc4918.html#METHOD_PROPFIND.
 
         :param remote_path: the path to remote resource.
         :return: True in case the remote resource is directory and False otherwise.
         """
         urn = Urn(remote_path)
         parent_urn = Urn(urn.parent())
-        if not self.check(urn.path()) and not self.check(Urn(remote_path, directory=True).path()):
+        if not self.check(urn.path()) and not self.check(
+            Urn(remote_path, directory=True).path()
+        ):
             raise RemoteResourceNotFound(remote_path)
 
-        response = self.execute_request(action='info', path=parent_urn.quote())
+        response = self.execute_request(action="info", path=parent_urn.quote())
         path = self.get_full_path(urn)
-        return WebDavXmlUtils.parse_is_dir_response(content=response.content, path=path, hostname=self.webdav.hostname)
+        return WebDavXmlUtils.parse_is_dir_response(
+            content=response.content, path=path, hostname=self.webdav.hostname
+        )
 
     @wrap_connection_error
     def get_property(self, remote_path, option):
-        """Gets metadata property of remote resource on WebDAV server.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_PROPFIND
+        """Gets metadata property of remote resource on WebDAV server. More
+        information you can find by link
+        http://webdav.org/specs/rfc4918.html#METHOD_PROPFIND.
 
         :param remote_path: the path to remote resource.
         :param option: the property attribute as dictionary with following keys:
@@ -611,13 +718,18 @@ class Client(object):
             raise RemoteResourceNotFound(urn.path())
 
         data = WebDavXmlUtils.create_get_property_request_content(option)
-        response = self.execute_request(action='get_property', path=urn.quote(), data=data)
-        return WebDavXmlUtils.parse_get_property_response(response.content, option['name'])
+        response = self.execute_request(
+            action="get_property", path=urn.quote(), data=data
+        )
+        return WebDavXmlUtils.parse_get_property_response(
+            response.content, option["name"]
+        )
 
     @wrap_connection_error
     def set_property(self, remote_path, option):
-        """Sets metadata property of remote resource on WebDAV server.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_PROPPATCH
+        """Sets metadata property of remote resource on WebDAV server. More
+        information you can find by link
+        http://webdav.org/specs/rfc4918.html#METHOD_PROPPATCH.
 
         :param remote_path: the path to remote resource.
         :param option: the property attribute as dictionary with following keys:
@@ -629,8 +741,9 @@ class Client(object):
 
     @wrap_connection_error
     def set_property_batch(self, remote_path, option):
-        """Sets batch metadata properties of remote resource on WebDAV server in batch.
-        More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_PROPPATCH
+        """Sets batch metadata properties of remote resource on WebDAV server
+        in batch. More information you can find by link
+        http://webdav.org/specs/rfc4918.html#METHOD_PROPPATCH.
 
         :param remote_path: the path to remote resource.
         :param option: the property attributes as list of dictionaries with following keys:
@@ -643,14 +756,13 @@ class Client(object):
             raise RemoteResourceNotFound(urn.path())
 
         data = WebDavXmlUtils.create_set_property_batch_request_content(option)
-        self.execute_request(action='set_property', path=urn.quote(), data=data)
+        self.execute_request(action="set_property", path=urn.quote(), data=data)
 
     def resource(self, remote_path):
         urn = Urn(remote_path)
         return Resource(self, urn)
 
     def push(self, remote_directory, local_directory):
-
         def prune(src, exp):
             return [sub(exp, "", item) for item in src]
 
@@ -672,20 +784,22 @@ class Client(object):
         for local_resource_name in listdir(local_directory):
 
             local_path = os.path.join(local_directory, local_resource_name)
-            remote_path = "{remote_directory}{resource_name}".format(remote_directory=urn.path(),
-                                                                     resource_name=local_resource_name)
+            remote_path = "{remote_directory}{resource_name}".format(
+                remote_directory=urn.path(), resource_name=local_resource_name
+            )
 
             if os.path.isdir(local_path):
                 if not self.check(remote_path=remote_path):
                     self.mkdir(remote_path=remote_path)
-                self.push(remote_directory=remote_path, local_directory=local_path)
+                self.push(
+                    remote_directory=remote_path, local_directory=local_path
+                )
             else:
                 if local_resource_name in remote_resource_names:
                     continue
                 self.upload_file(remote_path=remote_path, local_path=local_path)
 
     def pull(self, remote_directory, local_directory):
-
         def prune(src, exp):
             return [sub(exp, "", item) for item in src]
 
@@ -706,24 +820,33 @@ class Client(object):
         for remote_resource_name in remote_resource_names:
 
             local_path = os.path.join(local_directory, remote_resource_name)
-            remote_path = "{remote_directory}{resource_name}".format(remote_directory=urn.path(),
-                                                                     resource_name=remote_resource_name)
+            remote_path = "{remote_directory}{resource_name}".format(
+                remote_directory=urn.path(), resource_name=remote_resource_name
+            )
 
             remote_urn = Urn(remote_path)
 
             if self.is_dir(remote_urn.path()):
                 if not os.path.exists(local_path):
                     os.mkdir(local_path)
-                self.pull(remote_directory=remote_path, local_directory=local_path)
+                self.pull(
+                    remote_directory=remote_path, local_directory=local_path
+                )
             else:
                 if remote_resource_name in local_resource_names:
                     continue
-                self.download_file(remote_path=remote_path, local_path=local_path)
+                self.download_file(
+                    remote_path=remote_path, local_path=local_path
+                )
 
     def sync(self, remote_directory, local_directory):
 
-        self.pull(remote_directory=remote_directory, local_directory=local_directory)
-        self.push(remote_directory=remote_directory, local_directory=local_directory)
+        self.pull(
+            remote_directory=remote_directory, local_directory=local_directory
+        )
+        self.push(
+            remote_directory=remote_directory, local_directory=local_directory
+        )
 
 
 class Resource(object):
@@ -741,19 +864,25 @@ class Resource(object):
         old_path = self.urn.path()
         parent_path = self.urn.parent()
         new_name = Urn(new_name).filename()
-        new_path = "{directory}{filename}".format(directory=parent_path, filename=new_name)
+        new_path = "{directory}{filename}".format(
+            directory=parent_path, filename=new_name
+        )
 
         self.client.move(remote_path_from=old_path, remote_path_to=new_path)
         self.urn = Urn(new_path)
 
     def move(self, remote_path):
         new_urn = Urn(remote_path)
-        self.client.move(remote_path_from=self.urn.path(), remote_path_to=new_urn.path())
+        self.client.move(
+            remote_path_from=self.urn.path(), remote_path_to=new_urn.path()
+        )
         self.urn = new_urn
 
     def copy(self, remote_path):
         urn = Urn(remote_path)
-        self.client.copy(remote_path_from=self.urn.path(), remote_path_to=remote_path)
+        self.client.copy(
+            remote_path_from=self.urn.path(), remote_path_to=remote_path
+        )
         return Resource(self.client, urn)
 
     def info(self, params=None):
@@ -773,19 +902,31 @@ class Resource(object):
         self.client.upload_to(buff=buff, remote_path=self.urn.path())
 
     def read(self, local_path):
-        return self.client.upload_sync(local_path=local_path, remote_path=self.urn.path())
+        return self.client.upload_sync(
+            local_path=local_path, remote_path=self.urn.path()
+        )
 
     def read_async(self, local_path, callback=None):
-        return self.client.upload_async(local_path=local_path, remote_path=self.urn.path(), callback=callback)
+        return self.client.upload_async(
+            local_path=local_path,
+            remote_path=self.urn.path(),
+            callback=callback,
+        )
 
     def write_to(self, buff):
         return self.client.download_from(buff=buff, remote_path=self.urn.path())
 
     def write(self, local_path):
-        return self.client.download_sync(local_path=local_path, remote_path=self.urn.path())
+        return self.client.download_sync(
+            local_path=local_path, remote_path=self.urn.path()
+        )
 
     def write_async(self, local_path, callback=None):
-        return self.client.download_async(local_path=local_path, remote_path=self.urn.path(), callback=callback)
+        return self.client.download_async(
+            local_path=local_path,
+            remote_path=self.urn.path(),
+            callback=callback,
+        )
 
     def publish(self):
         return self.client.publish(self.urn.path())
@@ -795,11 +936,13 @@ class Resource(object):
 
     @property
     def property(self, option):
-        return self.client.get_property(remote_path=self.urn.path(), option=option)
+        return self.client.get_property(
+            remote_path=self.urn.path(), option=option
+        )
 
     @property.setter
     def property(self, option, value):
-        option['value'] = value.__str__()
+        option["value"] = value.__str__()
         self.client.set_property(remote_path=self.urn.path(), option=option)
 
 
@@ -809,14 +952,18 @@ class WebDavXmlUtils:
 
     @staticmethod
     def parse_get_list_response(content):
-        """Parses of response content XML from WebDAV server and extract file and directory names.
+        """Parses of response content XML from WebDAV server and extract file
+        and directory names.
 
         :param content: the XML content of HTTP response from WebDAV server for getting list of files by remote path.
         :return: list of extracted file or directory names.
         """
         try:
             tree = etree.fromstring(content)
-            hrees = [Urn.separate + unquote(urlsplit(hree.text).path) for hree in tree.findall(".//{DAV:}href")]
+            hrees = [
+                Urn.separate + unquote(urlsplit(hree.text).path)
+                for hree in tree.findall(".//{DAV:}href")
+            ]
             return [Urn(hree) for hree in hrees]
         except etree.XMLSyntaxError:
             return list()
@@ -836,7 +983,8 @@ class WebDavXmlUtils:
 
     @staticmethod
     def parse_free_space_response(content, hostname):
-        """Parses of response content XML from WebDAV server and extract an amount of free space.
+        """Parses of response content XML from WebDAV server and extract an
+        amount of free space.
 
         :param content: the XML content of HTTP response from WebDAV server for getting free space.
         :param hostname: the server hostname.
@@ -844,19 +992,20 @@ class WebDavXmlUtils:
         """
         try:
             tree = etree.fromstring(content)
-            node = tree.find('.//{DAV:}quota-available-bytes')
+            node = tree.find(".//{DAV:}quota-available-bytes")
             if node is not None:
                 return int(node.text)
             else:
-                raise MethodNotSupported(name='free', server=hostname)
+                raise MethodNotSupported(name="free", server=hostname)
         except TypeError:
-            raise MethodNotSupported(name='free', server=hostname)
+            raise MethodNotSupported(name="free", server=hostname)
         except etree.XMLSyntaxError:
             return str()
 
     @staticmethod
     def parse_info_response(content, path, hostname):
-        """Parses of response content XML from WebDAV server and extract an information about resource.
+        """Parses of response content XML from WebDAV server and extract an
+        information about resource.
 
         :param content: the XML content of HTTP response from WebDAV server.
         :param path: the path to resource.
@@ -867,12 +1016,14 @@ class WebDavXmlUtils:
                  `size`: size of resource,
                  `modified`: date of resource modification.
         """
-        response = WebDavXmlUtils.extract_response_for_path(content=content, path=path, hostname=hostname)
+        response = WebDavXmlUtils.extract_response_for_path(
+            content=content, path=path, hostname=hostname
+        )
         find_attributes = {
-            'created': ".//{DAV:}creationdate",
-            'name': ".//{DAV:}displayname",
-            'size': ".//{DAV:}getcontentlength",
-            'modified': ".//{DAV:}getlastmodified"
+            "created": ".//{DAV:}creationdate",
+            "name": ".//{DAV:}displayname",
+            "size": ".//{DAV:}getcontentlength",
+            "modified": ".//{DAV:}getlastmodified",
         }
         info = dict()
         for (name, value) in find_attributes.items():
@@ -881,14 +1032,17 @@ class WebDavXmlUtils:
 
     @staticmethod
     def parse_is_dir_response(content, path, hostname):
-        """Parses of response content XML from WebDAV server and extract an information about resource.
+        """Parses of response content XML from WebDAV server and extract an
+        information about resource.
 
         :param content: the XML content of HTTP response from WebDAV server.
         :param path: the path to resource.
         :param hostname: the server hostname.
         :return: True in case the remote resource is directory and False otherwise.
         """
-        response = WebDavXmlUtils.extract_response_for_path(content=content, path=path, hostname=hostname)
+        response = WebDavXmlUtils.extract_response_for_path(
+            content=content, path=path, hostname=hostname
+        )
         resource_type = response.find(".//{DAV:}resourcetype")
         if resource_type is None:
             raise MethodNotSupported(name="is_dir", server=hostname)
@@ -898,7 +1052,8 @@ class WebDavXmlUtils:
 
     @staticmethod
     def create_get_property_request_content(option):
-        """Creates an XML for requesting of getting a property value of remote WebDAV resource.
+        """Creates an XML for requesting of getting a property value of remote
+        WebDAV resource.
 
         :param option: the property attributes as dictionary with following keys:
                        `namespace`: (optional) the namespace for XML property which will be get,
@@ -907,24 +1062,28 @@ class WebDavXmlUtils:
         """
         root = etree.Element("propfind", xmlns="DAV:")
         prop = etree.SubElement(root, "prop")
-        etree.SubElement(prop, option.get('name', ""), xmlns=option.get('namespace', ""))
+        etree.SubElement(
+            prop, option.get("name", ""), xmlns=option.get("namespace", "")
+        )
         tree = etree.ElementTree(root)
         return WebDavXmlUtils.etree_to_string(tree)
 
     @staticmethod
     def parse_get_property_response(content, name):
-        """Parses of response content XML from WebDAV server for getting metadata property value for some resource.
+        """Parses of response content XML from WebDAV server for getting
+        metadata property value for some resource.
 
         :param content: the XML content of response as string.
         :param name: the name of property for finding a value in response
         :return: the value of property if it has been found or None otherwise.
         """
         tree = etree.fromstring(content)
-        return tree.xpath('//*[local-name() = $name]', name=name)[0].text
+        return tree.xpath("//*[local-name() = $name]", name=name)[0].text
 
     @staticmethod
     def create_set_property_batch_request_content(options):
-        """Creates an XML for requesting of setting a property values for remote WebDAV resource in batch.
+        """Creates an XML for requesting of setting a property values for
+        remote WebDAV resource in batch.
 
         :param options: the property attributes as list of dictionaries with following keys:
                        `namespace`: (optional) the namespace for XML property which will be set,
@@ -932,24 +1091,27 @@ class WebDavXmlUtils:
                        `value`: (optional) the value of property which will be set. Defaults is empty string.
         :return: the XML string of request content.
         """
-        root_node = etree.Element('propertyupdate', xmlns='DAV:')
-        set_node = etree.SubElement(root_node, 'set')
-        prop_node = etree.SubElement(set_node, 'prop')
+        root_node = etree.Element("propertyupdate", xmlns="DAV:")
+        set_node = etree.SubElement(root_node, "set")
+        prop_node = etree.SubElement(set_node, "prop")
         for option in options:
-            opt_node = etree.SubElement(prop_node, option['name'], xmlns=option.get('namespace', ''))
-            opt_node.text = option.get('value', '')
+            opt_node = etree.SubElement(
+                prop_node, option["name"], xmlns=option.get("namespace", "")
+            )
+            opt_node.text = option.get("value", "")
         tree = etree.ElementTree(root_node)
         return WebDavXmlUtils.etree_to_string(tree)
 
     @staticmethod
     def etree_to_string(tree):
-        """Creates string from lxml.etree.ElementTree with XML declaration and UTF-8 encoding.
+        """Creates string from lxml.etree.ElementTree with XML declaration and
+        UTF-8 encoding.
 
         :param tree: the instance of ElementTree
         :return: the string of XML.
         """
         buff = BytesIO()
-        tree.write(buff, xml_declaration=True, encoding='UTF-8')
+        tree.write(buff, xml_declaration=True, encoding="UTF-8")
         return buff.getvalue()
 
     @staticmethod
diff --git a/bob/devtools/webdav3/connection.py b/bob/devtools/webdav3/connection.py
index 392e5d9d35b5f075d334f723daafe17428f71697..989fb04b1355e0ceae02823bb99167f1b8e1dbac 100644
--- a/bob/devtools/webdav3/connection.py
+++ b/bob/devtools/webdav3/connection.py
@@ -21,8 +21,18 @@ class ConnectionSettings:
 class WebDAVSettings(ConnectionSettings):
     ns = "webdav:"
     prefix = "webdav_"
-    keys = {'hostname', 'login', 'password', 'token', 'root', 'cert_path', 'key_path', 'recv_speed', 'send_speed',
-            'verbose'}
+    keys = {
+        "hostname",
+        "login",
+        "password",
+        "token",
+        "root",
+        "cert_path",
+        "key_path",
+        "recv_speed",
+        "send_speed",
+        "verbose",
+    }
 
     hostname = None
     login = None
@@ -40,26 +50,34 @@ class WebDAVSettings(ConnectionSettings):
         self.options = dict()
 
         for key in self.keys:
-            value = options.get(key, '')
+            value = options.get(key, "")
             self.options[key] = value
             self.__dict__[key] = value
 
-        self.root = Urn(self.root).quote() if self.root else ''
+        self.root = Urn(self.root).quote() if self.root else ""
         self.root = self.root.rstrip(Urn.separate)
 
     def is_valid(self):
 
         if not self.hostname:
-            raise OptionNotValid(name="hostname", value=self.hostname, ns=self.ns)
+            raise OptionNotValid(
+                name="hostname", value=self.hostname, ns=self.ns
+            )
 
         if self.cert_path and not exists(self.cert_path):
-            raise OptionNotValid(name="cert_path", value=self.cert_path, ns=self.ns)
+            raise OptionNotValid(
+                name="cert_path", value=self.cert_path, ns=self.ns
+            )
 
         if self.key_path and not exists(self.key_path):
-            raise OptionNotValid(name="key_path", value=self.key_path, ns=self.ns)
+            raise OptionNotValid(
+                name="key_path", value=self.key_path, ns=self.ns
+            )
 
         if self.key_path and not self.cert_path:
-            raise OptionNotValid(name="cert_path", value=self.cert_path, ns=self.ns)
+            raise OptionNotValid(
+                name="cert_path", value=self.cert_path, ns=self.ns
+            )
 
         if self.password and not self.login:
             raise OptionNotValid(name="login", value=self.login, ns=self.ns)
@@ -71,7 +89,7 @@ class WebDAVSettings(ConnectionSettings):
 class ProxySettings(ConnectionSettings):
     ns = "proxy:"
     prefix = "proxy_"
-    keys = {'hostname', 'login', 'password'}
+    keys = {"hostname", "login", "password"}
 
     hostname = None
     login = None
@@ -82,7 +100,7 @@ class ProxySettings(ConnectionSettings):
         self.options = dict()
 
         for key in self.keys:
-            value = options.get(key, '')
+            value = options.get(key, "")
             self.options[key] = value
             self.__dict__[key] = value
 
@@ -93,4 +111,6 @@ class ProxySettings(ConnectionSettings):
 
         if self.login or self.password:
             if not self.hostname:
-                raise OptionNotValid(name="hostname", value=self.hostname, ns=self.ns)
+                raise OptionNotValid(
+                    name="hostname", value=self.hostname, ns=self.ns
+                )
diff --git a/bob/devtools/webdav3/exceptions.py b/bob/devtools/webdav3/exceptions.py
index b30abbc62339fc292c393f27124bc4f00bbd2c83..4d546bbec053ae6fd09a5e79f962b6562a23a239 100644
--- a/bob/devtools/webdav3/exceptions.py
+++ b/bob/devtools/webdav3/exceptions.py
@@ -13,8 +13,9 @@ class OptionNotValid(NotValid):
         self.ns = ns
 
     def __str__(self):
-        return "Option ({ns}{name}={value}) have invalid name or value".format(ns=self.ns, name=self.name,
-                                                                               value=self.value)
+        return "Option ({ns}{name}={value}) have invalid name or value".format(
+            ns=self.ns, name=self.name, value=self.value
+        )
 
 
 class CertificateNotValid(NotValid):
@@ -57,9 +58,8 @@ class ResourceTooBig(WebDavException):
 
     def __str__(self):
         return "Resource {path} is too big, it should be less then {max_size} but actually: {size}".format(
-            path=self.path,
-            max_size=self.max_size,
-            size=self.size)
+            path=self.path, max_size=self.max_size, size=self.size
+        )
 
 
 class MethodNotSupported(WebDavException):
@@ -68,7 +68,9 @@ class MethodNotSupported(WebDavException):
         self.server = server
 
     def __str__(self):
-        return "Method {name} not supported for {server}".format(name=self.name, server=self.server)
+        return "Method {name} not supported for {server}".format(
+            name=self.name, server=self.server
+        )
 
 
 class ConnectionException(WebDavException):
@@ -103,8 +105,9 @@ class ResponseErrorCode(WebDavException):
         self.message = message
 
     def __str__(self):
-        return "Request to {url} failed with code {code} and message: {message}".format(url=self.url, code=self.code,
-                                                                                        message=self.message)
+        return "Request to {url} failed with code {code} and message: {message}".format(
+            url=self.url, code=self.code, message=self.message
+        )
 
 
 class NotEnoughSpace(WebDavException):
diff --git a/bob/devtools/webdav3/urn.py b/bob/devtools/webdav3/urn.py
index 6279de26c5c26de20f0c51252d6d70d755a3419b..97c1ed68260f52c8d9f774bee5192a68f0b6bc50 100644
--- a/bob/devtools/webdav3/urn.py
+++ b/bob/devtools/webdav3/urn.py
@@ -19,10 +19,14 @@ class Urn(object):
             self._path = sub(expression, Urn.separate, self._path)
 
         if not self._path.startswith(Urn.separate):
-            self._path = "{begin}{end}".format(begin=Urn.separate, end=self._path)
+            self._path = "{begin}{end}".format(
+                begin=Urn.separate, end=self._path
+            )
 
         if directory and not self._path.endswith(Urn.separate):
-            self._path = "{begin}{end}".format(begin=self._path, end=Urn.separate)
+            self._path = "{begin}{end}".format(
+                begin=self._path, end=Urn.separate
+            )
 
     def __str__(self):
         return self.path()
@@ -36,7 +40,11 @@ class Urn(object):
     def filename(self):
 
         path_split = self._path.split(Urn.separate)
-        name = path_split[-2] + Urn.separate if path_split[-1] == '' else path_split[-1]
+        name = (
+            path_split[-2] + Urn.separate
+            if path_split[-1] == ""
+            else path_split[-1]
+        )
         return unquote(name)
 
     def parent(self):
@@ -44,7 +52,11 @@ class Urn(object):
         path_split = self._path.split(Urn.separate)
         nesting_level = self.nesting_level()
         parent_path_split = path_split[:nesting_level]
-        parent = self.separate.join(parent_path_split) if nesting_level != 1 else Urn.separate
+        parent = (
+            self.separate.join(parent_path_split)
+            if nesting_level != 1
+            else Urn.separate
+        )
         if not parent.endswith(Urn.separate):
             return unquote(parent + Urn.separate)
         else:
@@ -58,8 +70,12 @@ class Urn(object):
 
     @staticmethod
     def normalize_path(path):
-        result = sub('/{2,}', '/', path)
-        return result if len(result) < 1 or result[-1] != Urn.separate else result[:-1]
+        result = sub("/{2,}", "/", path)
+        return (
+            result
+            if len(result) < 1 or result[-1] != Urn.separate
+            else result[:-1]
+        )
 
     @staticmethod
     def compare_path(path_a, href):
diff --git a/conda/meta.yaml b/conda/meta.yaml
index 0043867e57a867ae40425823a9426a3436a87b74..ab99512be00d7ed3a0960eb5dc494d168638e6af 100644
--- a/conda/meta.yaml
+++ b/conda/meta.yaml
@@ -35,6 +35,7 @@ requirements:
     - conda-build=3.16
     - conda-verify=3
     - certifi
+    - docformatter
     - pytz
     - python-dateutil
     - gitpython
diff --git a/deps/docformatter/meta.yaml b/deps/docformatter/meta.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..388342efc48c8b44db7810b57d94522d35ad1740
--- /dev/null
+++ b/deps/docformatter/meta.yaml
@@ -0,0 +1,45 @@
+{% set version = "1.3" %}
+
+package:
+  name: docformatter
+  version: {{ version }}
+
+source:
+  git_rev: v{{ version }}
+  git_url: https://github.com/myint/docformatter
+
+build:
+  noarch: python
+  number: 0
+  script: "{{ PYTHON }} -m pip install --no-deps --ignore-installed ."
+
+requirements:
+  build:
+    - python
+    - pip
+
+  run:
+    - python
+    - untokenize
+
+test:
+
+  source_files:
+    - test_docformatter.py
+    - docformatter.py
+
+  requires:
+    - nose
+
+  commands:
+    - nosetests -sv test_docformatter.py
+
+about:
+  home: https://github.com/myint/docformatter
+  license: MIT License
+  license_family: MIT
+  summary: 'Formats docstrings to follow PEP 257'
+
+extra:
+  recipe-maintainers:
+    - anjos
diff --git a/deps/order.txt b/deps/order.txt
new file mode 100644
index 0000000000000000000000000000000000000000..47b9318ff231679f95dbdf85f5c2054a55322cf0
--- /dev/null
+++ b/deps/order.txt
@@ -0,0 +1,3 @@
+python-gitlab
+untokenize
+docformatter
diff --git a/deps/untokenize/meta.yaml b/deps/untokenize/meta.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..6c0768618d0e77a051086f80b9efa4e082c69937
--- /dev/null
+++ b/deps/untokenize/meta.yaml
@@ -0,0 +1,44 @@
+{% set version = "0.1.1" %}
+
+package:
+  name: untokenize
+  version: {{ version }}
+
+source:
+  git_rev: v{{ version }}
+  git_url: https://github.com/myint/untokenize
+
+build:
+  noarch: python
+  number: 0
+  script: "{{ PYTHON }} -m pip install --no-deps --ignore-installed ."
+
+requirements:
+  build:
+    - python
+    - pip
+
+  run:
+    - python
+
+test:
+
+  source_files:
+    - test_acid.py
+    - test_untokenize.py
+
+  requires:
+    - nose
+
+  commands:
+    - nosetests -sv test_acid.py test_untokenize.py
+
+about:
+  home: https://github.com/myint/untokenize
+  license: MIT License
+  license_family: MIT
+  summary: 'Transforms tokens into original source code (while preserving whitespace)'
+
+extra:
+  recipe-maintainers:
+    - anjos