Commit 4c1b1058 authored by Amir MOHAMMADI's avatar Amir MOHAMMADI

[pre-commit] Add pre-commit to bob.devtools and its template

parent 6d6d0c5e
Pipeline #41402 failed with stage
in 2 minutes and 50 seconds
[flake8]
max-line-length = 88
select = B,C,E,F,W,T4,B9,B950
ignore = E501, W503, E203
[settings]
line_length=88
order_by_type=true
lines_between_types=1
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/timothycrosley/isort
rev: 4.3.21-2
hooks:
- id: isort
args: [-sl]
- repo: https://github.com/psf/black
rev: stable
hooks:
- id: black
exclude: bob/devtools/templates/setup.py
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.0.0
hooks:
- id: check-ast
exclude: bob/devtools/templates/setup.py
- id: check-case-conflict
- id: trailing-whitespace
- id: end-of-file-fixer
- id: debug-statements
exclude: bob/devtools/templates/setup.py
- id: check-added-large-files
- id: flake8
exclude: bob/devtools/templates/setup.py
- repo: local
hooks:
- id: sphinx-build
name: sphinx build
entry: python -m sphinx.cmd.build
args: [-a, -E, -W, doc, sphinx]
language: system
files: ^doc/
types: [file]
pass_filenames: false
- id: sphinx-doctest
name: sphinx doctest
entry: python -m sphinx.cmd.build
args: [-a, -E, -b, doctest, doc, sphinx]
language: system
files: ^doc/
types: [file]
pass_filenames: false
......@@ -4,6 +4,14 @@
"""Bootstraps a new miniconda installation and prepares it for development."""
import glob
import logging
import os
import platform
import shutil
import subprocess
import sys
import time
_BASE_CONDARC = """\
add_pip_as_python_dependency: false #!final
......@@ -34,16 +42,6 @@ _INTERVALS = (
"""Time intervals that make up human readable time slots"""
import os
import sys
import glob
import time
import shutil
import platform
import subprocess
import logging
logger = logging.getLogger(__name__)
......@@ -171,7 +169,6 @@ def merge_conda_cache(cache, prefix, name):
# merge urls.txt files
logger.info("Merging urls.txt files from cache...")
urls = []
cached_pkgs_urls_txt = os.path.join(cached_pkgs_dir, "urls.txt")
if not os.path.exists(cached_pkgs_urls_txt):
......
......@@ -4,22 +4,21 @@
"""Tools for self-building and other utilities."""
import os
import re
import sys
import distutils.version
import glob
import json
import shutil
import logging
import os
import platform
import re
import subprocess
import sys
import logging
import conda_build.api
import yaml
logger = logging.getLogger(__name__)
import yaml
import distutils.version
def remove_conda_loggers():
"""Cleans-up conda API logger handlers to avoid logging repetition"""
......@@ -31,8 +30,6 @@ def remove_conda_loggers():
logger.debug("Removed conda logger handler at %s", handler)
import conda_build.api
remove_conda_loggers()
......@@ -102,9 +99,7 @@ def next_build_number(channel_url, basename):
remove_conda_loggers()
# get the channel index
channel_urls = calculate_channel_urls(
[channel_url], prepend=False, use_local=False
)
channel_urls = calculate_channel_urls([channel_url], prepend=False, use_local=False)
logger.debug("Downloading channel index from %s", channel_urls)
index = fetch_index(channel_urls=channel_urls)
......@@ -115,8 +110,7 @@ def next_build_number(channel_url, basename):
name, version, build = basename[:-6].rsplit("-", 2)
else:
raise RuntimeError(
"Package name %s does not end in either "
".tar.bz2 or .conda" % (basename,)
"Package name %s does not end in either " ".tar.bz2 or .conda" % (basename,)
)
# remove the build number as we're looking for the next value
......@@ -152,11 +146,7 @@ def next_build_number(channel_url, basename):
): # match!
url = index[dist].url
logger.debug(
"Found match at %s for %s-%s-%s",
url,
name,
version,
build_variant,
"Found match at %s for %s-%s-%s", url, name, version, build_variant,
)
build_number = max(build_number, dist.build_number + 1)
urls[index[dist].timestamp] = url.replace(channel_url, "")
......@@ -205,9 +195,7 @@ def make_conda_config(config, python, append_file, condarc_options):
# appropriate platform-specific subdir (e.g. win-64)
if os.path.isdir(url):
if not os.path.isabs(url):
url = os.path.normpath(
os.path.abspath(os.path.join(os.getcwd(), url))
)
url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url)))
url = url_path(url)
retval.channel_urls.append(url)
......@@ -261,8 +249,7 @@ def exists_on_channel(channel_url, basename):
name, version, build = name[:-8].rsplit("-", 2)
else:
raise RuntimeError(
"Package name %s does not end in either "
".tar.bz2 or .conda" % (name,)
"Package name %s does not end in either " ".tar.bz2 or .conda" % (name,)
)
# remove the build number as we're looking for the next value
......@@ -289,7 +276,7 @@ def exists_on_channel(channel_url, basename):
def remove_pins(deps):
return [l.split()[0] for l in deps]
return [ll.split()[0] for ll in deps]
def parse_dependencies(recipe_dir, config):
......@@ -436,8 +423,7 @@ def get_docserver_setup(public, stable, server, intranet, group):
if (not public) and (not intranet):
raise RuntimeError(
"You cannot request for private channels and set"
" intranet=False (server=%s) - these are conflicting options"
% server
" intranet=False (server=%s) - these are conflicting options" % server
)
entries = []
......@@ -461,9 +447,7 @@ def get_docserver_setup(public, stable, server, intranet, group):
server + prefix + "/docs/" + group + "/%(name)s/stable/",
]
else:
entries += [
server + prefix + "/docs/" + group + "/%(name)s/master/"
]
entries += [server + prefix + "/docs/" + group + "/%(name)s/master/"]
return "|".join(entries)
......@@ -500,8 +484,7 @@ def check_version(workdir, envtag):
'"version.txt" indicates version is a '
'pre-release (v%s) - but environment provided tag "%s", '
"which indicates this is a **stable** build. "
"Have you created the tag using ``bdt release``?"
% (version, envtag)
"Have you created the tag using ``bdt release``?" % (version, envtag)
)
else: # it is a stable build
if envtag is None:
......@@ -554,20 +537,11 @@ def git_clean_build(runner, verbose):
if not verbose:
flags += "q"
runner(
["git", "clean", flags]
+ ["--exclude=%s" % k for k in exclude_from_cleanup]
)
runner(["git", "clean", flags] + ["--exclude=%s" % k for k in exclude_from_cleanup])
def base_build(
bootstrap,
server,
intranet,
group,
recipe_dir,
conda_build_config,
condarc_options,
bootstrap, server, intranet, group, recipe_dir, conda_build_config, condarc_options,
):
"""Builds a non-beat/non-bob software dependence that doesn't exist on
defaults.
......@@ -614,18 +588,14 @@ def base_build(
"\n - ".join(condarc_options["channels"]),
)
logger.info("Merging conda configuration files...")
conda_config = make_conda_config(
conda_build_config, None, None, condarc_options
)
conda_config = make_conda_config(conda_build_config, None, None, condarc_options)
metadata = get_rendered_metadata(recipe_dir, conda_config)
arch = conda_arch()
# checks we should actually build this recipe
if should_skip_build(metadata):
logger.warn(
'Skipping UNSUPPORTED build of "%s" on %s', recipe_dir, arch
)
logger.warn('Skipping UNSUPPORTED build of "%s" on %s', recipe_dir, arch)
return
paths = get_output_path(metadata, conda_config)
......@@ -643,8 +613,8 @@ def base_build(
if any(urls):
raise RuntimeError(
"One or more packages for recipe at '%s' already exist (%s). "
"Change the package build number to trigger a build." % \
(recipe_dir, ", ".join(urls)),
"Change the package build number to trigger a build."
% (recipe_dir, ", ".join(urls)),
)
# if you get to this point, just builds the package(s)
......@@ -656,9 +626,7 @@ if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(
description="Builds bob.devtools on the CI"
)
parser = argparse.ArgumentParser(description="Builds bob.devtools on the CI")
parser.add_argument(
"-g",
"--group",
......@@ -677,8 +645,7 @@ if __name__ == "__main__":
default=os.environ.get(
"CONDA_ROOT", os.path.realpath(os.path.join(os.curdir, "miniconda"))
),
help="The location where we should install miniconda "
"[default: %(default)s]",
help="The location where we should install miniconda " "[default: %(default)s]",
)
parser.add_argument(
"-V",
......@@ -756,8 +723,7 @@ if __name__ == "__main__":
bootstrap.set_environment("BOB_PACKAGE_VERSION", version)
# create the build configuration
conda_build_config = os.path.join(args.work_dir, "conda",
"conda_build_config.yaml")
conda_build_config = os.path.join(args.work_dir, "conda", "conda_build_config.yaml")
recipe_append = os.path.join(args.work_dir, "data", "recipe_append.yaml")
condarc = os.path.join(args.conda_root, "condarc")
......@@ -821,8 +787,7 @@ if __name__ == "__main__":
"typically means this build is running on a shared builder and "
"the file ~/.conda/environments.txt is polluted with other "
"environment paths. To fix, empty that file and set its mode "
"to read-only for all."
% (path, os.path.join(args.conda_root, "conda-bld"))
"to read-only for all." % (path, os.path.join(args.conda_root, "conda-bld"))
)
# retrieve the current build number(s) for this build
......
......@@ -3,11 +3,11 @@
"""Utilities for retrieving, parsing and auto-generating changelogs."""
import io
import datetime
import io
import pytz
import dateutil.parser
import pytz
from .log import get_logger
......@@ -25,18 +25,14 @@ def parse_date(d):
def _sort_commits(commits, reverse):
"""Sorts gitlab commit objects using their ``committed_date`` attribute."""
return sorted(
commits, key=lambda x: parse_date(x.committed_date), reverse=reverse
)
return sorted(commits, key=lambda x: parse_date(x.committed_date), reverse=reverse)
def _sort_tags(tags, reverse):
"""Sorts gitlab tag objects using their ``committed_date`` attribute."""
return sorted(
tags,
key=lambda x: parse_date(x.commit["committed_date"]),
reverse=reverse,
tags, key=lambda x: parse_date(x.commit["committed_date"]), reverse=reverse,
)
......@@ -153,9 +149,7 @@ def _write_one_tag(f, pkg_name, tag):
if line.startswith("* ") or line.startswith("- "):
line = line[2:]
line = line.replace("!", pkg_name + "!").replace(
pkg_name + pkg_name, pkg_name
)
line = line.replace("!", pkg_name + "!").replace(pkg_name + pkg_name, pkg_name)
line = line.replace("#", pkg_name + "#")
if not line:
continue
......@@ -208,9 +202,7 @@ def _write_mergerequests_range(f, pkg_name, mrs):
title = title.replace(" !", " " + pkg_name + "!")
title = title.replace(" #", " " + pkg_name + "#")
if mr.description is not None:
description = (
mr.description.strip().replace("\r", "").replace("\n", " ")
)
description = mr.description.strip().replace("\r", "").replace("\n", " ")
description = description.replace(" !", " " + pkg_name + "!")
description = description.replace(" #", " " + pkg_name + "#")
else:
......@@ -261,15 +253,13 @@ def get_changes_since(gitpkg, since):
mrs = list(
reversed(
gitpkg.mergerequests.list(
state="merged",
updated_after=since,
order_by="updated_at",
all=True,
state="merged", updated_after=since, order_by="updated_at", all=True,
)
)
)
return mrs, tags, commits
def write_tags_with_commits(f, gitpkg, since, mode):
"""Writes all tags and commits of a given package to the output file.
......@@ -310,9 +300,7 @@ def write_tags_with_commits(f, gitpkg, since, mode):
# the attribute 'merged_at' is not available in GitLab API as of 27
# June 2018
mrs4tag = [
k
for k in mrs
if (start_date < parse_date(k.updated_at) <= end_date)
k for k in mrs if (start_date < parse_date(k.updated_at) <= end_date)
]
_write_mergerequests_range(
f, gitpkg.attributes["path_with_namespace"], mrs4tag
......@@ -329,9 +317,7 @@ def write_tags_with_commits(f, gitpkg, since, mode):
# write leftover merge requests
# the attribute 'merged_at' is not available in GitLab API as of 27
# June 2018
leftover_mrs = [
k for k in mrs if parse_date(k.updated_at) > start_date
]
leftover_mrs = [k for k in mrs if parse_date(k.updated_at) > start_date]
_write_mergerequests_range(
f, gitpkg.attributes["path_with_namespace"], leftover_mrs
)
......
......@@ -4,11 +4,13 @@
"""Tools to help CI-based builds and artifact deployment."""
import git
import distutils.version
from .log import get_logger, echo_info
import git
from .build import load_order_file
from .log import echo_info
from .log import get_logger
logger = get_logger(__name__)
......@@ -58,15 +60,11 @@ def is_stable(package, refname, tag, repodir):
if tag is not None:
logger.info('Project %s tag is "%s"', package, tag)
parsed_tag = distutils.version.LooseVersion(
tag[1:]
).version # remove 'v'
parsed_tag = distutils.version.LooseVersion(tag[1:]).version # remove 'v'
is_prerelease = any([isinstance(k, str) for k in parsed_tag])
if is_prerelease:
logger.warn(
"Pre-release detected - not publishing to stable channels"
)
logger.warn("Pre-release detected - not publishing to stable channels")
return False
if is_master(refname, tag, repodir):
......@@ -163,9 +161,7 @@ def select_build_file(basename, paths, branch):
specific_basename = "%s-%s" % (basename, branch)
for path in paths:
path = os.path.realpath(path)
candidate = os.path.join(
path, "%s%s" % (specific_basename, extension)
)
candidate = os.path.join(path, "%s%s" % (specific_basename, extension))
if os.path.exists(candidate):
return candidate
......@@ -247,29 +243,34 @@ def cleanup(dry_run, username, password, includes):
# go through all possible variants:
archs = [
'linux-64',
'linux-32',
'linux-armv6l',
'linux-armv7l',
'linux-ppc64le',
'osx-64',
'osx-32',
'win-64',
'win-32',
'noarch',
]
"linux-64",
"linux-32",
"linux-armv6l",
"linux-armv7l",
"linux-ppc64le",
"osx-64",
"osx-32",
"win-64",
"win-32",
"noarch",
]
path = server_info["conda"]
for arch in archs:
arch_path = '/'.join((path, arch))
arch_path = "/".join((path, arch))
if not (davclient.check(arch_path) and davclient.is_dir(arch_path)):
# it is normal if the directory does not exist
continue
server_path = davclient.get_url(arch_path)
echo_info('Cleaning beta packages from %s' % server_path)
remove_old_beta_packages(client=davclient, path=arch_path,
dry_run=dry_run, pyver=True, includes=includes)
echo_info("Cleaning beta packages from %s" % server_path)
remove_old_beta_packages(
client=davclient,
path=arch_path,
dry_run=dry_run,
pyver=True,
includes=includes,
)
......@@ -4,10 +4,10 @@
"""Constants used for building and more."""
import os
import pkg_resources
from . import bootstrap
from .log import get_logger
logger = get_logger(__name__)
......@@ -112,9 +112,7 @@ CACERT_URL = "https://curl.haxx.se/ca/cacert.pem"
"""Location of the most up-to-date CA certificate bundle"""
CACERT = pkg_resources.resource_filename(
__name__, os.path.join("data", "cacert.pem")
)
CACERT = pkg_resources.resource_filename(__name__, os.path.join("data", "cacert.pem"))
"""We keep a copy of the CA certificates we trust here
To update this file use: ``curl --remote-name --time-cond cacert.pem https://curl.haxx.se/ca/cacert.pem``
......
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import configparser
import os
import re
import configparser
import dateutil.parser
from distutils.version import StrictVersion
from .log import get_logger, echo_warning, echo_info, echo_normal
import dateutil.parser
from .deploy import _setup_webdav_client
from .log import echo_normal
from .log import echo_warning
from .log import get_logger
logger = get_logger(__name__)
......@@ -141,14 +144,14 @@ def remove_old_beta_packages(client, path, dry_run, pyver=True, includes=None):
if result is not None:
name += "/" + result.string[:4]
target = '/'.join((path, f))
target = "/".join((path, f))
info = client.info(target)
betas.setdefault(name, []).append(
(
StrictVersion(version),
int(build), # build number
dateutil.parser.parse(info['modified']).timestamp(),
dateutil.parser.parse(info["modified"]).timestamp(),
target,
)
)
......
......@@ -6,7 +6,8 @@
import os
from .constants import WEBDAV_PATHS, SERVER
from .constants import SERVER
from .constants import WEBDAV_PATHS
from .log import get_logger
logger = get_logger(__name__)
......@@ -57,9 +58,7 @@ def deploy_conda_package(
"""
server_info = WEBDAV_PATHS[stable][public]
davclient = _setup_webdav_client(
SERVER, server_info["root"], username, password
)
davclient = _setup_webdav_client(SERVER, server_info["root"], username, password)
basename = os.path.basename(package)
arch = arch or os.path.basename(os.path.dirname(package))
......@@ -75,30 +74,17 @@ def deploy_conda_package(
)
else:
logger.info(
"[dav] rm -f %s%s%s", SERVER, server_info["root"], remote_path
)
logger.info("[dav] rm -f %s%s%s", SERVER, server_info["root"], remote_path)
if not dry_run:
davclient.clean(remote_path)
logger.info(
"[dav] %s -> %s%s%s", package, SERVER, server_info["root"], remote_path
)
logger.info("[dav] %s -> %s%s%s", package, SERVER, server_info["root"], remote_path)
if not dry_run:
davclient.upload(local_path=package, remote_path=remote_path)
def deploy_documentation(
path,
package,
stable,
latest,
public,
branch,
tag,
username,
password,
dry_run,
path, package, stable, latest, public, branch, tag, username, password, dry_run,
):
"""Deploys sphinx documentation to the appropriate webdav locations.
......@@ -133,9 +119,7 @@ def deploy_documentation(
)
server_info = WEBDAV_PATHS[stable][public]
davclient = _setup_webdav_client(
SERVER, server_info["root"], username, password
)
davclient = _setup_webdav_client(SERVER, server_info["root"], username, password)
remote_path_prefix = "%s/%s" % (server_info["docs"], package)
......
......@@ -3,22 +3,21 @@
"""Utilities for calculating package dependencies and drawing graphs"""
import glob
import os
import re
import glob
import fnmatch
import tempfile
import tarfile
import tempfile
from io import BytesIO
from .bootstrap import set_environment
from .build import (
next_build_number,
get_rendered_metadata,
get_parsed_recipe,
get_output_path,
)
from .log import get_logger, echo_info
from .build import get_output_path
from .build import get_parsed_recipe
from .build import get_rendered_metadata
from .build import next_build_number
from .log import echo_info
from .log import get_logger
logger = get_logger(__name__)
......@@ -28,7 +27,7 @@ def compute_adjencence_matrix(
package,
conda_config,
main_channel,
recurse_regexp="^(bob|beat|batl|gridtk)(\.)?(?!-).*$",
recurse_regexp=r"^(bob|beat|batl|gridtk)(\.)?(?!-).*$",
current={},
ref="master",
deptypes=[],
......@@ -127,9 +126,7 @@ def compute_adjencence_matrix(
path = get_output_path(metadata, conda_config)[0]
# gets the next build number
build_number, _ = next_build_number(
main_channel, os.path.basename(path)
)
build_number, _ = next_build_number(main_channel, os.path.basename(path))
# at this point, all elements are parsed, I know the package version,
# build number and all dependencies
......@@ -163,8 +160,8 @@ def compute_adjencence_matrix(
# if dependencies match a target set of globs
recurse_compiled = re.compile(recurse_regexp)
def _re_filter(l):
return [k for k in l if recurse_compiled.match(k)]
def _re_filter(ll):
return [k for k in ll if recurse_compiled.match(k)]
all_recurse = set()
all_recurse |= set([z.split()[0] for z in _re_filter(host)])
......@@ -194,9 +191,7 @@ def compute_adjencence_matrix(
# do not recurse for packages we already know
all_recurse -= set(current.keys())
logger.info(
"Recursing over the following packages: %s", ", ".join(all_recurse)
)
logger.info("Recursing over the following packages: %s", ", ".join(all_recurse))
for dep in all_recurse:
dep_adjmtx = compute_adjencence_matrix(
......@@ -264,17 +259,10 @@ def generate_graph(adjacence_matrix, deptypes, whitelist):
for package, values in adjacence_matrix.items():
if not whitelist_compiled.match(values["name"]):
logger.debug(
"Skipping main package %s (did not match whitelist)",
values["name"],
"Skipping main package %s (did not match whitelist)", values["name"],
)
continue
name = (
values["name"]
+ "\n"