Skip to content
Snippets Groups Projects
Commit 346172e0 authored by André Anjos's avatar André Anjos :speech_balloon:
Browse files

Merge branch 'remove-bob-devel' into 'master'

Add bob-devel here which is generated dynamically

See merge request !242
parents 6523b29c 491551c6
No related branches found
No related tags found
1 merge request!242Add bob-devel here which is generated dynamically
Pipeline #54020 passed
......@@ -23,7 +23,7 @@ stages:
- ${PRE_COMMIT_HOME}
build_linux:
.build_linux_template:
extends: .build_template
variables:
BUILD_EGG: "true"
......@@ -33,13 +33,9 @@ build_linux:
before_script:
- rm -f /root/.condarc
- rm -rf /root/.conda
script:
- python3 ./bob/devtools/bootstrap.py -vv build
- source ${CONDA_ROOT}/etc/profile.d/conda.sh
- conda activate base
- pip install pre-commit
- pre-commit run --all-files --show-diff-on-failure
- python3 ./bob/devtools/build.py -vv --twine-check
artifacts:
paths:
- dist/*.zip
......@@ -52,18 +48,15 @@ build_linux:
key: "linux-cache"
build_macos_intel:
.build_macos_intel_template:
extends: .build_template
tags:
- macos
- intel
script:
before_script:
- python3 ./bob/devtools/bootstrap.py -vv build
- source ${CONDA_ROOT}/etc/profile.d/conda.sh
- conda activate base
- pip install pre-commit
- pre-commit run --all-files --show-diff-on-failure
- python3 ./bob/devtools/build.py -vv
artifacts:
paths:
- ${CONDA_ROOT}/conda-bld/osx-64/*.conda
......@@ -72,6 +65,41 @@ build_macos_intel:
key: "macos-intel-cache"
build_linux_bob_devel:
extends: .build_linux_template
script:
- python3 ./bob/devtools/build.py -vv build-bob-devel
build_linux_deps:
extends: .build_linux_template
script:
- python3 ./bob/devtools/build.py -vv build-deps
build_linux_bob_devtools:
extends: .build_linux_template
script:
- pip install pre-commit
- pre-commit run --all-files --show-diff-on-failure
- python3 ./bob/devtools/build.py -vv build-devtools --twine-check
build_macos_intel_bob_devel:
extends: .build_macos_intel_template
script:
- python3 ./bob/devtools/build.py -vv build-bob-devel
build_macos_intel_deps:
extends: .build_macos_intel_template
script:
- python3 ./bob/devtools/build.py -vv build-deps
build_macos_intel_bob_devtools:
extends: .build_macos_intel_template
script:
- pip install pre-commit
- pre-commit run --all-files --show-diff-on-failure
- python3 ./bob/devtools/build.py -vv build-devtools
# Deploy targets
.deploy_template:
stage: deploy
......@@ -86,8 +114,12 @@ build_macos_intel:
- bdt ci deploy -vv
- bdt ci clean -vv
dependencies:
- build_linux
- build_macos_intel
- build_linux_bob_devel
- build_linux_deps
- build_linux_bob_devtools
- build_macos_intel_bob_devel
- build_macos_intel_deps
- build_macos_intel_bob_devtools
tags:
- docker
cache:
......@@ -128,8 +160,12 @@ pypi:
- bdt ci pypi -vv dist/*.zip
- bdt ci clean -vv
dependencies:
- build_linux
- build_macos_intel
- build_linux_bob_devel
- build_linux_deps
- build_linux_bob_devtools
- build_macos_intel_bob_devel
- build_macos_intel_deps
- build_macos_intel_bob_devtools
tags:
- docker
cache:
......
......@@ -15,7 +15,11 @@ repos:
rev: 3.9.2
hooks:
- id: flake8
exclude: bob/devtools/templates/setup.py
exclude: |
(?x)^(
bob/devtools/templates/setup.py|
deps/bob-devel/run_test.py
)$
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
hooks:
......@@ -29,4 +33,4 @@ repos:
- id: check-added-large-files
exclude: bob/devtools/templates/setup.py
- id: check-yaml
exclude: .*/meta.yaml
exclude: .*/meta.*.yaml
......@@ -509,6 +509,9 @@ if __name__ == "__main__":
if args.command == "build":
# clean conda cache and packages before building
run_cmdline([conda_bin, "clean", "--all"])
# simple - just use the defaults channels when self building
run_cmdline(
[conda_bin, "install", "--yes"]
......@@ -520,6 +523,7 @@ if __name__ == "__main__":
"conda=%s" % conda_version,
"conda-build=%s" % conda_build_version,
"conda-verify=%s" % conda_verify_version,
"click",
"twine", # required for checking readme of python (zip) distro
]
)
......
......@@ -16,6 +16,7 @@ import re
import subprocess
import sys
import click
import conda_build.api
import yaml
......@@ -703,88 +704,142 @@ def base_build(
return conda_build.api.build(recipe_dir, config=conda_config)
if __name__ == "__main__":
def bob_devel(
bootstrap,
server,
intranet,
group,
conda_build_config,
condarc_options,
work_dir,
):
"""
Tests that all packages listed in bob/devtools/data/conda_build_config.yaml
can be installed in one environment.
"""
# Load the packages and their pins in bob/devtools/data/conda_build_config.yaml
# Create a temporary conda-build recipe to test if all packages can be installed
import argparse
with open(conda_build_config, "r") as f:
content = f.read()
parser = argparse.ArgumentParser(
description="Builds bob.devtools on the CI"
)
parser.add_argument(
"-g",
"--group",
default=os.environ.get("CI_PROJECT_NAMESPACE", "bob"),
help="The namespace of the project being built [default: %(default)s]",
)
parser.add_argument(
"-n",
"--name",
default=os.environ.get("CI_PROJECT_NAME", "bob.devtools"),
help="The name of the project being built [default: %(default)s]",
)
parser.add_argument(
"-c",
"--conda-root",
default=os.environ.get(
"CONDA_ROOT", os.path.realpath(os.path.join(os.curdir, "miniconda"))
),
help="The location where we should install miniconda "
"[default: %(default)s]",
)
parser.add_argument(
"-V",
"--visibility",
choices=["public", "internal", "private"],
default=os.environ.get("CI_PROJECT_VISIBILITY", "public"),
help="The visibility level for this project [default: %(default)s]",
)
parser.add_argument(
"-t",
"--tag",
default=os.environ.get("CI_COMMIT_TAG", None),
help="If building a tag, pass it with this flag [default: %(default)s]",
)
parser.add_argument(
"-w",
"--work-dir",
default=os.environ.get("CI_PROJECT_DIR", os.path.realpath(os.curdir)),
help="The directory where the repo was cloned [default: %(default)s]",
)
parser.add_argument(
"-T",
"--twine-check",
action="store_true",
default=False,
help="If set, then performs the equivalent of a "
'"twine check" on the generated python package (zip file)',
)
parser.add_argument(
"--internet",
"-i",
default=False,
action="store_true",
help="If executing on an internet-connected server, unset this flag",
)
parser.add_argument(
"--verbose",
"-v",
action="count",
default=0,
help="Increases the verbosity level. We always prints error and "
"critical messages. Use a single ``-v`` to enable warnings, "
"two ``-vv`` to enable information messages and three ``-vvv`` "
"to enable debug messages [default: %(default)s]",
idx1 = content.find("# AUTOMATIC PARSING START")
idx2 = content.find("# AUTOMATIC PARSING END")
content = content[idx1:idx2]
# filter out using conda-build specific markers
from conda_build.metadata import ns_cfg, select_lines
config = make_conda_config(conda_build_config, None, None, condarc_options)
content = select_lines(content, ns_cfg(config), variants_in_place=False)
package_pins = yaml.safe_load(content)
package_names_map = package_pins.pop("package_names_map")
packages = [package_names_map.get(p, p) for p in package_pins.keys()]
recipe_dir = os.path.join(work_dir, "deps", "bob-devel")
template_yaml = os.path.join(recipe_dir, "meta.template.yaml")
final_yaml = os.path.join(recipe_dir, "meta.yaml")
package_list = "\n".join(
[
" - {p1} {{{{ {p2} }}}}".format(
p1=p, p2=p.replace("-", "_").replace(".", "_")
)
for p in packages
]
)
parser.add_argument(
"--test-mark-expr",
"-A",
default="",
help="Use this flag to avoid running certain tests during the build. "
"It forwards all settings to ``nosetests`` via --eval-attr=<settings>``"
" and ``pytest`` via -m=<settings>.",
with open(template_yaml) as fr, open(final_yaml, "w") as fw:
content = fr.read()
content = content.replace("# PACKAGE_LIST", package_list)
logger.info(
"Writing a conda build recipe with the following content:\n%s",
content,
)
fw.write(content)
# run conda build
packages = base_build(
bootstrap=bootstrap,
server=server,
intranet=intranet,
group=group,
recipe_dir=recipe_dir,
conda_build_config=conda_build_config,
condarc_options=condarc_options,
)
args = parser.parse_args()
print(f"The following packages were built: {packages}")
@click.group()
@click.option(
"-g",
"--group",
default=os.environ.get("CI_PROJECT_NAMESPACE", "bob"),
help="The namespace of the project being built",
)
@click.option(
"-c",
"--conda-root",
default=os.environ.get(
"CONDA_ROOT", os.path.realpath(os.path.join(os.curdir, "miniconda"))
),
help="The location where we should install miniconda",
)
@click.option(
"-w",
"--work-dir",
default=os.environ.get("CI_PROJECT_DIR", os.path.realpath(os.curdir)),
help="The directory where the repo was cloned",
)
@click.option(
"--internet",
"-i",
is_flag=True,
help="If executing on an internet-connected server, unset this flag",
)
@click.option(
"-V",
"--visibility",
type=click.Choice(["public", "internal", "private"]),
default=os.environ.get("CI_PROJECT_VISIBILITY", "public"),
help="The visibility level for this project",
)
@click.option(
"-t",
"--tag",
default=os.environ.get("CI_COMMIT_TAG"),
help="If building a tag, pass it with this flag",
)
@click.option(
"--verbose",
"-v",
count=True,
help="Increases the verbosity level. We always prints error and critical messages. Use a single '-v' to enable warnings, two '-vv' to enable information messages and three '-vvv' to enable debug messages [default: %(default)s]",
)
@click.option(
"--test-mark-expr",
"-A",
default="",
help="Use this flag to avoid running certain tests during the build. It forwards all settings to 'nosetests' via --eval-attr=<settings> and 'pytest' via -m=<settings>.",
)
@click.pass_context
def cli(
ctx,
group,
conda_root,
work_dir,
internet,
visibility,
tag,
verbose,
test_mark_expr,
):
"Builds bob.devtools on the CI"
ctx.ensure_object(dict)
# loads the "adjacent" bootstrap module
import importlib.util
......@@ -796,25 +851,20 @@ if __name__ == "__main__":
spec.loader.exec_module(bootstrap)
server = bootstrap._SERVER
bootstrap.setup_logger(logger, args.verbose)
bootstrap.setup_logger(logger, verbose)
bootstrap.set_environment("DOCSERVER", server)
bootstrap.set_environment("LANG", "en_US.UTF-8")
bootstrap.set_environment("LC_ALL", os.environ["LANG"])
bootstrap.set_environment("NOSE_EVAL_ATTR", args.test_mark_expr)
bootstrap.set_environment("PYTEST_ADDOPTS", f"-m '{args.test_mark_expr}'")
# get information about the version of the package being built
version, is_prerelease = check_version(args.work_dir, args.tag)
bootstrap.set_environment("BOB_PACKAGE_VERSION", version)
bootstrap.set_environment("NOSE_EVAL_ATTR", test_mark_expr)
bootstrap.set_environment("PYTEST_ADDOPTS", f"-m '{test_mark_expr}'")
# create the build configuration
conda_build_config = os.path.join(
args.work_dir, "conda", "conda_build_config.yaml"
work_dir, "conda", "conda_build_config.yaml"
)
recipe_append = os.path.join(args.work_dir, "data", "recipe_append.yaml")
condarc = os.path.join(args.conda_root, "condarc")
condarc = os.path.join(conda_root, "condarc")
logger.info("Loading (this build's) CONDARC file from %s...", condarc)
with open(condarc, "rb") as f:
condarc_options = yaml.load(f, Loader=yaml.FullLoader)
......@@ -824,9 +874,64 @@ if __name__ == "__main__":
if condarc_options.get("conda-build", {}).get("root-dir") is None:
condarc_options["croot"] = os.path.join(prefix, "conda-bld")
# builds all dependencies in the 'deps' subdirectory - or at least checks
# these dependencies are already available; these dependencies go directly
# to the public channel once built
# get information about the version of the package being built
version, is_prerelease = check_version(work_dir, tag)
bootstrap.set_environment("BOB_PACKAGE_VERSION", version)
public = visibility == "public"
channels, upload_channel = bootstrap.get_channels(
public=public,
stable=(not is_prerelease),
server=server,
intranet=(not internet),
group=group,
)
if "channels" not in condarc_options:
condarc_options["channels"] = channels + ["defaults"]
# populate ctx.obj
ctx.obj["verbose"] = verbose
ctx.obj["conda_root"] = conda_root
ctx.obj["group"] = group
ctx.obj["bootstrap"] = bootstrap
ctx.obj["server"] = server
ctx.obj["work_dir"] = work_dir
ctx.obj["internet"] = internet
ctx.obj["condarc_options"] = condarc_options
ctx.obj["conda_build_config"] = conda_build_config
ctx.obj["upload_channel"] = upload_channel
@cli.command()
@click.pass_obj
def build_bob_devel(obj):
bob_devel(
bootstrap=obj["bootstrap"],
server=obj["server"],
intranet=not obj["internet"],
group=obj["group"],
conda_build_config=os.path.join(
obj["work_dir"],
"bob",
"devtools",
"data",
"conda_build_config.yaml",
),
condarc_options=obj["condarc_options"],
work_dir=obj["work_dir"],
)
git_clean_build(obj["bootstrap"].run_cmdline, verbose=(obj["verbose"] >= 3))
@cli.command()
@click.pass_obj
def build_deps(obj):
"""builds all dependencies in the 'deps' subdirectory - or at least checks
these dependencies are already available; these dependencies go directly
to the public channel once built
"""
recipes = load_order_file(os.path.join("deps", "order.txt"))
for k, recipe in enumerate([os.path.join("deps", k) for k in recipes]):
......@@ -834,26 +939,33 @@ if __name__ == "__main__":
# ignore - not a conda package
continue
base_build(
bootstrap,
server,
not args.internet,
args.group,
obj["bootstrap"],
obj["server"],
not obj["internet"],
obj["group"],
recipe,
conda_build_config,
condarc_options,
obj["conda_build_config"],
obj["condarc_options"],
)
public = args.visibility == "public"
channels, upload_channel = bootstrap.get_channels(
public=public,
stable=(not is_prerelease),
server=server,
intranet=(not args.internet),
group=args.group,
)
if "channels" not in condarc_options:
condarc_options["channels"] = channels + ["defaults"]
git_clean_build(obj["bootstrap"].run_cmdline, verbose=(obj["verbose"] >= 3))
@cli.command()
@click.option(
"-T",
"--twine-check",
is_flag=True,
help="If set, then performs the equivalent of a 'twine check' on the generated python package (zip file)",
)
@click.pass_obj
def build_devtools(obj, twine_check):
bootstrap = obj["bootstrap"]
condarc_options = obj["condarc_options"]
conda_build_config = obj["conda_build_config"]
work_dir = obj["work_dir"]
upload_channel = obj["upload_channel"]
recipe_append = os.path.join(work_dir, "data", "recipe_append.yaml")
logger.info(
"Using the following channels during build:\n - %s",
......@@ -864,19 +976,19 @@ if __name__ == "__main__":
conda_build_config, None, recipe_append, condarc_options
)
recipe_dir = os.path.join(args.work_dir, "conda")
recipe_dir = os.path.join(obj["work_dir"], "conda")
metadata = get_rendered_metadata(recipe_dir, conda_config)
paths = get_output_path(metadata, conda_config)
# asserts we're building at the right location
for path in paths:
assert path.startswith(os.path.join(args.conda_root, "conda-bld")), (
assert path.startswith(os.path.join(obj["conda_root"], "conda-bld")), (
'Output path for build (%s) does not start with "%s" - this '
"typically means this build is running on a shared builder and "
"the file ~/.conda/environments.txt is polluted with other "
"environment paths. To fix, empty that file and set its mode "
"to read-only for all."
% (path, os.path.join(args.conda_root, "conda-bld"))
% (path, os.path.join(obj["conda_root"], "conda-bld"))
)
# retrieve the current build number(s) for this build
......@@ -888,8 +1000,6 @@ if __name__ == "__main__":
build_number = max([int(k) for k in build_numbers])
# runs the build using the conda-build API
arch = conda_arch()
# notice we cannot build from the pre-parsed metadata because it has already
# resolved the "wrong" build number. We'll have to reparse after setting the
# environment variable BOB_BUILD_NUMBER.
......@@ -898,7 +1008,7 @@ if __name__ == "__main__":
conda_build.api.build(recipe_dir, config=conda_config)
# checks if long_description of python package renders fine
if args.twine_check:
if twine_check:
from twine.commands.check import check
package = glob.glob("dist/*.zip")
......@@ -911,4 +1021,8 @@ if __name__ == "__main__":
else:
logger.info("twine check (a.k.a. readme check) %s: OK", package[0])
git_clean_build(bootstrap.run_cmdline, verbose=(args.verbose >= 3))
git_clean_build(bootstrap.run_cmdline, verbose=(obj["verbose"] >= 3))
if __name__ == "__main__":
cli(obj={})
......@@ -69,44 +69,44 @@ zip_keys:
# Here is the version of dependencies are used when building packages (build
# and host requirements). We keep a list of **all of them** here to make sure
# everything goes as expected in our conda build process. For the version of
# packages that are used for testing packages, see the recipe of bob-devel.
# The version here do not necessarily match the versions in bob-devel.
# This version of bob-devel will be used at test time of packages:
bob_devel:
- 2021.06.17
# This version of beat-devel will be used at test time of packages. Notice it
# uses bob-devel and should have a version that is greater or equal its value
beat_devel:
- 2021.06.17
# The build time only dependencies (build requirements).
# Updating these to the latest version all the time is OK and a good idea.
# These versions should match the versions inside bob-devel as well (if they
# overlap) so update them in both places.
cmake:
- 3.14.0
make:
- 4.2.1
pkg_config:
- 0.29.2
# The host requirements. Ideally we want to build against the oldest possible version of
# packages so packages can be installed with a wide range of versions. But the versions
# here should also be compatible with the pinned versions in bob-devel. For most
# dependencies, you want to put the exact version of bob-devel in here as well. It is
# everything goes as expected in our conda build process.
# Ideally we want to build against the oldest possible version of
# packages so packages can be installed with a wide range of versions. It is
# best to keep this in sync with:
# https://github.com/AnacondaRecipes/aggregate/blob/master/conda_build_config.yaml The
# names here should not contain dots or dashes. You should replace dots and dashes with
# underlines.
# AUTOMATIC PARSING START
# DO NOT MODIFY THIS COMMENT
# list all packages with dashes or dots in their names, here:
package_names_map:
click_plugins: click-plugins
dask_jobqueue: dask-jobqueue
dask_ml: dask-ml
docker_py: docker-py
font_ttf_dejavu_sans_mono: font-ttf-dejavu-sans-mono
pkg_config: pkg-config
pytest_cov: pytest-cov
python_graphviz: python-graphviz
scikit_image: scikit-image
scikit_learn: scikit-learn
sphinxcontrib_httpdomain: sphinxcontrib-httpdomain
sphinxcontrib_mermaid: sphinxcontrib-mermaid
sphinxcontrib_programoutput: sphinxcontrib-programoutput
zc_buildout: zc.buildout
zc_recipe_egg: zc.recipe.egg
boost:
- 1.73.0
click:
- 8.0.1
click_plugins:
- 1.1.1
cmake:
- 3.14.0
coverage:
- 5.5
dask:
......@@ -147,6 +147,8 @@ libpng:
- 1.6.37
libtiff:
- 4.2.0
make:
- 4.2.1
matplotlib:
- 3.3.4
mkl:
......@@ -157,7 +159,8 @@ nose:
- 1.3.7
numba:
- 0.53.1
numpy: # we build against numpy 1.17 but test against newer versions.
# we build against numpy 1.17 but test against newer versions.
numpy:
- 1.17
opencv:
- 4.5.0
......@@ -165,6 +168,8 @@ pandas:
- 1.2.4
pillow:
- 8.2.0
pkg_config:
- 0.29.2
psutil:
- 5.8.0
psycopg2:
......@@ -217,16 +222,16 @@ sphinx_rtd_theme:
- 0.4.3
sphinxcontrib_httpdomain:
- 1.7.0
sphinxcontrib_programoutput:
- 0.16
sphinxcontrib_mermaid:
- 0.6.1
sphinxcontrib_programoutput:
- 0.16
sqlalchemy:
- 1.4.15
tabulate:
- 0.8.9
tensorflow:
- 2.4.1
tensorflow: # [linux]
- 2.4.1 # [linux]
termcolor:
- 1.1.0
torchvision:
......@@ -241,3 +246,5 @@ zc_buildout:
- 2.13.3
zc_recipe_egg:
- 2.0.7
# AUTOMATIC PARSING END
package:
name: bob-devel
version: 2021.09.14
build:
number: 0
requirements:
host:
- python {{ python }}
- {{ compiler('c') }}
- {{ compiler('cxx') }}
# PACKAGE_LIST
run:
- python
run_constrained:
{% for package in resolved_packages('host') %}
- {{ package }}
{% endfor %}
test:
requires:
- numpy
- ffmpeg
- pytorch
- torchvision
- setuptools
commands:
# we expect these features from ffmpeg:
- ffmpeg -codecs | grep "DEVI.S zlib" # [unix]
- ffmpeg -codecs | grep "DEV.LS h264" # [unix]
import sys
# couple of imports to see if packages are working
import numpy
import pkg_resources
def test_pytorch():
import torch
from torchvision.models import DenseNet
model = DenseNet()
t = torch.randn(1, 3, 224, 224)
out = model(t)
assert out.shape[1] == 1000
def _check_package(name, pyname=None):
"Checks if a Python package can be `require()`'d"
pyname = pyname or name
print(f"Checking Python setuptools integrity for {name} (pyname: {pyname})")
pkg_resources.require(pyname)
def test_setuptools_integrity():
_check_package("pytorch", "torch")
_check_package("torchvision")
# test if pytorch installation is sane
test_pytorch()
test_setuptools_integrity()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment