Commit 81494fca authored by André Anjos's avatar André Anjos 💬
Browse files

Merge branch 'conda-ci' into '1.6.x'

New Conda-based CI/CD Pipelines

See merge request !19
parents 24f99700 1f60a7e7
Pipeline #19469 passed with stages
in 48 minutes and 28 seconds
......@@ -21,3 +21,10 @@ opsnr.stt
.coverage
.DS_Store
html/
record.txt
_ci/
miniconda.sh
miniconda/
miniconda.cached/
conda/recipe_append.yaml
conda-bld/
# This build file uses template features from YAML so it is generic enough for
# any Bob project. Don't modify it unless you know what you're doing.
# Definition of global variables (all stages)
variables:
CONDA_ROOT: "${CI_PROJECT_DIR}/miniconda"
# Definition of our build pipeline order
stages:
- build
- docker
- deploy
- pypi
variables:
CONDA_ENV_NAME: beat_test_env
CONDA_PREFIX: /local/opt/conda
PREFIX: $CONDA_PREFIX/envs/$CONDA_ENV_NAME
build:
# Build targets
.build_template: &build_job
stage: build
before_script:
- ${PREFIX}/bin/python --version
- docker info
- mkdir _ci
- curl --silent "https://gitlab.idiap.ch/bob/bob.admin/raw/master/gitlab/install.sh" > _ci/install.sh
- chmod 755 _ci/install.sh
- ./_ci/install.sh _ci master #installs ci support scripts
- ./_ci/before_build.sh
script:
- ./_ci/build.sh
after_script:
- ./_ci/after_build.sh
cache: &build_caches
paths:
- miniconda.sh
- ${CONDA_ROOT}/pkgs/*.tar.bz2
- ${CONDA_ROOT}/pkgs/urls.txt
.build_linux_template: &linux_build_job
<<: *build_job
tags:
- docker
image: continuumio/conda-concourse-ci
artifacts:
expire_in: 1 week
paths:
- _ci/
- ${CONDA_ROOT}/conda-bld/linux-64/*.tar.bz2
cache:
<<: *build_caches
key: "linux-cache"
.build_macosx_template: &macosx_build_job
<<: *build_job
tags:
- macosx
artifacts:
expire_in: 1 week
paths:
- _ci/
- ${CONDA_ROOT}/conda-bld/osx-64/*.tar.bz2
cache:
<<: *build_caches
key: "macosx-cache"
# Docker host based testing (must be run inside dind or docker-enabled host)
.docker_test_linux_template: &linux_docker_job
stage: docker
before_script:
# safe keep artifacts as before_build.sh will erase those...
- mv ${CONDA_ROOT}/conda-bld .
- ./_ci/install.sh _ci master #updates ci support scripts
- ./_ci/before_build.sh
- mv conda-bld ${CONDA_ROOT}
- ./scripts/before_test.sh
script:
- export BEAT_DOCKER_TESTS=true
- BOB_TEST_ONLY=true ./_ci/build.sh
after_script:
- ./_ci/after_build.sh
build_linux_27:
<<: *linux_build_job
variables:
PYTHON_VERSION: "2.7"
build_linux_36:
<<: *linux_build_job
variables:
PYTHON_VERSION: "3.6"
BUILD_EGG: "true"
artifacts:
expire_in: 1 week
paths:
- _ci/
- dist/*.zip
- sphinx
- ${CONDA_ROOT}/conda-bld/linux-64/*.tar.bz2
build_macosx_27:
<<: *macosx_build_job
variables:
PYTHON_VERSION: "2.7"
build_macosx_36:
<<: *macosx_build_job
variables:
PYTHON_VERSION: "3.6"
# Docker host based testing
docker_linux_27:
<<: *linux_docker_job
variables:
PYTHON_VERSION: "2.7"
dependencies:
- build_linux_27
tags:
- docker-build
docker_linux_36:
<<: *linux_docker_job
variables:
PYTHON_VERSION: "3.6"
dependencies:
- build_linux_36
tags:
- docker-build
# Deploy targets
.deploy_template: &deploy_job
stage: deploy
before_script:
- ./_ci/install.sh _ci master #updates ci support scripts
script:
- ./_ci/deploy.sh
dependencies:
- build_linux_27
- build_linux_36
- build_macosx_27
- build_macosx_36
tags:
- deployer
deploy_beta:
<<: *deploy_job
environment: beta
only:
- 1.6.x
deploy_stable:
<<: *deploy_job
environment: stable
only:
- /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags)
except:
- branches
pypi:
stage: pypi
environment: pypi
only:
- /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags)
except:
- branches
before_script:
- ./_ci/install.sh _ci master #updates ci support scripts
script:
- git clean -ffdx
- ${PREFIX}/bin/python bootstrap-buildout.py
- ./bin/buildout
- ./bin/python ${PREFIX}/bin/coverage run --source=${CI_PROJECT_NAME} ./bin/nosetests -sv ${CI_PROJECT_NAME}
- ./bin/python ${PREFIX}/bin/coverage report
- ./bin/python ${PREFIX}/bin/sphinx-apidoc --separate -d 2 --output=doc/api ${CI_PROJECT_NAMESPACE}
- ./bin/python ${PREFIX}/bin/sphinx-build doc sphinx
- ./_ci/pypi.sh
dependencies:
- build_linux_36
tags:
- docker-build
- deployer
include LICENSE.AGPL README.rst buildout.cfg bootstrap-buildout.py
include LICENSE.AGPL README.rst version.txt requirements.txt
include buildout.cfg develop.cfg
recursive-include scripts *.sh
recursive-include doc conf.py *.rst *.png *.svg *.ico *.odg *.pdf *.dot
recursive-include beat/core/schema *.json
recursive-include beat/core/prototypes *.json *.py
recursive-include beat/core/test/prefix *.json *.py
recursive-include beat/core/test/prefix *.json *.py *.r *.m *.rst *.h *.cpp
......@@ -20,159 +20,45 @@
.. You should have received a copy of the GNU Affero Public License along ..
.. with the BEAT platform. If not, see http://www.gnu.org/licenses/. ..
.. image:: https://img.shields.io/badge/docs-stable-yellow.svg
:target: https://www.idiap.ch/software/beat/docs/beat/beat.core/stable/index.html
.. image:: https://img.shields.io/badge/docs-latest-orange.svg
:target: https://www.idiap.ch/software/beat/docs/beat/beat.core/master/index.html
.. image:: https://gitlab.idiap.ch/beat/beat.core/badges/master/build.svg
:target: https://gitlab.idiap.ch/beat/beat.core/commits/master
.. image:: https://gitlab.idiap.ch/beat/beat.core/badges/master/coverage.svg
:target: https://gitlab.idiap.ch/beat/beat.core/commits/master
.. image:: https://img.shields.io/badge/gitlab-project-0000c0.svg
:target: https://gitlab.idiap.ch/beat/beat.core
.. image:: https://img.shields.io/pypi/v/beat.core.svg
:target: https://pypi.python.org/pypi/beat.core
============================================
Biometrics Evaluation and Testing Platform
============================================
This package contains the source code for the core components of the BEAT
platform.
==========================
Core Components for BEAT
==========================
This package part of BEAT_, an open-source evaluation platform for data science
algorithms and workflows. It contains the source code for its core components.
Installation
------------
Really easy, with ``zc.buildout``::
$ python bootstrap-buildout.py
$ ./bin/buildout
These 2 commands should download and install all non-installed dependencies and
get you a fully operational test and development environment.
.. note::
The python shell used in the first line of the previous command set
determines the python interpreter that will be used for all scripts developed
inside this package.
If you are on the Idiap filesystem, you may use
``/idiap/project/beat/beat.env.deploy/usr/bin/python`` to bootstrap this
package instead. It contains the same setup deployed at the final BEAT
machinery.
Docker
======
This package depends on Docker_ and uses it to run user algorithms in a
container with the required software stack. You must install the Docker_ engine
and make sure the user running tests has access to it.
In particular, this package controls memory and CPU utilisation of the
containers it launches. You must make sure to enable those functionalities on
your installation.
Docker Setup
============
Complete BEAT's `installation`_ instructions. Then, to install this package,
run::
Make sure you have the ``docker`` command available on your system. For certain
operating systems, it is necessary to install ``docker`` via an external
virtual machine (a.k.a. the *docker machine*). Follow the instructions at `the
docker website <https://docs.docker.com/engine/installation/>` before trying to
execute algorithms or experiments.
$ conda install beat.backend.python
We use specific docker images to run user algorithms. Download the following
base images before you try to run tests or experiments on your computer::
$ docker pull docker.idiap.ch/beat/beat.env.system.python:1.1.2
$ docker pull docker.idiap.ch/beat/beat.env.db.examples:1.1.1
$ docker pull docker.idiap.ch/beat/beat.env.client:1.2.0
$ docker pull docker.idiap.ch/beat/beat.env.cxx:1.0.2
Optionally, also download the following images to be able to re-run experiments
downloaded from the BEAT platform (not required for unit testing)::
$ docker pull docker.idiap.ch/beat/beat.env.python:0.0.4
$ docker pull docker.idiap.ch/beat/beat.env.python:1.0.0
$ docker pull docker.idiap.ch/beat/beat.env.db:1.2.2
Documentation
-------------
To build the documentation, just do::
$ ./bin/sphinx-apidoc --separate -d 2 --output=doc/api beat beat/core/test beat/core/scripts
$ ./bin/sphinx-build doc sphinx
Testing
Contact
-------
After installation, it is possible to run our suite of unit tests. To do so,
use ``nose``::
$ ./bin/nosetests -sv
.. note::
Some of the tests for our command-line toolkit require a running BEAT
platform web-server, with a compatible ``beat.core`` installed (preferably
the same). By default, these tests will be skipped. If you want to run
them, you must setup a development web server and set the environment
variable ``BEAT_CORE_TEST_PLATFORM`` to point to that address. For example::
$ export BEAT_CORE_TEST_PLATFORM="http://example.com/platform/"
$ ./bin/nosetests -sv
It is **not** adviseable to run tests against a production web server.
If you want to skip slow tests (at least those pulling stuff from our servers)
or executing lengthy operations, just do::
$ ./bin/nosetests -sv -a '!slow'
To measure the test coverage, do the following::
$ ./bin/nosetests -sv --with-coverage --cover-package=beat.core
To produce an HTML test coverage report, at the directory `./htmlcov`, do the
following::
$ ./bin/nosetests -sv --with-coverage --cover-package=beat.core --cover-html --cover-html-dir=htmlcov
Our documentation is also interspersed with test units. You can run them using
sphinx::
$ ./bin/sphinx -b doctest doc sphinx
Development
-----------
Indentation
===========
You can enforce PEP8_ compliance using the application ``autopep8``. For
example, to enforce compliance on a single file and edit it in place, do::
$ ./bin/autopep8 --indent-size=2 --in-place beat/core/utils.py
We normally use 2-space indentation. If ever, you can easily change the
indentation to 4 spaces like this::
$ ./bin/autopep8 --indent-size=4 --in-place beat/core/utils.py
Profiling
=========
In order to profile the test code, try the following::
$ ./bin/python -mcProfile -oprof.data ./bin/nosetests -sv ...
This will dump the profiling data at ``prof.data``. You can dump its contents
in different ways using another command::
$ ./bin/python -mpstats prof.data
This will allow you to dump and print the profiling statistics as you may find
fit.
For questions or reporting issues to this software package, contact our
development `mailing list`_.
.. References go here
.. _pep8: https://www.python.org/dev/peps/pep-0008/
.. _docker: https://www.docker.com/
.. Place your references here:
.. _beat: https://www.idiap.ch/software/beat
.. _installation: https://www.idiap.ch/software/beat/install
.. _mailing list: https://www.idiap.ch/software/beat/discuss
......@@ -25,5 +25,6 @@
# #
###############################################################################
#see http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
__import__('pkg_resources').declare_namespace(__name__)
# see https://docs.python.org/3/library/pkgutil.html
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
......@@ -54,31 +54,31 @@ class Algorithm(BackendAlgorithm):
and output declaration, grouping, synchronization details, parameters and
splittability). The actual algorithm is not directly treated by this class -
it can, however, provide you with a loader for actually running the
algorithmic code (see :py:meth:`Algorithm.runner`).
algorithmic code (see :py:meth:`.runner`).
Parameters:
prefix (str): Establishes the prefix of your installation.
data (object, optional): The piece of data representing the algorithm. It
must validate against the schema defined for algorithms. If a string is
passed, it is supposed to be a valid path to an algorithm in the
designated prefix area. If a tuple is passed (or a list), then we
consider that the first element represents the algorithm declaration,
while the second, the code for the algorithm (either in its source format
or as a binary blob). If ``None`` is passed, loads our default prototype
for algorithms (source code will be in Python).
dataformat_cache (dict, optional): A dictionary mapping dataformat names to
loaded dataformats. This parameter is optional and, if passed, may
greatly speed-up algorithm loading times as dataformats that are already
loaded may be re-used.
library_cache (dict, optional): A dictionary mapping library names to
loaded libraries. This parameter is optional and, if passed, may greatly
speed-up library loading times as libraries that are already loaded may
be re-used.
data (:py:class:`object`, Optional): The piece of data representing the
algorithm. It must validate against the schema defined for algorithms.
If a string is passed, it is supposed to be a valid path to an
algorithm in the designated prefix area. If a tuple is passed (or a
list), then we consider that the first element represents the algorithm
declaration, while the second, the code for the algorithm (either in
its source format or as a binary blob). If ``None`` is passed, loads
our default prototype for algorithms (source code will be in Python).
dataformat_cache (:py:class:`dict`, Optional): A dictionary mapping
dataformat names to loaded dataformats. This parameter is optional and,
if passed, may greatly speed-up algorithm loading times as dataformats
that are already loaded may be re-used.
library_cache (:py:class:`dict`, Optional): A dictionary mapping library
names to loaded libraries. This parameter is optional and, if passed,
may greatly speed-up library loading times as libraries that are
already loaded may be re-used.
Attributes:
......
......@@ -62,12 +62,12 @@ class Database(BackendDatabase):
it is supposed to be a valid path to an database in the designated prefix
area.
dataformat_cache (dict, optional): A dictionary mapping dataformat names
to loaded dataformats. This parameter is optional and, if passed, may
greatly speed-up database loading times as dataformats that are already
loaded may be re-used. If you use this parameter, you must guarantee
that the cache is refreshed as appropriate in case the underlying
dataformats change.
dataformat_cache (:py:class:`dict`, Optional): A dictionary mapping
dataformat names to loaded dataformats. This parameter is optional and,
if passed, may greatly speed-up database loading times as dataformats
that are already loaded may be re-used. If you use this parameter, you
must guarantee that the cache is refreshed as appropriate in case the
underlying dataformats change.
Attributes:
......
......@@ -51,25 +51,25 @@ class DataFormat(BackendDataFormat):
prefix (str): Establishes the prefix of your installation.
data (object, optional): The piece of data representing the data format. It
must validate against the schema defined for data formats. If a string is
passed, it is supposed to be a valid path to an data format in the
designated prefix area. If ``None`` is passed, loads our default
prototype for data formats.
parent (tuple, optional): The parent DataFormat for this format. If set to
``None``, this means this dataformat is the first one on the hierarchy
tree. If set to a tuple, the contents are ``(format-instance,
field-name)``, which indicates the originating object that is this
object's parent and the name of the field on that object that points to
this one.
dataformat_cache (dict, optional): A dictionary mapping dataformat names to
loaded dataformats. This parameter is optional and, if passed, may
greatly speed-up data format loading times as dataformats that are already
loaded may be re-used. If you use this parameter, you must guarantee that
the cache is refreshed as appropriate in case the underlying dataformats
change.
data (:py:class:`object`, Optional): The piece of data representing the
data format. It must validate against the schema defined for data
formats. If a string is passed, it is supposed to be a valid path to an
data format in the designated prefix area. If ``None`` is passed, loads
our default prototype for data formats.
parent (:py:class:`tuple`, Optional): The parent DataFormat for this
format. If set to ``None``, this means this dataformat is the first one
on the hierarchy tree. If set to a tuple, the contents are
``(format-instance, field-name)``, which indicates the originating
object that is this object's parent and the name of the field on that
object that points to this one.
dataformat_cache (:py:class:`dict`, Optional): A dictionary mapping
dataformat names to loaded dataformats. This parameter is optional and,
if passed, may greatly speed-up data format loading times as
dataformats that are already loaded may be re-used. If you use this
parameter, you must guarantee that the cache is refreshed as
appropriate in case the underlying dataformats change.
Attributes:
......
......@@ -135,9 +135,9 @@ class Host(object):
Parameters:
raise_on_errors (bool, Optional): If we should raise an exception
(``RuntimeError``) in case installed environments override each other
and we can't know which to use.
raise_on_errors (:py:class:`bool`, Optional): If we should raise an
exception (:py:exc:`RuntimeError`) in case installed environments
override each other and we can't know which to use.
Raises:
......@@ -317,14 +317,14 @@ class Host(object):
container (:py:class:`Container`): The container.
virtual_memory_in_megabytes (int, Optional): The maximum amount of memory
the user process can consume on the host. If not specified, a memory
limit is not set.
virtual_memory_in_megabytes (:py:class:`int`, Optional): The maximum
amount of memory the user process can consume on the host. If not
specified, a memory limit is not set.
max_cpu_percent (float, Optional): The maximum amount of CPU the user
process may consume on the host. The value ``100`` equals to using 100%
of a single core. If not specified, then a CPU limitation is not put in
place.
max_cpu_percent (:py:class:`float`, Optional): The maximum amount of
CPU the user process may consume on the host. The value ``100``
equals to using 100% of a single core. If not specified, then a CPU
limitation is not put in place.
"""
......@@ -420,8 +420,9 @@ class Host(object):
Parameters:
timeout (float, Optional): A timeout in seconds to wait for the user
process to finish. If a timeout value is not given, waits forever.
timeout (:py:class:`float`, Optional): A timeout in seconds to wait
for the user process to finish. If a timeout value is not given,
waits forever.
'''
(status, stdout, stderr) = self._exec(['docker', 'wait', container.id],
timeout=timeout)
......
......@@ -110,13 +110,13 @@ def make_label(inputs, name, outputs, color):
Parameters:
inputs (list): A list of input names which represent all inputs for this
block
inputs (:py:class:`list`): A list of input names which represent all
inputs for this block
name (str): The name of the block
outputs (list): A list of output names which represent all outputs for this
block
outputs (:py:class:`list`): A list of output names which represent all
outputs for this block
color (str): A color definition in the format ``#rrggbb``, in which each
color channel is represented by 2-digit hexadecimal number ranging from
......
......@@ -59,36 +59,37 @@ class BaseExecutor(object):
string is passed, it is supposed to be a fully qualified absolute path to
a JSON file containing the block execution information.
cache (str, optional): If your cache is not located under
cache (:py:class:`str`, Optional): If your cache is not located under
``<prefix>/cache``, then specify a full path here. It will be used
instead.
dataformat_cache (dict, optional): A dictionary mapping dataformat names to
loaded dataformats. This parameter is optional and, if passed, may
greatly speed-up database loading times as dataformats that are already
loaded may be re-used. If you use this parameter, you must guarantee that
the cache is refreshed as appropriate in case the underlying dataformats
change.
database_cache (dict, optional): A dictionary mapping database names to
loaded databases. This parameter is optional and, if passed, may
greatly speed-up database loading times as databases that are already
loaded may be re-used. If you use this parameter, you must guarantee that
the cache is refreshed as appropriate in case the underlying databases
change.
algorithm_cache (dict, optional): A dictionary mapping algorithm names to
loaded algorithms. This parameter is optional and, if passed, may