diff --git a/gitlab/after_build.sh b/gitlab/after_build.sh deleted file mode 100755 index ad30ed8ac054c4f85a1d60ad341c033298380534..0000000000000000000000000000000000000000 --- a/gitlab/after_build.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -# Tue 13 Jun 2017 17:32:07 CEST - -source $(dirname ${0})/functions.sh - -run_cmd rm -rf ${PREFIX} diff --git a/gitlab/after_deploy.sh b/gitlab/after_deploy.sh deleted file mode 100755 index 445b812c564edded72c2d153a262ed03853fe111..0000000000000000000000000000000000000000 --- a/gitlab/after_deploy.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -# Tue 13 Jun 2017 17:32:29 CEST - -source $(dirname ${0})/functions.sh - -run_cmd rm -rf ${PREFIX} diff --git a/gitlab/after_docs.sh b/gitlab/after_docs.sh deleted file mode 100755 index 73a037624bb88e0aed6b9d09f5c6aa0291b38784..0000000000000000000000000000000000000000 --- a/gitlab/after_docs.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/usr/bin/env bash -# Thu 22 Sep 2016 18:23:57 CEST diff --git a/gitlab/after_test.sh b/gitlab/after_test.sh deleted file mode 100755 index 61999878fd4ced404d8912a06dc05f867bbbc814..0000000000000000000000000000000000000000 --- a/gitlab/after_test.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -# Tue 13 Jun 2017 17:32:16 CEST - -source $(dirname ${0})/functions.sh - -run_cmd rm -rf ${PREFIX} diff --git a/gitlab/after_wheels.sh b/gitlab/after_wheels.sh deleted file mode 100755 index 73a037624bb88e0aed6b9d09f5c6aa0291b38784..0000000000000000000000000000000000000000 --- a/gitlab/after_wheels.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/usr/bin/env bash -# Thu 22 Sep 2016 18:23:57 CEST diff --git a/gitlab/before_build.sh b/gitlab/before_build.sh deleted file mode 100755 index 54520c3fb433df5795af0773e33e10af6d34903f..0000000000000000000000000000000000000000 --- a/gitlab/before_build.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env bash -# Mon 8 Aug 17:40:24 2016 CEST - -source $(dirname ${0})/functions.sh - -if [ -z "${WHEELS_REPOSITORY}" ]; then - WHEELS_REPOSITORY="${DOCSERVER}/private/wheels/gitlab/" - WHEELS_SERVER=`echo ${DOCSERVER} | sed 's;http.*://;;'` - check_env WHEELS_SERVER -fi -check_env WHEELS_REPOSITORY - -prepare_build_env ${CONDA_FOLDER} ${PREFIX} ${CONDA_ENV} - -# Verify where pip is installed -use_pip=`which pip` -if [ -z "${use_pip}" ]; then - log_error "Cannot find pip, aborting..." - exit 1 -else - log_info "Using pip: ${use_pip}" -fi - -use_python=`which python` -if [ -z "${use_python}" ]; then - log_error "Cannot find python, aborting..." - exit 1 -else - log_info "Using python: ${use_python}" -fi - -# Install this package's build dependencies -PIPOPTS="--find-links ${WHEELS_REPOSITORY}" -if [ ! -z "${WHEELS_SERVER}" ]; then - PIPOPTS="${PIPOPTS} --trusted-host ${WHEELS_SERVER}" -fi - -# When building a tag, do not use beta wheels -PIPOPTS="${PIPOPTS} --use-wheel --no-index" -if [ -z "${CI_COMMIT_TAG}" ]; then - PIPOPTS="${PIPOPTS} --pre" -fi - -if [ -e requirements.txt ]; then - run_cmd ${use_pip} install ${PIPOPTS} --requirement requirements.txt -else - log_info "No requirements.txt file found, skipping 'pip install <build-deps>'..." -fi - -# Install this package's test dependencies -if [ -e test-requirements.txt ]; then - run_cmd ${use_pip} install ${PIPOPTS} --requirement test-requirements.txt -else - log_info "No test-requirements.txt file found, skipping 'pip install <test-deps>'..." -fi diff --git a/gitlab/before_deploy.sh b/gitlab/before_deploy.sh deleted file mode 100755 index 9c68a4f2b0157fbd6e01c34ac61b45c3b75794fc..0000000000000000000000000000000000000000 --- a/gitlab/before_deploy.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash -# Wed 21 Sep 2016 13:08:05 CEST - -source $(dirname ${0})/functions.sh - -run_cmd $(dirname ${0})/before_test.sh - -prepare_build_env ${CONDA_FOLDER} ${PREFIX} ${CONDA_ENV} - -# setup database locally and run `bob_dbmanage.py all download` -# if this is a database package - need auxiliary file for package -if [[ ${CI_PROJECT_NAME} == bob.db.* ]]; then - use_buildout=`which buildout` - if [ -z "${use_buildout}" ]; then - log_error "Cannot find buildout, aborting..." - exit 1 - else - log_info "Using buildout: ${use_buildout}" - fi - run_cmd ${use_buildout} - if [ -x ./bin/bob_dbmanage.py ]; then - run_cmd ./bin/bob_dbmanage.py all download --force; - fi -fi diff --git a/gitlab/before_docs.sh b/gitlab/before_docs.sh deleted file mode 100755 index 71679c5abad81486fdafb2d008f047444bfb42d4..0000000000000000000000000000000000000000 --- a/gitlab/before_docs.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash -# Thu 22 Sep 2016 18:23:57 CEST - -source $(dirname ${0})/functions.sh - -# if the docs will be uploaded to at least one place, -# make sure that the project folder will be available -if [[ ${#DOC_UPLOADS[@]} -gt 0 ]]; then - dav_recursive_mkdir "${DOC_SERVER_PREFIX}" -fi - -# Deletes all existing dav folders that will be overwritten -for k in "${DOC_UPLOADS[@]}"; do - dav_delete "${k}" -done diff --git a/gitlab/before_test.sh b/gitlab/before_test.sh deleted file mode 100755 index b588c13da438799818672c514bc4cf903abf1249..0000000000000000000000000000000000000000 --- a/gitlab/before_test.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env bash -# Wed 21 Sep 2016 13:08:05 CEST - -source $(dirname ${0})/functions.sh - -run_cmd $(dirname ${0})/before_build.sh - -prepare_build_env ${CONDA_FOLDER} ${PREFIX} ${CONDA_ENV} - -# Verify where pip is installed -use_pip=`which pip` -if [ -z "${use_pip}" ]; then - log_error "Cannot find pip, aborting..." - exit 1 -else - log_info "Using pip: ${use_pip}" -fi - -# zc.recipe.egg needs some special installation instructions -if [ "${CI_PROJECT_NAME}" == "bob.buildout" ]; then - run_cmd ${use_pip} install --no-binary ":all:" zc.recipe.egg -fi - -run_cmd ${use_pip} install --use-wheel --no-index --pre dist/*.whl - -# Downloads databases that may be missing (not shipped with python pkg) -if [ -x ${PREFIX}/bin/bob_dbmanage.py ]; then - run_cmd ${PREFIX}/bin/bob_dbmanage.py all download --missing; -fi diff --git a/gitlab/before_wheels.sh b/gitlab/before_wheels.sh deleted file mode 100755 index 73a037624bb88e0aed6b9d09f5c6aa0291b38784..0000000000000000000000000000000000000000 --- a/gitlab/before_wheels.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/usr/bin/env bash -# Thu 22 Sep 2016 18:23:57 CEST diff --git a/gitlab/build.sh b/gitlab/build.sh index 3af02af73d064d7edaae42422ad6374f3c2d2bee..377af3bf3517df8b0a55861089aa1c7712604228 100755 --- a/gitlab/build.sh +++ b/gitlab/build.sh @@ -3,30 +3,31 @@ source $(dirname ${0})/functions.sh -prepare_build_env ${CONDA_FOLDER} ${PREFIX} - -use_buildout=`which buildout` -if [ -z "${use_buildout}" ]; then - log_error "Cannot find buildout, aborting..." - exit 1 -else - log_info "Using buildout: ${use_buildout}" +run_cmd ${CONDA_FOLDER}/bin/conda install -n root --yes --quiet conda=4 conda-build=3 +run_cmd ${CONDA_FOLDER}/bin/conda config --set always_yes true +run_cmd ${CONDA_FOLDER}/bin/conda config --set show_channel_urls true +run_cmd ${CONDA_FOLDER}/bin/conda config --add channels defaults + run_cmd ${CONDA_FOLDER}/bin/conda config --add channels ${CONDA_CHANNEL} +if [ -z "${CI_COMMIT_TAG}" ]; then + run_cmd ${CONDA_FOLDER}/bin/conda config --add channels ${CONDA_BETA_CHANNEL} fi +run_cmd ${CONDA_FOLDER}/bin/conda config --set ssl_verify false +run_cmd ${CONDA_FOLDER}/bin/conda install --quiet --yes curl +run_cmd ${CONDA_FOLDER}/bin/conda clean --lock +run_cmd cp _ci/conda_build_config.yaml conda/ +run_cmd ${CONDA_FOLDER}/bin/conda info -run_cmd ${use_buildout} - -if [ -x ./bin/bob_dbmanage.py ]; then - run_cmd ./bin/bob_dbmanage.py all download --missing; -fi +run_cmd mkdir -p ./_ci/${OS_SLUG}/${PYTHON_VERSION} -if [ -d ./doc ]; then - run_cmd ./bin/sphinx-build -W doc sphinx -fi +if [ -z "${CI_COMMIT_TAG}" ]; then + run_cmd ${CONDA_FOLDER}/bin/python _ci/channel_support.py ${CONDA_BETA_CHANNEL} ${CI_PROJECT_NAME} ${BOB_PACKAGE_VERSION} ${PYTHON_VERSION} -u --log ./_ci/${OS_SLUG}/${PYTHON_VERSION}/build_number.txt -if [ -z "${WHEEL_TAG}" ]; then - # C/C++ extensions - run_cmd ./bin/python setup.py bdist_wheel else - # Python-only packages - run_cmd ./bin/python setup.py bdist_wheel --python-tag ${WHEEL_TAG} + run_cmd ${CONDA_FOLDER}/bin/python _ci/channel_support.py ${CONDA_CHANNEL} ${CI_PROJECT_NAME} ${BOB_PACKAGE_VERSION} ${PYTHON_VERSION} -u --log ./_ci/${OS_SLUG}/${PYTHON_VERSION}/build_number.txt + fi + +BOB_BUILD_NUMBER=`head -n 1 build_number.txt | tr -d '\n'` +export_env BOB_BUILD_NUMBER + +run_cmd ${CONDA_FOLDER}/bin/conda build --python=${PYTHON_VERSION} conda diff --git a/gitlab/channel_support.py b/gitlab/channel_support.py new file mode 100644 index 0000000000000000000000000000000000000000..7e32924e7bc60982b30b4798cb59e3fa3727183d --- /dev/null +++ b/gitlab/channel_support.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +import re +from conda.exports import get_index + + +def main(channel_url, name, version, py_ver, get_urls=False): + # no dot in py_ver + py_ver = py_ver.replace('.', '') + # get the channel index + index = get_index(channel_urls=[channel_url], prepend=False) + # search if package with the same version exists + build_number = 0 + urls = [] + for dist in index: + if dist.name == name and dist.version == version: + match = re.match('py[2-9][0-9]+', dist.build_string) + if match and match.group() == 'py{}'.format(py_ver): + build_number = max(build_number, dist.build_number + 1) + urls.append(index[dist].url) + return build_number, urls + + +if __name__ == '__main__': + import sys + import argparse + parser = argparse.ArgumentParser( + description='Return the next build number for a package. Or get a list' + ' of urls to the existing package.') + parser.add_argument( + 'channel_url', help='The url or name of the channel.') + parser.add_argument( + 'package_name', help='Name of the package.') + parser.add_argument( + 'package_version', help='Version of the package.') + parser.add_argument( + 'python_version', help='Version of the python.') + parser.add_argument( + '-u', '--package-urls', action='store_true', + help='Optionally output a list of existing packages after the build' + ' number.') + parser.add_argument( + '--log', default=sys.stdout, type=argparse.FileType('w'), + help='the file where the build number should be written.') + args = parser.parse_args() + build_number, urls = main(args.channel_url, + args.package_name, args.package_version, + args.python_version, args.package_urls) + args.log.write('{}\n'.format(build_number)) + if args.package_urls: + args.log.write('\n'.join(urls)) + args.log.close() diff --git a/gitlab/conda_build_config.yaml b/gitlab/conda_build_config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fcb105ff97bec7b7ef5c409c45f0af996f515861 --- /dev/null +++ b/gitlab/conda_build_config.yaml @@ -0,0 +1,246 @@ +python: + - 2.7 + - 3.5 + - 3.6 +numpy: + - 1.12 + - 1.13 +gcc: + - 4.8.5 +libgcc: + - 4.8.5 +toolchain: + - 2.3.2 +boost: + - 1.61 +libblitz: + - 0.10 +libpng: + - 1.6.27 +giflib: + - 5.1.4 +jpeg: + - 9b +sox: + - 14.4.2 +hdf5: + - 1.8.17 +libmatio: + - 1.5.6 +ffmpeg: + - 2.8.10 +vlfeat: + - 0.9.20 +libsvm: + - 3.21 +mkl: + - 2017.0.1 +sqlite: + - 3.13.0 +zlib: + - 1.2.8 +xz: + - 5.2.2 +x264: + - 20131217 +libogg: + - 1.3.2 + +pin_run_as_build: + libgcc: + min_pin: x.x.x + max_pin: '' + boost: + max_pin: x.x + libblitz: + max_pin: x.x + libpng: + max_pin: x.x + giflib: + max_pin: x + jpeg: + max_pin: x + sox: + max_pin: x.x.x + hdf5: + max_pin: x.x.x + libmatio: + max_pin: x.x + ffmpeg: + max_pin: x.x + vlfeat: + max_pin: x.x.x + libsvm: + max_pin: x.x + zlib: + max_pin: x.x.x + xz: + max_pin: x + x264: + max_pin: x.x.x + libogg: + max_pin: x + bob.buildout: + max_pin: x + bob.extension: + max_pin: x + bob.blitz: + max_pin: x + bob.core: + max_pin: x + bob.io.base: + max_pin: x + bob.io.image: + max_pin: x + bob.io.matlab: + max_pin: x + bob.io.video: + max_pin: x + bob.ip.color: + max_pin: x + bob.ip.draw: + max_pin: x + bob.ip.optflow.liu: + max_pin: x + bob.learn.activation: + max_pin: x + bob.learn.boosting: + max_pin: x + bob.learn.boosting: + max_pin: x + bob.learn.libsvm: + max_pin: x + bob.math: + max_pin: x + bob.measure: + max_pin: x + bob.sp: + max_pin: x + bob.ap: + max_pin: x + bob.db.base: + max_pin: x + bob.db.mnist: + max_pin: x + bob.db.wine: + max_pin: x + bob.io.audio: + max_pin: x + bob.ip.base: + max_pin: x + bob.ip.facedetect: + max_pin: x + bob.ip.flandmark: + max_pin: x + bob.ip.gabor: + max_pin: x + bob.ip.optflow.hornschunck: + max_pin: x + bob.ip.qualitymeasure: + max_pin: x + bob.ip.dlib: + max_pin: x + bob.learn.linear: + max_pin: x + bob.learn.mlp: + max_pin: x + bob.db.atnt: + max_pin: x + bob.db.iris: + max_pin: x + bob.learn.em: + max_pin: x + bob: + max_pin: x + bob.db.arface: + max_pin: x + bob.db.asvspoof: + max_pin: x + bob.db.asvspoof2017: + max_pin: x + bob.db.atvskeystroke: + max_pin: x + bob.db.avspoof: + max_pin: x + bob.db.banca: + max_pin: x + bob.db.biosecure: + max_pin: x + bob.db.biosecurid.face: + max_pin: x + bob.db.casme2: + max_pin: x + bob.db.caspeal: + max_pin: x + bob.db.cohface: + max_pin: x + bob.db.frgc: + max_pin: x + bob.db.gbu: + max_pin: x + bob.db.hci_tagging: + max_pin: x + bob.db.ijba: + max_pin: x + bob.db.kboc16: + max_pin: x + bob.db.lfw: + max_pin: x + bob.db.livdet2013: + max_pin: x + bob.db.mobio: + max_pin: x + bob.db.msu_mfsd_mod: + max_pin: x + bob.db.multipie: + max_pin: x + bob.db.nist_sre12: + max_pin: x + bob.db.pericrosseye: + max_pin: x + bob.db.putvein: + max_pin: x + bob.db.replay: + max_pin: x + bob.db.replaymobile: + max_pin: x + bob.db.scface: + max_pin: x + bob.db.utfvp: + max_pin: x + bob.db.voicepa: + max_pin: x + bob.db.xm2vts: + max_pin: x + bob.db.youtube: + max_pin: x + bob.ip.caffe_extractor: + max_pin: x + bob.ip.facelandmarks: + max_pin: x + bob.ip.skincolorfilter: + max_pin: x + bob.kaldi: + max_pin: x + gridtk: + max_pin: x + bob.bio.base: + max_pin: x + bob.bio.face: + max_pin: x + bob.bio.gmm: + max_pin: x + bob.bio.spear: + max_pin: x + bob.bio.video: + max_pin: x + bob.db.cuhk_cufs: + max_pin: x + bob.db.voxforge: + max_pin: x + bob.pad.base: + max_pin: x + bob.pad.voice: + max_pin: x + bob.bio.caffe_face: + max_pin: x diff --git a/gitlab/deploy.sh b/gitlab/deploy.sh index 7be644b3cb937c3eedbf169048d9c4e202bd993c..b2407a5083e78391ff3b3a4c7345af40f785f65b 100755 --- a/gitlab/deploy.sh +++ b/gitlab/deploy.sh @@ -1,64 +1,21 @@ #!/usr/bin/env bash -# Thu 22 Sep 2016 13:59:03 CEST +# Wed 21 Sep 2016 13:08:05 CEST source $(dirname ${0})/functions.sh -# upload documentation on our internal server -run_cmd $(dirname ${0})/before_docs.sh -run_cmd $(dirname ${0})/docs.sh -run_cmd $(dirname ${0})/after_docs.sh - -if [ "${VISIBILITY}" != "public" ]; then - log_warn "WARNING: You cannot publish a PRIVATE to PyPI" - log_warn "WARNING: Make this package public if you wish to do so next time" - log_warn "WARNING: Stopping deployment procedure before PyPI/Conda pushes" - exit 0 -fi - -lock_pypirc - -log_info "Uploading package to ${PYPISERVER} on behalf of ${PYPIUSER}..." -setup_deploy check sdist --formats zip upload --repository production - -unlock_pypirc - -condaforge_packages=("bob" \ -"bob.buildout" \ -"bob.extension" \ -"bob.blitz" \ -"bob.core" \ -"bob.ip.draw" \ -"bob.io.base" \ -"bob.sp" \ -"bob.math" \ -"bob.ap" \ -"bob.measure" \ -"bob.db.base" \ -"bob.io.image" \ -"bob.io.video" \ -"bob.io.matlab" \ -"bob.ip.base" \ -"bob.ip.color" \ -"bob.ip.gabor" \ -"bob.learn.activation" \ -"bob.learn.libsvm" \ -"bob.learn.boosting" \ -"bob.io.audio" \ -"bob.learn.linear" \ -"bob.learn.mlp" \ -"bob.db.wine" \ -"bob.db.mnist" \ -"bob.db.atnt" \ -"bob.ip.flandmark" \ -"bob.ip.facedetect" \ -"bob.ip.optflow.hornschunck" \ -"bob.ip.optflow.liu" \ -"bob.learn.em" \ -"bob.db.iris" \ -"bob.ip.qualitymeasure") - -if contains_element ${CI_PROJECT_NAME} "${condaforge_packages[@]}"; then - run_cmd ${CONDA_FOLDER}/bin/python _ci/update_feedstock.py ${CI_PROJECT_NAME} recipes -else - run_cmd ${CONDA_FOLDER}/bin/python _ci/update_feedstock.py ${CI_PROJECT_NAME} skeleton -fi +# Deletes all existing dav folders that will be overwritten +for os in "linux-64" "osx-64" "noarch"; do + for k in "conda-env/${os}/*.tar.bz2"; do + if [ -z "${CI_COMMIT_TAG}" ]; then #beta + dav_upload "${k}" "${CONDA_BETA_CHANNEL/private/private-upload}/${os}/" + else + dav_upload "${k}" "${CONDA_CHANNEL/public/public-upload}/${os}/" + fi + done +done + +# Uploads docs for the last treated package +run_cmd tar xfj "${k}" docs/${CI_PROJECT_NAME} +for folder in "${DOC_UPLOADS[@]}"; do + dav_upload_folder docs/${CI_PROJECT_NAME} "${folder}" +done diff --git a/gitlab/docs.sh b/gitlab/docs.sh deleted file mode 100755 index d7b915b6a94778f680a5e96c530ded97b89f48dc..0000000000000000000000000000000000000000 --- a/gitlab/docs.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash -# Wed 21 Sep 2016 13:08:05 CEST - -source $(dirname ${0})/functions.sh - -# Deletes all existing dav folders that will be overwritten -for k in "${DOC_UPLOADS[@]}"; do - dav_upload_folder sphinx "${k}" -done diff --git a/gitlab/functions.sh b/gitlab/functions.sh index 99c31df2efe6e118191115b0bf987484e0757ec1..937b06949c77e0641bf22e577921d503e776685a 100644 --- a/gitlab/functions.sh +++ b/gitlab/functions.sh @@ -119,89 +119,6 @@ run_cmd() { } -# Prepares ~/.pypirc -lock_pypirc() { - local lockfile=/var/tmp/pypirc_lock - local rc=${HOME}/.pypirc - local maxtries=10 - local try=0 - local sleeptime=30 #seconds - - while true; do - if [[ ${try} -lt ${maxtries} ]]; then - ((try++)) - if ( set -o noclobber; echo "$$" > "${lockfile}") 2> /dev/null; then - log_info "Successfully acquired ${lockfile}" - echo $$ > ${lockfile} - log_info "trapping on ${lockfile}..." - trap 'rm -f "${lockfile}"; exit $?' INT TERM EXIT - - # start: protected code - log_info "Creating ${rc}..." - if [ -e ${rc} ]; then - run_cmd rm -f ${rc} - fi - cat <<EOT >> ${rc} -[distutils] -index-servers = - production - staging - -[production] -username: ${PYPIUSER} -password: ${PYPIPASS} - -[staging] -repository: ${TESTSERVER} -username: ${PYPIUSER} -password: ${PYPIPASS} -EOT - run_cmd chmod 600 ${rc} - # end: protected code - break - else - log_warn "${lockfile} exists, owned by process $(cat $lockfile)" - log_info "Will retry after a ${sleeptime} seconds sleep (${try}/${maxtries})" - run_cmd sleep ${sleeptime} - fi - else - log_error "I already retried deploying ${try} times. Aborting..." - log_error "You can retry this step when less packages are building." - exit 1 - fi - done -} - - -# Cleans ~/.pypirc, if the lock file belongs to us -unlock_pypirc() { - local lockfile=/var/tmp/pypirc_lock - local rc=${HOME}/.pypirc - - # untrap if lock belongs to the running process - if [[ $(cat ${lockfile}) == $$ ]]; then - run_cmd rm -r ${lockfile} - run_cmd rm -rf ${rc} - log_info "$ trap - INT TERM EXIT" - trap - INT TERM EXIT - fi -} - - -# Runs setup.py in a deployment context. If fails, tries to unlock -# the ${HOME}/.pypirc file lock -setup_deploy() { - log_info "$ ${@}" - ${PREFIX}/bin/python setup.py ${@} - local status=$? - if [ ${status} != 0 ]; then - log_error "Command Failed \"${@}\"" - unlock_pypirc #just tries - exit ${status} - fi -} - - # Uploads a file to our intranet location via curl # $1: Path to the file to upload (e.g. dist/myfile.whl) # $2: Path on the server to upload to (e.g. private-upload/wheels/gitlab/) @@ -324,46 +241,6 @@ dav_upload_folder() { } -# Creates (clones), Activates environment and sets up compilation -# $1: root of the conda installation -# $2: your current build prefix -# $3: the name of the conda environment to clone -prepare_build_env() { - - # Readies a conda environment to use for installation - if [ ! -d $2 ]; then - log_info "Creating conda installation at $2..." - run_cmd $1/bin/conda create --clone $3 --prefix $2 --yes - else - log_info "Prefix directory $2 exists, not re-installing..." - fi - - # Activates conda environment for the build - log_info "$ source $1/bin/activate $2" - source $1/bin/activate $2 - - # Configures CCACHE - # use_ccache=`which ccache` - # if [ -z "${use_ccache}" ]; then - # log_warn "Cannot find ccache, compiling from scratch..." - # else - # local ccache_bin=$2/lib/ccache - # if [ ! -d ${ccache_bin} ]; then - # run_cmd mkdir -pv ${ccache_bin} - # ln -sf ${use_ccache} ${ccache_bin}/gcc - # ln -sf ${use_ccache} ${ccache_bin}/g++ - # ln -sf ${use_ccache} ${ccache_bin}/cc - # ln -sf ${use_ccache} ${ccache_bin}/c++ - # fi - # use_gcc=`which gcc` - # PATH=${ccache_bin}:${PATH} - # export_env PATH - # log_info "ccache installed at ${use_ccache}, caching compilations..." - # log_info "gcc installed at ${use_gcc}..." - # fi -} - - # Checks if an array contains a value # taken from here: https://stackoverflow.com/questions/3685970/check-if-an-array-contains-a-value # Parameters: <value-to-check> <array-variable> @@ -375,12 +252,21 @@ contains_element () { } +if [ -z "${BOB_PACKAGE_VERSION}" ]; then + if [ ! -r "version.txt" ]; then + log_error "./version.txt does not exist - cannot figure out version number" + exit 1 + fi + BOB_PACKAGE_VERSION=`cat version.txt | tr -d '\n'`; +fi + check_env PYTHON_VERSION check_env CI_PROJECT_URL check_env CI_PROJECT_DIR check_env CI_PROJECT_PATH check_env CI_PROJECT_NAME check_env CI_COMMIT_REF_NAME +check_env BOB_PACKAGE_VERSION check_pass PYPIUSER check_pass PYPIPASS check_pass DOCUSER @@ -392,31 +278,27 @@ VISIBILITY=`visibility` IS_MASTER=`is_master` if [ -z "${CONDA_FOLDER}" ]; then - CONDA_FOLDER=/opt/conda + CONDA_FOLDER=/opt/miniconda fi PYVER=py$(echo ${PYTHON_VERSION} | tr -d '.') -if [ -z "${CONDA_ENV}" ]; then - if [ -d "${CONDA_FOLDER}/envs/bob-devel-${PYVER}-${CI_COMMIT_REF_NAME}" ]; then - CONDA_ENV=bob-devel-${PYVER}-${CI_COMMIT_REF_NAME} - else - CONDA_ENV=bob-devel-${PYVER} - fi -fi -BOB_PREFIX_PATH=${CONDA_FOLDER}/envs/${CONDA_ENV} if [ -z "${DOCSERVER}" ]; then DOCSERVER=http://www.idiap.ch export_env DOCSERVER fi -if [ -z "${PREFIX}" ]; then - PREFIX=${CI_PROJECT_DIR}/build-prefix +CONDA_CHANNEL="${DOCSERVER}/public/conda" +CONDA_BETA_CHANNEL="${DOCSERVER}/private/conda" + +if [ -z "${OS_SLUG}" ]; then + OS_SLUG="${OSNAME}-64" fi TESTSERVER=https://testpypi.python.org/legacy/ check_env OSNAME +export_env OS_SLUG check_env VISIBILITY check_env IS_MASTER check_env PYVER @@ -425,8 +307,8 @@ export_env PREFIX check_env DOCSERVER check_env TESTSERVER check_env CONDA_FOLDER -check_env CONDA_ENV -export_env BOB_PREFIX_PATH +check_env CONDA_CHANNEL +check_env CONDA_BETA_CHANNEL # Setup default documentation server if [ -z "${CI_COMMIT_TAG}" ]; then diff --git a/gitlab/pypi.sh b/gitlab/pypi.sh new file mode 100755 index 0000000000000000000000000000000000000000..02011474a23ed4ba5b9d131bdfe9f691b3033287 --- /dev/null +++ b/gitlab/pypi.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +# Thu 22 Sep 2016 13:59:03 CEST + +source $(dirname ${0})/functions.sh + +if [ "${VISIBILITY}" != "public" ]; then + log_warn "WARNING: You cannot publish a PRIVATE to PyPI" + log_warn "WARNING: Make this package public if you wish to do so next time" + log_warn "WARNING: Stopping deployment procedure before PyPI/Conda pushes" + exit 0 +fi + +log_info "Uploading package to ${PYPISERVER} on behalf of ${PYPIUSER}..." +twine register --username ${PYPIUSER} --password ${PYPIPASS} dist/*.zip +twine upload --username ${PYPIUSER} --password ${PYPIPASS} dist/*.zip diff --git a/gitlab/test.sh b/gitlab/test.sh deleted file mode 100755 index 4a76151d483f3d59f19945ac802d55b65ed2a990..0000000000000000000000000000000000000000 --- a/gitlab/test.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env bash -# Wed 21 Sep 2016 13:08:05 CEST - -source $(dirname ${0})/functions.sh - -prepare_build_env ${CONDA_FOLDER} ${PREFIX} - -run_cmd cd ${PREFIX} - -# Checks some programs - -use_python=`which python` -check_env use_python - -use_coverage=`which coverage` -check_env use_coverage - -use_nosetests=`which nosetests` -check_env use_nosetests - -use_sphinx=`which sphinx-build` -check_env use_sphinx - -# The tests: - -run_cmd ${use_coverage} run --source=${CI_PROJECT_NAME} ${use_nosetests} -sv ${CI_PROJECT_NAME} -run_cmd ${use_coverage} report -run_cmd ${use_sphinx} -b doctest ${CI_PROJECT_DIR}/doc ${CI_PROJECT_NAME}/sphinx - -run_cmd cd ${CI_PROJECT_DIR} diff --git a/templates/ci-for-python-only.yml b/templates/ci-for-python-only.yml index 6489c109f75951127ce5701f50389680dea3b440..6a79b6fdd2d2b07781899e43620384dcc32013ff 100644 --- a/templates/ci-for-python-only.yml +++ b/templates/ci-for-python-only.yml @@ -1,249 +1,122 @@ -# This build file heavily uses template features from YAML so it is generic -# enough for any Bob project. Don't modify it unless you know what you're -# doing. +# This build file uses template features from YAML so it is generic enough for +# any Bob project. Don't modify it unless you know what you're doing. -# Definition of our build pipeline +# Definition of our build pipeline order stages: - build - - test - - docs - - wheels - deploy + - pypi -# --------- -# Templates -# --------- - -# Template for the build stage -# Needs to run on all supported architectures, platforms and python versions +# Build targets .build_template: &build_job stage: build before_script: - git clean -ffdx - mkdir _ci - - curl --silent "https://gitlab.idiap.ch/bob/bob.admin/raw/master/gitlab/install.sh" > _ci/install.sh + - curl --silent "https://gitlab.idiap.ch/bob/bob.admin/raw/condapackage/gitlab/install.sh" > _ci/install.sh - chmod 755 _ci/install.sh - ./_ci/install.sh _ci #updates - - ./_ci/before_build.sh script: - ./_ci/build.sh - after_script: - - ./_ci/after_build.sh + cache: + key: "$CI_JOB_NAME" + paths: + - conda-env/.pkgs/*.tar.bz2 + - conda-env/.pkgs/urls.txt + - conda-env/src_cache artifacts: expire_in: 1 week paths: - _ci/ - dist/ - - sphinx/ - - -# Template for the test stage - re-installs from uploaded wheels -# Needs to run on all supported architectures, platforms and python versions -.test_template: &test_job - stage: test - before_script: - - ./_ci/install.sh _ci #updates - - ./_ci/before_test.sh - script: - - ./_ci/test.sh - after_script: - - ./_ci/after_test.sh - - -# Template for the wheel uploading stage -# Needs to run against one supported architecture, platform and python version -.wheels_template: &wheels_job - stage: wheels - environment: intranet - only: - - master - - /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags) - before_script: - - ./_ci/install.sh _ci #updates - - ./_ci/before_wheels.sh - script: - - ./_ci/wheels.sh - after_script: - - ./_ci/after_wheels.sh - - -# Template for (latest) documentation upload stage -# Only one real job needs to do this -.docs_template: &docs_job - stage: docs - environment: intranet - only: - - master - before_script: - - ./_ci/install.sh _ci #updates - - ./_ci/before_docs.sh - script: - - ./_ci/docs.sh - after_script: - - ./_ci/after_docs.sh - - -# Template for the deployment stage - re-installs from uploaded wheels -# Needs to run on a single architecture only -# Will deploy your package to PyPI and other required services -# Only runs for tags -.deploy_template: &deploy_job - stage: deploy - environment: internet - only: - - /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags) - except: - - branches - before_script: - - ./_ci/install.sh _ci #updates - - ./_ci/before_deploy.sh - script: - - ./_ci/deploy.sh - after_script: - - ./_ci/after_deploy.sh - + - conda-env/$OS_SLUG/ -# ------------- -# Build Targets -# ------------- -# Linux + Python 2.7: Builds, tests, uploads wheel and deploys (if needed) build_linux_27: <<: *build_job - variables: &linux_27_build_variables + variables: PYTHON_VERSION: "2.7" - WHEEL_TAG: "py27" - tags: - - conda-linux - -test_linux_27: - <<: *test_job - variables: *linux_27_build_variables - dependencies: - - build_linux_27 - tags: - - conda-linux - -wheels_linux_27: - <<: *wheels_job - variables: *linux_27_build_variables - dependencies: - - build_linux_27 tags: - - conda-linux + - docker -deploy_linux_27: - <<: *deploy_job - variables: *linux_27_build_variables - dependencies: - - build_linux_27 - tags: - - conda-linux - - -# Linux + Python 3.5: Builds, tests and uploads wheel build_linux_35: <<: *build_job - variables: &linux_35_build_variables + variables: PYTHON_VERSION: "3.5" - WHEEL_TAG: "py3" - tags: - - conda-linux - -test_linux_35: - <<: *test_job - variables: *linux_35_build_variables - dependencies: - - build_linux_35 - tags: - - conda-linux - -wheels_linux_35: - <<: *wheels_job - variables: *linux_35_build_variables - dependencies: - - build_linux_35 tags: - - conda-linux + - docker -docs_linux_35: - <<: *docs_job - variables: *linux_35_build_variables - dependencies: - - build_linux_35 - tags: - - conda-linux - - -# Linux + Python 3.6: Builds and tests build_linux_36: <<: *build_job - variables: &linux_36_build_variables + variables: PYTHON_VERSION: "3.6" - WHEEL_TAG: "py3" - tags: - - conda-linux - -test_linux_36: - <<: *test_job - variables: *linux_36_build_variables - dependencies: - - build_linux_36 + BUILD_EGG: "true" tags: - - conda-linux + - docker - -# Mac OSX + Python 2.7: Builds and tests build_macosx_27: <<: *build_job - variables: &macosx_27_build_variables + variables: PYTHON_VERSION: "2.7" - WHEEL_TAG: "py27" tags: - conda-macosx -test_macosx_27: - <<: *test_job - variables: *macosx_27_build_variables - dependencies: - - build_macosx_27 - tags: - - conda-macosx - - -# Mac OSX + Python 3.5: Builds and tests build_macosx_35: <<: *build_job - variables: &macosx_35_build_variables + variables: PYTHON_VERSION: "3.5" - WHEEL_TAG: "py3" - tags: - - conda-macosx - -test_macosx_35: - <<: *test_job - variables: *macosx_35_build_variables - dependencies: - - build_macosx_35 tags: - conda-macosx - -# Mac OSX + Python 3.6: Builds and tests build_macosx_36: <<: *build_job - variables: &macosx_36_build_variables + variables: PYTHON_VERSION: "3.6" - WHEEL_TAG: "py3" tags: - conda-macosx -test_macosx_36: - <<: *test_job - variables: *macosx_36_build_variables + +# Deploy targets +deploy_beta: + stage: deploy + environment: beta + only: + - master + - /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags) + before_script: + - ./_ci/install.sh _ci #updates + script: + - ./_ci/deploy_beta.sh dependencies: + - build_linux_27 + - build_linux_35 + - build_linux_36 + - build_macosx_27 + - build_macosx_35 - build_macosx_36 tags: - - conda-macosx + - deployer + + +pypi: + stage: pypi + environment: pypi + only: + - /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags) + except: + - branches + before_script: + - ./_ci/install.sh _ci #updates + script: + - ./_ci/pypi.sh + dependencies: + - build_linux_27 + - build_linux_35 + - build_linux_36 + - build_macosx_27 + - build_macosx_35 + - build_macosx_36 + tags: + - deployer