#!/usr/bin/env bash # Thu 22 Sep 2016 13:05:54 CEST # Build utilities SCRIPTS_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) # Determines the operating system we're using osname() { [[ "$(uname -s)" == "Darwin" ]] && echo "osx" || echo "linux" } # Determines the visibility of the current package visibility() { local code=$(curl --output /dev/null --silent --fail --write-out "%{http_code}" ${CI_PROJECT_URL}) [[ ${code} == *200 ]] && echo "public" || echo "private" } # Determines if we're on the master branch or not is_master() { local retval="${CI_COMMIT_REF_NAME}" if [[ -n "${CI_COMMIT_TAG}" ]]; then # we're building a tag, check it is available on the master branch local count=`git branch -a --contains "tags/${CI_COMMIT_TAG}" | grep master | wc -l` if [[ "${count}" -gt "0" ]]; then #tag is on master retval="master" else retval="it-is-not-master" fi fi if [[ "${retval}" == "master" ]]; then echo "true" else echo "false" fi } # Checks whether to use "date" or "gdate" gnudate() { if hash gdate 2>/dev/null; then echo gdate else echo date fi } DATE=`gnudate` # datetime prefix for logging log_datetime() { echo "($(${DATE} +%T.%3N))" } # Functions for coloring echo commands log_debug() { echo -e "$(log_datetime) \033[1;32m${@}\033[0m" } log_info() { echo -e "$(log_datetime) \033[1;34m${@}\033[0m" } log_warn() { echo -e "$(log_datetime) \033[1;35mWarning: ${@}\033[0m" >&2 } log_error() { echo -e "$(log_datetime) \033[1;31mError: ${@}\033[0m" >&2 } # Checks a given environment variable is set (non-zero size) check_env() { if [ -z "${1+abc}" ]; then log_error "Variable ${1} is undefined - aborting..."; exit 1 else log_info "${1}=${!1}"; fi } # Exports a given environment variable, verbosely export_env() { if [ -z "${1+abc}" ]; then log_error "Variable ${1} is undefined - aborting..."; exit 1 else export ${1} log_info "export ${1}=${!1}"; fi } # Checks a given environment variable is set (non-zero size) check_pass() { if [ -z "${1+abc}" ]; then log_error "Variable ${1} is undefined - aborting..."; exit 1 else log_info "${1}=********"; fi } # Function for running command and echoing results run_cmd() { log_info "$ ${@}" ${@} local status=$? if [ ${status} != 0 ]; then log_error "Command Failed \"${@}\"" exit ${status} fi } # Uploads a file to our intranet location via curl # $1: Path to the file to upload (e.g. dist/myfile.whl) # $2: Path on the server to upload to (e.g. private-upload/wheels/gitlab/) dav_upload() { if [ ! -e $1 ]; then log_error "File \`${1}\' does not exist on the local disk" exit 1 fi local code=`curl --location --silent --fail --write-out "%{http_code}" --user "${DOCUSER}:${DOCPASS}" --upload-file ${1} ${DOCSERVER}/${2}` if [[ ${code} == *204 || ${code} == *201 ]]; then log_info "curl: cp ${1} -> ${DOCSERVER}/${2}" else log_error "Curl command finished with an error condition (code=${code}):" curl --location --silent --user "${DOCUSER}:${DOCPASS}" --upload-file ${1} ${DOCSERVER}/${2} exit ${code} fi } # Creates a folder at our intranet location via curl # $1: Path of the folder to create (e.g. private-upload/docs/test-folder) # $2: which HTTP response code it should return instead of exit on (e.g. 405 means the folder already exists) dav_mkdir() { local code=$(curl --location --silent --fail --write-out "%{http_code}" --user "${DOCUSER}:${DOCPASS}" -X MKCOL "${DOCSERVER}/${1}") if [[ ${code} == *204 || ${code} == *201 ]]; then log_info "curl: mkdir ${DOCSERVER}/${1}" # if the return code was not a success, the function should usually treat it # as an error. however, sometimes the codes must be treated more flexibly, # e.g.: dav_recursive_mkdir wants the directory created *or* already # existing, which means that a 405 (directory already exists) should not be # treated as an error. other codes may also have similar consideration in # the future. elif [[ "${code}" == "$2" ]]; then return "${code}" else log_error "Curl command finished with an error condition (code=${code}):" curl --location --silent --user "${DOCUSER}:${DOCPASS}" -X MKCOL "${DOCSERVER}/${1}" exit "${code}" fi } # Creates a folder and all parent folders at a intranet location via curl (mkdir -p) # $1: Path of a folder to guarantee to be writeable (e.g. private-upload/docs/bob/bob.admin) dav_recursive_mkdir() { log_info "curl: mkdir -p ${DOCSERVER}/${1}" # split path into an array of path segments # uses a subshell so setting the IFS doesnt mess up *this* shell IFS=/ read -a path_segments <<< "$1" local current_subpath='' # loop through segments for seg in "${path_segments[@]}"; do # append each segment to the current subpath current_subpath="${current_subpath}${seg}/" log_info "mkdir $DOCSERVER/$current_subpath" # make sure the current subpath folder is created # a 405 exit code is returned when the folder already exists dav_mkdir "$current_subpath" 405 log_info "Directory ${current_subpath} (now) exists." done } # Deletes a file/folder from our intranet location via curl # $1: Path to the file/folder to delete (e.g. dist/myfile.whl) dav_delete() { log_info "curl: exists ${DOCSERVER}/${1}?" # checks if the directory exists before trying to remove it (use --head) local code=$(curl --location --silent --fail --write-out "%{http_code}" --head --user "${DOCUSER}:${DOCPASS}" "${DOCSERVER}/$1") if [[ ${code} == *404 ]]; then log_info "Directory ${DOCSERVER}/$1 does not exist. Skipping deletion" return 0 fi code=$(curl --location --silent --fail --write-out "%{http_code}" --user "${DOCUSER}:${DOCPASS}" -X DELETE "${DOCSERVER}/$1") if [[ ${code} == *204 || ${code} == *201 ]]; then log_info "curl: rm -rf ${DOCSERVER}/${1}" else log_error "Curl command finished with an error condition (code=${code}):" curl --location --silent --user "${DOCUSER}:${DOCPASS}" -X DELETE "${DOCSERVER}/$1" exit "${code}" fi } # Uploads a folder and all contents recursively to our intranet location via curl # $1: Path to the folder to upload (e.g. test-folder/) # $2: Path on the server to upload to (e.g. private-upload/docs/test/ to put contents of test-folder/ in test/) dav_upload_folder() { log_info "curl: cp -r ${1} -> ${DOCSERVER}/${2}..." if [ ! -e $1 ]; then log_error "Directory \`${1}\' does not exist on the local disk" exit 1 fi dav_delete "${2}" find "$1" | while read -r fname; do # replace the local path prefix ('../folder1/folder2/folder-to-upload/') # with the server path prefix ('private-upload/docs/test/') # to make something like '../folder1/folder2/folder-to-upload/test.txt' # into 'private-upload/docs/test.txt' local server_prefix="${2%?}" #without ending slash local server_path="${fname/$1/$server_prefix}" # if its a file... if [[ -f "${fname}" ]]; then # upload the file ... dav_upload "${fname}" "${server_path}" else # if its a dir, create the dir dav_mkdir "${server_path}" fi done } # Checks if an array contains a value # taken from here: https://stackoverflow.com/questions/3685970/check-if-an-array-contains-a-value # Parameters: <value-to-check> <array-variable> # Usage: "a string" "${array[@]}" contains_element () { local e for e in "${@:2}"; do [[ "$e" == "$1" ]] && return 0; done return 1 } if [ -z "${BOB_PACKAGE_VERSION}" ]; then if [ ! -r "version.txt" ]; then log_error "./version.txt does not exist - cannot figure out version number" exit 1 fi BOB_PACKAGE_VERSION=`cat version.txt | tr -d '\n'`; fi # merges conda cache folders # $1: Path to the main cache to keep. The directory must exist. # $2: Path to the extra cache to be merged into main cache merge_conda_cache() { if [ -e ${1} ]; then _cached_urls="${2}/urls" _urls="${1}/urls" _cached_urlstxt="${2}/urls.txt" _urlstxt="${1}/urls.txt" if [ -e ${2} ]; then log_info "Merging urls.txt and packages with cached files..." mv ${2}/*.tar.bz2 ${1}/ cat ${_urls} ${_cached_urls} | sort | uniq > ${_urls} cat ${_urlstxt} ${_cached_urlstxt} | sort | uniq > ${_urlstxt} fi fi } # installs a miniconda installation. # $1: Path to where to install miniconda. install_miniconda() { log_info "Installing miniconda in ${1} ..." # Download the latest conda installation script if [ "${OSNAME}" == "linux" ]; then object=https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh else object=https://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh fi # check if miniconda.sh exists if [ ! -e miniconda.sh ]; then log_info "Downloading latest miniconda3 installer..." run_cmd curl --silent --output miniconda.sh ${object} else log_info "Re-using cached miniconda3 installer..." ls -l miniconda.sh fi # move cache to a different folder if it exists if [ -e ${1} ]; then run_cmd mv ${1} ${1}.cached fi # install miniconda bash miniconda.sh -b -p ${1} # Put back cache and merge urls.txt merge_conda_cache ${1}/pkgs ${1}.cached/pkgs # remove the backup cache folder rm -rf ${1}.cached # List currently available packages on cache # run_cmd ls -l ${1}/pkgs/ # run_cmd cat ${1}/pkgs/urls.txt hash -r } check_env PYTHON_VERSION check_env CI_PROJECT_URL check_env CI_PROJECT_DIR check_env CI_PROJECT_PATH check_env CI_PROJECT_NAME check_env CI_COMMIT_REF_NAME export_env BOB_PACKAGE_VERSION check_pass PYPIUSER check_pass PYPIPASS check_pass DOCUSER check_pass DOCPASS # Sets up variables OSNAME=`osname` VISIBILITY=`visibility` IS_MASTER=`is_master` check_env OSNAME check_env VISIBILITY check_env IS_MASTER if [ -z "${CONDA_FOLDER}" ]; then CONDA_FOLDER=/opt/miniconda fi # check if a conda installation exists. Otherwise, install one (only in build # stage): if [ ! -e ${CONDA_FOLDER}/bin/conda ]; then if [ "${CI_JOB_STAGE}" == "build" ]; then install_miniconda ${CONDA_FOLDER} fi fi PYVER=py$(echo ${PYTHON_VERSION} | tr -d '.') if [ -z "${DOCSERVER}" ]; then DOCSERVER=http://www.idiap.ch export_env DOCSERVER fi CONDA_CHANNEL="${DOCSERVER}/public/conda/label/main" CONDA_BETA_CHANNEL="${DOCSERVER}/private/conda" if [ -z "${OS_SLUG}" ]; then OS_SLUG="${OSNAME}-64" fi TESTSERVER=https://testpypi.python.org/legacy/ export_env OS_SLUG check_env PYVER check_env PREFIX export_env PREFIX check_env DOCSERVER check_env TESTSERVER check_env CONDA_FOLDER check_env CONDA_CHANNEL check_env CONDA_BETA_CHANNEL # Setup default documentation server if [ -z "${CI_COMMIT_TAG}" ]; then DEFSRV="${DOCSERVER}/software/bob/docs/bob/%(name)s/master/" else DEFSRV="${DOCSERVER}/software/bob/docs/bob/%(name)s/%(version)s/|${DOCSERVER}/software/bob/docs/bob/%(name)s/stable/|http://pythonhosted.org/%(name)s/" fi if [ -z "${BOB_DOCUMENTATION_SERVER}" ]; then BOB_DOCUMENTATION_SERVER="${DEFSRV}" else BOB_DOCUMENTATION_SERVER="${BOB_DOCUMENTATION_SERVER}|${DEFSRV}" fi if [ "${VISIBILITY}" != "public" ]; then # If private or internal, allow it to depend on other internal documents if [ -z "${CI_COMMIT_TAG}" ]; then BOB_DOCUMENTATION_SERVER="${BOB_DOCUMENTATION_SERVER}|${DOCSERVER}/private/docs/bob/%(name)s/master/" else BOB_DOCUMENTATION_SERVER="${BOB_DOCUMENTATION_SERVER}|${DOCSERVER}/private/docs/bob/%(name)s/%(version)s/|${DOCSERVER}/private/docs/bob/%(name)s/stable/" fi fi unset DEFSRV export_env BOB_DOCUMENTATION_SERVER # Sets up certificates for curl and openssl CURL_CA_BUNDLE="${SCRIPTS_DIR}/cacert.pem" export_env CURL_CA_BUNDLE SSL_CERT_FILE="${CURL_CA_BUNDLE}" export_env SSL_CERT_FILE # Sets up upload folders for documentation (just in case we need them) # See: https://gitlab.idiap.ch/bob/bob.admin/issues/2 # Prefix differs between private & public repos DOC_SERVER_PREFIX="-upload/docs/${CI_PROJECT_PATH}" if [[ "${VISIBILITY}" == "public" ]]; then DOC_SERVER_PREFIX="public${DOC_SERVER_PREFIX}" else DOC_SERVER_PREFIX="private${DOC_SERVER_PREFIX}" fi DOC_UPLOADS=() # always upload documentation for all branches. Ideally this is only called # when in master. DOC_UPLOADS+=("${DOC_SERVER_PREFIX}/${CI_COMMIT_REF_NAME}/") if [[ -n "${CI_COMMIT_TAG}" ]]; then DOC_UPLOADS+=("${DOC_SERVER_PREFIX}/${CI_COMMIT_TAG}/") fi # upload documentation to a "stable" url if that is the latest tagged on the # master branch if [[ -n "${CI_COMMIT_TAG}" && "${IS_MASTER}" == "true" ]]; then DOC_UPLOADS+=("${DOC_SERVER_PREFIX}/stable/") fi export_env DOC_SERVER_PREFIX check_env DOC_UPLOADS # Sets up the language so Unicode filenames are considered correctly LANG="en_US.UTF-8" LC_ALL="${LANG}" export_env LANG export_env LC_ALL