#!/usr/bin/env bash
# Thu 22 Sep 2016 13:05:54 CEST

# Build utilities
SCRIPTS_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)

# Determines the operating system we're using
osname() {
  [[ "$(uname -s)" == "Darwin" ]] && echo "osx" || echo "linux"
}

# Determines the visibility of the current package
visibility() {
  local code=$(curl --output /dev/null --silent --fail --write-out "%{http_code}" ${CI_PROJECT_URL})
  [[ ${code} == *200 ]] && echo "public" || echo "private"
}


# Determines if we're on the master branch or not
is_master() {
  local retval="${CI_COMMIT_REF_NAME}"
  if [[ -n "${CI_COMMIT_TAG}" ]]; then
    # we're building a tag, check it is available on the master branch
    local count=`git branch -a --contains "tags/${CI_COMMIT_TAG}" | grep master | wc -l`
    if [[ "${count}" -gt "0" ]]; then #tag is on master
      retval="master"
    else
      retval="it-is-not-master"
    fi
  fi
  if [[ "${retval}" == "master" ]]; then
    echo "true"
  else
    echo "false"
  fi
}


# Checks whether to use "date" or "gdate"
gnudate() {
  if hash gdate 2>/dev/null; then
    echo gdate
  else
    echo date
  fi
}
DATE=`gnudate`


# datetime prefix for logging
log_datetime() {
	echo "($(${DATE} +%T.%3N))"
}

# Functions for coloring echo commands
log_debug() {
  echo -e "$(log_datetime) \033[1;32m${@}\033[0m"
}


log_info() {
  echo -e "$(log_datetime) \033[1;34m${@}\033[0m"
}


log_warn() {
  echo -e "$(log_datetime) \033[1;35mWarning: ${@}\033[0m" >&2
}


log_error() {
  echo -e "$(log_datetime) \033[1;31mError: ${@}\033[0m" >&2
}


# Checks just if the variable is defined and has non-zero length
check_defined() {
  if [ -z "${!1+abc}" ]; then
    log_error "Variable ${1} is undefined - aborting...";
    exit 1
  elif [ -z "${!1}" ]; then
    log_error "Variable ${1} is zero-length - aborting...";
    exit 1
  fi
}


# Logs a given environment variable to the screen
log_env() {
  log_info "${1}=${!1}"
}


# Checks a given environment variable is set (non-zero size)
check_env() {
  check_defined "${1}"
  log_env "${1}"
}


# Checks a given environment variable array is set (non-zero size)
# Then prints all of its components
check_array_env() {
  check_defined "${1}"
  eval array=\( \${${1}[@]} \)
  for i in "${!array[@]}"; do
    log_info "${1}[${i}]=${array[${i}]}";
  done
}


# Exports a given environment variable, verbosely
export_env() {
  check_defined "${1}"
  export ${1}
  log_info "export ${1}=${!1}"
}


# Checks a given environment variable is set (non-zero size)
check_pass() {
  check_defined "${1}"
  log_info "${1}=********"
}


# Function for running command and echoing results
run_cmd() {
  log_info "$ ${@}"
  ${@}
  local status=$?
  if [ ${status} != 0 ]; then
    log_error "Command Failed \"${@}\""
    exit ${status}
  fi
}


# Checks if a file exists on the remote location
# $1: Path to the file to check on the server
dav_exists() {
  curl --head --silent --user "${DOCUSER}:${DOCPASS}" --location ${DOCSERVER}/${1} | grep HTTP | tail -n 1 | grep -q 200
  return $?
}


# Uploads a file to our intranet location via curl
# $1: Path to the file to upload (e.g. dist/myfile.whl)
# $2: Path on the server to upload to (e.g. private-upload/wheels/gitlab/)
dav_upload() {
  if [ ! -e $1 ]; then
    log_error "File \`${1}\' does not exist on the local disk"
    exit 1
  fi

  local code=`curl --location --silent --fail --write-out "%{http_code}" --user "${DOCUSER}:${DOCPASS}" --upload-file ${1} ${DOCSERVER}/${2}`
  if [[ ${code} == *204 || ${code} == *201 ]]; then
    log_info "curl: cp ${1} -> ${DOCSERVER}/${2}"
  else
    log_error "Curl command finished with an error condition (code=${code}):"
    curl --location --silent --user "${DOCUSER}:${DOCPASS}" --upload-file ${1} ${DOCSERVER}/${2}
    exit ${code}
  fi
}


# Uploads a file to our intranet location via curl, if (and only if) it does
# not exist on the remote server
# $1: Path to the file to upload (e.g. dist/myfile.whl)
# $2: Path on the server to upload to (e.g. private-upload/wheels/gitlab/)
dav_check_upload() {
  local remote_name=${2}/$(basename ${1})
  if dav_exists ${remote_name}; then
    log_info "File \`${remote_name}' already exists on the remote server (not uploading again)"
  else
    dav_upload ${1} ${2}
  fi
}


# Creates a folder at our intranet location via curl
# $1: Path of the folder to create (e.g. private-upload/docs/test-folder)
# $2: which HTTP response code it should return instead of exit on (e.g. 405 means the folder already exists)
dav_mkdir() {
  local code=$(curl --location --silent --fail --write-out "%{http_code}" --user "${DOCUSER}:${DOCPASS}" -X MKCOL "${DOCSERVER}/${1}")

  if [[ ${code} == *204 || ${code} == *201 ]]; then
    log_info "curl: mkdir ${DOCSERVER}/${1}"
  # if the return code was not a success, the function should usually treat it
  # as an error.  however, sometimes the codes must be treated more flexibly,
  # e.g.: dav_recursive_mkdir wants the directory created *or* already
  # existing, which means that a 405 (directory already exists) should not be
  # treated as an error.  other codes may also have similar consideration in
  # the future.
  elif [[ "${code}" == "$2" ]]; then
    return "${code}"
  else
    log_error "Curl command finished with an error condition (code=${code}):"
    curl --location --silent --user "${DOCUSER}:${DOCPASS}" -X MKCOL "${DOCSERVER}/${1}"
    exit "${code}"
  fi
}


# Creates a folder and all parent folders at a intranet location via curl (mkdir -p)
# $1: Path of a folder to guarantee to be writeable (e.g. private-upload/docs/bob/bob.admin)
dav_recursive_mkdir() {
  log_info "curl: mkdir -p ${DOCSERVER}/${1}"

  # split path into an array of path segments
  # uses a subshell so setting the IFS doesnt mess up *this* shell
  IFS=/ read -a path_segments <<< "$1"
  local current_subpath=''

  # loop through segments
  for seg in "${path_segments[@]}"; do
    # append each segment to the current subpath
    current_subpath="${current_subpath}${seg}/"
    log_info "mkdir $DOCSERVER/$current_subpath"

    # make sure the current subpath folder is created
    # a 405 exit code is returned when the folder already exists
    dav_mkdir "$current_subpath" 405
    log_info "Directory ${current_subpath} (now) exists."
  done
}


# Deletes a file/folder from our intranet location via curl
# $1: Path to the file/folder to delete (e.g. dist/myfile.whl)
dav_delete() {
  log_info "curl: exists ${DOCSERVER}/${1}?"

  # checks if the directory exists before trying to remove it (use --head)
  local code=$(curl --location --silent --fail --write-out "%{http_code}" --head --user "${DOCUSER}:${DOCPASS}" "${DOCSERVER}/$1")
  if [[ ${code} == *404 ]]; then
    log_info "Directory ${DOCSERVER}/$1 does not exist. Skipping deletion"
    return 0
  fi

  code=$(curl --location --silent --fail --write-out "%{http_code}" --user "${DOCUSER}:${DOCPASS}" -X DELETE "${DOCSERVER}/$1")

  if [[ ${code} == *204 || ${code} == *201 ]]; then
    log_info "curl: rm -rf ${DOCSERVER}/${1}"
  else
    log_error "Curl command finished with an error condition (code=${code}):"
    curl --location --silent --user "${DOCUSER}:${DOCPASS}" -X DELETE "${DOCSERVER}/$1"
    exit "${code}"
  fi
}


# Uploads a folder and all contents recursively to our intranet location via curl
# $1: Path to the folder to upload (e.g. test-folder/)
# $2: Path on the server to upload to (e.g. private-upload/docs/test/ to put contents of test-folder/ in test/)
dav_upload_folder() {
  log_info "curl: cp -r ${1} -> ${DOCSERVER}/${2}..."

  if [ ! -e $1 ]; then
    log_error "Directory \`${1}\' does not exist on the local disk"
    exit 1
  fi

  dav_delete "${2}"

  find "$1" | while read -r fname; do
    # replace the local path prefix ('../folder1/folder2/folder-to-upload/')
    # with the server path prefix ('private-upload/docs/test/')
    # to make something like '../folder1/folder2/folder-to-upload/test.txt'
    # into 'private-upload/docs/test.txt'
    local server_prefix="${2%?}" #without ending slash
    local server_path="${fname/$1/$server_prefix}"

    # if its a file...
    if [[ -f "${fname}" ]]; then
      # upload the file ...
      dav_upload "${fname}" "${server_path}"
    else
      # if its a dir, create the dir
      dav_recursive_mkdir "${server_path}"
    fi
  done
}


if [ -z "${BOB_PACKAGE_VERSION}" ]; then
  if [ ! -r "version.txt" ]; then
    log_error "./version.txt does not exist - cannot figure out version number"
    exit 1
  fi
  BOB_PACKAGE_VERSION=`cat version.txt | tr -d '\n'`;
fi


# merges conda cache folders
# $1: Path to the main cache to keep. The directory must exist.
# $2: Path to the extra cache to be merged into main cache
merge_conda_cache() {
  if [ -e ${1} ]; then
    _cached_urlstxt="${2}/urls.txt"
    _urlstxt="${1}/urls.txt"
    if [ -e ${2} ]; then
      log_info "Merging urls.txt and packages with cached files..."
      mv ${2}/*.tar.bz2 ${1}/
      cat ${_urlstxt} ${_cached_urlstxt} | sort | uniq > ${_urlstxt}
    fi
  fi
}


# installs a miniconda installation.
# $1: Path to where to install miniconda.
install_miniconda() {
  log_info "Installing miniconda in ${1} ..."

  # downloads the latest conda installation script
  if [ "${OSNAME}" == "linux" ]; then
    object=https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh
  else
    object=https://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
  fi

  # checks if miniconda.sh exists
  if [ ! -e miniconda.sh ]; then
    log_info "Downloading latest miniconda3 installer..."
    run_cmd curl --silent --output miniconda.sh ${object}
  else
    log_info "Re-using cached miniconda3 installer..."
    ls -l miniconda.sh
  fi

  # move cache to a different folder if it exists
  if [ -e ${1} ]; then
    run_cmd mv ${1} ${1}.cached
  fi

  # install miniconda
  run_cmd bash miniconda.sh -b -p ${1}

  # Put back cache and merge urls.txt
  merge_conda_cache ${1}/pkgs ${1}.cached/pkgs
  # remove the backup cache folder
  rm -rf ${1}.cached

  # List currently available packages on cache
  # run_cmd ls -l ${1}/pkgs/
  # run_cmd cat ${1}/pkgs/urls.txt

  hash -r
}


# sets CONDA_CHANNELS to the list of conda channels that should be considered
# $1: visibility (maybe either "public" or "private")
# $2: typically, the value of ${CI_COMMIT_TAG} or empty
# given the current visibility/tagging conditions of the package.
set_conda_channels() {
  CONDA_CHANNELS=() #resets bash array
  if [ "${1}" == "public" ]; then
    if [ -z "${2}" ]; then #public beta build
      CONDA_CHANNELS+=('public/conda/label/beta')
      CONDA_CHANNELS+=('public/conda')
    else #public (tagged) build
      CONDA_CHANNELS+=('public/conda')
    fi
  else
    if [ -z "${2}" ]; then #private beta build
      CONDA_CHANNELS+=('private/conda/label/beta')
      CONDA_CHANNELS+=('public/conda/label/beta')
      CONDA_CHANNELS+=('private/conda')
      CONDA_CHANNELS+=('public/conda')
    else #private (tagged) build
      CONDA_CHANNELS+=('private/conda')
      CONDA_CHANNELS+=('public/conda')
    fi
  fi
  check_array_env CONDA_CHANNELS
}


# deploys all conda packages built up to now
# $1: path on the remote server corresponding to one of the conda channels to
#     upload to
# $2: prefix of the package name to upload (may be empty to upload all
#     available)
deploy_conda_packages() {
  local upload_path=$(echo "${1}" | sed -e 's/public/public-upload/;s/private/private-upload/')
  for os in "osx-64" "noarch" "linux-64"; do
    for f in ${CONDA_ROOT}/conda-bld/${os}/${2}*.tar.bz2; do
      if [[ -f "${f}" ]]; then
        dav_check_upload "${f}" "${upload_path}/${os}/"
      fi
    done
  done
}


# sets BOB_BUILD_NUMBER to the value of the next build number for the package
# with the given specifications
# $1: the channel to lookup
set_next_bob_build_number() {
  log_info "$ ${CONDA_ROOT}/bin/python ${SCRIPTS_DIR}/channel_support.py ${DOCSERVER}/${1} ${CI_PROJECT_NAME} ${BOB_PACKAGE_VERSION} ${PYTHON_VERSION}"
  BOB_BUILD_NUMBER=$(${CONDA_ROOT}/bin/python ${SCRIPTS_DIR}/channel_support.py ${DOCSERVER}/${1} ${CI_PROJECT_NAME} ${BOB_PACKAGE_VERSION} ${PYTHON_VERSION})
  export_env BOB_BUILD_NUMBER
}


log_env PYTHON_VERSION
check_env CI_PROJECT_URL
check_env CI_PROJECT_DIR
check_env CI_PROJECT_PATH
check_env CI_PROJECT_NAME
check_env CI_COMMIT_REF_NAME
export_env BOB_PACKAGE_VERSION
check_pass PYPIUSER
check_pass PYPIPASS
check_pass DOCUSER
check_pass DOCPASS

# Sets up variables
OSNAME=`osname`
VISIBILITY=`visibility`
IS_MASTER=`is_master`

check_env OSNAME
check_env VISIBILITY
check_env IS_MASTER

if [ -z "${DOCSERVER}" ]; then
  DOCSERVER=http://www.idiap.ch
fi

# Sets up the location of our rc file for conda
CONDARC=${CONDA_ROOT}/condarc

if [ -z "${OS_SLUG}" ]; then
  OS_SLUG="${OSNAME}-64"
fi

TESTSERVER=https://testpypi.python.org/legacy/

export_env OS_SLUG
export_env DOCSERVER
check_env TESTSERVER
check_env CONDA_ROOT
export_env CONDARC

# Setup default database server
BOB_DATABASE_SERVER="${DOCSERVER}/public/databases/latest"
export_env BOB_DATABASE_SERVER

# Setup default documentation server
if [ -z "${CI_COMMIT_TAG}" ]; then
  DEFSRV="${DOCSERVER}/software/bob/docs/bob/%(name)s/master/"
else
  DEFSRV="${DOCSERVER}/software/bob/docs/bob/%(name)s/%(version)s/|${DOCSERVER}/software/bob/docs/bob/%(name)s/stable/|http://pythonhosted.org/%(name)s/"
fi
if [ -z "${BOB_DOCUMENTATION_SERVER}" ]; then
  BOB_DOCUMENTATION_SERVER="${DEFSRV}"
else
  BOB_DOCUMENTATION_SERVER="${BOB_DOCUMENTATION_SERVER}|${DEFSRV}"
fi
if [ "${VISIBILITY}" != "public" ]; then
  # If private or internal, allow it to depend on other internal documents
  if [ -z "${CI_COMMIT_TAG}" ]; then
    BOB_DOCUMENTATION_SERVER="${BOB_DOCUMENTATION_SERVER}|${DOCSERVER}/private/docs/bob/%(name)s/master/"
  else
    BOB_DOCUMENTATION_SERVER="${BOB_DOCUMENTATION_SERVER}|${DOCSERVER}/private/docs/bob/%(name)s/%(version)s/|${DOCSERVER}/private/docs/bob/%(name)s/stable/"
  fi
fi
unset DEFSRV
export_env BOB_DOCUMENTATION_SERVER

# Sets up certificates for curl and openssl
CURL_CA_BUNDLE="${SCRIPTS_DIR}/cacert.pem"
export_env CURL_CA_BUNDLE
SSL_CERT_FILE="${CURL_CA_BUNDLE}"
export_env SSL_CERT_FILE
GIT_SSL_CAINFO="${CURL_CA_BUNDLE}"
export_env GIT_SSL_CAINFO

# Sets up upload folders for documentation (just in case we need them)
# See: https://gitlab.idiap.ch/bob/bob.admin/issues/2

# Prefix differs between private & public repos
DOC_SERVER_PREFIX="-upload/docs/${CI_PROJECT_PATH}"
if [[ "${VISIBILITY}" == "public" ]]; then
  DOC_SERVER_PREFIX="public${DOC_SERVER_PREFIX}"
else
  DOC_SERVER_PREFIX="private${DOC_SERVER_PREFIX}"
fi

DOC_UPLOADS=()

# always upload documentation for all branches. Ideally this is only called
# when in master.
DOC_UPLOADS+=("${DOC_SERVER_PREFIX}/${CI_COMMIT_REF_NAME}/")

if [[ -n "${CI_COMMIT_TAG}" ]]; then
  DOC_UPLOADS+=("${DOC_SERVER_PREFIX}/${CI_COMMIT_TAG}/")
fi

# upload documentation to a "stable" url if that is the latest tagged on the
# master branch
if [[ -n "${CI_COMMIT_TAG}" && "${IS_MASTER}" == "true" ]]; then
  DOC_UPLOADS+=("${DOC_SERVER_PREFIX}/stable/")
fi

export_env DOC_SERVER_PREFIX
check_env DOC_UPLOADS

# Sets up the language so Unicode filenames are considered correctly
LANG="en_US.UTF-8"
LC_ALL="${LANG}"
export_env LANG
export_env LC_ALL

# Set up the location of matplotlibrc:
# https://matplotlib.org/users/customizing.html#the-matplotlibrc-file
MATPLOTLIBRC="${SCRIPTS_DIR}"
export_env MATPLOTLIBRC