diff --git a/gitlab/build.sh b/gitlab/build.sh
index 377af3bf3517df8b0a55861089aa1c7712604228..b04d33fb8980a8f4c7cf0677801d543d7f49b7b3 100755
--- a/gitlab/build.sh
+++ b/gitlab/build.sh
@@ -6,13 +6,6 @@ source $(dirname ${0})/functions.sh
 run_cmd ${CONDA_FOLDER}/bin/conda install -n root --yes --quiet conda=4 conda-build=3
 run_cmd ${CONDA_FOLDER}/bin/conda config --set always_yes true
 run_cmd ${CONDA_FOLDER}/bin/conda config --set show_channel_urls true
-run_cmd ${CONDA_FOLDER}/bin/conda config --add channels defaults
-  run_cmd ${CONDA_FOLDER}/bin/conda config --add channels ${CONDA_CHANNEL}
-if [ -z "${CI_COMMIT_TAG}" ]; then
-  run_cmd ${CONDA_FOLDER}/bin/conda config --add channels ${CONDA_BETA_CHANNEL}
-fi
-run_cmd ${CONDA_FOLDER}/bin/conda config --set ssl_verify false
-run_cmd ${CONDA_FOLDER}/bin/conda install --quiet --yes curl
 run_cmd ${CONDA_FOLDER}/bin/conda clean --lock
 run_cmd cp _ci/conda_build_config.yaml conda/
 run_cmd ${CONDA_FOLDER}/bin/conda info
@@ -21,7 +14,6 @@ run_cmd mkdir -p ./_ci/${OS_SLUG}/${PYTHON_VERSION}
 
 if [ -z "${CI_COMMIT_TAG}" ]; then
   run_cmd ${CONDA_FOLDER}/bin/python _ci/channel_support.py ${CONDA_BETA_CHANNEL} ${CI_PROJECT_NAME} ${BOB_PACKAGE_VERSION} ${PYTHON_VERSION} -u --log ./_ci/${OS_SLUG}/${PYTHON_VERSION}/build_number.txt
-
 else
   run_cmd ${CONDA_FOLDER}/bin/python _ci/channel_support.py ${CONDA_CHANNEL} ${CI_PROJECT_NAME} ${BOB_PACKAGE_VERSION} ${PYTHON_VERSION} -u --log ./_ci/${OS_SLUG}/${PYTHON_VERSION}/build_number.txt
 
@@ -30,4 +22,8 @@ fi
 BOB_BUILD_NUMBER=`head -n 1 build_number.txt | tr -d '\n'`
 export_env BOB_BUILD_NUMBER
 
-run_cmd ${CONDA_FOLDER}/bin/conda build --python=${PYTHON_VERSION} conda
+if [ -z "${CI_COMMIT_TAG}" ]; then
+    run_cmd ${CONDA_FOLDER}/bin/conda build --override-channels -c ${CONDA_BETA_CHANNEL} -c ${CONDA_CHANNEL} -c defaults --python=${PYTHON_VERSION} conda
+else
+    run_cmd ${CONDA_FOLDER}/bin/conda build --override-channels -c ${CONDA_CHANNEL} -c defaults --python=${PYTHON_VERSION} conda
+fi
diff --git a/gitlab/deploy.sh b/gitlab/deploy.sh
index b2407a5083e78391ff3b3a4c7345af40f785f65b..90f340dcaf76f014f9ce8bdb280cb7b125f6942e 100755
--- a/gitlab/deploy.sh
+++ b/gitlab/deploy.sh
@@ -3,7 +3,7 @@
 
 source $(dirname ${0})/functions.sh
 
-# Deletes all existing dav folders that will be overwritten
+# Uploads all the built packages
 for os in "linux-64" "osx-64" "noarch"; do
   for k in "conda-env/${os}/*.tar.bz2"; do
     if [ -z "${CI_COMMIT_TAG}" ]; then #beta
@@ -19,3 +19,12 @@ run_cmd tar xfj "${k}" docs/${CI_PROJECT_NAME}
 for folder in "${DOC_UPLOADS[@]}"; do
   dav_upload_folder docs/${CI_PROJECT_NAME} "${folder}"
 done
+
+# Deletes all the old packages if in beta
+if [ -z "${CI_COMMIT_TAG}" ]; then #beta
+    for urls in "_ci/*/*/build_number.txt"; do
+        for url in `tail -n +2 $urls`; do
+            dav_delete ${url/$DOCSERVER/}
+        done
+    done
+fi
diff --git a/gitlab/functions.sh b/gitlab/functions.sh
index 937b06949c77e0641bf22e577921d503e776685a..ab3a0c12a01271eecea09c27ed8de364cd7e7b7f 100644
--- a/gitlab/functions.sh
+++ b/gitlab/functions.sh
@@ -6,7 +6,7 @@ SCRIPTS_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
 
 # Determines the operating system we're using
 osname() {
-  [[ "$(uname -s)" == "Darwin" ]] && echo "macosx" || echo "linux"
+  [[ "$(uname -s)" == "Darwin" ]] && echo "osx" || echo "linux"
 }
 
 # Determines the visibility of the current package
diff --git a/gitlab/install.sh b/gitlab/install.sh
index 6217441888739055e34e3b053f05f664b08bef6f..798bc786f32412e0c3890f397d6c4f65c6c65dbf 100755
--- a/gitlab/install.sh
+++ b/gitlab/install.sh
@@ -59,10 +59,8 @@ get_exec() {
 run_cmd mkdir -pv ${1}
 get_script ${1} cacert.pem
 get_script ${1} functions.sh
-get_exec ${1} install.sh
-for stage in "build" "test" "docs" "wheels" "deploy"; do
-  get_exec ${1} before_${stage}.sh
+for stage in "build" "deploy" "pypi"; do
   get_exec ${1} ${stage}.sh
-  get_exec ${1} after_${stage}.sh
 done
-get_exec ${1} update_feedstock.py
+get_exec ${1} channel_support.py
+get_exec ${1} conda_build_config.yaml
diff --git a/gitlab/update_feedstock.py b/gitlab/update_feedstock.py
deleted file mode 100755
index 42680c57fc94da3daf1313c7d5b67c46b8109518..0000000000000000000000000000000000000000
--- a/gitlab/update_feedstock.py
+++ /dev/null
@@ -1,262 +0,0 @@
-#!/usr/bin/env python
-
-import hashlib
-try:
-  from urllib2 import urlopen
-except ImportError:
-  from urllib.request import urlopen
-import requests
-import json
-try:
-  from packaging.version import parse
-except ImportError:
-  from pip._vendor.packaging.version import parse
-import re
-import tempfile
-import shutil
-import os
-import subprocess
-
-URL_PATTERN = 'https://pypi.python.org/pypi/{package}/json'
-
-
-def run_commands(*calls):
-  """runs the given commands."""
-  # get all calls
-  for call in calls:
-    print(' - ' + ' '.join(call))
-    # execute call
-    if subprocess.call(call):
-      # call failed (has non-zero exit status)
-      raise ValueError("Command '%s' failed; stopping" % ' '.join(call))
-
-
-def get_version(package, url_pattern=URL_PATTERN):
-  """Return version of package on pypi.python.org using json."""
-  req = requests.get(url_pattern.format(package=package))
-  version = parse('0')
-  if req.status_code == requests.codes.ok:
-    j = json.loads(req.text)
-    if 'releases' in j:
-      releases = j['releases']
-      for release in releases:
-        ver = parse(release)
-        if not ver.is_prerelease:
-          version = max(version, ver)
-  return str(version)
-
-
-def get_remote_md5_sum(url, max_file_size=100 * 1024 * 1024):
-  remote = urlopen(url)
-  hash = hashlib.md5()
-
-  total_read = 0
-  while True:
-    data = remote.read(4096)
-    total_read += 4096
-
-    if not data or total_read > max_file_size:
-      break
-    hash.update(data)
-
-  return hash.hexdigest()
-
-
-class Gitlab(object):
-  """A class that wraps Gitlab API using curl"""
-
-  def __init__(self, token):
-    super(Gitlab, self).__init__()
-    self.token = token
-    self.base_url = 'https://gitlab.idiap.ch/api/v3/'
-    self.projects_url = self.base_url + 'projects/'
-
-  def get_project(self, project_name, namespace='bob'):
-    cmd = ["curl", "--header",
-           "PRIVATE-TOKEN: {}".format(self.token),
-           self.base_url + "projects/{}%2F{}".format(
-               namespace, project_name)]
-    pipeline = subprocess.check_output(cmd)
-    return json.loads(pipeline.decode())
-
-  def create_pipeline(self, project_id):
-    cmd = ["curl", "--request", "POST", "--header",
-           "PRIVATE-TOKEN: {}".format(self.token),
-           self.base_url + "projects/{}/pipeline?ref=master".format(
-               project_id)]
-    pipeline = subprocess.check_output(cmd)
-    return json.loads(pipeline.decode())
-
-  def get_pipeline(self, project_id, pipeline_id):
-    cmd = ["curl", "--header",
-           "PRIVATE-TOKEN: {}".format(self.token),
-           self.base_url + "projects/{}/pipelines/{}".format(
-               project_id, pipeline_id)]
-    pipeline = subprocess.check_output(cmd)
-    return json.loads(pipeline.decode())
-
-  def create_merge_request(self, project_id, source_branch, target_branch,
-                           title, assignee_id='', description='',
-                           target_project_id='', labels='', milestone_id='',
-                           remove_source_branch=''):
-    url = "projects/{}/merge_requests?"
-    url += "&".join(['source_branch={}', 'target_branch={}', 'title={}',
-                     'assignee_id={}', 'description={}',
-                     'target_project_id={}', 'labels={}',
-                     'milestone_id={}', 'remove_source_branch={}'])
-    url = url.format(project_id, source_branch, target_branch, title,
-                     assignee_id, description, target_project_id, labels,
-                     milestone_id, remove_source_branch)
-    cmd = ["curl", "--request", "POST", "--header",
-           "PRIVATE-TOKEN: {}".format(self.token),
-           self.base_url + url]
-    pipeline = subprocess.check_output(cmd)
-    return json.loads(pipeline.decode())
-
-  def accept_merge_request(self, project_id, mergerequest_id,
-                           merge_commit_message='',
-                           should_remove_source_branch='',
-                           merge_when_pipeline_succeeds='', sha=''):
-    """
-    Update an existing merge request.
-    """
-
-    url = "projects/{}/merge_request/{}/merge?"
-    url += "&".join([
-        'merge_commit_message={}',
-        'should_remove_source_branch={}',
-        'merge_when_pipeline_succeeds={}',
-        'sha={}',
-    ])
-    url = url.format(project_id, mergerequest_id, merge_commit_message,
-                     should_remove_source_branch,
-                     merge_when_pipeline_succeeds,
-                     sha)
-    cmd = ["curl", "--request", "PUT", "--header",
-           "PRIVATE-TOKEN: {}".format(self.token),
-           self.base_url + url]
-    pipeline = subprocess.check_output(cmd)
-    try:
-      return json.loads(pipeline.decode())
-    except Exception:
-      return False
-
-
-def update_meta(meta_path, package):
-  stable_version = get_version(package)
-  print('latest stable version for {} is {}'.format(package, stable_version))
-  url = 'https://pypi.io/packages/source/{0}/{1}/{1}-{2}.zip'.format(
-      package[0], package, stable_version)
-  md5 = get_remote_md5_sum(url)
-  with open(meta_path) as f:
-    doc = f.read()
-  build_number = '0'
-  doc = re.sub(r'\{\s?%\s?set\s?version\s?=\s?".*"\s?%\s?\}',
-               '{% set version = "' + str(stable_version) + '" %}',
-               doc, count=1)
-  doc = re.sub(r'\s+number\:\s?[0-9]+', '\n  number: ' + build_number, doc,
-               count=1)
-  doc = re.sub(r'\{\s?%\s?set\s?build_number\s?=\s?"[0-9]+"\s?%\s?\}',
-               '{% set build_number = "' + build_number + '" %}',
-               doc, count=1)
-  doc = re.sub(r'\s+md5\:.*', '\n  md5: {}'.format(md5), doc, count=1)
-  doc = re.sub(r'\s+url\:.*',
-               '\n  url: {}'.format(
-                   url.replace(stable_version, '{{ version }}')),
-               doc, count=1)
-  doc = re.sub(r'\s+home\:.*',
-               '\n  home: https://www.idiap.ch/software/bob/',
-               doc, count=1)
-  doc = doc.replace('Modified BSD License (3-clause)', 'BSD 3-Clause')
-
-  if package == 'bob':
-    requrl = 'https://gitlab.idiap.ch/bob/bob/raw/master/requirements.txt'
-    remote = requests.get(requrl)
-    req = remote.content.decode()
-    req = '\n    - '.join(req.replace('== ', '==').strip().split('\n'))
-    be_id = doc.find('bob.extension')
-    te_id = doc.find('test:\n', be_id)
-    template = '''{req}
-
-run:
-  - python
-  - {req}
-
-'''.format(req=req)
-    doc = doc[:be_id] + template + doc[te_id:]
-
-  with open(meta_path, 'w') as f:
-    f.write(doc)
-
-  return stable_version
-
-
-def main(package, subfolder='recipes', direct_push=False):
-  temp_dir = tempfile.mkdtemp()
-  try:
-    print("\nClonning bob.conda")
-    root = os.path.join(temp_dir, 'bob.conda')
-    feedstock = os.path.join(root, subfolder, package)
-    try:
-      run_commands(
-          ['git', 'clone',
-           'git@gitlab.idiap.ch:bob/bob.conda.git',
-           root])
-    except ValueError:
-      print("\nFailed to clone `bob.conda`, Exiting ...")
-      raise
-    os.chdir(feedstock)
-    # update meta.yaml
-    meta_path = 'meta.yaml'
-    stable_version = update_meta(meta_path, package)
-
-    branch_name = '{}-{}'.format(package, stable_version)
-    if not direct_push:
-      run_commands(
-          ['git', 'checkout', '-b', branch_name])
-
-    run_commands(['git', '--no-pager', 'diff'],
-                 ['git', 'config', 'user.email',
-                  os.environ.get('GITLAB_USER_EMAIL')],
-                 ['git', 'config', 'user.name',
-                  os.environ.get('GITLAB_USER_ID')],
-                 ['git', 'add', '-A'])
-    try:
-      run_commands(['git', 'commit', '-am',
-                    '[{}] Update to version {}'.format(package,
-                                                       stable_version)])
-    except ValueError:
-      print('Feedstock is already uptodate, skipping.')
-      return
-    if direct_push:
-      print(feedstock)
-      try:
-        answer = raw_input(
-            'Would you like to push directly to master?').lower()
-      except Exception:
-        answer = input('Would you like to push directly to master?').lower()
-      if answer.startswith('y') or answer == '':
-        run_commands(['git', 'push'])
-        print('See the changes at:\n'
-              'https://github.com/conda-forge/'
-              '{}-feedstock/commits/master\n\n'.format(package))
-    else:
-      origin_url = 'https://idiapbbb:{}@gitlab.idiap.ch/bob/bob.conda.git'
-      origin_url = origin_url.format(os.environ.get('IDIAPBBB_PASS'))
-      subprocess.call(['git', 'remote', 'set-url', 'origin', origin_url])
-      run_commands(['git', 'push', '--quiet', '--force', '--set-upstream',
-                    'origin', branch_name])
-      gitlab = Gitlab(os.environ.get('GITLAB_API_TOKEN'))
-      project_id = gitlab.get_project('bob.conda')['id']
-      title = 'Update-to-{}'.format(branch_name)
-      mr = gitlab.create_merge_request(project_id, branch_name, 'master',
-                                       title, remove_source_branch='true')
-    #   gitlab.accept_merge_request(
-        #   project_id, mr['id'], merge_when_pipeline_succeeds='true')
-  finally:
-    shutil.rmtree(temp_dir)
-
-
-if __name__ == '__main__':
-  import sys
-  main(*sys.argv[1:])
diff --git a/gitlab/wheels.sh b/gitlab/wheels.sh
deleted file mode 100755
index 25b13d242a43cac50e8b261d65320afc025aacc0..0000000000000000000000000000000000000000
--- a/gitlab/wheels.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-# Thu 22 Sep 2016 13:58:56 CEST
-
-source $(dirname ${0})/functions.sh
-
-for file in dist/*.whl; do
-  dav_upload ${file} private-upload/wheels/gitlab/
-done
diff --git a/templates/ci-for-cxx-extensions.yml b/templates/ci-for-cxx-extensions.yml
deleted file mode 100644
index b0fba4df69a69e9a48722c5e27d9192f452f815c..0000000000000000000000000000000000000000
--- a/templates/ci-for-cxx-extensions.yml
+++ /dev/null
@@ -1,275 +0,0 @@
-# This build file heavily uses template features from YAML so it is generic
-# enough for any Bob project. Don't modify it unless you know what you're
-# doing.
-
-
-# Definition of our build pipeline
-stages:
-  - build
-  - test
-  - docs
-  - wheels
-  - deploy
-
-
-# ---------
-# Templates
-# ---------
-
-# Template for the build stage
-# Needs to run on all supported architectures, platforms and python versions
-.build_template: &build_job
-  stage: build
-  before_script:
-    - git clean -ffdx
-    - mkdir _ci
-    - curl --silent "https://gitlab.idiap.ch/bob/bob.admin/raw/master/gitlab/install.sh" > _ci/install.sh
-    - chmod 755 _ci/install.sh
-    - ./_ci/install.sh _ci #updates
-    - ./_ci/before_build.sh
-  script:
-    - ./_ci/build.sh
-  after_script:
-    - ./_ci/after_build.sh
-  artifacts:
-    expire_in: 1 week
-    paths:
-      - _ci/
-      - dist/
-      - sphinx/
-
-
-# Template for the test stage - re-installs from uploaded wheels
-# Needs to run on all supported architectures, platforms and python versions
-.test_template: &test_job
-  stage: test
-  before_script:
-    - ./_ci/install.sh _ci #updates
-    - ./_ci/before_test.sh
-  script:
-    - ./_ci/test.sh
-  after_script:
-    - ./_ci/after_test.sh
-
-
-# Template for the wheel uploading stage
-# Needs to run against all combinations of python and operating systems
-.wheels_template: &wheels_job
-  stage: wheels
-  environment: intranet
-  only:
-    - master
-    - /^v\d+\.\d+\.\d+([abc]\d*)?$/  # PEP-440 compliant version (tags)
-  before_script:
-    - ./_ci/install.sh _ci #updates
-    - ./_ci/before_wheels.sh
-  script:
-    - ./_ci/wheels.sh
-  after_script:
-    - ./_ci/after_wheels.sh
-
-
-# Template for (latest) documentation upload stage
-# Only one real job needs to do this
-.docs_template: &docs_job
-  stage: docs
-  environment: intranet
-  only:
-    - master
-  before_script:
-    - ./_ci/install.sh _ci #updates
-    - ./_ci/before_docs.sh
-  script:
-    - ./_ci/docs.sh
-  after_script:
-    - ./_ci/after_docs.sh
-
-
-# Template for the deployment stage - re-installs from uploaded wheels
-# Needs to run on a single architecture only
-# Will deploy your package to PyPI and other required services
-# Only runs for tags
-.deploy_template: &deploy_job
-  stage: deploy
-  environment: internet
-  only:
-    - /^v\d+\.\d+\.\d+([abc]\d*)?$/  # PEP-440 compliant version (tags)
-  except:
-    - branches
-  before_script:
-    - ./_ci/install.sh _ci #updates
-    - ./_ci/before_deploy.sh
-  script:
-    - ./_ci/deploy.sh
-  after_script:
-    - ./_ci/after_deploy.sh
-
-
-# -------------
-# Build Targets
-# -------------
-
-# Linux + Python 2.7: Builds, tests, uploads wheel and deploys (if needed)
-build_linux_27:
-  <<: *build_job
-  variables: &linux_27_build_variables
-    PYTHON_VERSION: "2.7"
-  tags:
-    - conda-linux
-
-test_linux_27:
-  <<: *test_job
-  variables: *linux_27_build_variables
-  dependencies:
-    - build_linux_27
-  tags:
-    - conda-linux
-
-wheels_linux_27:
-  <<: *wheels_job
-  variables: *linux_27_build_variables
-  dependencies:
-    - build_linux_27
-  tags:
-    - conda-linux
-
-deploy_linux_27:
-  <<: *deploy_job
-  variables: *linux_27_build_variables
-  dependencies:
-    - build_linux_27
-  tags:
-    - conda-linux
-
-
-# Linux + Python 3.5: Builds, tests and uploads wheel
-build_linux_35:
-  <<: *build_job
-  variables: &linux_35_build_variables
-    PYTHON_VERSION: "3.5"
-  tags:
-    - conda-linux
-
-test_linux_35:
-  <<: *test_job
-  variables: *linux_35_build_variables
-  dependencies:
-    - build_linux_35
-  tags:
-    - conda-linux
-
-wheels_linux_35:
-  <<: *wheels_job
-  variables: *linux_35_build_variables
-  dependencies:
-    - build_linux_35
-  tags:
-    - conda-linux
-
-docs_linux_35:
-  <<: *docs_job
-  variables: *linux_35_build_variables
-  dependencies:
-    - build_linux_35
-  tags:
-    - conda-linux
-
-
-# Linux + Python 3.6: Builds and tests
-build_linux_36:
-  <<: *build_job
-  variables: &linux_36_build_variables
-    PYTHON_VERSION: "3.6"
-  tags:
-    - conda-linux
-
-test_linux_36:
-  <<: *test_job
-  variables: *linux_36_build_variables
-  dependencies:
-    - build_linux_36
-  tags:
-    - conda-linux
-
-wheels_linux_36:
-  <<: *wheels_job
-  variables: *linux_36_build_variables
-  dependencies:
-    - build_linux_36
-  tags:
-    - conda-linux
-
-
-# Mac OSX + Python 2.7: Builds and tests
-build_macosx_27:
-  <<: *build_job
-  variables: &macosx_27_build_variables
-    PYTHON_VERSION: "2.7"
-  tags:
-    - conda-macosx
-
-test_macosx_27:
-  <<: *test_job
-  variables: *macosx_27_build_variables
-  dependencies:
-    - build_macosx_27
-  tags:
-    - conda-macosx
-
-wheels_macosx_27:
-  <<: *wheels_job
-  variables: *macosx_27_build_variables
-  dependencies:
-    - build_macosx_27
-  tags:
-    - conda-macosx
-
-
-# Mac OSX + Python 3.5: Builds and tests
-build_macosx_35:
-  <<: *build_job
-  variables: &macosx_35_build_variables
-    PYTHON_VERSION: "3.5"
-  tags:
-    - conda-macosx
-
-test_macosx_35:
-  <<: *test_job
-  variables: *macosx_35_build_variables
-  dependencies:
-    - build_macosx_35
-  tags:
-    - conda-macosx
-
-wheels_macosx_35:
-  <<: *wheels_job
-  variables: *macosx_35_build_variables
-  dependencies:
-    - build_macosx_35
-  tags:
-    - conda-macosx
-
-
-# Mac OSX + Python 3.6: Builds and tests
-build_macosx_36:
-  <<: *build_job
-  variables: &macosx_36_build_variables
-    PYTHON_VERSION: "3.6"
-  tags:
-    - conda-macosx
-
-test_macosx_36:
-  <<: *test_job
-  variables: *macosx_36_build_variables
-  dependencies:
-    - build_macosx_36
-  tags:
-    - conda-macosx
-
-wheels_macosx_36:
-  <<: *wheels_job
-  variables: *macosx_36_build_variables
-  dependencies:
-    - build_macosx_36
-  tags:
-    - conda-macosx