Skip to content
Snippets Groups Projects
Commit a2c4937a authored by André Anjos's avatar André Anjos :speech_balloon:
Browse files

[ci] Re-use bob.devtools central CI file; Move deployment script to bob.devtools

parent da963627
Branches
Tags
No related merge requests found
Pipeline #55376 passed
# This YAML file contains descriptions for the CI of python-only packages include:
# - do **not** modify it unless you know what you're doing (and up to!) - project: 'bob/bob.devtools'
ref: master
# Definition of global variables (all stages) file: '/bob/devtools/data/gitlab-ci/python-package.yaml'
variables:
PYTHONUNBUFFERED: "1"
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
PRE_COMMIT_HOME: "${CI_PROJECT_DIR}/.cache/pre-commit"
# Definition of our build pipeline order
stages:
- build
- test
- deploy
- pypi
# Build targets
build:
image: python:latest
tags:
- docker
stage: build
before_script:
- pip install twine pre-commit sphinx sphinx-rtd-theme
script:
- "[ -r .pre-commit-config.yaml ] && pre-commit run --all-files --show-diff-on-failure --verbose"
- python setup.py sdist --formats=zip
- twine check dist/*.zip
- pip install -e .
- "[ -e doc ] && sphinx-build doc html"
artifacts:
paths:
- dist/*.zip
- html
expire_in: 1 week
cache:
key: "build-py"
cache:
paths:
- ${PRE_COMMIT_HOME}
- ${PIP_CACHE_DIR}
# Test targets
.test_template:
tags:
- docker
stage: test
before_script:
- pip install tox
cache:
key: "test-py"
paths:
- ${PIP_CACHE_DIR}
test_py38:
extends: .test_template
image: python:3.8
script:
- tox -e py38
test_py39:
extends: .test_template
image: python:3.9
script:
- tox -e py39
test_py310:
extends: .test_template
image: python:3.10
script:
- tox -e py310
.deploy_template:
image: python:latest
tags:
- docker
stage: deploy
dependencies:
- test_py38
- test_py39
- test_py310
- build
before_script:
- pip install webdavclient3
script:
- python ./deploydocs.py -v html
deploy_beta:
extends: .deploy_template
environment: beta
only:
- master
deploy_stable:
extends: .deploy_template
environment: stable
only:
- /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags)
except:
- branches
pypi:
image: python:latest
tags:
- docker
stage: pypi
environment: pypi
only:
refs:
- /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags)
variables:
- $CI_PROJECT_VISIBILITY == "public"
except:
- branches
dependencies:
- test_py38
- test_py39
- test_py310
- build
before_script:
- pip install twine
script:
- twine --skip-existing --username=${PYPIUSER} --password=${PYPIPASS} dist/*.zip
cache:
paths:
- ${PIP_CACHE_DIR}
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Deployment utilities for sphinx documentation via webDAV.
Requires you ``pip install webdavclient3``
"""
import os
import sys
import logging
logger = logging.getLogger("deploy")
_SERVER = "http://www.idiap.ch"
_WEBDAV_PATHS = {
True: { # stable?
False: { # visible?
"root": "/private-upload",
"conda": "/conda",
"docs": "/docs",
},
True: { # visible?
"root": "/public-upload",
"conda": "/conda",
"docs": "/docs",
},
},
False: { # stable?
False: { # visible?
"root": "/private-upload",
"conda": "/conda/label/beta",
"docs": "/docs",
},
True: { # visible?
"root": "/public-upload",
"conda": "/conda/label/beta",
"docs": "/docs",
},
},
}
"""Default locations of our webdav upload paths"""
def _setup_webdav_client(server, root, username, password):
"""Configures and checks the webdav client."""
# setup webdav connection
webdav_options = dict(
webdav_hostname=server,
webdav_root=root,
webdav_login=username,
webdav_password=password,
)
from webdav3.client import Client
retval = Client(webdav_options)
assert retval.valid()
return retval
def deploy_documentation(
path,
package,
stable,
latest,
public,
branch,
tag,
username,
password,
dry_run,
):
"""Deploys sphinx documentation to the appropriate webdav locations.
Parameters
==========
path : str
Path leading to the root of the documentation to be deployed
package : str
Full name (with namespace) of the package being treated
stable : bool
Indicates if the documentation corresponds to the latest stable build
latest : bool
Indicates if the documentation being deployed correspond to the latest
stable for the package or not. In case the documentation comes from a
patch release which is not on the master branch, please set this flag
to ``False``, which will make us avoid deployment of the documentation
to ``master`` and ``stable`` sub-directories.
public : bool
Indicates if the documentation is supposed to be distributed publicly
or privatly (within Idiap network)
branch : str
The name of the branch for the current build
tag : str
The name of the tag currently built (may be ``None``)
username : str
The name of the user on the webDAV server to use for uploading the
package
password : str
The password of the user on the webDAV server to use for uploading the
package
dry_run : bool
If we're supposed to really do the actions, or just log messages.
"""
# uploads documentation artifacts
if not os.path.exists(path):
raise RuntimeError(
"Documentation is not available at %s - "
"ensure documentation is being produced for your project!" % path
)
server_info = _WEBDAV_PATHS[stable][public]
davclient = _setup_webdav_client(
_SERVER, server_info["root"], username, password
)
remote_path_prefix = "%s/%s" % (server_info["docs"], package)
# finds out the correct mixture of sub-directories we should deploy to.
# 1. if ref-name is a tag, don't forget to publish to 'master' as well -
# all tags are checked to come from that branch
# 2. if ref-name is a branch name, deploy to it
# 3. in case a tag is being published, make sure to deploy to the special
# "stable" subdir as well
deploy_docs_to = set([branch])
if stable:
if tag is not None:
deploy_docs_to.add(tag)
if latest:
deploy_docs_to.add("master")
deploy_docs_to.add("stable")
# creates package directory, and then uploads directory there
for k in deploy_docs_to:
if not davclient.check(remote_path_prefix): # base package directory
logger.info("[dav] mkdir %s", remote_path_prefix)
if not dry_run:
davclient.mkdir(remote_path_prefix)
remote_path = "%s/%s" % (remote_path_prefix, k)
logger.info(
"[dav] %s -> %s%s%s",
path,
_SERVER,
server_info["root"],
remote_path,
)
if not dry_run:
davclient.upload_directory(local_path=path, remote_path=remote_path)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(
description="Deploys documentation from python-only packages"
)
parser.add_argument(
"directory",
help="Directory containing the sphinx build to deploy",
)
parser.add_argument(
"-p",
"--package",
default=os.environ.get("CI_PROJECT_PATH", None),
help="The package being built [default: %(default)s]",
)
parser.add_argument(
"-x",
"--visibility",
default=os.environ.get("CI_PROJECT_VISIBILITY", "private"),
help="The visibility of the package being built [default: %(default)s]",
)
parser.add_argument(
"-b",
"--branch",
default=os.environ.get("CI_COMMIT_REF_NAME", None),
help="Name of the branch being built [default: %(default)s]",
)
parser.add_argument(
"-t",
"--tag",
default=os.environ.get("CI_COMMIT_TAG", None),
help="If building a tag, pass it with this flag [default: %(default)s]",
)
parser.add_argument(
"-u",
"--username",
default=os.environ.get("DOCUSER", None),
help="Username for webdav deployment [default: %(default)s]",
)
parser.add_argument(
"-P",
"--password",
default=os.environ.get("DOCPASS", None),
help="Password for webdav deployment [default: %(default)s]",
)
parser.add_argument(
"-v",
"--verbose",
help="Be verbose (enables INFO logging)",
action="store_const",
dest="loglevel",
default=logging.WARNING,
const=logging.INFO,
)
args = parser.parse_args()
logging.basicConfig(level=args.loglevel)
deploy_documentation(
args.directory,
package=args.package,
stable=(args.tag is not None),
latest=True,
public=(args.visibility == "public"),
branch=args.branch,
tag=args.tag,
username=args.username,
password=args.password,
dry_run=False,
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment