Skip to content
Snippets Groups Projects
Commit c6f0f339 authored by André Anjos's avatar André Anjos :speech_balloon:
Browse files

[scripts/ci] Clean-up extra sphinx file generation

parent 63c60735
No related branches found
No related tags found
No related merge requests found
Pipeline #30426 passed
...@@ -74,6 +74,13 @@ def is_stable(package, refname, tag, repodir): ...@@ -74,6 +74,13 @@ def is_stable(package, refname, tag, repodir):
return False return False
def comment_cleanup(lines):
"""Cleans-up comments and empty lines from textual data read from files"""
no_comments = [k.partition('#')[0].strip() for k in lines]
return [k for k in no_comments if k]
def read_packages(filename): def read_packages(filename):
""" """
Return a python list of tuples (repository, branch), given a file containing Return a python list of tuples (repository, branch), given a file containing
...@@ -81,15 +88,34 @@ def read_packages(filename): ...@@ -81,15 +88,34 @@ def read_packages(filename):
""" """
# loads dirnames from order file (accepts # comments and empty lines) # loads dirnames from order file (accepts # comments and empty lines)
packages = []
with open(filename, 'rt') as f: with open(filename, 'rt') as f:
for line in f: lines = comment_cleanup(f.readlines())
line = line.partition('#')[0].strip()
if line: packages = []
if ',' in line: #user specified a branch for line in lines:
path, branch = [k.strip() for k in line.split(',', 1)] if ',' in line: #user specified a branch
packages.append((path, branch)) path, branch = [k.strip() for k in line.split(',', 1)]
else: packages.append((path, branch))
packages.append((line, 'master')) else:
packages.append((line, 'master'))
return packages return packages
def uniq(seq, idfun=None):
"""Very fast, order preserving uniq function"""
# order preserving
if idfun is None:
def idfun(x): return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
# in old Python versions:
# if seen.has_key(marker)
# but in new ones:
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result
...@@ -14,7 +14,7 @@ from . import bdt ...@@ -14,7 +14,7 @@ from . import bdt
from ..constants import SERVER, CONDA_BUILD_CONFIG, CONDA_RECIPE_APPEND, \ from ..constants import SERVER, CONDA_BUILD_CONFIG, CONDA_RECIPE_APPEND, \
WEBDAV_PATHS, BASE_CONDARC WEBDAV_PATHS, BASE_CONDARC
from ..deploy import deploy_conda_package, deploy_documentation from ..deploy import deploy_conda_package, deploy_documentation
from ..ci import read_packages from ..ci import read_packages, comment_cleanup, uniq
from ..log import verbosity_option, get_logger, echo_normal from ..log import verbosity_option, get_logger, echo_normal
logger = get_logger(__name__) logger = get_logger(__name__)
...@@ -697,24 +697,27 @@ def docs(ctx, requirement, dry_run): ...@@ -697,24 +697,27 @@ def docs(ctx, requirement, dry_run):
# Copying the content from extra_intersphinx # Copying the content from extra_intersphinx
extra_intersphinx_path = os.path.join(clone_to, "doc", extra_intersphinx_path = os.path.join(clone_to, "doc",
"extra-intersphinx.txt") "extra-intersphinx.txt")
test_requirements_path = os.path.join(clone_to, "doc",
"test-requirements.txt")
requirements_path = os.path.join(clone_to, "requirements.txt")
if os.path.exists(extra_intersphinx_path): if os.path.exists(extra_intersphinx_path):
extra_intersphinx += open(extra_intersphinx_path).readlines() with open(extra_intersphinx_path) as f:
extra_intersphinx += comment_cleanup(f.readlines())
test_requirements_path = os.path.join(clone_to, "doc",
"test-requirements.txt")
if os.path.exists(test_requirements_path): if os.path.exists(test_requirements_path):
extra_intersphinx += open(test_requirements_path).readlines() with open(test_requirements_path) as f:
extra_intersphinx += comment_cleanup(f.readliens())
requirements_path = os.path.join(clone_to, "requirements.txt")
if os.path.exists(requirements_path): if os.path.exists(requirements_path):
extra_intersphinx += open(requirements_path).readlines() with open(requirements_path) as f:
extra_intersphinx += comment_cleanup(f.readlines())
nitpick_path = os.path.join(clone_to, "doc", "nitpick-exceptions.txt") nitpick_path = os.path.join(clone_to, "doc", "nitpick-exceptions.txt")
if os.path.exists(nitpick_path): if os.path.exists(nitpick_path):
nitpick += open(nitpick_path).readlines() with open(nitpick_path) as f:
nitpick += comment_cleanup(f.readlines())
logger.info('Generating sphinx files...') logger.info('Generating (extra) sphinx files...')
# Making unique lists and removing all bob/beat references # Making unique lists and removing all bob/beat references
if not dry_run: if not dry_run:
...@@ -723,16 +726,16 @@ def docs(ctx, requirement, dry_run): ...@@ -723,16 +726,16 @@ def docs(ctx, requirement, dry_run):
group = os.environ['CI_PROJECT_NAMESPACE'] group = os.environ['CI_PROJECT_NAMESPACE']
extra_intersphinx = set([k.strip() for k in extra_intersphinx \ extra_intersphinx = set([k.strip() for k in extra_intersphinx \
if not k.strip().startswith(group)]) if not k.strip().startswith(group)])
logger.info('Contents of "doc/extra-intersphinx.txt":\n%s', data = '\n'.join(uniq(sorted(extra_intersphinx)))
''.join(extra_intersphinx)) logger.info('Contents of "doc/extra-intersphinx.txt":\n%s', data)
with open(os.path.join(doc_path, 'extra-intersphinx.txt'), 'w') as f: with open(os.path.join(doc_path, 'extra-intersphinx.txt'), 'w') as f:
f.writelines(extra_intersphinx) f.write(data)
# nitpick exceptions # nitpick exceptions
logger.info('Contents of "doc/nitpick-exceptions.txt":\n%s', data = '\n'.join(uniq(sorted(nitpick)))
''.join(nitpick)) logger.info('Contents of "doc/nitpick-exceptions.txt":\n%s', data)
with open(os.path.join(doc_path, "nitpick-exceptions.txt"), "w") as f: with open(os.path.join(doc_path, 'nitpick-exceptions.txt'), 'w') as f:
f.writelines(set([k.strip() for k in nitpick])) f.write(data)
logger.info('Building documentation...') logger.info('Building documentation...')
ctx.invoke(build, dry_run=dry_run) ctx.invoke(build, dry_run=dry_run)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment