Commit 5886b5e5 authored by André Anjos's avatar André Anjos 💬

Standardise

parent cee2bdfe
Pipeline #3174 failed with stage
in 4 minutes and 27 seconds
# This build file is defined in two parts: 1) a generic set of instructions you
# probably **don't** need to change and 2) a part you may have to tune to your
# project. It heavily uses template features from YAML to help you in only
# changing a minimal part of it and avoid code duplication to a maximum while
# still providing a nice pipeline display on your package.
# 1) Generic instructions (only change if you know what you're doing)
# -------------------------------------------------------------------
# Definition of our build pipeline
stages:
- build
- test
- docs
- wheels
# Global variables
variables:
CONDA_PREFIX: env
# Template for the build stage
# Needs to run on all supported architectures, platforms and python versions
.build_template: &build_job
stage: build
before_script:
- git clean -ffdx
- curl --silent https://gitlab.idiap.ch/bob/bob/snippets/7/raw | tr -d '\r' > bootstrap-conda.sh
- chmod 755 ./bootstrap-conda.sh
- ./bootstrap-conda.sh ${CONDA_FOLDER} ${PYTHON_VER} ${CONDA_PREFIX}
variables: &build_variables
BOB_DOCUMENTATION_SERVER: "http://www.idiap.ch/software/bob/docs/latest/bob/%s/master/"
script:
- ./bin/buildout
- if [ -x ./bin/bob_dbmanage.py ]; then ./bin/bob_dbmanage.py all download --force; fi
- ./bin/sphinx-build doc sphinx
- ./bin/python setup.py bdist_wheel --python-tag ${WHEEL_TAG}
after_script:
- rm -rf ${CONDA_PREFIX}
artifacts:
expire_in: 1 day
paths:
- bootstrap-conda.sh
- dist/
- sphinx/
# Template for building on a Linux machine
.build_linux_template: &linux_build_job
<<: *build_job
variables: &linux_build_variables
<<: *build_variables
CONDA_FOLDER: "/local/conda"
CFLAGS: "-D_GLIBCXX_USE_CXX11_ABI=0 -coverage"
CXXFLAGS: "-D_GLIBCXX_USE_CXX11_ABI=0 -coverage"
# Template for building on a Mac OSX machine
.build_mac_template: &macosx_build_job
<<: *build_job
variables: &macosx_build_variables
<<: *build_variables
CONDA_FOLDER: "/opt/conda"
MACOSX_DEPLOYMENT_TARGET: "10.9"
CFLAGS: "-pthread -coverage"
CXXFLAGS: "-pthread -coverage"
LDFLAGS: "-lpthread"
# Template for the test stage - re-install from uploaded wheels
# Needs to run on all supported architectures, platforms and python versions
.test_template: &test_job
stage: test
before_script:
- ./bootstrap-conda.sh ${CONDA_FOLDER} ${PYTHON_VER} ${CONDA_PREFIX}
- source ${CONDA_FOLDER}/bin/activate ${CONDA_PREFIX}
- pip install --use-wheel --no-index --pre dist/*.whl
script:
- cd ${CONDA_PREFIX}
- python -c "from ${CI_PROJECT_NAME} import get_config; print(get_config())"
- coverage run --source=${CI_PROJECT_NAME} ./bin/nosetests -sv ${CI_PROJECT_NAME}
- coverage report
- sphinx-build -b doctest ../doc ../sphinx
after_script:
- rm -rf ${CONDA_PREFIX}
# Template for the wheel uploading stage
# Needs to run against one combination of python 2.x and 3.x if it is a python
# only package, otherwise, needs to run in both pythons to all supported
# architectures (Linux and Mac OSX 64-bit)
.wheels_template: &wheels_job
stage: wheels
only:
- master
- tags
before_script:
- curl --silent https://gitlab.idiap.ch/bob/bob/snippets/8/raw | tr -d '\r' > upload-wheel.sh
- chmod 755 upload-wheel.sh
script:
- ./upload-wheel.sh
# Template for (latest) documentation upload stage
# Only one real job needs to do this
.docs_template: &docs_job
stage: docs
only:
- master
before_script:
- curl --silent https://gitlab.idiap.ch/bob/bob/snippets/9/raw | tr -d '\r' > upload-sphinx.sh
- chmod 755 upload-sphinx.sh
script:
- ./upload-sphinx.sh
# 2) Package specific instructions (you may tune this if needed)
# --------------------------------------------------------------
# Linux + Python 2.7: Builds, tests, uploads wheel
build_linux_27:
<<: *linux_build_job
variables: &linux_27_build_variables
<<: *linux_build_variables
PYTHON_VER: "2.7"
WHEEL_TAG: "py27"
tags:
- conda-linux
test_linux_27:
<<: *test_job
variables: *linux_27_build_variables
dependencies:
- build_linux_27
tags:
- conda-linux
wheels_linux_27:
<<: *wheels_job
dependencies:
- build_linux_27
tags:
- conda-linux
# Linux + Python 3.4: Builds and tests
build_linux_34:
<<: *linux_build_job
variables: &linux_34_build_variables
<<: *linux_build_variables
PYTHON_VER: "3.4"
WHEEL_TAG: "py3"
tags:
- conda-linux
test_linux_34:
<<: *test_job
variables: *linux_34_build_variables
dependencies:
- build_linux_34
tags:
- conda-linux
# Linux + Python 3.5: Builds, tests, uploads wheel
build_linux_35:
<<: *linux_build_job
variables: &linux_35_build_variables
<<: *linux_build_variables
PYTHON_VER: "3.5"
WHEEL_TAG: "py3"
tags:
- conda-linux
test_linux_35:
<<: *test_job
variables: *linux_35_build_variables
dependencies:
- build_linux_35
tags:
- conda-linux
wheels_linux_35:
<<: *wheels_job
dependencies:
- build_linux_35
tags:
- conda-linux
docs_linux_35:
<<: *docs_job
dependencies:
- build_linux_35
tags:
- conda-linux
# Mac OSX + Python 2.7: Builds and tests
build_macosx_27:
<<: *macosx_build_job
variables: &macosx_27_build_variables
<<: *macosx_build_variables
PYTHON_VER: "2.7"
WHEEL_TAG: "py27"
tags:
- conda-macosx
test_macosx_27:
<<: *test_job
variables: *macosx_27_build_variables
dependencies:
- build_macosx_27
tags:
- conda-macosx
# Mac OSX + Python 3.4: Builds and tests
build_macosx_34:
<<: *macosx_build_job
variables: &macosx_34_build_variables
<<: *macosx_build_variables
PYTHON_VER: "3.4"
WHEEL_TAG: "py3"
tags:
- conda-macosx
test_macosx_34:
<<: *test_job
variables: *macosx_34_build_variables
dependencies:
- build_macosx_34
tags:
- conda-macosx
# Mac OSX + Python 3.5: Builds and tests
build_macosx_35:
<<: *macosx_build_job
variables: &macosx_35_build_variables
<<: *macosx_build_variables
PYTHON_VER: "3.5"
WHEEL_TAG: "py3"
tags:
- conda-macosx
test_macosx_35:
<<: *test_job
variables: *macosx_35_build_variables
dependencies:
- build_macosx_35
tags:
- conda-macosx
\ No newline at end of file
language: python
matrix:
include:
- python: 2.7
env:
- secure: CchM+63gNGQp44wkG/vzx52uHg6R2XrFaV9P5Anid88h2wiqQi8N6UYD7oF7cNMc+e/wSaa4sv802blsk+8HUv6e6RUIWHqXsDQM9iXWemAAwrT8zvL+Nc/CtTKWGBhVn+rtwr7eEZRcAUii+obDSxc5R7lJGMJzqxO5x6tl77LDEHNcSguF58RJQOifcabThYT7VnZkV4DqGzfaG5ngZwcIGF34zlkOTvtVwnmfM7EdPjzQitKlurM1w2ViyiClby9OO/nAk7T7HgqT6ZkzW0G9exdgeztTzqjtS9DL4FIDdm17cCDWEdLeCgXBO3DKJ9J/XNS0ND7WQooMSFSf8dLQTYMfSDP1AZxPeyb260hIVwMSyJxeS6l8HnAphuBuOo/f8gNteWNxShxcAC8uiVm/EcCLmrwLWSmfPjGyje0QRaJzQ45kQXEf25lm1uPlH3cyYCIn52Y2yP7vwDk1JIbk9G6E6oIZjRq4myZUJCfOzyATZUlZ/EzzZS4fnSxGc9YGOERxAc1E8VcEAq+WbGuNmYJidOGRm5NKzL7rvXWAkfvbSDdNZPZPETAPSKFAqLyYZbCAKzefAQRTyA6CQE8d9HBkEr5iNevlxbuVBF70cUcnsTyLaFpA3t+XvbiR3FhOGjLrfJ/yiZjJihl7bgYimIlntI8vAasgjUxBZ3Q=
- secure: j4yg2mFK9jAzgB+aGJj9ieSGYq0iJBTopUSznd9HwT0xAwXGFkOhV2D8t+a0Ww1tuD8eYHKVfLBCMXV9ahelFbIBOM4ajMRsSJf8YcxdqaGFU/4BoKJd6G6chpz3HtScB/VGGO5uRLIqDQWgii6+pEAk3uj5csLp0amL0LCWdhCYHrh94NbeVsJvxPN4LTOTChKZjbo3yhUCPWuKTvbhPteE0NGuf1Ko6NqxaCzT9Iq11YbdNSmJmZnNbOqLkvFFQLdtzV3NXHFstep1YCRcDHLz/wNxkvh9iROUmrdT1b0D8IxexmwFMfVt8GsnrTRAe9jS7mQq7ccE90cJU2nHwh+pzAKyg/wmDdRBkDP08flEZprCUvYWGwoVv5E7GUGxG0voVdLJd2CD2NUHsvyvLZqY1R0EcRvc61aeVY9p/S/r2JzlzZjJTrdh3TUw0OcQ4cymM0VPXKU7N+x89KOhJMbGNwxibd4BtILY5A52nuTy+u1uiiUDFh96rFd3bl3ZwwmnY/CaOmclNILTpa9USsZGngn3Q+juoA8HPKZ7murNl+/mZpopb6tBqQNys5y9AHpX/wdWb36cupmysccmO54cmvfVIuVnOgxFMxlZpBhI4B/TGEeioED6gwJs092TaXBKs2eR5fVuwevZISQOpoH07cmp2bN7fwXQpgdVU6I=
- BOB_DOCUMENTATION_SERVER=https://www.idiap.ch/software/bob/docs/latest/bioidiap/%s/master
- BOB_UPLOAD_WHEEL="--universal"
- python: 3.3
- python: 3.4
- python: 3.5
before_install:
# - sudo add-apt-repository -y ppa:biometrics/bob
- sudo apt-get update -qq
- sudo apt-get install -qq --force-yes dvipng texlive-latex-base texlive-latex-extra texlive-math-extra texlive-latex-recommended texlive-fonts-recommended
- pip install --upgrade pip
- pip install --find-links https://www.idiap.ch/software/bob/wheels/travis/ --use-wheel sphinx nose numpy scipy matplotlib coverage
- pip install --find-links https://www.idiap.ch/software/bob/wheels/travis/ --use-wheel --pre -r requirements.txt coveralls
install:
- python bootstrap-buildout.py
- ./bin/buildout buildout:develop=. buildout:extensions=bob.buildout buildout:auto-checkout=
script:
# - ./bin/python -c 'import pkg_resources; from bob.bio.base import get_config; print(get_config())'
- ./bin/coverage run --source=gridtk ./bin/nosetests -sv
- ./bin/sphinx-build -b doctest doc sphinx
- ./bin/sphinx-build -b html doc sphinx
after_success:
- coveralls
- wget https://raw.githubusercontent.com/bioidiap/bob.extension/master/scripts/upload-{sphinx,wheel}.sh
- chmod a+x upload-sphinx.sh upload-wheel.sh
- ./upload-sphinx.sh
- ./upload-wheel.sh
include README.rst bootstrap-buildout.py buildout.cfg version.txt
include README.rst bootstrap-buildout.py buildout.cfg develop.cfg version.txt
recursive-include doc *.rst
recursive-include gridtk *.sh
.. vim: set fileencoding=utf-8 :
.. Tue 16 Aug 16:07:37 CEST 2016
.. image:: http://img.shields.io/badge/docs-stable-yellow.png
:target: http://pythonhosted.org/gridtk/index.html
.. image:: http://img.shields.io/badge/docs-latest-orange.png
:target: https://www.idiap.ch/software/bob/docs/latest/bioidiap/gridtk/master/index.html
.. image:: http://travis-ci.org/bioidiap/gridtk.svg?branch=master
:target: https://travis-ci.org/bioidiap/gridtk?branch=master
.. image:: https://coveralls.io/repos/github/bioidiap/gridtk/badge.svg?branch=master
:target: https://coveralls.io/github/bioidiap/gridtk?branch=master
.. image:: https://img.shields.io/badge/github-master-0000c0.png
:target: https://github.com/bioidiap/gridtk/tree/master
.. image:: https://gitlab.idiap.ch/bob/gridtk/badges/master/build.svg
:target: https://gitlab.idiap.ch/bob/gridtk/commits/master
.. image:: https://img.shields.io/badge/gitlab-project-0000c0.svg
:target: https://gitlab.idiap.ch/bob/gridtk
.. image:: http://img.shields.io/pypi/v/gridtk.png
:target: https://pypi.python.org/pypi/gridtk
.. image:: http://img.shields.io/pypi/dm/gridtk.png
:target: https://pypi.python.org/pypi/gridtk
======================
Parallel Job Manager
======================
The Job Manager is python wrapper around SGE utilities like ``qsub``, ``qstat`` and ``qdel``.
It interacts with these tools to submit and manage grid jobs making up a complete workflow ecosystem.
Currently, it is set up to work with the SGE grid at Idiap, but it is also possible to modify it to be used in other SGE grids.
Since version 1.0 there is also a local submission system introduced.
Instead of sending jobs to the SGE grid, it executes them in parallel processes on the local machine, using a simple scheduling system.
This package is part of the signal-processing and machine learning toolbox
Bob_. It provides a set of python wrappers around SGE utilities like ``qsub``,
``qstat`` and ``qdel``. It interacts with these tools to submit and manage
grid jobs making up a complete workflow ecosystem. Currently, it is set up to
work with the SGE grid at Idiap, but it is also possible to modify it to be
used in other SGE grids.
.. warning::
The new version of gridtk was completely rewritten and is no longer compatible with older versions of gridtk.
In particular, the database type has changed.
If you still have old ``submitted.db``, ``success.db`` or ``failure.db`` databases, please use an older version of gridtk to handle them.
Since version 1.0.x there is also a local submission system introduced. Instead
of sending jobs to the SGE grid, it executes them in parallel processes on the
local machine, using a simple scheduling system.
.. warning::
Though tested thoroughly, this version might still be unstable and the reported statuses of the grid jobs might be incorrect.
If you are in doubt that the status is correct, please double-check with other grid utilities (like ``bin/grid qmon``).
In case you found any problem, please report it using the `bug reporting system <http://github.com/bioidiap/gridtk/issues>`.
.. note::
In the current version, gridtk is compatible with python3.
Anyways, due to limitations of the working environment, the grid functionality is not tested with python 3.
However, with python 2.7 everything should work out fine.
Installation
------------
This package uses the Buildout system to install it.
Please call::
Follow our `installation`_ instructions. Then, using the Python interpreter
provided by the distribution, bootstrap and buildout this package::
$ python bootstrap-buildout.py
$ bin/buildout
$ bin/sphinx-build docs sphinx
$ firefox sphinx/index.html
to create and open the documentation including even more information than given in this README below.
Submitting jobs to the SGE grid
+++++++++++++++++++++++++++++++
Every time you interact with the Job Manager, a local database file (normally named ``submitted.sql3``) is read or written so it preserves its state during decoupled calls.
The database contains all information about jobs that is required for the Job Manager to:
* submit jobs of any kind
* probe for submitted jobs
* query SGE for submitted jobs
* identify problems with submitted jobs
* cleanup logs from submitted jobs
* easily re-submit jobs if problems occur
* support for parametric (array) jobs
* submit jobs with dependencies, which automatically get killed on failures
Many of these features are also achievable using the stock SGE utilities, the Job Manager only makes it dead simple.
If you really want to use the stock SGE utilities, the gridtk defines some wrapper scripts that allows to use ``qsub``, ``qstat`` and ``qdel`` without the need of the SETSHELL command.
For example, you can easily use ``qstat.py`` to query the list of your jobs running in the SGE grid.
Submitting a simple job
-----------------------
To interact with the Job Manager we use the ``jman`` utility.
Make sure to have your shell environment setup to reach it w/o requiring to type-in the full path.
The first task you may need to pursue is to submit jobs.
Here is how::
$ jman -vv submit myscript.py --help
... Added job '<Job: 1> : submitted -- /usr/bin/python myscript.py --help' to the database
... Submitted job '<Job: 6151645> : queued -- /usr/bin/python myscript.py --help' to the SGE grid.
.. note::
The command ``submit`` of the Job Manager will submit a job that will run in a python environment.
It is not the only way to submit a job using the Job Manager.
You can also use ``submit`` a job that considers the command as a self sufficient application.
Read the full help message of ``jman`` for details and instructions.
Submitting a parametric job
---------------------------
Parametric or array jobs are jobs that execute the same way, except for the environment variable ``SGE_TASK_ID``, which changes for every job.
This way, your program controls, which bit of the full job has to be executed in each (parallel) instance.
It is great for forking thousands of jobs into the grid.
The next example sends 10 copies of the ``myscript.py`` job to the grid with the same parameters.
Only the variable ``SGE_TASK_ID`` changes between them::
$ jman -vv submit -t 10 myscript.py --help
... Added job '<Job: 2> : submitted -- /usr/bin/python myscript.py --help' to the database
... Submitted job '<Job: 6151646> : queued -- /usr/bin/python myscript.py --help' to the SGE grid.
The ``-t`` option in ``jman`` accepts different kinds of job array descriptions.
Have a look at the help documentation for details with ``jman --help``.
Probing for jobs
----------------
Once the job has been submitted you will noticed a database file (by default called ``submitted.sql3``) has been created in the current working directory.
It contains the information for the job you just submitted::
$ jman list
job-id queue status job-name dependencies submitted command line
==================== ========= ============== ==================== ============================== ===========================================
6151645 all.q queued None [] /usr/bin/python myscript.py --help
6151646 [1-10:1] all.q queued None [] /usr/bin/python myscript.py --help
From this dump you can see the SGE job identifier including the number of array jobs, the queue the job has been submitted to, the current status of the job in the SGE grid, the dependencies of the job and the command that was executed in the SGE grid.
The ``list`` command from ``jman`` will show the current status of the job, which is updated automatically as soon as the grid job finishes.
Several calls to ``list`` might end up in
.. note::
This feature is new since version 1.0.0. There is no need to refresh the
database any more.
Submitting dependent jobs
-------------------------
Sometimes, the execution of one job might depend on the execution of another job.
The JobManager can take care of this, simply by adding the id of the job that we have to wait for::
$ jman -vv submit --dependencies 6151645 -- /usr/bin/python myscript.py --help
... Added job '<Job: 3> : submitted -- /usr/bin/python myscript.py --help' to the database
... Submitted job '<Job: 6151647> : queued -- /usr/bin/python myscript.py --help' to the SGE grid.
Now, the new job will only be run after the first one finished.
.. note::
Please note the ``--`` between the list of dependencies and the command.
Inspecting log files
--------------------
If jobs finish, the result of the executed job will be shown in the ``list``.
In case it is non-zero, might want to inspect the log files as follows::
$ jman report --errors-only
...
<Job: 6151646 - 'jman'> : failure (2) -- /usr/bin/python myscript.py --help
/usr/bin/python: can't open file 'myscript.py': [Errno 2] No such file or directory
Hopefully, that helps in debugging the problem!
Re-submitting the job
---------------------
If you are convinced the job did not work because of external conditions (e.g. temporary network outage), you may re-submit it, *exactly* like it was submitted the first time::
$ jman -vv resubmit --job-id 6151645
... Deleting job '6151645'
... Submitted job '<Job: 6151673> : queued -- /usr/bin/python myscript.py --help' to the SGE grid.
By default, the log files of the old job are deleted during re-submission.
If for any reason you want to keep the old log files, use the ``--keep-logs`` option.
Notice the new job identifier has changed as expected.
Stopping a grid job
-------------------
In case you found an error in the code of a grid job that is currently executing, you might want to kill the job in the grid.
For this purpose, you can use the command::
$ jman stop
The job is removed from the grid, but all log files are still available.
A common use case is to stop the grid job, fix the bugs, and re-submit it.
Cleaning-up
-----------
If the job in question will not work no matter how many times we re-submit it, you may just want to clean it up and do something else.
The Job Manager is here for you again::
$ jman -vvv delete
... Deleting job '8258327' from the database.
In case, jobs are still running or queued in the grid, they will be stopped before they are removed from the database.
By default, all logs will be deleted with the job.
Inspection on the current directory will now show you everything concerning the jobs is gone.
New from version 1.0
++++++++++++++++++++
If you know the gridtk in versions below 1.0, you might experience some differences.
The main advantages of the new version are:
* When run in the grid, the jobs now register themselves in the database.
There is no need to refresh the database by hand any more.
This includes that the result (an integral value) of the job execution is available once the job is finished.
Hence, there is no need to rely on the output of the error log any more.
.. note::
In case the job died in the grid, e.g., because of a timeout, this mechanism unfortunately still doesn't work.
Please try to use ``jman -vv communicate`` to see if these kinds of errors happened.
* Jobs are now stored in a proper .sql3 database.
Additionally to the jobs, each array job now has its own SQL model, which allows to store status and results of each array job.
To ``list`` the array jobs as well, please use the ``--print-array-jobs`` option.
* In case you have submitted a long list of commands with inter-dependencies, the Job Manager can now kill waiting jobs in case a dependent job failed.
Simply use the ``--stop-on-failure`` option during the submission of the jobs.
* Now, the verbosity of the gridtk can be selected more detailed.
Simply use the ``-v`` option several times to get 0: ERROR, 1: WARNING, 2: INFO, 3: DEBUG outputs.
A good choose is probably the ``-vv`` option to enable INFO output.
Please note that this is not propagated to the jobs that are run in the grid.
.. note::
The ``-v`` options must directly follow the ``jman`` command, and it has to be before the action (like ``submit`` or ``list``) is chosen.
The ``--database`` is now also a default option, which has to be at the same position.
* One important improvement is that you now have the possibility to execute the jobs **in parallel** on the **local machine**.
Please see next section for details.
Running jobs on the local machine
---------------------------------
The JobManager is designed such that it supports mainly the same infrastructure when submitting jobs locally or in the SGE grid.
To submit jobs locally, just add the ``--local`` option to the jman command::
$ jman --local -vv submit /usr/bin/python myscript.py --help
One important difference to the grid submission is that the jobs that are submitted to the local machine **do not run immediately**, but are only collected in the ``submitted.sql3`` database.
To run the collected jobs using 4 parallel processes, simply use::
$ jman --local -vv run-scheduler --parallel 4
and all jobs that have not run yet are executed, keeping an eye on the dependencies.
.. note::
The scheduler will run until it is stopped using Ctrl-C.
Hence, as soon as you submit new (local) jobs to the database, it will continue running these jobs.
If you want the scheduler to stop after all scheduled jobs ran, please use the ``--die-when-finished`` option.
Another difference is that by default, the jobs write their results into the command line and not into log files.
If you want the log file behavior back, specify the log directory during the submission::
$ jman --local -vv submit --log-dir logs myscript.py --help
Of course, you can choose a different log directory (also for the SGE submission).
Furthermore, the job identifiers during local submission usually start from 1 and increase.
Also, during local re-submission, the job ID does not change.
Using the local machine for debugging
-------------------------------------
One possible use case for the local job submission is the re-submission of jobs to the local machine.
In this case, you might re-submit the grid job locally::
$ ./bin/buildout
$ jman --local -vv resubmit --job-id 6151646 --keep-logs
(as mentioned above, no new ID is assigned) and run the local scheduler::
Contact
-------
$ jman --local -vv run-scheduler --no-log-files --job-ids 6151646
For questions or reporting issues to this software package, contact our
development `mailing list`_.
to print the output and the error to console instead of to log files.
.. Place your references here:
.. _bob: https://www.idiap.ch/software/bob
.. _installation: https://gitlab.idiap.ch/bob/bob/wikis/Installation
.. _mailing list: https://groups.google.com/forum/?fromgroups#!forum/bob-devel
......@@ -25,7 +25,10 @@ import tempfile
from optparse import OptionParser
tmpeggs = tempfile.mkdtemp()
__version__ = '2015-07-01'
# See zc.buildout's changelog if this version is up to date.
tmpeggs = tempfile.mkdtemp(prefix='bootstrap-')
usage = '''\
[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
......@@ -40,8 +43,9 @@ this script from going over the network.
'''
parser = OptionParser(usage=usage)
parser.add_option("-v", "--version", help="use a specific zc.buildout version")
parser.add_option("--version",
action="store_true", default=False,
help=("Return bootstrap.py version."))
parser.add_option("-t", "--accept-buildout-test-releases",
dest='accept_buildout_test_releases',
action="store_true", default=False,
......@@ -59,25 +63,33 @@ parser.add_option("-f", "--find-links",
parser.add_option("--allow-site-packages",
action="store_true", default=False,
help=("Let bootstrap.py use existing site packages"))
parser.add_option("--buildout-version",
help="Use a specific zc.buildout version")
parser.add_option("--setuptools-version",
help="use a specific setuptools version")
help="Use a specific setuptools version")
parser.add_option("--setuptools-to-dir",
help=("Allow for re-use of existing directory of "
"setuptools versions"))
options, args = parser.parse_args()
if options.version:
print("bootstrap.py version %s" % __version__)
sys.exit(0)
######################################################################
# load/install setuptools
try:
if options.allow_site_packages:
import setuptools
import pkg_resources
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
ez = {}
exec(urlopen('https://bootstrap.pypa.io/ez_setup.py').read(), ez)
if os.path.exists('ez_setup.py'):
exec(open('ez_setup.py').read(), ez)
else:
exec(urlopen('https://bootstrap.pypa.io/ez_setup.py').read(), ez)
if not options.allow_site_packages:
# ez_setup imports site, which adds site packages
......@@ -88,12 +100,19 @@ if not options.allow_site_packages:
# We can't remove these reliably
if hasattr(site, 'getsitepackages'):
for sitepackage_path in site.getsitepackages():
sys.path[:] = [x for x in sys.path if sitepackage_path not in x]
# Strip all site-packages directories from sys.path that
# are not sys.prefix; this is because on Windows
# sys.prefix is a site-package directory.
if sitepackage_path != sys.prefix:
sys.path[:] = [x for x in sys.path
if sitepackage_path not in x]
setup_args = dict(to_dir=tmpeggs, download_delay=0)
if options.setuptools_version is not None:
setup_args['version'] = options.setuptools_version
if options.setuptools_to_dir is not None:
setup_args['to_dir'] = options.setuptools_to_dir
ez['use_setuptools'](**setup_args)
import setuptools
......@@ -110,7 +129,12 @@ for path in sys.path:
ws = pkg_resources.working_set
setuptools_path = ws.find(
pkg_resources.Requirement.parse('setuptools')).location
# Fix sys.path here as easy_install.pth added before PYTHONPATH
cmd = [sys.executable, '-c',
'import sys; sys.path[0:0] = [%r]; ' % setuptools_path +
'from setuptools.command.easy_install import main; main()',
'-mZqNxd', tmpeggs]
......@@ -123,11 +147,8 @@ find_links = os.environ.get(
if find_links:
cmd.extend(['-f', find_links])
setuptools_path = ws.find(
pkg_resources.Requirement.parse('setuptools')).location
requirement = 'zc.buildout'
version = options.version
version = options.buildout_version
if version is None and not options.accept_buildout_test_releases:
# Figure out the most recent final version of zc.buildout.
import setuptools.package_index
......@@ -167,7 +188,7 @@ if version:
cmd.append(requirement)
import subprocess
if subprocess.call(cmd, env=dict(os.environ, PYTHONPATH=setuptools_path)) != 0:
if subprocess.call(cmd) != 0:
raise Exception(
"Failed to execute command:\n%s" % repr(cmd)[1:-1])
......
; vim: set fileencoding=utf-8 :
; Tue 16 Aug 16:07:37 CEST 2016
[buildout]
parts = scripts
develop = .
; required packages: our package
; optional package: ipdb (for debugging)
eggs = gridtk
ipdb
eggs = gridtk
extensions = bob.buildout
newest = false
verbose = true
[scripts]
recipe = bob.buildout:scripts
dependent-scripts = true
\ No newline at end of file
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Manuel Guenther <manuel.guenther@idiap.ch>
# Tue Nov 4 18:34:42 CET 2014