Commit 920e11b2 authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira

Merge branch 'pre-release' into 'master'

Pre release



See merge request !5
parents 760f5fd8 7f04c0a7
Pipeline #4259 passed with stages
in 46 minutes and 30 seconds
# This build file is defined in two parts: 1) a generic set of instructions you
# probably **don't** need to change and 2) a part you may have to tune to your
# project. It heavily uses template features from YAML to help you in only
# changing a minimal part of it and avoid code duplication to a maximum while
# still providing a nice pipeline display on your package.
# This build file heavily uses template features from YAML so it is generic
# enough for any Bob project. Don't modify it unless you know what you're
# doing.
# 1) Generic instructions (only change if you know what you're doing)
# -------------------------------------------------------------------
# Definition of our build pipeline
stages:
- build
- test
- docs
- wheels
- deploy
# Global variables
variables:
CONDA_PREFIX: env
# ---------
# Templates
# ---------
# Template for the build stage
# Needs to run on all supported architectures, platforms and python versions
......@@ -27,104 +22,98 @@ variables:
stage: build
before_script:
- git clean -ffdx
- curl --silent https://gitlab.idiap.ch/bob/bob/snippets/7/raw | tr -d '\r' > bootstrap-conda.sh
- chmod 755 ./bootstrap-conda.sh
- ./bootstrap-conda.sh ${CONDA_FOLDER} ${PYTHON_VER} ${CONDA_PREFIX}
variables: &build_variables
BOB_DOCUMENTATION_SERVER: "http://www.idiap.ch/software/bob/docs/latest/bob/%s/master/"
- mkdir _ci
- curl --silent "https://gitlab.idiap.ch/bob/bob.admin/raw/master/gitlab/install.sh" > _ci/install.sh
- chmod 755 _ci/install.sh
- ./_ci/install.sh _ci #updates
- ./_ci/before_build.sh
script:
- ./bin/buildout
- if [ -x ./bin/bob_dbmanage.py ]; then ./bin/bob_dbmanage.py all download --force; fi
- ./bin/sphinx-build doc sphinx
- ./bin/python setup.py bdist_wheel --python-tag ${WHEEL_TAG}
- ./_ci/build.sh
after_script:
- rm -rf ${CONDA_PREFIX}
- ./_ci/after_build.sh
artifacts:
expire_in: 1 day
expire_in: 1 week
paths:
- bootstrap-conda.sh
- _ci/
- dist/
- sphinx/
# Template for building on a Linux machine
.build_linux_template: &linux_build_job
<<: *build_job
variables: &linux_build_variables
<<: *build_variables
CONDA_FOLDER: "/local/conda"
CFLAGS: "-D_GLIBCXX_USE_CXX11_ABI=0 -coverage"
CXXFLAGS: "-D_GLIBCXX_USE_CXX11_ABI=0 -coverage"
# Template for building on a Mac OSX machine
.build_mac_template: &macosx_build_job
<<: *build_job
variables: &macosx_build_variables
<<: *build_variables
CONDA_FOLDER: "/opt/conda"
MACOSX_DEPLOYMENT_TARGET: "10.9"
CFLAGS: "-pthread -coverage"
CXXFLAGS: "-pthread -coverage"
LDFLAGS: "-lpthread"
# Template for the test stage - re-install from uploaded wheels
# Template for the test stage - re-installs from uploaded wheels
# Needs to run on all supported architectures, platforms and python versions
.test_template: &test_job
stage: test
before_script:
- ./bootstrap-conda.sh ${CONDA_FOLDER} ${PYTHON_VER} ${CONDA_PREFIX}
- source ${CONDA_FOLDER}/bin/activate ${CONDA_PREFIX}
- pip install --use-wheel --no-index --pre dist/*.whl
- ./_ci/install.sh _ci #updates
- ./_ci/before_test.sh
script:
- cd ${CONDA_PREFIX}
- python -c "from ${CI_PROJECT_NAME} import get_config; print(get_config())"
- coverage run --source=${CI_PROJECT_NAME} ./bin/nosetests -sv ${CI_PROJECT_NAME}
- coverage report
- sphinx-build -b doctest ../doc ../sphinx
- ./_ci/test.sh
after_script:
- rm -rf ${CONDA_PREFIX}
- ./_ci/after_test.sh
# Template for the wheel uploading stage
# Needs to run against one combination of python 2.x and 3.x if it is a python
# only package, otherwise, needs to run in both pythons to all supported
# architectures (Linux and Mac OSX 64-bit)
# Needs to run against one supported architecture, platform and python version
.wheels_template: &wheels_job
stage: wheels
environment: intranet
only:
- master
- tags
- /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags)
before_script:
- curl --silent https://gitlab.idiap.ch/bob/bob/snippets/8/raw | tr -d '\r' > upload-wheel.sh
- chmod 755 upload-wheel.sh
- ./_ci/install.sh _ci #updates
- ./_ci/before_wheels.sh
script:
- ./upload-wheel.sh
- ./_ci/wheels.sh
after_script:
- ./_ci/after_wheels.sh
# Template for (latest) documentation upload stage
# Only one real job needs to do this
.docs_template: &docs_job
stage: docs
environment: intranet
only:
- master
before_script:
- curl --silent https://gitlab.idiap.ch/bob/bob/snippets/9/raw | tr -d '\r' > upload-sphinx.sh
- chmod 755 upload-sphinx.sh
- ./_ci/install.sh _ci #updates
- ./_ci/before_docs.sh
script:
- ./_ci/docs.sh
after_script:
- ./_ci/after_docs.sh
# Template for the deployment stage - re-installs from uploaded wheels
# Needs to run on a single architecture only
# Will deploy your package to PyPI and other required services
# Only runs for tags
.deploy_template: &deploy_job
stage: deploy
environment: internet
only:
- /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags)
except:
- branches
before_script:
- ./_ci/install.sh _ci #updates
- ./_ci/before_deploy.sh
script:
- ./upload-sphinx.sh
- ./_ci/deploy.sh
after_script:
- ./_ci/after_deploy.sh
# 2) Package specific instructions (you may tune this if needed)
# --------------------------------------------------------------
# -------------
# Build Targets
# -------------
# Linux + Python 2.7: Builds, tests, uploads wheel
# Linux + Python 2.7: Builds, tests, uploads wheel and deploys (if needed)
build_linux_27:
<<: *linux_build_job
<<: *build_job
variables: &linux_27_build_variables
<<: *linux_build_variables
PYTHON_VER: "2.7"
PYTHON_VERSION: "2.7"
WHEEL_TAG: "py27"
tags:
- conda-linux
......@@ -139,6 +128,15 @@ test_linux_27:
wheels_linux_27:
<<: *wheels_job
variables: *linux_27_build_variables
dependencies:
- build_linux_27
tags:
- conda-linux
deploy_linux_27:
<<: *deploy_job
variables: *linux_27_build_variables
dependencies:
- build_linux_27
tags:
......@@ -147,10 +145,9 @@ wheels_linux_27:
# Linux + Python 3.4: Builds and tests
build_linux_34:
<<: *linux_build_job
<<: *build_job
variables: &linux_34_build_variables
<<: *linux_build_variables
PYTHON_VER: "3.4"
PYTHON_VERSION: "3.4"
WHEEL_TAG: "py3"
tags:
- conda-linux
......@@ -164,12 +161,11 @@ test_linux_34:
- conda-linux
# Linux + Python 3.5: Builds, tests, uploads wheel
# Linux + Python 3.5: Builds, tests and uploads wheel
build_linux_35:
<<: *linux_build_job
<<: *build_job
variables: &linux_35_build_variables
<<: *linux_build_variables
PYTHON_VER: "3.5"
PYTHON_VERSION: "3.5"
WHEEL_TAG: "py3"
tags:
- conda-linux
......@@ -184,6 +180,7 @@ test_linux_35:
wheels_linux_35:
<<: *wheels_job
variables: *linux_35_build_variables
dependencies:
- build_linux_35
tags:
......@@ -191,6 +188,7 @@ wheels_linux_35:
docs_linux_35:
<<: *docs_job
variables: *linux_35_build_variables
dependencies:
- build_linux_35
tags:
......@@ -199,10 +197,9 @@ docs_linux_35:
# Mac OSX + Python 2.7: Builds and tests
build_macosx_27:
<<: *macosx_build_job
<<: *build_job
variables: &macosx_27_build_variables
<<: *macosx_build_variables
PYTHON_VER: "2.7"
PYTHON_VERSION: "2.7"
WHEEL_TAG: "py27"
tags:
- conda-macosx
......@@ -218,10 +215,9 @@ test_macosx_27:
# Mac OSX + Python 3.4: Builds and tests
build_macosx_34:
<<: *macosx_build_job
<<: *build_job
variables: &macosx_34_build_variables
<<: *macosx_build_variables
PYTHON_VER: "3.4"
PYTHON_VERSION: "3.4"
WHEEL_TAG: "py3"
tags:
- conda-macosx
......@@ -237,10 +233,9 @@ test_macosx_34:
# Mac OSX + Python 3.5: Builds and tests
build_macosx_35:
<<: *macosx_build_job
<<: *build_job
variables: &macosx_35_build_variables
<<: *macosx_build_variables
PYTHON_VER: "3.5"
PYTHON_VERSION: "3.5"
WHEEL_TAG: "py3"
tags:
- conda-macosx
......@@ -251,4 +246,4 @@ test_macosx_35:
dependencies:
- build_macosx_35
tags:
- conda-macosx
\ No newline at end of file
- conda-macosx
......@@ -3,12 +3,8 @@
# Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
# Wed 20 July 14:43:22 CEST 2016
"""
Verification API for bob.db.voxforge
"""
from bob.bio.base.database.file import BioFile
from bob.bio.video.utils.FrameSelector import FrameSelector
from ..utils import FrameSelector
class VideoBioFile(BioFile):
......@@ -19,8 +15,5 @@ class VideoBioFile(BioFile):
"""
super(VideoBioFile, self).__init__(client_id=client_id, path=path, file_id=file_id)
def load(self, directory=None, extension='.avi'):
return FrameSelector()(self.make_path(directory, extension))
def load(self, directory=None, extension='.avi', frame_selector = FrameSelector()):
return frame_selector(self.make_path(directory, extension))
......@@ -42,9 +42,9 @@ class Wrapper(bob.bio.base.preprocessor.Preprocessor):
This is experimental and might cause trouble.
Use this flag with care.
read_original_data:
read_original_data: callable or ``None``
Function that loads the raw data.
If not explicitly defined the raw data will be loaded by :py:method:`bob.bio.base.database.VideoBioFile.load`
If not explicitly defined the raw data will be loaded by :py:meth:`bob.bio.video.database.VideoBioFile.load`. using the specified ``frame_selector``
"""
......@@ -53,9 +53,16 @@ class Wrapper(bob.bio.base.preprocessor.Preprocessor):
frame_selector=utils.FrameSelector(),
quality_function=None,
compressed_io=False,
read_original_data=lambda biofile, directory, extension: biofile.load(directory, extension)
read_original_data=None
):
def _read_video_data(biofile, directory, extension):
"""Read video data using the frame_selector of this object"""
return biofile.load(directory, extension, frame_selector)
if read_original_data is None:
read_original_data = _read_video_data
# load preprocessor configuration
if isinstance(preprocessor, str):
self.preprocessor = bob.bio.base.load_resource(preprocessor, "preprocessor")
......@@ -64,6 +71,7 @@ class Wrapper(bob.bio.base.preprocessor.Preprocessor):
else:
raise ValueError("The given preprocessor could not be interpreted")
bob.bio.base.preprocessor.Preprocessor.__init__(
self,
preprocessor=preprocessor,
......@@ -72,7 +80,6 @@ class Wrapper(bob.bio.base.preprocessor.Preprocessor):
read_original_data=read_original_data
)
self.frame_selector = frame_selector
self.quality_function = quality_function
self.compressed_io = compressed_io
......@@ -107,9 +114,6 @@ class Wrapper(bob.bio.base.preprocessor.Preprocessor):
preprocessed : :py:class:`bob.bio.video.FrameContainer`
A frame container that contains the preprocessed frames.
"""
if not isinstance(frames, utils.FrameContainer):
frames = self.frame_selector(frames)
annots = None
fc = utils.FrameContainer()
......@@ -162,7 +166,7 @@ class Wrapper(bob.bio.base.preprocessor.Preprocessor):
**Parameters:**
frames : :py:class:`bob.bio.video.FrameContainer`
The preprocessed frames, as returned by the :py:meth:`__call__` function.
The preprocessed frames, as returned by the `__call__` function.
filename : str
The name of the preprocessed data file to write.
......
......@@ -8,7 +8,7 @@ import os
class DummyBioFile(VideoBioFile):
def load(self, directory=None, extension='.pgm'):
def load(self, directory=None, extension='.pgm', frame_selector=None):
file_name = self.make_path(directory, extension)
fc = FrameContainer()
fc.add(os.path.basename(file_name), bob.io.base.load(file_name))
......
......@@ -44,16 +44,12 @@ def test_annotations():
def test_detect():
def read_original_data(video_object, original_path, original_extension):
return frame_selector(video_object.make_path(original_path, original_extension))
# load test video
original_path = pkg_resources.resource_filename("bob.bio.video.test", "")
video_object = bob.bio.video.database.VideoBioFile(client_id=1, file_id=1, path="data/testvideo")
frame_selector = bob.bio.video.FrameSelector(max_number_of_frames=3, selection_style="spread")
preprocessor = bob.bio.video.preprocessor.Wrapper('face-detect', frame_selector, compressed_io=False,
read_original_data=read_original_data)
preprocessor = bob.bio.video.preprocessor.Wrapper('face-detect', frame_selector, compressed_io=False)
video = preprocessor.read_original_data(video_object, original_path,".avi")
assert isinstance(video, bob.bio.video.FrameContainer)
......@@ -71,15 +67,11 @@ def test_detect():
def test_flandmark():
def read_original_data(video_object, original_path, original_extension):
return frame_selector(video_object.make_path(original_path, original_extension))
original_path = pkg_resources.resource_filename("bob.bio.video.test", "")
video_object = bob.bio.video.database.VideoBioFile(client_id=1, file_id=1, path="data/testvideo")
frame_selector = bob.bio.video.FrameSelector(max_number_of_frames=3, selection_style="spread")
preprocessor = bob.bio.video.preprocessor.Wrapper('landmark-detect', frame_selector, compressed_io=False,
read_original_data=read_original_data)
preprocessor = bob.bio.video.preprocessor.Wrapper('landmark-detect', frame_selector, compressed_io=False)
video = preprocessor.read_original_data(video_object, original_path, ".avi")
assert isinstance(video, bob.bio.video.FrameContainer)
......
......@@ -28,7 +28,6 @@ develop = src/bob.extension
src/bob.learn.em
src/bob.measure
src/bob.db.base
src/bob.bio.db
src/bob.db.atnt
src/bob.bio.base
src/bob.learn.boosting
......@@ -60,7 +59,6 @@ bob.learn.linear = git https://gitlab.idiap.ch/bob/bob.learn.linear
bob.learn.em = git https://gitlab.idiap.ch/bob/bob.learn.em
bob.measure = git https://gitlab.idiap.ch/bob/bob.measure
bob.db.base = git https://gitlab.idiap.ch/bob/bob.db.base
bob.bio.db = git https://gitlab.idiap.ch/bob/bob.bio.db
bob.db.atnt = git https://gitlab.idiap.ch/bob/bob.db.atnt
bob.bio.base = git https://gitlab.idiap.ch/bob/bob.bio.base
bob.bio.face = git https://gitlab.idiap.ch/bob/bob.bio.face
......
......@@ -111,7 +111,7 @@ pygments_style = 'sphinx'
# Some variables which are useful for generated material
project_variable = project.replace('.', '_')
short_description = u'Run video face recognition algorithms'
short_description = u'Tools for running biometric recognition experiments'
owner = [u'Idiap Research Institute']
......@@ -216,8 +216,13 @@ autodoc_default_flags = [
]
# For inter-documentation mapping:
from bob.extension.utils import link_documentation
intersphinx_mapping = link_documentation()
from bob.extension.utils import link_documentation, load_requirements
sphinx_requirements = "./extra-intersphinx.txt"
if os.path.exists(sphinx_requirements):
intersphinx_mapping = link_documentation(additional_packages=load_requirements(sphinx_requirements))
else:
intersphinx_mapping = link_documentation()
# We want to remove all private (i.e. _. or __.__) members
# that are not in the list of accepted functions
......@@ -237,4 +242,4 @@ def member_function_test(app, what, name, obj, skip, options):
return False
def setup(app):
app.connect('autodoc-skip-member', member_function_test)
\ No newline at end of file
app.connect('autodoc-skip-member', member_function_test)
python
numpy
bob.bio.gmm
\ No newline at end of file
......@@ -53,7 +53,7 @@ Hence, when you want to run an experiment using the video wrapper classes, you m
Databases
~~~~~~~~~
All video databases defined here rely on the :py:class:`bob.bio.db.BioDatabase` interface, which in turn uses the :ref:`verification_databases`.
All video databases defined here rely on the :py:class:`bob.bio.db.BioDatabase` interface, which in turn uses the `verification_databases <https://github.com/idiap/bob/wiki/Packages>`_.
After downloading and extracting the original data of the data sets, it is necessary that the scripts know, where the data was installed.
For this purpose, the ``./bin/verify.py`` script can read a special file, where those directories are stored, see :ref:`bob.bio.base.installation`.
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment