Commit 8bf52edf authored by André Anjos's avatar André Anjos 💬
Browse files

Merge branch 'update' into 'master'

Update

See merge request !1
parents 05346f9a 2d2968ec
Pipeline #5832 passed with stages
in 16 minutes and 58 seconds
......@@ -19,3 +19,4 @@ build
src/
logs/
*.sql3
bob/db/hci_tagging/data/
# This build file heavily uses template features from YAML so it is generic
# enough for any Bob project. Don't modify it unless you know what you're
# doing.
# Definition of our build pipeline
stages:
- build
- test
- docs
- wheels
- deploy
# ---------
# Templates
# ---------
# Template for the build stage
# Needs to run on all supported architectures, platforms and python versions
.build_template: &build_job
stage: build
before_script:
- git clean -ffdx
- mkdir _ci
- curl --silent "https://gitlab.idiap.ch/bob/bob.admin/raw/master/gitlab/install.sh" > _ci/install.sh
- chmod 755 _ci/install.sh
- ./_ci/install.sh _ci #updates
- ./_ci/before_build.sh
script:
- ./_ci/build.sh hci_tagging
after_script:
- ./_ci/after_build.sh
artifacts:
expire_in: 1 week
paths:
- _ci/
- dist/
- sphinx/
# Template for the test stage - re-installs from uploaded wheels
# Needs to run on all supported architectures, platforms and python versions
.test_template: &test_job
stage: test
before_script:
- ./_ci/install.sh _ci #updates
- ./_ci/before_test.sh
script:
- ./_ci/test.sh
after_script:
- ./_ci/after_test.sh
# Template for the wheel uploading stage
# Needs to run against one supported architecture, platform and python version
.wheels_template: &wheels_job
stage: wheels
environment: intranet
only:
- master
- /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags)
before_script:
- ./_ci/install.sh _ci #updates
- ./_ci/before_wheels.sh
script:
- ./_ci/wheels.sh
after_script:
- ./_ci/after_wheels.sh
# Template for (latest) documentation upload stage
# Only one real job needs to do this
.docs_template: &docs_job
stage: docs
environment: intranet
only:
- master
before_script:
- ./_ci/install.sh _ci #updates
- ./_ci/before_docs.sh
script:
- ./_ci/docs.sh
after_script:
- ./_ci/after_docs.sh
# Template for the deployment stage - re-installs from uploaded wheels
# Needs to run on a single architecture only
# Will deploy your package to PyPI and other required services
# Only runs for tags
.deploy_template: &deploy_job
stage: deploy
environment: internet
only:
- /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags)
except:
- branches
before_script:
- ./_ci/install.sh _ci #updates
- ./_ci/before_deploy.sh
script:
- ./_ci/deploy.sh
after_script:
- ./_ci/after_deploy.sh
# -------------
# Build Targets
# -------------
# Linux + Python 2.7: Builds, tests, uploads wheel and deploys (if needed)
build_linux_27:
<<: *build_job
variables: &linux_27_build_variables
PYTHON_VERSION: "2.7"
WHEEL_TAG: "py27"
tags:
- conda-linux
test_linux_27:
<<: *test_job
variables: *linux_27_build_variables
dependencies:
- build_linux_27
tags:
- conda-linux
wheels_linux_27:
<<: *wheels_job
variables: *linux_27_build_variables
dependencies:
- build_linux_27
tags:
- conda-linux
deploy_linux_27:
<<: *deploy_job
variables: *linux_27_build_variables
dependencies:
- build_linux_27
tags:
- conda-linux
# Linux + Python 3.4: Builds and tests
build_linux_34:
<<: *build_job
variables: &linux_34_build_variables
PYTHON_VERSION: "3.4"
WHEEL_TAG: "py3"
tags:
- conda-linux
test_linux_34:
<<: *test_job
variables: *linux_34_build_variables
dependencies:
- build_linux_34
tags:
- conda-linux
# Linux + Python 3.5: Builds, tests and uploads wheel
build_linux_35:
<<: *build_job
variables: &linux_35_build_variables
PYTHON_VERSION: "3.5"
WHEEL_TAG: "py3"
tags:
- conda-linux
test_linux_35:
<<: *test_job
variables: *linux_35_build_variables
dependencies:
- build_linux_35
tags:
- conda-linux
wheels_linux_35:
<<: *wheels_job
variables: *linux_35_build_variables
dependencies:
- build_linux_35
tags:
- conda-linux
docs_linux_35:
<<: *docs_job
variables: *linux_35_build_variables
dependencies:
- build_linux_35
tags:
- conda-linux
# Mac OSX + Python 2.7: Builds and tests
build_macosx_27:
<<: *build_job
variables: &macosx_27_build_variables
PYTHON_VERSION: "2.7"
WHEEL_TAG: "py27"
tags:
- conda-macosx
test_macosx_27:
<<: *test_job
variables: *macosx_27_build_variables
dependencies:
- build_macosx_27
tags:
- conda-macosx
# Mac OSX + Python 3.4: Builds and tests
build_macosx_34:
<<: *build_job
variables: &macosx_34_build_variables
PYTHON_VERSION: "3.4"
WHEEL_TAG: "py3"
tags:
- conda-macosx
test_macosx_34:
<<: *test_job
variables: *macosx_34_build_variables
dependencies:
- build_macosx_34
tags:
- conda-macosx
# Mac OSX + Python 3.5: Builds and tests
build_macosx_35:
<<: *build_job
variables: &macosx_35_build_variables
PYTHON_VERSION: "3.5"
WHEEL_TAG: "py3"
tags:
- conda-macosx
test_macosx_35:
<<: *test_job
variables: *macosx_35_build_variables
dependencies:
- build_macosx_35
tags:
- conda-macosx
Copyright (c) 2013, Andre Anjos - Idiap Research Institute
All rights reserved.
Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
Written by Andre Anjos <andre.anjos@idiap.ch>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer. Redistributions in binary
form must reproduce the above copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other materials provided with
the distribution. Neither the name of the Idiap Research Institute nor the
names of its contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
......@@ -21,4 +24,4 @@ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
include LICENSE README.rst bootstrap-buildout.py buildout.cfg version.txt
include README.rst bootstrap-buildout.py buildout.cfg develop.cfg LICENSE version.txt requirements.txt
recursive-include bob/db/hci_tagging *.csv *.txt *.hdf5 *.face
.. vim: set fileencoding=utf-8 :
.. Andre Anjos <andre.anjos@idiap.ch>
.. Wed 30 Sep 2015 11:03:49 CEST
.. Tue 13 Dec 18:31:43 CET 2016
===============================================
Mahnob HCI-Tagging Database Interface for Bob
===============================================
.. image:: http://img.shields.io/badge/docs-stable-yellow.png
:target: http://pythonhosted.org/bob.db.hci_tagging/index.html
.. image:: http://img.shields.io/badge/docs-latest-orange.png
:target: https://www.idiap.ch/software/bob/docs/latest/bob/bob.db.hci_tagging/master/index.html
.. image:: https://gitlab.idiap.ch/bob/bob.db.hci_tagging/badges/master/build.svg
:target: https://gitlab.idiap.ch/bob/bob.db.hci_tagging/commits/master
.. image:: https://img.shields.io/badge/gitlab-project-0000c0.svg
:target: https://gitlab.idiap.ch/bob/bob.db.hci_tagging
.. image:: http://img.shields.io/pypi/v/bob.db.hci_tagging.png
:target: https://pypi.python.org/pypi/bob.db.hci_tagging
.. image:: http://img.shields.io/pypi/dm/bob.db.hci_tagging.png
:target: https://pypi.python.org/pypi/bob.db.hci_tagging
This package contains an interface for the `Mahnob HCI-Tagging dataset`_
interface. It is presently used to benchmark and test Remote
Photo-Plethysmography algorithms at Idiap. This package only uses the colored
videos (from Camera 1, in AVI format) and the biological signals saved in BDF_
format.
If you decide to use this package, please consider citing `Bob`_, as a software
development environment and the authors of the dataset::
================================================
Mahnob HCI-Tagging Database Access API for Bob
================================================
@article{soleymani-2012,
author={Soleymani, M. and Lichtenauer, J. and Pun, T. and Pantic, M.},
journal={Affective Computing, IEEE Transactions on},
title={A Multimodal Database for Affect Recognition and Implicit Tagging},
year={2012},
volume={3},
number={1},
pages={42-55},
doi={10.1109/T-AFFC.2011.25},
month=Jan,
}
This package is part of the signal-processing and machine learning toolbox
Bob_. It contains an interface for the evaluation protocols of the `Mahnob
HCI-Tagging Dataset`_. Notice this package does not contain the raw data files
from this dataset, which need to be obtained through the link above.
Installation
------------
To install this package -- alone or together with other `Packages of Bob
<https://github.com/idiap/bob/wiki/Packages>`_ -- please read the `Installation
Instructions <https://github.com/idiap/bob/wiki/Installation>`_. For Bob_ to
be able to work properly, some dependent packages are required to be installed.
Please make sure that you have read the `Dependencies
<https://github.com/idiap/bob/wiki/Dependencies>`_ for your operating system.
Follow our `installation`_ instructions. Then, using the Python interpreter
provided by the distribution, bootstrap and buildout this package::
$ python bootstrap-buildout.py
$ ./bin/buildout
Dependencies
============
This package makes use of the following important external dependencies:
Contact
-------
* bob.ip.facedetect_: For automatically detecting faces using a boosted
classifier based on LBPs
* mne_: For estimating the heart-rate in beats-per-minute using the
Pam-Tompkins algorithm
* Python-EDF_ tools: to read physiological sensor information out of BDF
files
For questions or reporting issues to this software package, contact our
development `mailing list`_.
Usage
-----
You can read videos and sensor information out of the database using the
provided API.
Annotations
===========
This package can, optionally, *automatically* annotate the following key
aspects of the Mahnob HCI-Tagging dataset:
* Average heart-rate in beats-per-minute (BPM), using the Pam-Tompkins
algorithm as implemented by `mne`_.
* Face bounding boxes, as detected by the default detector on
`bob.ip.facedetect`_.
The annotation procedure can be launched with the following command::
$ ./bin/bob_dbmanage.py hci_tagging mkmeta
Each video, which is composed of a significant number of frames (hundreds),
takes about 5 minutes to get completely processed. If are at Idiap, you can
launch the job on the SGE queue using the following command-line::
$ ./bin/jman sub -q q1d --io-big -t 3490 `pwd`/bin/bob_dbmanage.py hci_tagging mkmeta
.. Your references go here
.. Place your references here:
.. _bob: https://www.idiap.ch/software/bob
.. _installation: https://www.idiap.ch/software/bob/install
.. _mailing list: https://www.idiap.ch/software/bob/discuss
.. _mahnob hci-tagging dataset: http://mahnob-db.eu/hci-tagging/
.. _bdf: http://www.biosemi.com/faq/file_format.htm
.. _bob.ip.facedetect: https://pypi.python.org/pypi/bob.ip.facedetect
.. _mne: https://pypi.python.org/pypi/mne
.. _python-edf: https://bitbucket.org/cleemesser/python-edf/
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Andre Anjos <andre.anjos@idiap.ch>
# Wed 30 Sep 2015 12:14:50 CEST
import os
from .models import *
......@@ -29,25 +27,32 @@ class Database(object):
Parameters:
protocol (str, optional): If set, can take the value of either 'cvpr14' or 'all'.
'cvpr14' subselects samples used by Li et al. on their CVPR'14 paper for
heart-rate estimation. If 'all' is set, the complete database is selected.
protocol (:py:class:`str`, optional): If set, can take the value of
either ``cvpr14`` or ``all``. ``cvpr14`` subselects samples used by Li
et al. on their CVPR``14 paper for heart-rate estimation. If ``all`` is
set, the complete database is selected.
subset (str, optional): If set, it could be either 'train', 'dev' or 'test'
or a combination of them (i.e. a list). If not set (default),
the files from all these sets are retrieved for the 'all' protocol.
Note that for 'cvpr14' protocol, this has no effect, since no training,
development and test set have been defined in this case.
subset (:py:class:`str`, optional): If set, it could be either ``train``,
``dev`` or ``test`` or a combination of them (i.e. a list). If not set
(default), the files from all these sets are retrieved for the ``all``
protocol. Note that for the ``cvpr14`` protocol, this has no effect,
since no training, development and test set have been defined in this
case.
Returns:
list: A list of :py:class:`File` objects.
Returns: A list of :py:class:`File` objects.
"""
proto_basedir = os.path.join('data', 'protocols')
if protocol in ('cvpr14',):
d = resource_filename(__name__, os.path.join('protocols/cvpr14', 'li_samples_cvpr14.txt'))
d = resource_filename(__name__, os.path.join(proto_basedir, 'cvpr14', 'li_samples_cvpr14.txt'))
with open(d, 'rt') as f: sessions = f.read().split()
return [File(**k) for k in self.metadata if k['basedir'] in sessions]
if protocol in ('all'):
if not subset:
......@@ -55,16 +60,37 @@ class Database(object):
else:
files = []
if 'train' in subset:
d = resource_filename(__name__, os.path.join('protocols/all', 'train.txt'))
d = resource_filename(__name__, os.path.join(proto_basedir, 'all', 'train.txt'))
with open(d, 'rt') as f: sessions = f.read().split()
files += [File(**k) for k in self.metadata if k['basedir'] in sessions]
if 'dev' in subset:
d = resource_filename(__name__, os.path.join('protocols/all', 'dev.txt'))
d = resource_filename(__name__, os.path.join(proto_basedir, 'all', 'dev.txt'))
with open(d, 'rt') as f: sessions = f.read().split()
files += [File(**k) for k in self.metadata if k['basedir'] in sessions]
if 'test' in subset:
d = resource_filename(__name__, os.path.join('protocols/all', 'test.txt'))
d = resource_filename(__name__, os.path.join(proto_basedir, 'all', 'test.txt'))
with open(d, 'rt') as f: sessions = f.read().split()
files += [File(**k) for k in self.metadata if k['basedir'] in sessions]
return files
# gets sphinx autodoc done right - don't remove it
def __appropriate__(*args):
"""Says object was actually declared here, an not on the import module.
Parameters:
*args: An iterable of objects to modify
Resolves `Sphinx referencing issues
<https://github.com/sphinx-doc/sphinx/issues/3048>`
"""
for obj in args: obj.__module__ = __name__
__appropriate__(
File,
)
__all__ = [_ for _ in dir() if not _.startswith('_')]
......@@ -46,6 +46,12 @@ def dumplist(args):
def create_meta(args):
"""Runs the face detection, heart-rate estimation, save outputs at package"""
if not args.force:
raise RuntimeError("This method will re-write the internal HDF5 files, " \
"which contain vital metadata used for generating results." \
" Make sure this is what you want to do reading the API for this " \
"package first (special attention to the method " \
":py:meth:`File.run_face_detector`).")
from . import Database
db = Database()
......@@ -57,15 +63,15 @@ def create_meta(args):
objects = objects[:args.limit]
if args.grid_count:
print len(objects)
print(len(objects))
sys.exit(0)
# if we are on a grid environment, just find what I have to process.
if os.environ.has_key('SGE_TASK_ID'):
pos = int(os.environ['SGE_TASK_ID']) - 1
if pos >= len(objects):
raise RuntimeError, "Grid request for job %d on a setup with %d jobs" % \
(pos, len(objects))
raise RuntimeError("Grid request for job %d on a setup with %d jobs" % \
(pos, len(objects)))
objects = [objects[pos]]
if args.selftest:
......@@ -76,34 +82,33 @@ def create_meta(args):
for obj in objects:
output = obj.make_path(basedir, '.hdf5')
if os.path.exists(output) and not args.force:
print "Skipping `%s' (meta file exists)" % obj.make_path()
print("Skipping `%s' (meta file exists)" % obj.make_path())
continue
try:
print "Creating meta data for `%s'..." % obj.make_path()
print("Creating meta data for `%s'..." % obj.make_path())
bb = obj.run_face_detector(args.directory, max_frames=1)[0]
hr = obj.estimate_heartrate_in_bpm(args.directory)
if bb and hr:
outdir = os.path.dirname(output)
if not os.path.exists(outdir): os.makedirs(outdir)
h5 = bob.io.base.HDF5File(output, 'w')
h5 = bob.io.base.HDF5File(output, 'a')
h5.create_group('face_detector')
h5.cd('face_detector')
h5.set('topleft_x', bb.topleft.x)
h5.set('topleft_y', bb.topleft.y)
h5.set('width', bb.size.x)
h5.set('height', bb.size.y)
h5.set_attribute('quality', bb.quality)
h5.set('topleft_x', bb.topleft[1])
h5.set('topleft_y', bb.topleft[0])
h5.set('width', bb.size[1])
h5.set('height', bb.size[0])
h5.cd('..')
h5.set('heartrate', hr)
h5.set_attribute('units', 'beats-per-minute', 'heartrate')
h5.close()
else:
print "Skipping `%s': Missing Bounding box and/or Heart-rate" % (obj.stem,)
print " -> Bounding box: %s" % bb
print " -> Heart-rate : %s" % hr
print("Skipping `%s': Missing Bounding box and/or Heart-rate" % (obj.stem,))
print(" -> Bounding box: %s" % bb)
print(" -> Heart-rate : %s" % hr)
except IOError as e:
print "Skipping `%s': %s" % (obj.stem, str(e))
print("Skipping `%s': %s" % (obj.stem, str(e)))
continue
finally:
......@@ -129,35 +134,35 @@ def debug(args):
objects = objects[:args.limit]
if args.grid_count:
print len(objects)
print(len(objects))
sys.exit(0)
# if we are on a grid environment, just find what I have to process.
if os.environ.has_key('SGE_TASK_ID'):
pos = int(os.environ['SGE_TASK_ID']) - 1
if pos >= len(objects):
raise RuntimeError, "Grid request for job %d on a setup with %d jobs" % \
(pos, len(objects))
raise RuntimeError("Grid request for job %d on a setup with %d jobs" % \
(pos, len(objects)))
objects = [objects[pos]]
basedir = 'debug'
for obj in objects:
print "Creating debug data for `%s'..." % obj.make_path()
print("Creating debug data for `%s'..." % obj.make_path())
try:
detections = obj.run_face_detector(args.directory)
# save annotated video file
output = obj.make_path(args.output_directory, '.avi')
print "Annotating video `%s'" % output
print("Annotating video `%s'" % output)
utils.annotate_video(obj.load_video(args.directory), detections, output)
print "Annotating heart-rate `%s'" % output
print("Annotating heart-rate `%s'" % output)
output = obj.make_path(args.output_directory, '.pdf')
utils.explain_heartrate(obj, args.directory, output)
except IOError as e:
print "Skipping `%s': %s" % (obj.stem, str(e))
print("Skipping `%s': %s" % (obj.stem, str(e)))
continue