Commit a4010cbb authored by Amir MOHAMMADI's avatar Amir MOHAMMADI

Initial commit

parents
Pipeline #14382 passed with stages
in 9 minutes and 3 seconds
*~
*.swp
*.pyc
bin
eggs
parts
.installed.cfg
.mr.developer.cfg
*.egg-info
src
develop-eggs
sphinx
dist
# This build file heavily uses template features from YAML so it is generic
# enough for any Bob project. Don't modify it unless you know what you're
# doing.
# Definition of our build pipeline
stages:
- build
- test
- docs
- wheels
- deploy
# ---------
# Templates
# ---------
# Template for the build stage
# Needs to run on all supported architectures, platforms and python versions
.build_template: &build_job
stage: build
before_script:
- git clean -ffdx
- mkdir _ci
- curl --silent "https://gitlab.idiap.ch/bob/bob.admin/raw/master/gitlab/install.sh" > _ci/install.sh
- chmod 755 _ci/install.sh
- ./_ci/install.sh _ci #updates
- ./_ci/before_build.sh
script:
- ./_ci/build.sh
after_script:
- ./_ci/after_build.sh
artifacts:
expire_in: 1 week
paths:
- _ci/
- dist/
- sphinx/
# Template for the test stage - re-installs from uploaded wheels
# Needs to run on all supported architectures, platforms and python versions
.test_template: &test_job
stage: test
before_script:
- ./_ci/install.sh _ci #updates
- ./_ci/before_test.sh
script:
- ./_ci/test.sh
after_script:
- ./_ci/after_test.sh
# Template for the wheel uploading stage
# Needs to run against one supported architecture, platform and python version
.wheels_template: &wheels_job
stage: wheels
environment: intranet
only:
- master
- /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags)
before_script:
- ./_ci/install.sh _ci #updates
- ./_ci/before_wheels.sh
script:
- ./_ci/wheels.sh
after_script:
- ./_ci/after_wheels.sh
# Template for (latest) documentation upload stage
# Only one real job needs to do this
.docs_template: &docs_job
stage: docs
environment: intranet
only:
- master
before_script:
- ./_ci/install.sh _ci #updates
- ./_ci/before_docs.sh
script:
- ./_ci/docs.sh
after_script:
- ./_ci/after_docs.sh
# Template for the deployment stage - re-installs from uploaded wheels
# Needs to run on a single architecture only
# Will deploy your package to PyPI and other required services
# Only runs for tags
.deploy_template: &deploy_job
stage: deploy
environment: internet
only:
- /^v\d+\.\d+\.\d+([abc]\d*)?$/ # PEP-440 compliant version (tags)
except:
- branches
before_script:
- ./_ci/install.sh _ci #updates
- ./_ci/before_deploy.sh
script:
- ./_ci/deploy.sh
after_script:
- ./_ci/after_deploy.sh
# -------------
# Build Targets
# -------------
# Linux + Python 2.7: Builds, tests, uploads wheel and deploys (if needed)
build_linux_27:
<<: *build_job
variables: &linux_27_build_variables
PYTHON_VERSION: "2.7"
WHEEL_TAG: "py27"
tags:
- conda-linux
test_linux_27:
<<: *test_job
variables: *linux_27_build_variables
dependencies:
- build_linux_27
tags:
- conda-linux
wheels_linux_27:
<<: *wheels_job
variables: *linux_27_build_variables
dependencies:
- build_linux_27
tags:
- conda-linux
deploy_linux_27:
<<: *deploy_job
variables: *linux_27_build_variables
dependencies:
- build_linux_27
tags:
- conda-linux
# Linux + Python 3.5: Builds, tests and uploads wheel
build_linux_35:
<<: *build_job
variables: &linux_35_build_variables
PYTHON_VERSION: "3.5"
WHEEL_TAG: "py3"
tags:
- conda-linux
test_linux_35:
<<: *test_job
variables: *linux_35_build_variables
dependencies:
- build_linux_35
tags:
- conda-linux
wheels_linux_35:
<<: *wheels_job
variables: *linux_35_build_variables
dependencies:
- build_linux_35
tags:
- conda-linux
docs_linux_35:
<<: *docs_job
variables: *linux_35_build_variables
dependencies:
- build_linux_35
tags:
- conda-linux
# Linux + Python 3.6: Builds and tests
build_linux_36:
<<: *build_job
variables: &linux_36_build_variables
PYTHON_VERSION: "3.6"
WHEEL_TAG: "py3"
tags:
- conda-linux
test_linux_36:
<<: *test_job
variables: *linux_36_build_variables
dependencies:
- build_linux_36
tags:
- conda-linux
# Mac OSX + Python 2.7: Builds and tests
build_macosx_27:
<<: *build_job
variables: &macosx_27_build_variables
PYTHON_VERSION: "2.7"
WHEEL_TAG: "py27"
tags:
- conda-macosx
test_macosx_27:
<<: *test_job
variables: *macosx_27_build_variables
dependencies:
- build_macosx_27
tags:
- conda-macosx
# Mac OSX + Python 3.5: Builds and tests
build_macosx_35:
<<: *build_job
variables: &macosx_35_build_variables
PYTHON_VERSION: "3.5"
WHEEL_TAG: "py3"
tags:
- conda-macosx
test_macosx_35:
<<: *test_job
variables: *macosx_35_build_variables
dependencies:
- build_macosx_35
tags:
- conda-macosx
# Mac OSX + Python 3.6: Builds and tests
build_macosx_36:
<<: *build_job
variables: &macosx_36_build_variables
PYTHON_VERSION: "3.6"
WHEEL_TAG: "py3"
tags:
- conda-macosx
test_macosx_36:
<<: *test_job
variables: *macosx_36_build_variables
dependencies:
- build_macosx_36
tags:
- conda-macosx
This diff is collapsed.
include README.rst buildout.cfg COPYING version.txt requirements.txt
recursive-include doc *.py *.rst *.ico *.png
recursive-include bob/db/uvad/lists *.lst
.. vim: set fileencoding=utf-8 :
.. Tue Nov 7 16:30:33 CET 2017
.. image:: https://img.shields.io/badge/docs-stable-yellow.svg
:target: http://beatubulatest.lab.idiap.ch/private/docs/bob/bob.db.uvad/stable/index.html
.. image:: https://img.shields.io/badge/docs-latest-orange.svg
:target: http://beatubulatest.lab.idiap.ch/private/docs/bob/bob.db.uvad/master/index.html
.. image:: https://gitlab.idiap.ch/bob/bob.db.uvad/badges/master/build.svg
:target: https://gitlab.idiap.ch/bob/bob.db.uvad/commits/master
.. image:: https://gitlab.idiap.ch/bob/bob.db.uvad/badges/master/coverage.svg
:target: https://gitlab.idiap.ch/bob/bob.db.uvad/commits/master
.. image:: https://img.shields.io/badge/gitlab-project-0000c0.svg
:target: https://gitlab.idiap.ch/bob/bob.db.uvad
.. image:: https://img.shields.io/pypi/v/bob.db.uvad.svg
:target: https://pypi.python.org/pypi/bob.db.uvad
=================================
UVAD Database Access in Bob
=================================
This package is part of the signal-processing and machine learning toolbox
Bob_. This package provides an interface to the Unicamp Video-Attack Database
(`UVAD`_) database. The original data files need to be downloaded separately.
If you use this database, please cite the following publication::
@ARTICLE{7017526,
author={Pinto, A. and Robson Schwartz, W. and Pedrini, H. and De Rezende Rocha, A.},
journal={Information Forensics and Security, IEEE Transactions on},
title={Using Visual Rhythms for Detecting Video-Based Facial Spoof Attacks},
year={2015},
month={May},
volume={10},
number={5},
pages={1025-1038},
keywords={Authentication;Biometrics (access control);Databases;Face;Feature extraction;Histograms;Noise;Unicamp Video-Attack Database;Video-based Face Spoofing;Video-based face spoofing;Visual Rhythm, Video-based Attacks;impersonation detection in facial biometric systems;unicamp video-attack database;video-based attacks;visual rhythm},
doi={10.1109/TIFS.2015.2395139},
ISSN={1556-6013},}
Installation
------------
Complete Bob's `installation`_ instructions. Then, to install this package,
run::
$ conda install bob.db.uvad
Contact
-------
For questions or reporting issues to this software package, contact our
development `mailing list`_.
.. Place your references here:
.. _bob: https://www.idiap.ch/software/bob
.. _installation: https://www.idiap.ch/software/bob/install
.. _mailing list: https://www.idiap.ch/software/bob/discuss
.. _uvad: http://ieeexplore.ieee.org/abstract/document/7017526/
# see https://docs.python.org/3/library/pkgutil.html
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
# see https://docs.python.org/3/library/pkgutil.html
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from .query import Database, File
def __appropriate__(*args):
"""Says object was actually declared here, and not in the import module.
Fixing sphinx warnings of not being able to find classes, when path is
shortened. Parameters:
*args: An iterable of objects to modify
Resolves `Sphinx referencing issues
<https://github.com/sphinx-doc/sphinx/issues/3048>`
"""
for obj in args:
obj.__module__ = __name__
__appropriate__(
Database,
File,
)
def get_config():
"""Returns a string containing the configuration information.
"""
import bob.extension
return bob.extension.get_config(__name__)
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
#!/usr/bin/env python
from bob.db.uvad import Database
uvad_directory = "[UVAD_DIRECTORY]"
database = Database(
original_directory=uvad_directory,
)
"""UVAD - a mobile face presentation attack database with real-world
variations
"""
import os
import sys
import pkg_resources
from bob.db.base.driver import Interface as BaseInterface
from bob.io.base import create_directories_safe
def dumplist(args):
"""Dumps lists of files based on your criteria"""
from .query import Database
db = Database()
r = db.objects(
purposes=args.purpose,
groups=args.group,
)
output = sys.stdout
if args.selftest:
from bob.db.base.utils import null
output = null()
for f in r:
output.write('%s\n' % f.make_path(
directory=args.directory, extension=args.extension))
return 0
def checkfiles(args):
"""Checks existence of files based on your criteria"""
from .query import Database
db = Database()
r = db.objects()
# go through all files, check if they are available on the filesystem
good = []
bad = []
for f in r:
if os.path.exists(f.make_path(args.directory, args.extension)):
good.append(f)
else:
bad.append(f)
# report
output = sys.stdout
if args.selftest:
from bob.db.base.utils import null
output = null()
if bad:
for f in bad:
output.write('Cannot find file "%s"\n' %
f.make_path(args.directory, args.extension))
output.write('%d files (out of %d) were not found at "%s"\n' %
(len(bad), len(r), args.directory))
return 0
def convert_filelist(outfolder, group, real_files, attack_files,
prepend='release_1'):
outpaths = [
os.path.join(outfolder, group, 'for_real.lst'),
os.path.join(outfolder, group, 'for_attack.lst'),
]
create_directories_safe(os.path.dirname(outpaths[0]))
real_files = [os.path.join(prepend, sample) for path in real_files
for sample in open(path, 'rt').read().split()]
attack_files = [os.path.join(prepend, sample) for path in attack_files
for sample in open(path, 'rt').read().split()]
with open(outpaths[0], 'w') as wrf, \
open(outpaths[1], 'w') as waf:
for sample_path in real_files:
wrf.write('{} {}\n'.format(sample_path, 'NA'))
for sample_path in attack_files:
attack_type = '/'.join(sample_path.split('/')[1:4])
waf.write('{} {} {}\n'.format(sample_path, 'NA', attack_type))
def create(args):
"""Creates the file-lists to be used in Bob based on original file lists.
"""
root_dir = args.root_dir
output_dir = args.output_dir
def create_lists(files, outfolder, root_dir):
for group in files:
for real in files[group]:
files[group][real] = [os.path.join(root_dir, f)
for f in files[group][real]]
for group in files:
convert_filelist(outfolder, group, files[group]['real'],
files[group]['attack'])
# experiment 1
files = {
'train': {
'real': ['real_sony_canon_kodac_train.txt'],
'attack': [
'attack_sony_canon_kodac_allcameras_monitors123_train.txt'],
},
'dev': {
'real': ['real_nikon_olympus_panasonic_test.txt'],
'attack': [
'attack_nikon_olympus_panasonic_allcameras_monitors4567_test'
'.txt'],
},
}
outfolder = os.path.join(output_dir, 'experiment_1')
create_lists(
files, outfolder,
os.path.join(root_dir, 'release_1/protocols/experiment_1/'))
# experiment 2
files = {
'train': {
'real': ['real_train.txt'],
'attack': ['attack_train.txt'],
},
'dev': {
'real': ['real_test.txt'],
'attack': ['attack_test.txt'],
},
}
for i in range(1, 10):
outfolder = os.path.join(output_dir, 'experiment_2_{}'.format(i))
create_lists(
files, outfolder,
os.path.join(root_dir,
'release_1/protocols/experiment_2/{}'.format(i)))
# experiment 3
mylist = zip(('1', '2'), ('123', '456'))
for camera in ('canon', 'kodac', 'nikon', 'olympus', 'panasonic', 'sony'):
for (ti1, ti2), (ei1, ei2) in zip(mylist, reversed(mylist)):
files = {
'train': {
'real': ['real_{}_{}.txt'.format(camera, ti1)],
'attack': ['attack_{}_allcameras_monitors{}.txt'.format(
camera, ti2)],
},
'dev': {
'real': ['real_{}_{}.txt'.format(camera, ei1)],
'attack': ['attack_{}_allcameras_monitors{}.txt'.format(
camera, ei2)],
},
}
outfolder = os.path.join(
output_dir, 'experiment_3_{}_{}'.format(camera, ti1))
create_lists(
files, outfolder,
os.path.join(root_dir,
'release_1/protocols/experiment_3/'))
class Interface(BaseInterface):
def name(self):
return 'uvad'
def version(self):
return pkg_resources.require('bob.db.%s' % self.name())[0].version
def files(self):
return ()
def type(self):
return 'text'
def add_commands(self, parser):
from . import __doc__ as docs
subparsers = self.setup_parser(parser,
"UVAD database", docs)
import argparse
# the "dumplist" action
parser = subparsers.add_parser('dumplist', help=dumplist.__doc__)
parser.add_argument(
'-d', '--directory', default='',
help="if given, this path will be prepended to every entry "
"returned.")
parser.add_argument(
'-e', '--extension', default='',
help="if given, this extension will be appended to every entry "
"returned.")
parser.add_argument(
'-u', '--purpose', help="if given, this value will limit the "
"output files to those designed for the given purposes.",
choices=('enroll', 'probe', ''))
parser.add_argument(
'-g', '--group',
help="if given, this value will limit the output files to those "
"belonging to a particular protocolar group.",
choices=('dev', 'eval', 'world', ''))
parser.add_argument('--self-test', dest="selftest",
action='store_true', help=argparse.SUPPRESS)
parser.set_defaults(func=dumplist) # action
# the "checkfiles" action
parser = subparsers.add_parser('checkfiles', help=checkfiles.__doc__)
parser.add_argument(
'-l', '--list-directory', required=True,
help="The directory which contains the file lists.")
parser.add_argument(
'-d', '--directory', dest="directory", default='',
help="if given, this path will be prepended to every entry "
"returned.")
parser.add_argument(
'-e', '--extension', dest="extension", default='',
help="if given, this extension will be appended to every entry "
"returned.")
parser.add_argument('--self-test', dest="selftest",
action='store_true', help=argparse.SUPPRESS)
parser.set_defaults(func=checkfiles) # action
# the "create" action
parser = subparsers.add_parser('create', help=create.__doc__)
parser.add_argument(
'-d', '--root-dir',
help='The directory where the original database is.')
default_output = pkg_resources.resource_filename(__name__, 'lists')
parser.add_argument(
'-o', '--output-dir', default=default_output,
help='The directory where the new list files will be saved into.')
parser.set_defaults(func=create) # action
This source diff could not be displayed because it is too large. You can view the blob instead.
release_1/real/nikon/DSCN0808.MOV NA
release_1/real/nikon/DSCN0809.MOV NA
release_1/real/nikon/DSCN0810.MOV NA
release_1/real/nikon/DSCN0811.MOV NA
release_1/real/nikon/DSCN0812.MOV NA
release_1/real/nikon/DSCN0813.MOV NA
release_1/real/nikon/DSCN0814.MOV NA
release_1/real/nikon/DSCN0815.MOV NA
release_1/real/nikon/DSCN0816.MOV NA
release_1/real/nikon/DSCN0817.MOV NA
release_1/real/nikon/DSCN0818.MOV NA
release_1/real/nikon/DSCN0819.MOV NA
release_1/real/nikon/DSCN0820.MOV NA
release_1/real/nikon/DSCN0821.MOV NA
release_1/real/nikon/DSCN0822.MOV NA
release_1/real/nikon/DSCN0823.MOV NA
release_1/real/nikon/DSCN0824.MOV NA
release_1/real/nikon/DSCN0825.MOV NA
release_1/real/nikon/DSCN0826.MOV NA
release_1/real/nikon/DSCN0827.MOV NA
release_1/real/olympus/P3200001.MP4 NA
release_1/real/olympus/P3200003.MP4 NA
release_1/real/olympus/P3200004.MP4 NA
release_1/real/olympus/P3200005.MP4 NA
release_1/real/olympus/P3200006.MP4 NA
release_1/real/olympus/P3200007.MP4 NA
release_1/real/olympus/P3200008.MP4 NA
release_1/real/olympus/P3200009.MP4 NA
release_1/real/olympus/P3200010.MP4 NA
release_1/real/olympus/P3200011.MP4 NA
release_1/real/olympus/P3200012.MP4 NA
release_1/real/olympus/P3200013.MP4 NA
release_1/real/olympus/P3210015.MP4 NA
release_1/real/olympus/P3210016.MP4 NA
release_1/real/olympus/P3210017.MP4 NA
release_1/real/olympus/P3210018.MP4 NA
release_1/real/olympus/P3210019.MP4 NA
release_1/real/olympus/P3210020.MP4 NA
release_1/real/olympus/P3210021.MP4 NA
release_1/real/olympus/P3210022.MP4 NA
release_1/real/panasonic/P1040576.MOV NA
release_1/real/panasonic/P1040577.MOV NA
release_1/real/panasonic/P1040578.MOV NA
release_1/real/panasonic/P1040579.MOV NA
release_1/real/panasonic/P1040580.MOV NA
release_1/real/panasonic/P1040582.MOV NA
release_1/real/panasonic/P1040583.MOV NA
release_1/real/panasonic/P1040584.MOV NA
release_1/real/panasonic/P1040585.MOV NA
release_1/real/panasonic/P1040586.MOV NA
release_1/real/panasonic/P1040587.MOV NA
release_1/real/panasonic/P1040588.MOV NA
release_1/real/panasonic/P1040590.MOV NA
release_1/real/panasonic/P1040591.MOV NA
release_1/real/panasonic/P1040619.MOV NA
release_1/real/panasonic/P1040621.MOV NA
release_1/real/panasonic/P1040622.MOV NA
release_1/real/panasonic/P1040624.MOV NA
release_1/real/panasonic/P1040625.MOV NA