Commit f5654c09 authored by Philip ABBET's avatar Philip ABBET

Add casme2/3 (api change: beat.backend.python v1.4.2)

parent dcb91034
This diff is collapsed.
###############################################################################
# #
# Copyright (c) 2017 Idiap Research Institute, http://www.idiap.ch/ #
# Contact: beat.support@idiap.ch #
# #
# This file is part of the beat.examples module of the BEAT platform. #
# #
# Commercial License Usage #
# Licensees holding valid commercial BEAT licenses may use this file in #
# accordance with the terms contained in a written agreement between you #
# and Idiap. For further information contact tto@idiap.ch #
# #
# Alternatively, this file may be used under the terms of the GNU Affero #
# Public License version 3 as published by the Free Software and appearing #
# in the file LICENSE.AGPL included in the packaging of this file. #
# The BEAT platform is distributed in the hope that it will be useful, but #
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY #
# or FITNESS FOR A PARTICULAR PURPOSE. #
# #
# You should have received a copy of the GNU Affero Public License along #
# with the BEAT platform. If not, see http://www.gnu.org/licenses/. #
# #
###############################################################################
import os
import numpy as np
import bob.io.base
import bob.io.image
import bob.db.casme2
#----------------------------------------------------------
def get_client_end_index(objs, client_id, client_start_index,
start_index, end_index):
client_end_index = client_start_index
while client_end_index + 1 <= end_index:
obj = objs[client_end_index + 1 - start_index]
if obj.client_id != client_id:
return client_end_index
client_end_index += 1
return end_index
#----------------------------------------------------------
def get_emotion_end_index(objs, emotion, emotion_start_index,
start_index, end_index):
emotion_end_index = emotion_start_index
while emotion_end_index + 1 <= end_index:
obj = objs[emotion_end_index + 1 - start_index]
if obj.emotion != emotion:
return emotion_end_index
emotion_end_index += 1
return end_index
#----------------------------------------------------------
class View:
"""Outputs:
- image: "{{ system_user.username }}/array_4d_uint8/1"
- file_id: "{{ system_user.username }}/uint64/1"
- client_id: "{{ system_user.username }}/uint64/1"
- emotion: "{{ system_user.username }}/text/1"
One "file_id" is associated with a given "image".
Several "image" are associated with a given "client_id".
Several "client_id" are associated with a given "emotion".
--------------- --------------- --------------- --------------- --------------- ---------------
| image | | image | | image | | image | | image | | image |
--------------- --------------- --------------- --------------- --------------- ---------------
--------------- --------------- --------------- --------------- --------------- ---------------
| file_id | | file_id | | file_id | | file_id | | file_id | | file_id |
--------------- --------------- --------------- --------------- --------------- ---------------
----------------------------------------------- -----------------------------------------------
| client_id | | client_id |
----------------------------------------------- -----------------------------------------------
-----------------------------------------------------------------------------------------------
| emotion |
-----------------------------------------------------------------------------------------------
"""
def setup(self, root_folder, outputs, parameters, force_start_index=None,
force_end_index=None):
# Initialisations
self.root_folder = root_folder
self.outputs = outputs
self.parameters = parameters
# Open the database and load the objects to provide via the outputs
self.db = bob.db.casme2.Database()
self.objs = sorted(self.db.objects(protocol=parameters['protocol'],
groups=parameters['group']),
key=lambda x: (x.emotion, x.client_id, x.id))
# Determine the range of indices that must be provided
self.start_index = force_start_index if force_start_index is not None else 0
self.end_index = force_end_index if force_end_index is not None else len(self.objs) - 1
self.objs = self.objs[self.start_index : self.end_index + 1]
self.next_index = self.start_index
return True
def done(self, last_data_index):
return last_data_index >= self.end_index
def next(self):
obj = self.objs[self.next_index - self.start_index]
# Output: emotion (only provide data when the emotion change)
if self.outputs['emotion'].isConnected() and \
self.outputs['emotion'].last_written_data_index < self.next_index:
emotion_end_index = get_emotion_end_index(self.objs, obj.emotion,
self.next_index,
self.start_index,
self.end_index)
self.outputs['emotion'].write(
{
'text': obj.emotion
},
emotion_end_index
)
# Output: client_id (only provide data when the client_id change)
if self.outputs['client_id'].isConnected() and \
self.outputs['client_id'].last_written_data_index < self.next_index:
client_end_index = get_client_end_index(self.objs, obj.client_id,
self.next_index,
self.start_index,
self.end_index)
self.outputs['client_id'].write(
{
'value': np.uint64(obj.client_id)
},
client_end_index
)
# Output: file_id (provide data at each iteration)
if self.outputs['file_id'].isConnected():
self.outputs['file_id'].write(
{
'value': np.uint64(obj.id)
},
self.next_index
)
# Output: image (provide data at each iteration)
if self.outputs['image'].isConnected():
frames = obj.frames
filename = str(os.path.join(obj.make_path(self.root_folder), frames[0].filename))
frame = bob.io.base.load(filename)
data = np.zeros(shape=(len(frames), frame.shape[0], frame.shape[1], frame.shape[2]), dtype="uint8")
data[0] = frame
for i in range(1, len(frames)):
filename = str(os.path.join(obj.make_path(self.root_folder), frames[i].filename))
data[i] = bob.io.base.load(filename)
self.outputs['image'].write(
{
'value': data
},
self.next_index
)
# Determine the next data index that must be provided
self.next_index = 1 + min([ x.last_written_data_index for x in self.outputs
if x.isConnected() ]
)
#----------------------------------------------------------
def setup_tests():
# Install a mock load function for the images
def mock_load(root_folder):
return np.ndarray((3, 10, 20), dtype=np.uint8)
bob.io.base.load = mock_load
#----------------------------------------------------------
# Test the behavior of the views (on fake data)
if __name__ == '__main__':
setup_tests()
from beat.backend.python.database import DatabaseTester
DatabaseTester('View', View,
[
'emotion',
'client_id',
'file_id',
'image',
],
parameters=dict(
protocol='fold_1',
group='train',
),
irregular_outputs=
[
'emotion',
'client_id',
]
)
.. Copyright (c) 2017 Idiap Research Institute, http://www.idiap.ch/ ..
.. Contact: beat.support@idiap.ch ..
.. ..
.. This file is part of the beat.examples module of the BEAT platform. ..
.. ..
.. Commercial License Usage ..
.. Licensees holding valid commercial BEAT licenses may use this file in ..
.. accordance with the terms contained in a written agreement between you ..
.. and Idiap. For further information contact tto@idiap.ch ..
.. ..
.. Alternatively, this file may be used under the terms of the GNU Affero ..
.. Public License version 3 as published by the Free Software and appearing ..
.. in the file LICENSE.AGPL included in the packaging of this file. ..
.. The BEAT platform is distributed in the hope that it will be useful, but ..
.. WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY ..
.. or FITNESS FOR A PARTICULAR PURPOSE. ..
.. ..
.. You should have received a copy of the GNU Affero Public License along ..
.. with the BEAT platform. If not, see http://www.gnu.org/licenses/. ..
-----------------------------------------------
CASME 2 Spotaneous Subtle Expression Database
-----------------------------------------------
Changelog
=========
* **Version 3**, 30/Oct/2017:
- Port to beat.backend.python v1.4.2
* **Version 2**, 26/Jan/2016:
- Port to Bob v2
* **Version 1**, 13/May/2015:
- Initial release
Description
===========
The CASME II database (http://fu.psych.ac.cn/CASME/casme2-en.php) has the
following characteristics:
- The samples are spontaneous and dynamic micro-expressions. Baseline (usually
neutral) frames are kept before and after each micro-expression, making it
possible to evaluate different detection algorithms.
- The recordings have high temporal resolution (200 fps) and relatively higher
face resolution at 280x340 pixels.
- Micro-expression labeling is based on FACS investigator's guide and Yan et
al. findings (Yan et al., 2013) that is different from the traditional 6
categories on ordinary facial expression.
- The recordings have proper illumination without lighting flickers and with
reduced highlight regions of the face.
- Some types of facial expressions are difficult to elicit in laboratory
situations, thus the samples in different categories distributed unequally,
e.g., there are 60 disgust samples but only 7 sadness samples. In CASME II,
we provide 5 classes of micro-expressions.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment