From e4b9354cbf0c4f2c7e0e69b886aea796c13b275f Mon Sep 17 00:00:00 2001 From: Samuel Gaist <samuel.gaist@idiap.ch> Date: Wed, 27 Jan 2021 17:47:02 +0100 Subject: [PATCH] [advanced][databases][casme2] Add new version following V2 implementation --- advanced/databases/casme2/5.json | 527 +++++++++++++++++++++++++++++++ advanced/databases/casme2/5.py | 148 +++++++++ advanced/databases/casme2/5.rst | 1 + 3 files changed, 676 insertions(+) create mode 100644 advanced/databases/casme2/5.json create mode 100644 advanced/databases/casme2/5.py create mode 100644 advanced/databases/casme2/5.rst diff --git a/advanced/databases/casme2/5.json b/advanced/databases/casme2/5.json new file mode 100644 index 0000000..346966e --- /dev/null +++ b/advanced/databases/casme2/5.json @@ -0,0 +1,527 @@ +{ + "description": "CASME 2 Spotaneous Subtle Expression Database", + "root_folder": "/idiap/resource/database/CASME2", + "protocols": [ + { + "name": "fold_1", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_1" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_1" + } + } + } + }, + { + "name": "fold_2", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_2" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_2" + } + } + } + }, + { + "name": "fold_3", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_3" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_3" + } + } + } + }, + { + "name": "fold_4", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_4" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_4" + } + } + } + }, + { + "name": "fold_5", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_5" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_5" + } + } + } + }, + { + "name": "fold_6", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_6" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_6" + } + } + } + }, + { + "name": "fold_7", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_7" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_7" + } + } + } + }, + { + "name": "fold_8", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_8" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_8" + } + } + } + }, + { + "name": "fold_9", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_9" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_9" + } + } + } + }, + { + "name": "fold_10", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_10" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_10" + } + } + } + }, + { + "name": "fold_11", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_11" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_11" + } + } + } + }, + { + "name": "fold_12", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_12" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_12" + } + } + } + }, + { + "name": "fold_13", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_13" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_13" + } + } + } + }, + { + "name": "fold_14", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_14" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_14" + } + } + } + }, + { + "name": "fold_15", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_15" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_15" + } + } + } + }, + { + "name": "fold_16", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_16" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_16" + } + } + } + }, + { + "name": "fold_17", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_17" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_17" + } + } + } + }, + { + "name": "fold_18", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_18" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_18" + } + } + } + }, + { + "name": "fold_19", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_19" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_19" + } + } + } + }, + { + "name": "fold_20", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_20" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_20" + } + } + } + }, + { + "name": "fold_21", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_21" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_21" + } + } + } + }, + { + "name": "fold_22", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_22" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_22" + } + } + } + }, + { + "name": "fold_23", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_23" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_23" + } + } + } + }, + { + "name": "fold_24", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_24" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_24" + } + } + } + }, + { + "name": "fold_25", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_25" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_25" + } + } + } + }, + { + "name": "fold_26", + "template": "simple_expression_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "group": "train", + "protocol": "fold_26" + } + }, + "test": { + "view": "View", + "parameters": { + "group": "test", + "protocol": "fold_26" + } + } + } + } + ], + "schema_version": 2 +} \ No newline at end of file diff --git a/advanced/databases/casme2/5.py b/advanced/databases/casme2/5.py new file mode 100644 index 0000000..8f2ef31 --- /dev/null +++ b/advanced/databases/casme2/5.py @@ -0,0 +1,148 @@ +############################################################################### +# # +# Copyright (c) 2018 Idiap Research Institute, http://www.idiap.ch/ # +# Contact: beat.support@idiap.ch # +# # +# This file is part of the beat.examples module of the BEAT platform. # +# # +# Commercial License Usage # +# Licensees holding valid commercial BEAT licenses may use this file in # +# accordance with the terms contained in a written agreement between you # +# and Idiap. For further information contact tto@idiap.ch # +# # +# Alternatively, this file may be used under the terms of the GNU Affero # +# Public License version 3 as published by the Free Software and appearing # +# in the file LICENSE.AGPL included in the packaging of this file. # +# The BEAT platform is distributed in the hope that it will be useful, but # +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # +# or FITNESS FOR A PARTICULAR PURPOSE. # +# # +# You should have received a copy of the GNU Affero Public License along # +# with the BEAT platform. If not, see http://www.gnu.org/licenses/. # +# # +############################################################################### + +import os +import numpy as np +from collections import namedtuple + +from beat.backend.python.database import View as BaseView + +import bob.io.base +import bob.io.image +import bob.db.casme2 + + +#---------------------------------------------------------- + + +class View(BaseView): + """Outputs: + - image: "{{ system_user.username }}/array_4d_uint8/1" + - file_id: "{{ system_user.username }}/uint64/1" + - client_id: "{{ system_user.username }}/uint64/1" + - emotion: "{{ system_user.username }}/text/1" + + One "file_id" is associated with a given "image". + Several "image" are associated with a given "client_id". + Several "client_id" are associated with a given "emotion". + + --------------- --------------- --------------- --------------- --------------- --------------- + | image | | image | | image | | image | | image | | image | + --------------- --------------- --------------- --------------- --------------- --------------- + --------------- --------------- --------------- --------------- --------------- --------------- + | file_id | | file_id | | file_id | | file_id | | file_id | | file_id | + --------------- --------------- --------------- --------------- --------------- --------------- + ----------------------------------------------- ----------------------------------------------- + | client_id | | client_id | + ----------------------------------------------- ----------------------------------------------- + ----------------------------------------------------------------------------------------------- + | emotion | + ----------------------------------------------------------------------------------------------- + """ + + def index(self, root_folder, parameters): + Entry = namedtuple('Entry', ['emotion', 'client_id', 'file_id', 'frames']) + + # Open the database and load the objects to provide via the outputs + db = bob.db.casme2.Database() + + objs = sorted(db.objects(protocol=str(parameters['protocol']), + groups=parameters['group']), + key=lambda x: (x.emotion, x.client_id, x.id)) + + entries = [] + + for obj in objs: + frames = [ str(os.path.join(obj.make_path(), x.filename)). + replace('/idiap/resource/database/CASME2/Cropped', root_folder) + for x in obj.frames ] + + entries.append(Entry(obj.emotion, obj.client_id, obj.id, frames)) + + return entries + + + def get(self, output, index): + obj = self.objs[index] + + if output == 'emotion': + return { + 'value': obj.emotion + } + + elif output == 'client_id': + return { + 'value': np.uint64(obj.client_id) + } + + elif output == 'file_id': + return { + 'value': np.uint64(obj.file_id) + } + + elif output == 'image': + frame = bob.io.base.load(obj.frames[0]) + + data = np.zeros(shape=(len(obj.frames), frame.shape[0], frame.shape[1], frame.shape[2]), dtype="uint8") + data[0] = frame + + for i in range(1, len(obj.frames)): + data[i] = bob.io.base.load(obj.frames[i]) + + return { + 'value': data + } + + +#---------------------------------------------------------- + + +def setup_tests(): + # Install a mock load function for the images + def mock_load(root_folder): + return np.ndarray((3, 10, 20), dtype=np.uint8) + + bob.io.base.load = mock_load + + +#---------------------------------------------------------- + + +# Test the behavior of the views (on fake data) +if __name__ == '__main__': + + setup_tests() + + view = View() + view.objs = view.index( + root_folder='', + parameters=dict( + protocol='fold_1', + group='train', + ) + ) + view.get('emotion', 0) + view.get('client_id', 0) + view.get('file_id', 0) + view.get('image', 0) diff --git a/advanced/databases/casme2/5.rst b/advanced/databases/casme2/5.rst new file mode 100644 index 0000000..4f8103c --- /dev/null +++ b/advanced/databases/casme2/5.rst @@ -0,0 +1 @@ +CASME 2 Spotaneous Subtle Expression Database \ No newline at end of file -- GitLab