diff --git a/advanced/databases/putvein/5.json b/advanced/databases/putvein/5.json new file mode 100644 index 0000000000000000000000000000000000000000..efcc0c79df8f074e6978a8ff90202c5b14923734 --- /dev/null +++ b/advanced/databases/putvein/5.json @@ -0,0 +1,1007 @@ +{ + "description": "The PUT Vein Database", + "root_folder": "/idiap/resource/database/PUT_Vein_Dataset", + "protocols": [ + { + "name": "palm-L_1", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "L_1", + "kind": "palm", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "L_1", + "kind": "palm", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "L_1", + "kind": "palm", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "L_1", + "kind": "palm", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "L_1", + "kind": "palm", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "palm-L_4", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "L_4", + "kind": "palm", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "L_4", + "kind": "palm", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "L_4", + "kind": "palm", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "L_4", + "kind": "palm", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "L_4", + "kind": "palm", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "palm-R_1", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "R_1", + "kind": "palm", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_1", + "kind": "palm", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "R_1", + "kind": "palm", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_1", + "kind": "palm", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "R_1", + "kind": "palm", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "palm-R_4", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "R_4", + "kind": "palm", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_4", + "kind": "palm", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "R_4", + "kind": "palm", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_4", + "kind": "palm", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "R_4", + "kind": "palm", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "palm-RL_1", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "RL_1", + "kind": "palm", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "RL_1", + "kind": "palm", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "RL_1", + "kind": "palm", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "RL_1", + "kind": "palm", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "RL_1", + "kind": "palm", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "palm-RL_4", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "RL_4", + "kind": "palm", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "RL_4", + "kind": "palm", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "RL_4", + "kind": "palm", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "RL_4", + "kind": "palm", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "RL_4", + "kind": "palm", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "palm-LR_1", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "LR_1", + "kind": "palm", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "LR_1", + "kind": "palm", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "LR_1", + "kind": "palm", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "LR_1", + "kind": "palm", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "LR_1", + "kind": "palm", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "palm-LR_4", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "LR_4", + "kind": "palm", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "LR_4", + "kind": "palm", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "LR_4", + "kind": "palm", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "LR_4", + "kind": "palm", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "LR_4", + "kind": "palm", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "palm-R_BEAT_1", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_1", + "kind": "palm", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_BEAT_1", + "kind": "palm", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_1", + "kind": "palm", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_BEAT_1", + "kind": "palm", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_1", + "kind": "palm", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "palm-R_BEAT_4", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_4", + "kind": "palm", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_BEAT_4", + "kind": "palm", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_4", + "kind": "palm", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_BEAT_4", + "kind": "palm", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_4", + "kind": "palm", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "wrist-L_1", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "L_1", + "kind": "wrist", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "L_1", + "kind": "wrist", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "L_1", + "kind": "wrist", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "L_1", + "kind": "wrist", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "L_1", + "kind": "wrist", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "wrist-L_4", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "L_4", + "kind": "wrist", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "L_4", + "kind": "wrist", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "L_4", + "kind": "wrist", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "L_4", + "kind": "wrist", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "L_4", + "kind": "wrist", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "wrist-R_1", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "R_1", + "kind": "wrist", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_1", + "kind": "wrist", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "R_1", + "kind": "wrist", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_1", + "kind": "wrist", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "R_1", + "kind": "wrist", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "wrist-R_4", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "R_4", + "kind": "wrist", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_4", + "kind": "wrist", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "R_4", + "kind": "wrist", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_4", + "kind": "wrist", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "R_4", + "kind": "wrist", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "wrist-RL_1", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "RL_1", + "kind": "wrist", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "RL_1", + "kind": "wrist", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "RL_1", + "kind": "wrist", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "RL_1", + "kind": "wrist", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "RL_1", + "kind": "wrist", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "wrist-RL_4", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "RL_4", + "kind": "wrist", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "RL_4", + "kind": "wrist", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "RL_4", + "kind": "wrist", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "RL_4", + "kind": "wrist", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "RL_4", + "kind": "wrist", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "wrist-LR_1", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "LR_1", + "kind": "wrist", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "LR_1", + "kind": "wrist", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "LR_1", + "kind": "wrist", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "LR_1", + "kind": "wrist", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "LR_1", + "kind": "wrist", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "wrist-LR_4", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "LR_4", + "kind": "wrist", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "LR_4", + "kind": "wrist", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "LR_4", + "kind": "wrist", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "LR_4", + "kind": "wrist", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "LR_4", + "kind": "wrist", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "wrist-R_BEAT_1", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_1", + "kind": "wrist", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_BEAT_1", + "kind": "wrist", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_1", + "kind": "wrist", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_BEAT_1", + "kind": "wrist", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_1", + "kind": "wrist", + "group": "eval", + "purpose": "probe" + } + } + } + }, + { + "name": "wrist-R_BEAT_4", + "template": "advanced_vein_recognition/1", + "views": { + "train": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_4", + "kind": "wrist", + "group": "train" + } + }, + "dev_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_BEAT_4", + "kind": "wrist", + "group": "dev", + "purpose": "enroll" + } + }, + "dev_probes": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_4", + "kind": "wrist", + "group": "dev", + "purpose": "probe" + } + }, + "eval_templates": { + "view": "TemplateView", + "parameters": { + "protocol": "R_BEAT_4", + "kind": "wrist", + "group": "eval", + "purpose": "enroll" + } + }, + "eval_probes": { + "view": "View", + "parameters": { + "protocol": "R_BEAT_4", + "kind": "wrist", + "group": "eval", + "purpose": "probe" + } + } + } + } + ], + "schema_version": 2 +} \ No newline at end of file diff --git a/advanced/databases/putvein/5.py b/advanced/databases/putvein/5.py new file mode 100644 index 0000000000000000000000000000000000000000..e7e81522b3a13794211a8814b9cc7b504123d0ed --- /dev/null +++ b/advanced/databases/putvein/5.py @@ -0,0 +1,222 @@ +############################################################################### +# # +# Copyright (c) 2018 Idiap Research Institute, http://www.idiap.ch/ # +# Contact: beat.support@idiap.ch # +# # +# This file is part of the beat.examples module of the BEAT platform. # +# # +# Commercial License Usage # +# Licensees holding valid commercial BEAT licenses may use this file in # +# accordance with the terms contained in a written agreement between you # +# and Idiap. For further information contact tto@idiap.ch # +# # +# Alternatively, this file may be used under the terms of the GNU Affero # +# Public License version 3 as published by the Free Software and appearing # +# in the file LICENSE.AGPL included in the packaging of this file. # +# The BEAT platform is distributed in the hope that it will be useful, but # +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # +# or FITNESS FOR A PARTICULAR PURPOSE. # +# # +# You should have received a copy of the GNU Affero Public License along # +# with the BEAT platform. If not, see http://www.gnu.org/licenses/. # +# # +############################################################################### + +import os +import numpy as np +from collections import namedtuple + +from beat.backend.python.database import View as BaseView + +import bob.db.putvein +import bob.ip.color + + +#---------------------------------------------------------- + + +class View(BaseView): + """Outputs: + - image: "{{ system_user.username }}/array_2d_uint8/1" + - client_id: "{{ system_user.username }}/uint64/1" + + Several "image" are associated with a given "client_id" + + --------- --------- --------- --------- --------- --------- + | image | | image | | image | | image | | image | | image | + --------- --------- --------- --------- --------- --------- + ----------------------------- ----------------------------- + | client_id | | client_id | + ----------------------------- ----------------------------- + """ + + def index(self, root_folder, parameters): + Entry = namedtuple('Entry', ['client_id', 'image']) + + # Open the database and load the objects to provide via the outputs + db = bob.db.putvein.Database() + + objs = sorted(db.objects(protocol=parameters['protocol'], + purposes=parameters.get('purpose', None), + groups=[parameters['group']], + kinds=[parameters['kind']]), + key=lambda x: x.client_id) + + return [ Entry(x.client_id, x.make_path(root_folder)) + for x in objs ] + + + def get(self, output, index): + obj = self.objs[index] + + if output == 'client_id': + return { + 'value': np.uint64(obj.client_id) + } + + elif output == 'image': + """ + The image returned by the ``bob.db.putvein`` is RGB (with shape + (3, 768, 1024)). This method converts image to a grayscale + (shape (768, 1024)) and then rotates image by 270 deg so that + images can be used with ``bob.bio.vein`` algorithms designed for + the ``bob.db.biowave_v1`` database. + Output images dimensions: (1024, 768). + """ + color_image = bob.io.base.load(obj.image) + grayscale_image = bob.ip.color.rgb_to_gray(color_image) + grayscale_image = np.rot90(grayscale_image, k=3) + + return { + 'value': grayscale_image + } + + +#---------------------------------------------------------- + + +class TemplateView(BaseView): + """Outputs: + - image: "{{ system_user.username }}/array_2d_uint8/1" + - model_id: "{{ system_user.username }}/text/1" + - client_id: "{{ system_user.username }}/uint64/1" + + Several "image" are associated with a given "model_id". + Several "model_id" are associated with a given "client_id". + + --------------- --------------- --------------- --------------- --------------- --------------- + | image | | image | | image | | image | | image | | image | + --------------- --------------- --------------- --------------- --------------- --------------- + ----------------------------------------------- ----------------------------------------------- + | model_id | | model_id | + ----------------------------------------------- ----------------------------------------------- + ----------------------------------------------------------------------------------------------- + | client_id | + ----------------------------------------------------------------------------------------------- + + Note: for this particular database, there is only one "image" + per "model_id". + """ + + def index(self, root_folder, parameters): + Entry = namedtuple('Entry', ['client_id', 'model_id', 'image']) + + # Open the database and load the objects to provide via the outputs + db = bob.db.putvein.Database() + + model_ids = db.model_ids(protocol=parameters['protocol'], + groups=[parameters['group']], + kinds=[parameters['kind']]) + + entries = [] + + for model_id in model_ids: + objs = db.objects(protocol=parameters['protocol'], + purposes=parameters.get('purpose', None), + groups=[parameters['group']], + kinds=[parameters['kind']], + model_ids=[model_id]) + + entries.extend([ Entry(x.client_id, model_id, x.make_path(root_folder)) + for x in objs ]) + + return sorted(entries, key=lambda x: (x.client_id, x.model_id)) + + + def get(self, output, index): + obj = self.objs[index] + + if output == 'client_id': + return { + 'value': np.uint64(obj.client_id) + } + + elif output == 'model_id': + return { + 'text': str(obj.model_id) + } + + elif output == 'image': + """ + The image returned by the ``bob.db.putvein`` is RGB (with shape + (3, 768, 1024)). This method converts image to a grayscale + (shape (768, 1024)) and then rotates image by 270 deg so that + images can be used with ``bob.bio.vein`` algorithms designed for + the ``bob.db.biowave_v1`` database. + Output images dimensions: (1024, 768). + """ + color_image = bob.io.base.load(obj.image) + grayscale_image = bob.ip.color.rgb_to_gray(color_image) + grayscale_image = np.rot90(grayscale_image, k=3) + + return { + 'value': grayscale_image + } + + +#---------------------------------------------------------- + + +def setup_tests(): + # Install a mock methods + def mock_load(filename): + return np.ndarray((3, 10, 20), dtype=np.uint8) + + bob.io.base.load = mock_load + + +#---------------------------------------------------------- + + +# Test the behavior of the views (on fake data) +if __name__ == '__main__': + + setup_tests() + + view = View() + view.objs = view.index( + root_folder='', + parameters=dict( + protocol = 'LR_4', + kind = 'wrist', + group = 'dev', + purpose = 'probe', + ) + ) + view.get('client_id', 0) + view.get('image', 0) + + + view = TemplateView() + view.objs = view.index( + root_folder='', + parameters=dict( + protocol = 'LR_4', + kind = 'wrist', + group = 'dev', + purpose = 'enroll', + ) + ) + view.get('client_id', 0) + view.get('model_id', 0) + view.get('image', 0) diff --git a/advanced/databases/putvein/5.rst b/advanced/databases/putvein/5.rst new file mode 100644 index 0000000000000000000000000000000000000000..76867d0f2412a972fdcfa3a673e231bfd09db4f8 --- /dev/null +++ b/advanced/databases/putvein/5.rst @@ -0,0 +1 @@ +The PUT Vein Database \ No newline at end of file