Commit cb2c4f2a authored by André Anjos's avatar André Anjos 💬
Browse files

Separate docker-based tests

parent c612d7ee
......@@ -192,71 +192,3 @@ def test_push_and_delete():
# now push the new object and then delete it remotely
nose.tools.eq_(call('push', obj, prefix=tmp_prefix), 0)
nose.tools.eq_(call('rm', '--remote', obj, prefix=tmp_prefix), 0)
@slow
@nose.tools.with_setup(teardown=cleanup)
def test_execute_algorithm_using_database():
instructions = os.path.join(os.path.dirname(__file__), 'instructions/algo_using_database.json')
with open(instructions) as instruction_file:
instructions_data = json.load(instruction_file)
input_field = instructions_data['inputs']['in']
index_db_from_instructions(input_field)
nose.tools.eq_(call('execute', instructions, cache=tmp_prefix), 0)
@slow
@nose.tools.with_setup(teardown=cleanup)
def test_execute_algorithm_using_cached_files():
instructions_dir = os.path.join(os.path.dirname(__file__), 'instructions')
cache_dir = os.path.join(tmp_prefix, 'ab', 'cd', 'ef')
os.makedirs(cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data.checksum'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index.checksum'), cache_dir)
instructions = os.path.join(instructions_dir, 'algo_using_cached_files.json')
nose.tools.eq_(call('execute', instructions, cache=tmp_prefix), 0)
@slow
@nose.tools.with_setup(teardown=cleanup)
def test_execute_algorithm_using_database_and_cached_files():
instructions_dir = os.path.join(os.path.dirname(__file__), 'instructions')
cache_dir = os.path.join(tmp_prefix, 'ab', 'cd', 'ef')
os.makedirs(cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data.checksum'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index.checksum'), cache_dir)
instructions = os.path.join(instructions_dir, 'algo_using_database_and_cached_files.json')
with open(instructions) as instruction_file:
instructions_data = json.load(instruction_file)
input_field = instructions_data['inputs']['in1']
index_db_from_instructions(input_field)
nose.tools.eq_(call('execute', instructions, cache=tmp_prefix), 0)
@slow
@nose.tools.with_setup(teardown=cleanup)
def test_execute_analyzer():
instructions_dir = os.path.join(os.path.dirname(__file__), 'instructions')
cache_dir = os.path.join(tmp_prefix, 'ab', 'cd', 'ef')
os.makedirs(cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data.checksum'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index.checksum'), cache_dir)
instructions = os.path.join(instructions_dir, 'analyzer.json')
nose.tools.eq_(call('execute', instructions, cache=tmp_prefix), 0)
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
###############################################################################
# #
# Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/ #
# Contact: beat.support@idiap.ch #
# #
# This file is part of the beat.cmdline module of the BEAT platform. #
# #
# Commercial License Usage #
# Licensees holding valid commercial BEAT licenses may use this file in #
# accordance with the terms contained in a written agreement between you #
# and Idiap. For further information contact tto@idiap.ch #
# #
# Alternatively, this file may be used under the terms of the GNU Affero #
# Public License version 3 as published by the Free Software and appearing #
# in the file LICENSE.AGPL included in the packaging of this file. #
# The BEAT platform is distributed in the hope that it will be useful, but #
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY #
# or FITNESS FOR A PARTICULAR PURPOSE. #
# #
# You should have received a copy of the GNU Affero Public License along #
# with the BEAT platform. If not, see http://www.gnu.org/licenses/. #
# #
###############################################################################
# Docker based tests for algorithms and analyzers
import os
import pkg_resources
from beat.core.test.utils import slow, cleanup, skipif
from .test_algorithms import call
from . import tmp_prefix
instructions_dir = pkg_resources.resource_filename(__name__, 'instructions')
@slow
@nose.tools.with_setup(teardown=cleanup)
def test_execute_algorithm_using_database():
instructions = os.path.join(instructions_dir, 'algo_using_database.json')
with open(instructions) as instruction_file:
instructions_data = json.load(instruction_file)
input_field = instructions_data['inputs']['in']
index_db_from_instructions(input_field)
nose.tools.eq_(call('execute', instructions, cache=tmp_prefix), 0)
@slow
@nose.tools.with_setup(teardown=cleanup)
def test_execute_algorithm_using_cached_files():
cache_dir = os.path.join(tmp_prefix, 'ab', 'cd', 'ef')
os.makedirs(cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data.checksum'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index.checksum'), cache_dir)
instructions = os.path.join(instructions_dir, 'algo_using_cached_files.json')
nose.tools.eq_(call('execute', instructions, cache=tmp_prefix), 0)
@slow
@nose.tools.with_setup(teardown=cleanup)
def test_execute_algorithm_using_database_and_cached_files():
cache_dir = os.path.join(tmp_prefix, 'ab', 'cd', 'ef')
os.makedirs(cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data.checksum'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index.checksum'), cache_dir)
instructions = os.path.join(instructions_dir, 'algo_using_database_and_cached_files.json')
with open(instructions) as instruction_file:
instructions_data = json.load(instruction_file)
input_field = instructions_data['inputs']['in1']
index_db_from_instructions(input_field)
nose.tools.eq_(call('execute', instructions, cache=tmp_prefix), 0)
@slow
@nose.tools.with_setup(teardown=cleanup)
def test_execute_analyzer():
cache_dir = os.path.join(tmp_prefix, 'ab', 'cd', 'ef')
os.makedirs(cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.data.checksum'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index'), cache_dir)
shutil.copy(os.path.join(instructions_dir, '0123456789AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.0.9.index.checksum'), cache_dir)
instructions = os.path.join(instructions_dir, 'analyzer.json')
nose.tools.eq_(call('execute', instructions, cache=tmp_prefix), 0)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment