test_docker_execution.py 11.4 KB
Newer Older
1
2
3
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :

Samuel GAIST's avatar
Samuel GAIST committed
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
###################################################################################
#                                                                                 #
# Copyright (c) 2019 Idiap Research Institute, http://www.idiap.ch/               #
# Contact: beat.support@idiap.ch                                                  #
#                                                                                 #
# Redistribution and use in source and binary forms, with or without              #
# modification, are permitted provided that the following conditions are met:     #
#                                                                                 #
# 1. Redistributions of source code must retain the above copyright notice, this  #
# list of conditions and the following disclaimer.                                #
#                                                                                 #
# 2. Redistributions in binary form must reproduce the above copyright notice,    #
# this list of conditions and the following disclaimer in the documentation       #
# and/or other materials provided with the distribution.                          #
#                                                                                 #
# 3. Neither the name of the copyright holder nor the names of its contributors   #
# may be used to endorse or promote products derived from this software without   #
# specific prior written permission.                                              #
#                                                                                 #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND #
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED   #
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE          #
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE    #
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL      #
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR      #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER      #
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,   #
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE   #
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.            #
#                                                                                 #
###################################################################################
35

36
37

# Tests for experiment execution within Docker containers
38

39
import os
40
import shutil
41
import subprocess  # nosec
Samuel GAIST's avatar
Samuel GAIST committed
42

43
import nose.tools
44
45
46
import pkg_resources

from beat.core.database import Database
47

48
from ..dock import Host
49
from ..execution import DockerExecutor
Samuel GAIST's avatar
Samuel GAIST committed
50
from . import DOCKER_NETWORK_TEST_ENABLED
51
from . import network_name
52
from . import prefix_folder
Samuel GAIST's avatar
Samuel GAIST committed
53
from .test_execution import BaseExecutionMixIn
54
from .utils import DOCKER_TEST_IMAGES
Samuel GAIST's avatar
Samuel GAIST committed
55
56
57
from .utils import cleanup
from .utils import skipif
from .utils import slow
Samuel GAIST's avatar
Samuel GAIST committed
58

59
BUILDER_CONTAINER_NAME = "docker.idiap.ch/beat/beat.env.builder/beat.env.cxxdev"
Samuel GAIST's avatar
Samuel GAIST committed
60
61
62
BUILDER_IMAGE = (
    BUILDER_CONTAINER_NAME + ":" + DOCKER_TEST_IMAGES[BUILDER_CONTAINER_NAME]
)
63

Samuel GAIST's avatar
Samuel GAIST committed
64
# ----------------------------------------------------------
65
66


67
class TestDockerExecution(BaseExecutionMixIn):
68
    @classmethod
69
    def setup_class(cls):
70
71
72
        cls.host = Host(raise_on_errors=False)

    @classmethod
73
    def teardown_class(cls):
74
75
        cls.host.teardown()
        cleanup()
76

77
    def teardown(self):
78
79
        self.host.teardown()

Samuel GAIST's avatar
Samuel GAIST committed
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
    def create_executor(
        self,
        prefix,
        configuration,
        tmp_prefix,
        dataformat_cache,
        database_cache,
        algorithm_cache,
    ):
        executor = DockerExecutor(
            self.host,
            prefix,
            configuration,
            tmp_prefix,
            dataformat_cache,
            database_cache,
            algorithm_cache,
        )
98
99
100
101

        executor.debug = os.environ.get("DOCKER_TEST_DEBUG", False) == "True"
        return executor

102
103
    def build_algorithm(self, algorithm):
        test_folder = os.path.abspath(os.path.join(os.path.dirname(__file__)))
Samuel GAIST's avatar
Samuel GAIST committed
104
        scripts_folder = os.path.abspath(os.path.join(test_folder, "scripts"))
105
        sources_folder = os.path.abspath(os.path.join(test_folder, algorithm))
Samuel GAIST's avatar
Samuel GAIST committed
106
        cmd = ["/build.sh"]
107
        builder_container = self.host.create_container(BUILDER_IMAGE, cmd)
108
        builder_container.add_volume("%s/build.sh" % scripts_folder, "/build.sh")
109
110
111
112
113
114
115
        builder_container.add_volume(sources_folder, "/sources", read_only=False)
        builder_container.uid = os.getuid()
        builder_container.set_workdir("/sources")
        builder_container.set_entrypoint("bash")

        self.host.start(builder_container)
        status = self.host.wait(builder_container)
116
117
118
        if status != 0:
            print(self.host.logs(builder_container))

119
        self.host.rm(builder_container)
120
        nose.tools.eq_(status, 0)
121

122
        # Update the tmp prefix with the latest content
123
        subprocess.check_call(  # nosec
Samuel GAIST's avatar
Samuel GAIST committed
124
125
126
127
128
129
130
131
132
            [
                "rsync",
                "-arz",
                '--exclude="*"',
                '--include="*.so"',
                os.path.join(test_folder, "prefix"),
                prefix_folder,
            ]
        )
133

134
    @slow
135
    @skipif(not DOCKER_NETWORK_TEST_ENABLED, "Network test disabled")
136
    def test_custom_network(self):
Samuel GAIST's avatar
Samuel GAIST committed
137
138
139
140
141
        result = self.execute(
            "user/user/integers_addition/1/integers_addition",
            [{"sum": 495, "nb": 9}],
            network_name=network_name,
        )
142

143
        nose.tools.assert_is_none(result)
144

145
146
    @slow
    def test_custom_port_range(self):
Samuel GAIST's avatar
Samuel GAIST committed
147
148
149
150
151
        result = self.execute(
            "user/user/integers_addition/1/integers_addition",
            [{"sum": 495, "nb": 9}],
            port_range="50000:50100",
        )
152

153
        nose.tools.assert_is_none(result)
154

155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
    @slow
    def test_databases_sharing(self):
        prefix = pkg_resources.resource_filename("beat.backend.python.test", "prefix")
        db = Database(prefix, "integers_db/1")
        nose.tools.assert_true(db.valid, db.errors)

        data_sharing_path = db.data["root_folder"]

        os.makedirs(data_sharing_path, exist_ok=True)

        offset = 12
        with open(os.path.join(data_sharing_path, "datafile.txt"), "wt") as data_file:
            data_file.write("{}".format(offset))

        result = self.execute(
            "user/user/integers_addition/1/shared_datasets",
            [{"sum": 495 + 9 * offset, "nb": 9}],
            share_databases=True,
        )

        shutil.rmtree(data_sharing_path)
        nose.tools.assert_is_none(result)

178
179
    @slow
    def test_single_1_prepare_error(self):
180
        result = self.execute("errors/user/single/1/prepare_error", [None])
181

182
183
184
185
        nose.tools.eq_(result["status"], 1)
        nose.tools.eq_(
            result["user_error"], "'Could not prepare algorithm (returned False)'"
        )
186

187
188
    @slow
    def test_single_1_setup_error(self):
189
        result = self.execute("errors/user/single/1/setup_error", [None])
190

191
192
193
194
        nose.tools.eq_(result["status"], 1)
        nose.tools.eq_(
            result["user_error"], "'Could not setup algorithm (returned False)'"
        )
195

196
197
198
199
    # NOT COMPATIBLE YET WITH THE NEW API
    # @slow
    # def test_cxx_double_1(self):
    #     assert self.execute('user/user/double/1/cxx_double', [{'out_data': 42}]) is None
200
201
202
203

    @slow
    def test_cxx_double_legacy(self):
        datasets_uid = os.getuid()
204
205
        self.build_algorithm("prefix/algorithms/user/cxx_integers_echo_legacy")

Samuel GAIST's avatar
Samuel GAIST committed
206
207
208
209
210
        result = self.execute(
            "user/user/double/1/cxx_double_legacy",
            [{"out_data": 42}],
            datasets_uid=datasets_uid,
        )
211
        nose.tools.assert_is_none(result)
212
213
214
215

    @slow
    def test_cxx_double_sequential(self):
        datasets_uid = os.getuid()
216
217
        self.build_algorithm("prefix/algorithms/user/cxx_integers_echo_sequential")

218
        nose.tools.assert_is_none(
Samuel GAIST's avatar
Samuel GAIST committed
219
220
221
222
223
224
            self.execute(
                "user/user/double/1/cxx_double_sequential",
                [{"out_data": 42}],
                datasets_uid=datasets_uid,
            )
        )
225

226
227
228
229
230
231
232
233
234
235
236
237
238
    @slow
    def test_cxx_double_offsetting_sequential(self):
        datasets_uid = os.getuid()
        self.build_algorithm("prefix/algorithms/user/cxx_integers_offsetter_sequential")

        nose.tools.assert_is_none(
            self.execute(
                "user/user/double/1/cxx_offsetting_sequential",
                [{"out_data": 77}],
                datasets_uid=datasets_uid,
            )
        )

239
240
241
    @slow
    def test_cxx_double_autonomous(self):
        datasets_uid = os.getuid()
242
243
        self.build_algorithm("prefix/algorithms/user/cxx_integers_echo_autonomous")

244
        nose.tools.assert_is_none(
Samuel GAIST's avatar
Samuel GAIST committed
245
246
247
248
249
250
            self.execute(
                "user/user/double/1/cxx_double_autonomous",
                [{"out_data": 42}],
                datasets_uid=datasets_uid,
            )
        )
251
252
253
254

    @slow
    def test_cxx_analyzer_error(self):
        datasets_uid = os.getuid()
255
256
        needed_alorithms = [
            "cxx_integers_echo_sequential",
Samuel GAIST's avatar
Samuel GAIST committed
257
            "cxx_integers_echo_analyzer",
258
259
260
261
262
        ]

        for algorithm in needed_alorithms:
            self.build_algorithm("prefix/algorithms/user/%s" % algorithm)

Samuel GAIST's avatar
Samuel GAIST committed
263
        result = self.execute(
264
            "errors/user/double/1/cxx_analyzer_error",
Samuel GAIST's avatar
Samuel GAIST committed
265
266
267
            [{"out_data": 42}],
            datasets_uid=datasets_uid,
        )
268

269
270
271
272
        nose.tools.eq_(result["status"], 255)
        nose.tools.assert_true(
            "[sys] C++ algorithm can't be analyzers" in result["stderr"]
        )
273
274
275
276
277
278

    @slow
    def test_read_only_error(self):
        result = self.execute("errors/user/single/1/write_error", [{"out_data": 42}])

        nose.tools.eq_(result["status"], 1)
279
        nose.tools.assert_true("Read-only" in result["user_error"])
280
281
282
283
284
285
286
287
288

    @slow
    def test_user_mismatch_error(self):
        result = self.execute(
            "errors/user/single/1/write_error", [{"out_data": 42}], datasets_uid=0
        )

        nose.tools.eq_(result["status"], 1)
        nose.tools.assert_true("Failed to create an user" in result["stderr"])
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330

    @slow
    def test_loop_mix_db_env_error(self):
        with nose.tools.assert_raises(RuntimeError) as context:
            self.execute(
                "errors/user/loop/1/loop_mix_db_env", [None],
            )

        nose.tools.assert_true(
            "are not all providing an environment" in context.exception.args[0]
        )

    @slow
    def test_loop_two_db_env_error(self):
        with nose.tools.assert_raises(RuntimeError) as context:
            self.execute(
                "errors/user/loop/1/loop_two_db_environments", [None],
            )

        nose.tools.assert_true(
            "are requesting different environments" in context.exception.args[0]
        )

    @slow
    def test_single_not_existing_db_env_error(self):
        with nose.tools.assert_raises(RuntimeError) as context:
            self.execute(
                "errors/user/single/1/not_existing_db_env", [None],
            )

        nose.tools.assert_true(
            "not found - available environments are" in context.exception.args[0]
        )

    @slow
    def test_loop_1_two_db_env(self):
        nose.tools.assert_is_none(
            self.execute(
                "user/user/loop/1/loop_two_db_env",
                [{"sum": 135, "nb": 9}, {"sum": 9, "nb": 9}],
            )
        )