test_docker_execution.py 11.3 KB
Newer Older
1
2
3
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :

Samuel GAIST's avatar
Samuel GAIST committed
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
###################################################################################
#                                                                                 #
# Copyright (c) 2019 Idiap Research Institute, http://www.idiap.ch/               #
# Contact: beat.support@idiap.ch                                                  #
#                                                                                 #
# Redistribution and use in source and binary forms, with or without              #
# modification, are permitted provided that the following conditions are met:     #
#                                                                                 #
# 1. Redistributions of source code must retain the above copyright notice, this  #
# list of conditions and the following disclaimer.                                #
#                                                                                 #
# 2. Redistributions in binary form must reproduce the above copyright notice,    #
# this list of conditions and the following disclaimer in the documentation       #
# and/or other materials provided with the distribution.                          #
#                                                                                 #
# 3. Neither the name of the copyright holder nor the names of its contributors   #
# may be used to endorse or promote products derived from this software without   #
# specific prior written permission.                                              #
#                                                                                 #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND #
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED   #
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE          #
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE    #
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL      #
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR      #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER      #
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,   #
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE   #
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.            #
#                                                                                 #
###################################################################################
35

36
37

# Tests for experiment execution within Docker containers
38

39
import os
40
import subprocess  # nosec
Samuel GAIST's avatar
Samuel GAIST committed
41

42
import nose.tools
43
44

from beat.core.database import Database
45

46
from ..dock import Host
47
from ..execution import DockerExecutor
Samuel GAIST's avatar
Samuel GAIST committed
48
from . import DOCKER_NETWORK_TEST_ENABLED
49
from . import network_name
50
from . import prefix as test_prefix
51
from . import prefix_folder
Samuel GAIST's avatar
Samuel GAIST committed
52
from .test_execution import BaseExecutionMixIn
53
from .utils import DOCKER_TEST_IMAGES
Samuel GAIST's avatar
Samuel GAIST committed
54
55
56
from .utils import cleanup
from .utils import skipif
from .utils import slow
Samuel GAIST's avatar
Samuel GAIST committed
57

58
BUILDER_CONTAINER_NAME = "docker.idiap.ch/beat/beat.env.builder/beat.env.cxxdev"
Samuel GAIST's avatar
Samuel GAIST committed
59
60
61
BUILDER_IMAGE = (
    BUILDER_CONTAINER_NAME + ":" + DOCKER_TEST_IMAGES[BUILDER_CONTAINER_NAME]
)
62

Samuel GAIST's avatar
Samuel GAIST committed
63
# ----------------------------------------------------------
64
65


66
class TestDockerExecution(BaseExecutionMixIn):
67
    @classmethod
68
    def setup_class(cls):
69
70
71
        cls.host = Host(raise_on_errors=False)

    @classmethod
72
    def teardown_class(cls):
73
74
        cls.host.teardown()
        cleanup()
75

76
    def teardown(self):
77
78
        self.host.teardown()

Samuel GAIST's avatar
Samuel GAIST committed
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
    def create_executor(
        self,
        prefix,
        configuration,
        tmp_prefix,
        dataformat_cache,
        database_cache,
        algorithm_cache,
    ):
        executor = DockerExecutor(
            self.host,
            prefix,
            configuration,
            tmp_prefix,
            dataformat_cache,
            database_cache,
            algorithm_cache,
        )
97
98
99
100

        executor.debug = os.environ.get("DOCKER_TEST_DEBUG", False) == "True"
        return executor

101
102
    def build_algorithm(self, algorithm):
        test_folder = os.path.abspath(os.path.join(os.path.dirname(__file__)))
Samuel GAIST's avatar
Samuel GAIST committed
103
        scripts_folder = os.path.abspath(os.path.join(test_folder, "scripts"))
104
        sources_folder = os.path.abspath(os.path.join(test_folder, algorithm))
Samuel GAIST's avatar
Samuel GAIST committed
105
        cmd = ["/build.sh"]
106
        builder_container = self.host.create_container(BUILDER_IMAGE, cmd)
107
        builder_container.add_volume("%s/build.sh" % scripts_folder, "/build.sh")
108
109
110
111
112
113
114
        builder_container.add_volume(sources_folder, "/sources", read_only=False)
        builder_container.uid = os.getuid()
        builder_container.set_workdir("/sources")
        builder_container.set_entrypoint("bash")

        self.host.start(builder_container)
        status = self.host.wait(builder_container)
115
116
117
        if status != 0:
            print(self.host.logs(builder_container))

118
        self.host.rm(builder_container)
119
        nose.tools.eq_(status, 0)
120

121
        # Update the tmp prefix with the latest content
122
        subprocess.check_call(  # nosec
Samuel GAIST's avatar
Samuel GAIST committed
123
124
125
126
127
128
129
130
131
            [
                "rsync",
                "-arz",
                '--exclude="*"',
                '--include="*.so"',
                os.path.join(test_folder, "prefix"),
                prefix_folder,
            ]
        )
132

133
    @slow
134
    @skipif(not DOCKER_NETWORK_TEST_ENABLED, "Network test disabled")
135
    def test_custom_network(self):
Samuel GAIST's avatar
Samuel GAIST committed
136
137
138
139
140
        result = self.execute(
            "user/user/integers_addition/1/integers_addition",
            [{"sum": 495, "nb": 9}],
            network_name=network_name,
        )
141

142
        nose.tools.assert_is_none(result)
143

144
145
    @slow
    def test_custom_port_range(self):
Samuel GAIST's avatar
Samuel GAIST committed
146
147
148
149
150
        result = self.execute(
            "user/user/integers_addition/1/integers_addition",
            [{"sum": 495, "nb": 9}],
            port_range="50000:50100",
        )
151

152
        nose.tools.assert_is_none(result)
153

154
155
    @slow
    def test_databases_sharing(self):
156
        db = Database(test_prefix, "integers_db/1")
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
        nose.tools.assert_true(db.valid, db.errors)

        data_sharing_path = db.data["root_folder"]

        offset = 12
        with open(os.path.join(data_sharing_path, "datafile.txt"), "wt") as data_file:
            data_file.write("{}".format(offset))

        result = self.execute(
            "user/user/integers_addition/1/shared_datasets",
            [{"sum": 495 + 9 * offset, "nb": 9}],
            share_databases=True,
        )

        nose.tools.assert_is_none(result)

173
174
    @slow
    def test_single_1_prepare_error(self):
175
        result = self.execute("errors/user/single/1/prepare_error", [None])
176

177
178
179
180
        nose.tools.eq_(result["status"], 1)
        nose.tools.eq_(
            result["user_error"], "'Could not prepare algorithm (returned False)'"
        )
181

182
183
    @slow
    def test_single_1_setup_error(self):
184
        result = self.execute("errors/user/single/1/setup_error", [None])
185

186
187
188
189
        nose.tools.eq_(result["status"], 1)
        nose.tools.eq_(
            result["user_error"], "'Could not setup algorithm (returned False)'"
        )
190

191
192
193
194
    # NOT COMPATIBLE YET WITH THE NEW API
    # @slow
    # def test_cxx_double_1(self):
    #     assert self.execute('user/user/double/1/cxx_double', [{'out_data': 42}]) is None
195
196
197
198

    @slow
    def test_cxx_double_legacy(self):
        datasets_uid = os.getuid()
199
200
        self.build_algorithm("prefix/algorithms/user/cxx_integers_echo_legacy")

Samuel GAIST's avatar
Samuel GAIST committed
201
202
203
204
205
        result = self.execute(
            "user/user/double/1/cxx_double_legacy",
            [{"out_data": 42}],
            datasets_uid=datasets_uid,
        )
206
        nose.tools.assert_is_none(result)
207
208
209
210

    @slow
    def test_cxx_double_sequential(self):
        datasets_uid = os.getuid()
211
212
        self.build_algorithm("prefix/algorithms/user/cxx_integers_echo_sequential")

213
        nose.tools.assert_is_none(
Samuel GAIST's avatar
Samuel GAIST committed
214
215
216
217
218
219
            self.execute(
                "user/user/double/1/cxx_double_sequential",
                [{"out_data": 42}],
                datasets_uid=datasets_uid,
            )
        )
220

221
222
223
224
225
226
227
228
229
230
231
232
233
    @slow
    def test_cxx_double_offsetting_sequential(self):
        datasets_uid = os.getuid()
        self.build_algorithm("prefix/algorithms/user/cxx_integers_offsetter_sequential")

        nose.tools.assert_is_none(
            self.execute(
                "user/user/double/1/cxx_offsetting_sequential",
                [{"out_data": 77}],
                datasets_uid=datasets_uid,
            )
        )

234
235
236
    @slow
    def test_cxx_double_autonomous(self):
        datasets_uid = os.getuid()
237
238
        self.build_algorithm("prefix/algorithms/user/cxx_integers_echo_autonomous")

239
        nose.tools.assert_is_none(
Samuel GAIST's avatar
Samuel GAIST committed
240
241
242
243
244
245
            self.execute(
                "user/user/double/1/cxx_double_autonomous",
                [{"out_data": 42}],
                datasets_uid=datasets_uid,
            )
        )
246
247
248
249

    @slow
    def test_cxx_analyzer_error(self):
        datasets_uid = os.getuid()
250
251
        needed_alorithms = [
            "cxx_integers_echo_sequential",
Samuel GAIST's avatar
Samuel GAIST committed
252
            "cxx_integers_echo_analyzer",
253
254
255
256
257
        ]

        for algorithm in needed_alorithms:
            self.build_algorithm("prefix/algorithms/user/%s" % algorithm)

Samuel GAIST's avatar
Samuel GAIST committed
258
        result = self.execute(
259
            "errors/user/double/1/cxx_analyzer_error",
Samuel GAIST's avatar
Samuel GAIST committed
260
261
262
            [{"out_data": 42}],
            datasets_uid=datasets_uid,
        )
263

264
265
266
267
        nose.tools.eq_(result["status"], 255)
        nose.tools.assert_true(
            "[sys] C++ algorithm can't be analyzers" in result["stderr"]
        )
268
269
270
271
272
273

    @slow
    def test_read_only_error(self):
        result = self.execute("errors/user/single/1/write_error", [{"out_data": 42}])

        nose.tools.eq_(result["status"], 1)
274
        nose.tools.assert_true("Read-only" in result["user_error"])
275
276
277
278
279
280
281
282
283

    @slow
    def test_user_mismatch_error(self):
        result = self.execute(
            "errors/user/single/1/write_error", [{"out_data": 42}], datasets_uid=0
        )

        nose.tools.eq_(result["status"], 1)
        nose.tools.assert_true("Failed to create an user" in result["stderr"])
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325

    @slow
    def test_loop_mix_db_env_error(self):
        with nose.tools.assert_raises(RuntimeError) as context:
            self.execute(
                "errors/user/loop/1/loop_mix_db_env", [None],
            )

        nose.tools.assert_true(
            "are not all providing an environment" in context.exception.args[0]
        )

    @slow
    def test_loop_two_db_env_error(self):
        with nose.tools.assert_raises(RuntimeError) as context:
            self.execute(
                "errors/user/loop/1/loop_two_db_environments", [None],
            )

        nose.tools.assert_true(
            "are requesting different environments" in context.exception.args[0]
        )

    @slow
    def test_single_not_existing_db_env_error(self):
        with nose.tools.assert_raises(RuntimeError) as context:
            self.execute(
                "errors/user/single/1/not_existing_db_env", [None],
            )

        nose.tools.assert_true(
            "not found - available environments are" in context.exception.args[0]
        )

    @slow
    def test_loop_1_two_db_env(self):
        nose.tools.assert_is_none(
            self.execute(
                "user/user/loop/1/loop_two_db_env",
                [{"sum": 135, "nb": 9}, {"sum": 9, "nb": 9}],
            )
        )