test_docker_execution.py 11.8 KB
Newer Older
1
2
3
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :

Samuel GAIST's avatar
Samuel GAIST committed
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
###################################################################################
#                                                                                 #
# Copyright (c) 2019 Idiap Research Institute, http://www.idiap.ch/               #
# Contact: beat.support@idiap.ch                                                  #
#                                                                                 #
# Redistribution and use in source and binary forms, with or without              #
# modification, are permitted provided that the following conditions are met:     #
#                                                                                 #
# 1. Redistributions of source code must retain the above copyright notice, this  #
# list of conditions and the following disclaimer.                                #
#                                                                                 #
# 2. Redistributions in binary form must reproduce the above copyright notice,    #
# this list of conditions and the following disclaimer in the documentation       #
# and/or other materials provided with the distribution.                          #
#                                                                                 #
# 3. Neither the name of the copyright holder nor the names of its contributors   #
# may be used to endorse or promote products derived from this software without   #
# specific prior written permission.                                              #
#                                                                                 #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND #
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED   #
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE          #
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE    #
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL      #
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR      #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER      #
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,   #
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE   #
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.            #
#                                                                                 #
###################################################################################
35

36
37

# Tests for experiment execution within Docker containers
38

39
import os
40
import subprocess  # nosec
Samuel GAIST's avatar
Samuel GAIST committed
41

42
import nose.tools
43
44

from beat.core.database import Database
45

46
from ..dock import Host
47
from ..execution import DockerExecutor
Samuel GAIST's avatar
Samuel GAIST committed
48
from . import DOCKER_NETWORK_TEST_ENABLED
49
from . import network_name
50
from . import prefix as test_prefix
51
from . import prefix_folder
Samuel GAIST's avatar
Samuel GAIST committed
52
from .test_execution import BaseExecutionMixIn
53
from .utils import DOCKER_TEST_IMAGES
Samuel GAIST's avatar
Samuel GAIST committed
54
55
56
from .utils import cleanup
from .utils import skipif
from .utils import slow
Samuel GAIST's avatar
Samuel GAIST committed
57

58
BUILDER_CONTAINER_NAME = "docker.idiap.ch/beat/beat.env.builder/beat.env.cxxdev"
Samuel GAIST's avatar
Samuel GAIST committed
59
60
61
BUILDER_IMAGE = (
    BUILDER_CONTAINER_NAME + ":" + DOCKER_TEST_IMAGES[BUILDER_CONTAINER_NAME]
)
62

Samuel GAIST's avatar
Samuel GAIST committed
63
# ----------------------------------------------------------
64
65


66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
def write_rawdata_for_database(database_name, raw_data):
    """Generate raw data for give database"""

    db = Database(test_prefix, database_name)
    nose.tools.assert_true(db.valid, db.errors)

    data_sharing_path = db.data["root_folder"]

    with open(os.path.join(data_sharing_path, "datafile.txt"), "wt") as data_file:
        data_file.write("{}".format(raw_data))


# ----------------------------------------------------------


81
class TestDockerExecution(BaseExecutionMixIn):
82
    @classmethod
83
    def setup_class(cls):
84
85
86
        cls.host = Host(raise_on_errors=False)

    @classmethod
87
    def teardown_class(cls):
88
89
        cls.host.teardown()
        cleanup()
90

91
    def teardown(self):
92
93
        self.host.teardown()

Samuel GAIST's avatar
Samuel GAIST committed
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
    def create_executor(
        self,
        prefix,
        configuration,
        tmp_prefix,
        dataformat_cache,
        database_cache,
        algorithm_cache,
    ):
        executor = DockerExecutor(
            self.host,
            prefix,
            configuration,
            tmp_prefix,
            dataformat_cache,
            database_cache,
            algorithm_cache,
        )
112
113
114
115

        executor.debug = os.environ.get("DOCKER_TEST_DEBUG", False) == "True"
        return executor

116
117
    def build_algorithm(self, algorithm):
        test_folder = os.path.abspath(os.path.join(os.path.dirname(__file__)))
Samuel GAIST's avatar
Samuel GAIST committed
118
        scripts_folder = os.path.abspath(os.path.join(test_folder, "scripts"))
119
        sources_folder = os.path.abspath(os.path.join(test_folder, algorithm))
Samuel GAIST's avatar
Samuel GAIST committed
120
        cmd = ["/build.sh"]
121
        builder_container = self.host.create_container(BUILDER_IMAGE, cmd)
122
        builder_container.add_volume("%s/build.sh" % scripts_folder, "/build.sh")
123
124
125
126
127
128
129
        builder_container.add_volume(sources_folder, "/sources", read_only=False)
        builder_container.uid = os.getuid()
        builder_container.set_workdir("/sources")
        builder_container.set_entrypoint("bash")

        self.host.start(builder_container)
        status = self.host.wait(builder_container)
130
131
132
        if status != 0:
            print(self.host.logs(builder_container))

133
        self.host.rm(builder_container)
134
        nose.tools.eq_(status, 0)
135

136
        # Update the tmp prefix with the latest content
137
        subprocess.check_call(  # nosec
Samuel GAIST's avatar
Samuel GAIST committed
138
139
140
141
142
143
144
145
146
            [
                "rsync",
                "-arz",
                '--exclude="*"',
                '--include="*.so"',
                os.path.join(test_folder, "prefix"),
                prefix_folder,
            ]
        )
147

148
    @slow
149
    @skipif(not DOCKER_NETWORK_TEST_ENABLED, "Network test disabled")
150
    def test_custom_network(self):
Samuel GAIST's avatar
Samuel GAIST committed
151
152
153
154
155
        result = self.execute(
            "user/user/integers_addition/1/integers_addition",
            [{"sum": 495, "nb": 9}],
            network_name=network_name,
        )
156

157
        nose.tools.assert_is_none(result)
158

159
160
    @slow
    def test_custom_port_range(self):
Samuel GAIST's avatar
Samuel GAIST committed
161
162
163
164
165
        result = self.execute(
            "user/user/integers_addition/1/integers_addition",
            [{"sum": 495, "nb": 9}],
            port_range="50000:50100",
        )
166

167
        nose.tools.assert_is_none(result)
168

169
    @slow
170
    def test_database_rawdata_access(self):
171
        offset = 12
172
173

        write_rawdata_for_database("simple_rawdata_access/1", offset)
174
175

        result = self.execute(
176
            "user/user/single/1/single_rawdata_access", [{"out_data": 42 + offset}]
177
178
179
180
        )

        nose.tools.assert_is_none(result)

181
182
183
184
185
186
187
188
189
    @slow
    def test_database_no_rawdata_access(self):
        write_rawdata_for_database("simple/1", "should not be loaded")

        result = self.execute("errors/user/single/1/single_no_rawdata_access", [None])

        nose.tools.eq_(result["status"], 1)
        nose.tools.assert_true("FileNotFoundError" in result["user_error"])

190
191
    @slow
    def test_single_1_prepare_error(self):
192
        result = self.execute("errors/user/single/1/prepare_error", [None])
193

194
195
196
197
        nose.tools.eq_(result["status"], 1)
        nose.tools.eq_(
            result["user_error"], "'Could not prepare algorithm (returned False)'"
        )
198

199
200
    @slow
    def test_single_1_setup_error(self):
201
        result = self.execute("errors/user/single/1/setup_error", [None])
202

203
204
205
206
        nose.tools.eq_(result["status"], 1)
        nose.tools.eq_(
            result["user_error"], "'Could not setup algorithm (returned False)'"
        )
207

208
209
210
211
    # NOT COMPATIBLE YET WITH THE NEW API
    # @slow
    # def test_cxx_double_1(self):
    #     assert self.execute('user/user/double/1/cxx_double', [{'out_data': 42}]) is None
212
213
214
215

    @slow
    def test_cxx_double_legacy(self):
        datasets_uid = os.getuid()
216
217
        self.build_algorithm("prefix/algorithms/user/cxx_integers_echo_legacy")

Samuel GAIST's avatar
Samuel GAIST committed
218
219
220
221
222
        result = self.execute(
            "user/user/double/1/cxx_double_legacy",
            [{"out_data": 42}],
            datasets_uid=datasets_uid,
        )
223
        nose.tools.assert_is_none(result)
224
225
226
227

    @slow
    def test_cxx_double_sequential(self):
        datasets_uid = os.getuid()
228
229
        self.build_algorithm("prefix/algorithms/user/cxx_integers_echo_sequential")

230
        nose.tools.assert_is_none(
Samuel GAIST's avatar
Samuel GAIST committed
231
232
233
234
235
236
            self.execute(
                "user/user/double/1/cxx_double_sequential",
                [{"out_data": 42}],
                datasets_uid=datasets_uid,
            )
        )
237

238
239
240
241
242
243
244
245
246
247
248
249
250
    @slow
    def test_cxx_double_offsetting_sequential(self):
        datasets_uid = os.getuid()
        self.build_algorithm("prefix/algorithms/user/cxx_integers_offsetter_sequential")

        nose.tools.assert_is_none(
            self.execute(
                "user/user/double/1/cxx_offsetting_sequential",
                [{"out_data": 77}],
                datasets_uid=datasets_uid,
            )
        )

251
252
253
    @slow
    def test_cxx_double_autonomous(self):
        datasets_uid = os.getuid()
254
255
        self.build_algorithm("prefix/algorithms/user/cxx_integers_echo_autonomous")

256
        nose.tools.assert_is_none(
Samuel GAIST's avatar
Samuel GAIST committed
257
258
259
260
261
262
            self.execute(
                "user/user/double/1/cxx_double_autonomous",
                [{"out_data": 42}],
                datasets_uid=datasets_uid,
            )
        )
263
264
265
266

    @slow
    def test_cxx_analyzer_error(self):
        datasets_uid = os.getuid()
267
268
        needed_alorithms = [
            "cxx_integers_echo_sequential",
Samuel GAIST's avatar
Samuel GAIST committed
269
            "cxx_integers_echo_analyzer",
270
271
272
273
274
        ]

        for algorithm in needed_alorithms:
            self.build_algorithm("prefix/algorithms/user/%s" % algorithm)

Samuel GAIST's avatar
Samuel GAIST committed
275
        result = self.execute(
276
            "errors/user/double/1/cxx_analyzer_error",
Samuel GAIST's avatar
Samuel GAIST committed
277
278
279
            [{"out_data": 42}],
            datasets_uid=datasets_uid,
        )
280

281
282
283
284
        nose.tools.eq_(result["status"], 255)
        nose.tools.assert_true(
            "[sys] C++ algorithm can't be analyzers" in result["stderr"]
        )
285
286
287
288
289
290

    @slow
    def test_read_only_error(self):
        result = self.execute("errors/user/single/1/write_error", [{"out_data": 42}])

        nose.tools.eq_(result["status"], 1)
291
        nose.tools.assert_true("Read-only" in result["user_error"])
292
293
294
295
296
297
298
299
300

    @slow
    def test_user_mismatch_error(self):
        result = self.execute(
            "errors/user/single/1/write_error", [{"out_data": 42}], datasets_uid=0
        )

        nose.tools.eq_(result["status"], 1)
        nose.tools.assert_true("Failed to create an user" in result["stderr"])
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342

    @slow
    def test_loop_mix_db_env_error(self):
        with nose.tools.assert_raises(RuntimeError) as context:
            self.execute(
                "errors/user/loop/1/loop_mix_db_env", [None],
            )

        nose.tools.assert_true(
            "are not all providing an environment" in context.exception.args[0]
        )

    @slow
    def test_loop_two_db_env_error(self):
        with nose.tools.assert_raises(RuntimeError) as context:
            self.execute(
                "errors/user/loop/1/loop_two_db_environments", [None],
            )

        nose.tools.assert_true(
            "are requesting different environments" in context.exception.args[0]
        )

    @slow
    def test_single_not_existing_db_env_error(self):
        with nose.tools.assert_raises(RuntimeError) as context:
            self.execute(
                "errors/user/single/1/not_existing_db_env", [None],
            )

        nose.tools.assert_true(
            "not found - available environments are" in context.exception.args[0]
        )

    @slow
    def test_loop_1_two_db_env(self):
        nose.tools.assert_is_none(
            self.execute(
                "user/user/loop/1/loop_two_db_env",
                [{"sum": 135, "nb": 9}, {"sum": 9, "nb": 9}],
            )
        )