Commit 101ea79c authored by André Anjos's avatar André Anjos 💬

Merge branch '76_implement_support_for_database_sharing' into 'master'

Add option to share the database with the algorithm container when running an experiment

Closes #76

See merge request !103
parents cbfb5f6f 78acb9d2
Pipeline #45316 passed with stages
in 10 minutes and 28 seconds
...@@ -75,7 +75,14 @@ logger = logging.getLogger(__name__) ...@@ -75,7 +75,14 @@ logger = logging.getLogger(__name__)
def run_experiment( def run_experiment(
configuration, name, force, use_docker, use_local, run_environment_path, quiet configuration,
name,
force,
use_docker,
use_local,
run_environment_path,
quiet,
share_databases,
): ):
"""Run experiments locally""" """Run experiments locally"""
...@@ -168,7 +175,7 @@ def run_experiment( ...@@ -168,7 +175,7 @@ def run_experiment(
database_cache = {} database_cache = {}
algorithm_cache = {} algorithm_cache = {}
library_cache = {} library_cache = {}
# from .test.utils import set_trace; set_trace()
experiment = Experiment( experiment = Experiment(
configuration.path, configuration.path,
name, name,
...@@ -227,10 +234,12 @@ def run_experiment( ...@@ -227,10 +234,12 @@ def run_experiment(
python_path=run_environment_path, python_path=run_environment_path,
) )
elif use_docker: elif use_docker:
exp_configuration = value["configuration"]
exp_configuration["share_databases"] = share_databases
executor = DockerExecutor( executor = DockerExecutor(
host=host, host=host,
prefix=configuration.path, prefix=configuration.path,
data=value["configuration"], data=exp_configuration,
cache=configuration.cache, cache=configuration.cache,
dataformat_cache=dataformat_cache, dataformat_cache=dataformat_cache,
database_cache=database_cache, database_cache=database_cache,
...@@ -774,12 +783,20 @@ commands.initialise_asset_commands(experiments, CMD_LIST, ExperimentCommand) ...@@ -774,12 +783,20 @@ commands.initialise_asset_commands(experiments, CMD_LIST, ExperimentCommand)
mutually_exclusive=["docker", "local"], mutually_exclusive=["docker", "local"],
) )
@click.option("--quiet", help="Be less verbose", is_flag=True) @click.option("--quiet", help="Be less verbose", is_flag=True)
@click.option(
"--share-databases",
help="Share the database files with the algorithm container",
is_flag=True,
default=False,
)
@click.pass_context @click.pass_context
@raise_on_error @raise_on_error
def run(ctx, name, force, docker, local, environment, quiet): def run(ctx, name, force, docker, local, environment, quiet, share_databases):
""" Runs an experiment locally""" """ Runs an experiment locally"""
config = ctx.meta.get("config") config = ctx.meta.get("config")
return run_experiment(config, name, force, docker, local, environment, quiet) return run_experiment(
config, name, force, docker, local, environment, quiet, share_databases
)
@experiments.command() @experiments.command()
......
...@@ -39,13 +39,14 @@ ...@@ -39,13 +39,14 @@
import contextlib import contextlib
import os import os
import shutil import shutil
import subprocess # nosec
import sys import sys
import tempfile import tempfile
import urllib import urllib
import pkg_resources import pkg_resources
from beat.core.test import initialize_db_root_folder
from beat.core.test import sync_prefixes
from beat.core.test import teardown_package as bc_teardown_package from beat.core.test import teardown_package as bc_teardown_package
from beat.core.test import tmp_prefix # noqa forward import from beat.core.test import tmp_prefix # noqa forward import
...@@ -108,13 +109,18 @@ prefix = os.path.join(prefix_folder, "prefix") ...@@ -108,13 +109,18 @@ prefix = os.path.join(prefix_folder, "prefix")
def setup_package(): def setup_package():
prefixes = [ prefixes = [
pkg_resources.resource_filename("beat.backend.python.test", "prefix"), pkg_resources.resource_filename(f"beat.{resource}.test", "prefix")
pkg_resources.resource_filename("beat.core.test", "prefix"), for resource in ["backend.python", "core", "cmdline"]
pkg_resources.resource_filename("beat.cmdline.test", "prefix"),
] ]
for path in prefixes: sync_prefixes(
subprocess.check_call(["rsync", "-arz", path, prefix_folder]) # nosec prefixes, prefix_folder,
)
initialize_db_root_folder(
os.path.join(prefix_folder, "beat_cmdline_test"),
os.path.join(prefix, "databases"),
)
def teardown_package(): def teardown_package():
......
...@@ -227,3 +227,30 @@ class TestDockerExperimentRun(core.AssetBaseTest): ...@@ -227,3 +227,30 @@ class TestDockerExperimentRun(core.AssetBaseTest):
nose.tools.eq_(exit_code, 1, outputs) nose.tools.eq_(exit_code, 1, outputs)
exit_code, outputs = self.call("run", obj, "--docker", cache=tmp_prefix) exit_code, outputs = self.call("run", obj, "--docker", cache=tmp_prefix)
nose.tools.eq_(exit_code, 1, outputs) nose.tools.eq_(exit_code, 1, outputs)
def run_database_sharing(self, share_databases, expected_output):
"""Test that the database sharing works"""
db = Database(prefix, "integers_db/1")
nose.tools.assert_true(db.valid, db.errors)
data_sharing_path = db.data["root_folder"]
offset = 12
with open(os.path.join(data_sharing_path, "datafile.txt"), "wt") as data_file:
data_file.write("{}".format(offset))
obj = "user/user/integers_addition/1/shared_datasets"
args = ["run", obj, "--docker"]
if share_databases:
args.append("--share-databases")
exit_code, outputs = self.call(*args, cache=tmp_prefix)
nose.tools.eq_(exit_code, expected_output, outputs)
@slow
def test_run_database_sharing(self):
for share, result in [(False, 1), (True, 0)]:
yield self.run_database_sharing, share, result
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment