Commit ce5fd206 authored by André Anjos's avatar André Anjos 💬
Browse files

Merge branch 'make_install_more_flexible' into 'master'

Make install more flexible

See merge request !418
parents 3ae3ba7d 46f65300
Pipeline #47887 passed with stages
in 20 minutes and 5 seconds
......@@ -126,7 +126,11 @@ class BackendUtilitiesMixin(object):
for contribution in ["system", "test"]:
install.install_contributions(
source_prefix, contribution, template_data, db_root_file_path
source_prefix=source_prefix,
project=contribution,
assets=list(install.ASSET_UPLOADER_MAP.keys()),
template_data=template_data,
db_root_file=db_root_file_path,
)
if not os.path.exists(settings.CACHE_ROOT):
......
......@@ -170,6 +170,16 @@ def create_users(username, passwd):
return system_user, plot_user, user
def get_users(username):
from django.contrib.auth.models import User
system_user = User.objects.get(username=settings.SYSTEM_ACCOUNT)
plot_user = User.objects.get(username=settings.PLOT_ACCOUNT)
user = User.objects.get(username=username)
return system_user, plot_user, user
def list_objects(prefix, project, category, fnfilter):
"""Lists all objects matching a certain filter"""
......@@ -919,6 +929,18 @@ def upload_plotter(prefix, name, data):
return True
ASSET_UPLOADER_MAP = collections.OrderedDict(
dataformats=upload_dataformat,
protocoltemplates=upload_protocoltemplate,
databases=upload_database,
toolchains=upload_toolchain,
libraries=upload_library,
algorithms=upload_algorithm,
plotters=upload_plotter,
experiments=upload_experiment,
)
def upload_dispatcher(prefix, project, type, name, data):
"""Uploads the experiment to the running platform
......@@ -940,27 +962,18 @@ def upload_dispatcher(prefix, project, type, name, data):
Returns:
bool: Indicates if the operation was succesful
bool: Indicates if the operation was successful
"""
base_subdir = os.path.join(prefix, project)
valid_types = {
"dataformats": upload_dataformat,
"protocoltemplates": upload_protocoltemplate,
"databases": upload_database,
"libraries": upload_library,
"algorithms": upload_algorithm,
"toolchains": upload_toolchain,
"experiments": upload_experiment,
"plotters": upload_plotter,
}
if type not in valid_types:
raise KeyError("Type must be one of `%s'" % ", ".join(valid_types.keys()))
if type not in ASSET_UPLOADER_MAP:
raise KeyError(
"Type must be one of `%s'" % ", ".join(ASSET_UPLOADER_MAP.keys())
)
upload_function = valid_types[type]
upload_function = ASSET_UPLOADER_MAP[type]
try:
......@@ -979,6 +992,9 @@ def upload_dispatcher(prefix, project, type, name, data):
def link_contribution_versions(klass):
"""Link object versions together"""
if not hasattr(klass, "version"):
return
for obj in klass.objects.all():
if obj.version > 1:
# search for similar
......@@ -1002,7 +1018,9 @@ def link_contribution_versions(klass):
pass # ignores
def install_contributions(source_prefix, project, template_data, db_root_file=None):
def install_contributions(
source_prefix, project, assets, template_data, db_root_file=None
):
"""Installs all contributions for a given project
......@@ -1014,6 +1032,10 @@ def install_contributions(source_prefix, project, template_data, db_root_file=No
project (str): The project within the ``source_prefix`` where to install
objects from.
assets (list): The list of assets to install, this allows to install only
a reduced number of them for testing purpose or in production systems to
only add new type of asset.
template_data (dict): A dictionary containing standard template data for
completing template objects installed on the project.
......@@ -1023,96 +1045,86 @@ def install_contributions(source_prefix, project, template_data, db_root_file=No
"""
# Dataformat adding requires a special trick as there are dependencies
# between different dataformats. Our recipe: we try to upload all of them
# one after the other. If one fails, we retry on the next loop, until all
# formats have been uploaded.
dataformat_filenames_next = list_objects(
source_prefix, project, "dataformats", "*.json"
)
dataformat_filenames_cur = []
while True:
if not dataformat_filenames_next:
break
if len(dataformat_filenames_cur) == len(dataformat_filenames_next):
break
dataformat_filenames_cur = dataformat_filenames_next
dataformat_filenames_next = []
for k in dataformat_filenames_cur:
if not upload_dispatcher(
source_prefix, project, "dataformats", k, template_data
):
dataformat_filenames_next.append(k)
from ....dataformats.models import DataFormat
link_contribution_versions(DataFormat)
# Template protocols
from ....protocoltemplates.models import ProtocolTemplate
for object_ in list_objects(source_prefix, project, "protocoltemplates", "*.json"):
upload_dispatcher(
source_prefix, project, "protocoltemplates", object_, template_data
)
link_contribution_versions(ProtocolTemplate)
# Reads database root file, if provided
db_root = {}
if db_root_file:
db_root.update(load_database_folders(db_root_file))
for k in list_objects(source_prefix, project, "databases", "*.json"):
if k in db_root:
template_data["root_folder"] = db_root[k]
upload_dispatcher(source_prefix, project, "databases", k, template_data)
from ....databases.models import Database
link_contribution_versions(Database)
for k in list_objects(source_prefix, project, "toolchains", "*.json"):
upload_dispatcher(source_prefix, project, "toolchains", k, template_data)
from ....toolchains.models import Toolchain
link_contribution_versions(Toolchain)
# Libraries adding requires a special trick as there are
# dependencies between different libraries and algorithms. Our
# recipe: we use the same technique as for dataformats.
library_filenames_next = list_objects(source_prefix, project, "libraries", "*.json")
library_filenames_cur = []
while True:
if not library_filenames_next:
break
if len(library_filenames_cur) == len(library_filenames_next):
break
library_filenames_cur = library_filenames_next
library_filenames_next = []
for k in library_filenames_cur:
if not upload_dispatcher(
source_prefix, project, "libraries", k, template_data
):
library_filenames_next.append(k)
from ....libraries.models import Library
link_contribution_versions(Library)
for k in list_objects(source_prefix, project, "algorithms", "*.json"):
upload_dispatcher(source_prefix, project, "algorithms", k, template_data)
from ....algorithms.models import Algorithm
link_contribution_versions(Algorithm)
for k in list_objects(source_prefix, project, "plotters", "*.json"):
upload_dispatcher(source_prefix, project, "plotters", k, template_data)
from ....plotters.models import Plotter
link_contribution_versions(Plotter)
for k in list_objects(source_prefix, project, "experiments", "*.json"):
upload_dispatcher(source_prefix, project, "experiments", k, template_data)
for asset in assets:
if asset == "dataformats":
# Dataformat adding requires a special trick as there are dependencies
# between different dataformats. Our recipe: we try to upload all of them
# one after the other. If one fails, we retry on the next loop, until all
# formats have been uploaded.
dataformat_filenames_next = list_objects(
source_prefix, project, asset, "*.json"
)
dataformat_filenames_cur = []
while True:
if not dataformat_filenames_next:
break
if len(dataformat_filenames_cur) == len(dataformat_filenames_next):
break
dataformat_filenames_cur = dataformat_filenames_next
dataformat_filenames_next = []
for k in dataformat_filenames_cur:
if not upload_dispatcher(
source_prefix, project, asset, k, template_data
):
dataformat_filenames_next.append(k)
elif asset == "libraries":
# Libraries adding requires a special trick as there are
# dependencies between different libraries and algorithms. Our
# recipe: we use the same technique as for dataformats.
library_filenames_next = list_objects(
source_prefix, project, asset, "*.json"
)
library_filenames_cur = []
while True:
if not library_filenames_next:
break
if len(library_filenames_cur) == len(library_filenames_next):
break
library_filenames_cur = library_filenames_next
library_filenames_next = []
for k in library_filenames_cur:
if not upload_dispatcher(
source_prefix, project, asset, k, template_data
):
library_filenames_next.append(k)
elif asset == "databases":
db_root = {}
if db_root_file:
db_root.update(load_database_folders(db_root_file))
for object_ in list_objects(source_prefix, project, asset, "*.json"):
if object_ in db_root:
template_data["root_folder"] = db_root[object_]
upload_dispatcher(source_prefix, project, asset, object_, template_data)
else:
for object_ in list_objects(source_prefix, project, asset, "*.json"):
upload_dispatcher(source_prefix, project, asset, object_, template_data)
from ....algorithms.models import Algorithm
from ....databases.models import Database
from ....dataformats.models import DataFormat
from ....experiments.models import Experiment
from ....libraries.models import Library
from ....plotters.models import Plotter
from ....protocoltemplates.models import ProtocolTemplate
from ....toolchains.models import Toolchain
klass_map = {
"dataformats": DataFormat,
"protocoltemplates": ProtocolTemplate,
"databases": Database,
"libraries": Library,
"algorithms": Algorithm,
"toolchains": Toolchain,
"experiments": Experiment,
"plotters": Plotter,
}
link_contribution_versions(klass_map[asset])
class Command(BaseCommand):
......@@ -1123,6 +1135,7 @@ class Command(BaseCommand):
super(Command, self).__init__()
self.assets = list(ASSET_UPLOADER_MAP.keys())
self.prefix = os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0]))),
"src",
......@@ -1223,6 +1236,24 @@ class Command(BaseCommand):
"%s]" % ", ".join(self.projects),
)
parser.add_argument(
"--assets",
nargs="*",
type=str,
default=self.assets,
help="The assets that you wish to install [default: %(default)s] "
"WARNING: order is important, dataformats before any other assets "
"that uses them, protocol templates before databases, etc.",
)
parser.add_argument(
"--assets-only",
action="store_true",
dest="assets_only",
default=False,
help="Only operate the asset installation part",
)
def handle(self, *ignored, **arguments):
# Setup this command's logging level
......@@ -1246,6 +1277,14 @@ class Command(BaseCommand):
)
sys.exit(1)
assets = arguments["assets"]
if not all(item in ASSET_UPLOADER_MAP.keys() for item in assets):
logger.error(f"Invalid asset list {assets}")
sys.exit(1)
assets = sorted(assets, key=self.assets.index)
# Creates the prefix directory
if not os.path.exists(settings.PREFIX):
logger.info("Creating prefix directory `%s'...", settings.PREFIX)
......@@ -1261,24 +1300,29 @@ class Command(BaseCommand):
call_command("migrate", interactive=False, verbosity=1)
# Setup sites: 1.Development; 2.Staging; 3.Production
create_sites()
if not arguments["assets_only"]:
# Setup sites: 1.Development; 2.Staging; 3.Production
create_sites()
system_user, plot_user, user = create_users(
arguments["username"], arguments["password"]
)
system_user, plot_user, user = create_users(
arguments["username"], arguments["password"]
)
# Sets up initial groups
add_group("Default")
# Sets up initial groups
add_group("Default")
# Sets up the queue and environments
setup_environment(arguments["queue_configuration"], arguments["verbosity"])
# Sets up the queue and environments
setup_environment(arguments["queue_configuration"], arguments["verbosity"])
from ....backend.models import EnvironmentLanguage
else:
system_user, plot_user, user = get_users(arguments["username"])
from ....backend.models import Environment
from ....code.models import Code
environment = (
EnvironmentLanguage.objects.filter(language=Code.PYTHON).first().environment
)
environment = Environment.objects.filter(
languages__language=Code.PYTHON, active=True
).first()
queue = environment.queues.first()
# Iterates over projects to install
......@@ -1295,5 +1339,9 @@ class Command(BaseCommand):
logger.info("Adding objects for project `%s'...", project)
install_contributions(
self.prefix, project, template_data, arguments["database_root_file"]
self.prefix,
project,
assets,
template_data,
arguments["database_root_file"],
)
......@@ -69,7 +69,7 @@ from django.core.management.base import BaseCommand
import beat.core.database
from beat.web.databases.models import Database
from .install import link_database_versions
from .install import link_contribution_versions
from .install import list_objects
from .install import load_database_folders
from .install import upload_database
......@@ -197,4 +197,4 @@ class Command(BaseCommand):
logger.error("Failed to install %s", key)
if not dry_run:
link_database_versions()
link_contribution_versions(Database)
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment