Commit 880d942b authored by Flavio TARSETTI's avatar Flavio TARSETTI

Merge branch 'cleanup_utils' into 'django3_migration'

Cleanup utils

See merge request !369
parents 43962cc5 f6c46821
Pipeline #42706 passed with stage
in 14 minutes and 53 seconds
......@@ -25,22 +25,22 @@
# #
###############################################################################
import json
import sys
from actstream.models import Action
from django.contrib.contenttypes.models import ContentType
from django.core.serializers.base import DeserializationError
from django.core.serializers.python import Deserializer as PythonDeserializer, _get_model
from django.core.serializers.json import Serializer as JSONSerializer
from django.core.serializers.python import Deserializer as PythonDeserializer
from django.core.serializers.python import _get_model
from django.utils.encoding import force_text
from django.contrib.contenttypes.models import ContentType
from actstream.models import Action
import sys
import json
elements_to_convert = [('actor_object_id', 'actor_content_type'),
('action_object_object_id', 'action_object_content_type'),
('target_object_id', 'target_content_type')]
elements_to_convert = [
("actor_object_id", "actor_content_type"),
("action_object_object_id", "action_object_content_type"),
("target_object_id", "target_content_type"),
]
class Serializer(JSONSerializer):
......@@ -48,13 +48,14 @@ class Serializer(JSONSerializer):
Custom JSON serializer that replaces the actstream Action model ids by
their natural key counterpart.
"""
def get_dump_object(self, obj):
data = {
"model": force_text(obj._meta),
"fields": self._current,
}
if not self.use_natural_primary_keys or not hasattr(obj, 'natural_key'):
if not self.use_natural_primary_keys or not hasattr(obj, "natural_key"):
data["pk"] = force_text(obj._get_pk_val(), strings_only=True)
if self.use_natural_primary_keys and isinstance(obj, Action):
......@@ -62,15 +63,19 @@ class Serializer(JSONSerializer):
element_object_id = element[0]
element_content_type = element[1]
element_info = data['fields'][element_content_type]
element_info = data["fields"][element_content_type]
if element_info:
element_app = element_info[0]
element_model = element_info[1]
element_type = ContentType.objects.get(app_label=element_app, model=element_model)
element_id = data['fields'][element_object_id]
element_object = element_type.get_object_for_this_type(id=int(element_id))
data['fields'][element_object_id] = element_object.natural_key()
element_type = ContentType.objects.get(
app_label=element_app, model=element_model
)
element_id = data["fields"][element_object_id]
element_object = element_type.get_object_for_this_type(
id=int(element_id)
)
data["fields"][element_object_id] = element_object.natural_key()
return data
......@@ -82,7 +87,7 @@ def Deserializer(stream_or_string, **options):
if not isinstance(stream_or_string, (bytes, str)):
stream_or_string = stream_or_string.read()
if isinstance(stream_or_string, bytes):
stream_or_string = stream_or_string.decode('utf-8')
stream_or_string = stream_or_string.decode("utf-8")
try:
objects = json.loads(stream_or_string)
......@@ -94,16 +99,20 @@ def Deserializer(stream_or_string, **options):
for element in elements_to_convert:
element_object_id = element[0]
element_content_type = element[1]
element_info = d['fields'][element_content_type]
element_info = d["fields"][element_content_type]
if element_info:
element_app = element_info[0]
element_model = element_info[1]
element_type = ContentType.objects.get(app_label=element_app, model=element_model)
element_natural_key = d['fields'][element_object_id]
element_object = element_type.model_class().objects.get_by_natural_key(*element_natural_key)
element_type = ContentType.objects.get(
app_label=element_app, model=element_model
)
element_natural_key = d["fields"][element_object_id]
element_object = element_type.model_class().objects.get_by_natural_key(
*element_natural_key
)
d['fields'][element_object_id] = str(element_object.id)
d["fields"][element_object_id] = str(element_object.id)
for obj in PythonDeserializer(objects, **options):
yield obj
......@@ -111,4 +120,4 @@ def Deserializer(stream_or_string, **options):
raise
except Exception as e:
# Map to deserializer error
raise(DeserializationError, DeserializationError(e), sys.exc_info()[2])
raise (DeserializationError, DeserializationError(e), sys.exc_info()[2])
......@@ -27,12 +27,11 @@
"""
Django REST framework helpers
"""
import logging
from rest_framework.views import exception_handler
from rest_framework.exceptions import APIException
from rest_framework.status import is_client_error
from rest_framework.views import exception_handler
logger = logging.getLogger("beat.drf_exceptions")
......
......@@ -26,40 +26,41 @@
###############################################################################
import copy
import datetime
import logging
logger = logging.getLogger(__name__)
import os
import time
import copy
import shutil
import tarfile
import tempfile
import datetime
import time
from django.apps import apps
from django.apps import registry
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import BaseCommand
from django.conf import settings
from django.apps import apps, registry
from ....import __version__
APPS = [ #dump and load order are respected
'authtoken',
'backend',
'statistics',
'dataformats',
'databases',
'libraries',
'algorithms',
'plotters',
'toolchains',
'experiments',
'attestations',
'search',
'reports',
'actstream',
'post_office',
from ..version import __version__
logger = logging.getLogger(__name__)
APPS = [ # dump and load order are respected
"authtoken",
"backend",
"statistics",
"dataformats",
"databases",
"libraries",
"algorithms",
"plotters",
"toolchains",
"experiments",
"attestations",
"search",
"reports",
"actstream",
"post_office",
]
......@@ -75,63 +76,87 @@ def _check(app, queryset):
# checks the forks
if k.fork_of and k.fork_of.id not in declared:
logger.warn("** Inconsistent creation date - %s `%s' (%s) which is a fork of `%s' (%s) must be dumped after, but was created before", app, k, k.creation_date, k.fork_of, k.fork_of.creation_date)
logger.warn(" -> Correcting creation date of %s to be (a second after) %s", k, k.fork_of)
logger.warn(
"** Inconsistent creation date - %s `%s' (%s) which is a fork of `%s' (%s) must be dumped after, but was created before",
app,
k,
k.creation_date,
k.fork_of,
k.fork_of.creation_date,
)
logger.warn(
" -> Correcting creation date of %s to be (a second after) %s",
k,
k.fork_of,
)
k.creation_date = k.fork_of.creation_date + datetime.timedelta(seconds=1)
k.save()
errors += 1
# checks previous versions
if k.previous_version and k.previous_version.id not in declared:
logger.warn("** Inconsistent creation date - %s `%s' (%s) which has `%s' (%s) as a previous version must be dumped after, but was created before", app, k, k.creation_date, k.previous_version, k.previous_version.creation_date)
logger.warn(" -> Correcting creation date of %s to be (a second after) %s", k, k.previous_version)
k.creation_date = k.previous_version.creation_date + datetime.timedelta(seconds=1)
logger.warn(
"** Inconsistent creation date - %s `%s' (%s) which has `%s' (%s) as a previous version must be dumped after, but was created before",
app,
k,
k.creation_date,
k.previous_version,
k.previous_version.creation_date,
)
logger.warn(
" -> Correcting creation date of %s to be (a second after) %s",
k,
k.previous_version,
)
k.creation_date = k.previous_version.creation_date + datetime.timedelta(
seconds=1
)
k.save()
errors += 1
declared.append(k.id)
return errors
class Command(BaseCommand):
help = 'Backs-up current database and prefix in a single tarball'
help = "Backs-up current database and prefix in a single tarball"
def handle(self, *ignored, **arguments):
# Setup this command's logging level
global logger
arguments['verbosity'] = int(arguments['verbosity'])
if arguments['verbosity'] >= 1:
if arguments['verbosity'] == 1: logger.setLevel(logging.INFO)
elif arguments['verbosity'] >= 2: logger.setLevel(logging.DEBUG)
arguments["verbosity"] = int(arguments["verbosity"])
if arguments["verbosity"] >= 1:
if arguments["verbosity"] == 1:
logger.setLevel(logging.INFO)
elif arguments["verbosity"] >= 2:
logger.setLevel(logging.DEBUG)
# for these apps, only backs-up these particular object types
only = dict(
dataformats = 'dataformats.DataFormat',
libraries = 'libraries.Library',
algorithms = 'algorithms.Algorithm',
databases = 'databases.Database',
toolchains = 'toolchains.Toolchain',
dataformats="dataformats.DataFormat",
libraries="libraries.Library",
algorithms="algorithms.Algorithm",
databases="databases.Database",
toolchains="toolchains.Toolchain",
)
dump_arguments = dict(
indent = 2,
verbosity=arguments.get('verbosity'),
indent=2,
verbosity=arguments.get("verbosity"),
interactive=False,
use_natural_primary_keys = True,
use_natural_foreign_keys = True,
format = 'json',
exclude = [
'sessions',
'admin',
'contenttypes',
'auth.Permission',
'backend.Job',
'backend.JobSplit',
use_natural_primary_keys=True,
use_natural_foreign_keys=True,
format="json",
exclude=[
"sessions",
"admin",
"contenttypes",
"auth.Permission",
"backend.Job",
"backend.JobSplit",
],
)
......@@ -141,59 +166,64 @@ class Command(BaseCommand):
use_apps = [app for app in APPS if app in installed_apps]
try:
tmpdir = tempfile.mkdtemp('.backup', 'beat.web-')
tmpdir = tempfile.mkdtemp(".backup", "beat.web-")
# backs-up everything else first
arguments = copy.deepcopy(dump_arguments)
arguments['exclude'] += use_apps
destfile = os.path.join(tmpdir, 'initial.json')
arguments["exclude"] += use_apps
destfile = os.path.join(tmpdir, "initial.json")
logger.info("Dumping initial (unspecified) data -> `%s'", destfile)
arguments['output'] = destfile #new in Django-1.8.x
call_command('xdumpdata', **arguments)
arguments["output"] = destfile # new in Django-1.8.x
call_command("xdumpdata", **arguments)
# and backs-up the apps respecting the imposed order
for app in use_apps:
destfile = os.path.join(tmpdir, '%s.json' % app)
destfile = os.path.join(tmpdir, "%s.json" % app)
arguments = copy.deepcopy(dump_arguments)
logger.info("Dumping data for `%s' -> `%s'", app, destfile)
if app in only:
app, model = only[app].split('.')
app, model = only[app].split(".")
model = apps.get_model(app, model)
order = ('creation_date',)
order = ("creation_date",)
# This will check and correct objects with weird creation
# dates so that the dump order is consistent
while True:
queryset = model.objects.order_by(*order)
err = _check(app, queryset)
if not err: break
if not err:
break
arguments['primary_keys'] = \
','.join([str(k.id) for k in queryset])
arguments["primary_keys"] = ",".join([str(k.id) for k in queryset])
arguments['output'] = destfile #new in Django-1.8.x
call_command('xdumpdata', only.get(app, app), **arguments)
arguments["output"] = destfile # new in Django-1.8.x
call_command("xdumpdata", only.get(app, app), **arguments)
# copy prefix data
path = os.path.join(settings.PREFIX, app)
if os.path.exists(path):
destdir = os.path.join(tmpdir, 'prefix', app)
logger.info("Backing up core objects for `%s' -> `%s'",
app, destdir)
destdir = os.path.join(tmpdir, "prefix", app)
logger.info(
"Backing up core objects for `%s' -> `%s'", app, destdir
)
shutil.copytree(path, destdir)
else:
logger.info("No disk presence found for `%s'", app)
# tarball and remove directory
compress = 'bz2'
tarball = time.strftime('%Y.%m.%d-%Hh%Mm%S') + \
('-v%s' % __version__) + '.tar.' + compress
compress = "bz2"
tarball = (
time.strftime("%Y.%m.%d-%Hh%Mm%S")
+ ("-v%s" % __version__)
+ ".tar."
+ compress
)
logger.info("Writing archive `%s'", tarball)
with tarfile.open(tarball, "w:%s" % compress) as tar:
tar.add(tmpdir, arcname='')
tar.add(tmpdir, arcname="")
finally:
......
......@@ -31,9 +31,9 @@ import logging
from django.core.management.base import BaseCommand
from django.db import transaction
from beat.web.backend.models import Worker
from beat.core.bcpapi.broker import BeatComputationBroker
from beat.core.utils import setup_logging
from beat.web.backend.models import Worker
logger = logging.getLogger(__name__)
......
......@@ -46,73 +46,76 @@ Examples:
$ manage.py update_installed_databases -v1 --dry-run
"""
import os
import sys
import logging
from django.core.management.base import BaseCommand
from django.conf import settings
from beat.web.databases.models import Database
from .install import load_database_folders
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Change the root path of the databases listed in the given conf file'
help = "Change the root path of the databases listed in the given conf file"
def add_arguments(self, parser):
from argparse import RawDescriptionHelpFormatter
parser.epilog = __doc__
parser.formatter_class = RawDescriptionHelpFormatter
parser.add_argument('database_root_file', type=str,
help='The JSON file containing ' \
'the root directories of the databases installed ' \
'on the platform.')
parser.add_argument('--dry-run', '-d', action='store_true',
dest='dry_run', default=False, help='Set this flag to ' \
'simulate a run.')
parser.add_argument(
"database_root_file",
type=str,
help="The JSON file containing "
"the root directories of the databases installed "
"on the platform.",
)
parser.add_argument(
"--dry-run",
"-d",
action="store_true",
dest="dry_run",
default=False,
help="Set this flag to " "simulate a run.",
)
def handle(self, *ignored, **arguments):
# Setup this command's logging level
global logger
arguments['verbosity'] = int(arguments['verbosity'])
if arguments['verbosity'] >= 1:
if arguments['verbosity'] == 1:
arguments["verbosity"] = int(arguments["verbosity"])
if arguments["verbosity"] >= 1:
if arguments["verbosity"] == 1:
logger.setLevel(logging.INFO)
elif arguments['verbosity'] >= 2:
elif arguments["verbosity"] >= 2:
logger.setLevel(logging.DEBUG)
dry_run = arguments['dry_run']
dry_run = arguments["dry_run"]
# Reads database root file, if provided
db_root_file = arguments['database_root_file']
db_root_file = arguments["database_root_file"]
db_root = load_database_folders(db_root_file)
for db, path in db_root.items():
name, version = db.split('/')
name, version = db.split("/")
try:
database = Database.objects.get(name=name, version=int(version))
except Database.DoesNotExist:
logger.error("Failed to find %s", db)
else:
if dry_run:
logger.info("Would change %s for %s" %(database.declaration['root_folder'],
path))
logger.info(
"Would change %s for %s"
% (database.declaration["root_folder"], path)
)
else:
logger.info("Changing %s path for %s" %(db, path))
logger.info("Changing %s path for %s" % (db, path))
declaration = database.declaration
declaration['root_folder'] = path
declaration["root_folder"] = path
database.declaration = declaration
database.save()
......@@ -27,22 +27,24 @@
###############################################################################
from django.core.management.base import BaseCommand, CommandError
from actstream.models import Action
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from actstream.models import Action
from django.core.management.base import BaseCommand
from beat.web.common.models import Shareable
class Command(BaseCommand):
help = 'Cleanup wrongly made public actions'
help = "Cleanup wrongly made public actions"
def handle(self, *args, **options):
ctype = ContentType.objects.get_for_model(User)
for action in Action.objects.filter(public=True).filter(actor_content_type=ctype):
if action.action_object and hasattr(action.action_object, 'sharing'):
for action in Action.objects.filter(public=True).filter(
actor_content_type=ctype
):
if action.action_object and hasattr(action.action_object, "sharing"):
if action.action_object.sharing != Shareable.PUBLIC:
action.public = False
action.save()
......@@ -30,10 +30,10 @@ import logging
import multiprocessing
import signal
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.conf import settings
from django import db
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import BaseCommand
from beat.core.utils import find_free_port
......
......@@ -27,9 +27,9 @@
###############################################################################
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from django.contrib.sessions.models import Session
from django.core.management.base import BaseCommand
from django.utils import timezone
......@@ -49,30 +49,40 @@ def get_current_users():
user_id_list = []
for session in active_sessions:
data = session.get_decoded()
user_id_list.append(data.get('_auth_user_id', None))
user_id_list.append(data.get("_auth_user_id", None))
# Query all logged in users based on id list
return User.objects.filter(id__in=user_id_list)
class Command(BaseCommand):
help = 'Get active users'
help = "Get active users"
def add_arguments(self, parser):
parser.add_argument('--email', '-e', action='store_true',
dest='show_email', default=False, help='Set this flag'\
'to also print email addresse')
parser.add_argument('--only-email', '-o', action='store_true',
dest='show_only_email', default=False, help='Set this'\
'flag to only print email addresse')
parser.add_argument(
"--email",
"-e",
action="store_true",
dest="show_email",
default=False,
help="Set this flag" "to also print email addresse",
)
parser.add_argument(
"--only-email",
"-o",
action="store_true",
dest="show_only_email",
default=False,
help="Set this" "flag to only print email addresse",
)
def handle(self, *args, **options):
current_users = get_current_users()
for user in current_users:
if options['show_only_email']:
if options["show_only_email"]:
print(user.email)
else:
user_data = "{} {}".format(user.first_name, user.last_name)
if options['show_email']:
if options["show_email"]:
user_data += " {}".format(user.email)
print(user_data)
\ No newline at end of file
print(user_data)
......@@ -33,23 +33,20 @@ import tarfile
import tempfile
import django.core.serializers.base
from django.apps import apps
from django.apps import registry
from django.conf import settings
from django.contrib.auth.models import User
from django.core.management import call_command
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.apps import apps, registry
from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from django.db import connection
# Overrides deserialization to affect OneToOneFields for Users correctly
from ....navigation.models import Agreement
from ....accounts.models import AccountSettings
from ....experiments.models import Experiment
from ....navigation.models import Agreement
from .backup import APPS
logger = logging.getLogger(__name__)
_original_save = django.core.serializers.base.DeserializedObject.save
......@@ -99,6 +96,7 @@ class Command(BaseCommand):
if connection.vendor == "sqlite":
import sqlite3 # noqa
from pkg_resources import parse_version # noqa
if parse_version(sqlite3.sqlite_version) >= parse_version("3.26"):
......
......@@ -26,26 +26,26 @@
# #
###############################################################################
import json
import logging
import signal
import json
import sys
from django.core.management.base import BaseCommand
from django.conf import settings
from django.core.management.base import BaseCommand
from beat.core.bcpapi import BCP
from beat.core.bcpapi.client import BeatComputationClient
from ....backend.models import JobSplit
from ....backend.helpers import split_new_jobs
from ....backend.helpers import process_newly_cancelled_experiments