Commit 95f2641b authored by Flavio TARSETTI's avatar Flavio TARSETTI
Browse files

Merge branch 'cleanup_statistics' into 'django3_migration'

Cleanup statistics

See merge request !363
parents 40b8919b 6e70390a
Pipeline #42676 passed with stage
in 14 minutes and 55 seconds
......@@ -25,28 +25,29 @@
# #
###############################################################################
from .models import HourlyStatistics as HourlyStatisticsModel
from django.contrib import admin
from .models import HourlyStatistics as HourlyStatisticsModel
#----------------------------------------------------------
# ----------------------------------------------------------
class HourlyStatistics(admin.ModelAdmin):
list_display = (
'id',
'date',
'hour',
'cpu_time',
'max_memory',
'data_read_size',
'data_read_nb_blocks',
'data_read_time',
'data_written_size',
'data_written_nb_blocks',
'data_written_time',
list_display = (
"id",
"date",
"hour",
"cpu_time",
"max_memory",
"data_read_size",
"data_read_nb_blocks",
"data_read_time",
"data_written_size",
"data_written_nb_blocks",
"data_written_time",
)
list_display_links = ('id', )
list_display_links = ("id",)
admin.site.register(HourlyStatisticsModel, HourlyStatistics)
......@@ -27,32 +27,38 @@
from __future__ import unicode_literals
from django.db import migrations, models
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
]
dependencies = []
operations = [
migrations.CreateModel(
name='HourlyStatistics',
name="HourlyStatistics",
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateField()),
('hour', models.IntegerField()),
('cpu_time', models.FloatField(default=0)),
('max_memory', models.BigIntegerField(default=0)),
('data_read_size', models.BigIntegerField(default=0)),
('data_read_nb_blocks', models.IntegerField(default=0)),
('data_read_time', models.FloatField(default=0)),
('data_written_size', models.BigIntegerField(default=0)),
('data_written_nb_blocks', models.IntegerField(default=0)),
('data_written_time', models.FloatField(default=0)),
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
("date", models.DateField()),
("hour", models.IntegerField()),
("cpu_time", models.FloatField(default=0)),
("max_memory", models.BigIntegerField(default=0)),
("data_read_size", models.BigIntegerField(default=0)),
("data_read_nb_blocks", models.IntegerField(default=0)),
("data_read_time", models.FloatField(default=0)),
("data_written_size", models.BigIntegerField(default=0)),
("data_written_nb_blocks", models.IntegerField(default=0)),
("data_written_time", models.FloatField(default=0)),
],
options={
'verbose_name_plural': 'Hourly statistics',
},
options={"verbose_name_plural": "Hourly statistics"},
),
]
......@@ -27,24 +27,28 @@
from django.db import models
# ----------------------------------------------------------
#----------------------------------------------------------
class HourlyStatistics(models.Model):
date = models.DateField()
hour = models.IntegerField()
cpu_time = models.FloatField(default=0)
max_memory = models.BigIntegerField(default=0)
data_read_size = models.BigIntegerField(default=0)
data_read_nb_blocks = models.IntegerField(default=0)
data_read_time = models.FloatField(default=0)
data_written_size = models.BigIntegerField(default=0)
data_written_nb_blocks = models.IntegerField(default=0)
data_written_time = models.FloatField(default=0)
date = models.DateField()
hour = models.IntegerField()
cpu_time = models.FloatField(default=0)
max_memory = models.BigIntegerField(default=0)
data_read_size = models.BigIntegerField(default=0)
data_read_nb_blocks = models.IntegerField(default=0)
data_read_time = models.FloatField(default=0)
data_written_size = models.BigIntegerField(default=0)
data_written_nb_blocks = models.IntegerField(default=0)
data_written_time = models.FloatField(default=0)
def __str__(self):
return 'Hourly statistics #%d (%s, hour %s)' % (self.id, self.date.strftime('%b %d, %Y'), self.hour)
return "Hourly statistics #%d (%s, hour %s)" % (
self.id,
self.date.strftime("%b %d, %Y"),
self.hour,
)
class Meta:
verbose_name_plural = "Hourly statistics"
......@@ -2,21 +2,21 @@
{% comment %}
* Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
* Contact: beat.support@idiap.ch
*
*
* This file is part of the beat.web module of the BEAT platform.
*
*
* Commercial License Usage
* Licensees holding valid commercial BEAT licenses may use this file in
* accordance with the terms contained in a written agreement between you
* and Idiap. For further information contact tto@idiap.ch
*
*
* Alternatively, this file may be used under the terms of the GNU Affero
* Public License version 3 as published by the Free Software and appearing
* in the file LICENSE.AGPL included in the packaging of this file.
* The BEAT platform is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
*
*
* You should have received a copy of the GNU Affero Public License along
* with the BEAT platform. If not, see http://www.gnu.org/licenses/.
{% endcomment %}
......
......@@ -27,18 +27,16 @@
from django import template
from django.conf import settings
from ..views import calculate_totals
register = template.Library()
@register.inclusion_tag('statistics/panels/status.html', takes_context=True)
@register.inclusion_tag("statistics/panels/status.html", takes_context=True)
def status(context, id):
return {
'request': context['request'],
'panel_id': id,
'totals': calculate_totals(),
"request": context["request"],
"panel_id": id,
"totals": calculate_totals(),
}
......@@ -25,51 +25,41 @@
# #
###############################################################################
from django.test import TestCase
from django.contrib.auth.models import User
from django.urls import reverse
from datetime import datetime
from datetime import date
from datetime import timedelta
from .models import HourlyStatistics
from .utils import updateStatistics
from ..dataformats.models import DataFormat
from ..common.testutils import BaseTestCase
import nose.tools
from django.test import TestCase
import beat.core.stats
import nose.tools
from .models import HourlyStatistics
from .utils import updateStatistics
STATS_1 = {
'cpu': { 'user': 1, 'system': 1.5, },
'memory': { 'rss': 100, },
'data': {
'volume': { 'read': 1000, 'write': 2000, },
'blocks': { 'read': 100, 'write': 200, },
'time': { 'read': 10000, 'write': 20000, },
'generated_files': [],
}
"cpu": {"user": 1, "system": 1.5},
"memory": {"rss": 100},
"data": {
"volume": {"read": 1000, "write": 2000},
"blocks": {"read": 100, "write": 200},
"time": {"read": 10000, "write": 20000},
"generated_files": [],
},
}
STATS_2 = {
'cpu': { 'user': 2, 'system': 3, },
'memory': { 'rss': 200, },
'data': {
'volume': { 'read': 500, 'write': 600, },
'blocks': { 'read': 50, 'write': 60, },
'time': { 'read': 5000, 'write': 6000, },
'generated_files': [],
}
"cpu": {"user": 2, "system": 3},
"memory": {"rss": 200},
"data": {
"volume": {"read": 500, "write": 600},
"blocks": {"read": 50, "write": 60},
"time": {"read": 5000, "write": 6000},
"generated_files": [],
},
}
class UpdateStatisticsFunction(TestCase):
def test_first_update(self):
statistics = beat.core.stats.Statistics(STATS_1)
......@@ -82,15 +72,14 @@ class UpdateStatisticsFunction(TestCase):
nose.tools.eq_(hourly.date, date1.date())
nose.tools.eq_(hourly.hour, 6)
nose.tools.eq_(hourly.cpu_time , 2.5)
nose.tools.eq_(hourly.max_memory , 100)
nose.tools.eq_(hourly.data_read_size , 1000)
nose.tools.eq_(hourly.data_read_nb_blocks , 100)
nose.tools.eq_(hourly.data_read_time , 10000)
nose.tools.eq_(hourly.data_written_size , 2000)
nose.tools.eq_(hourly.data_written_nb_blocks , 200)
nose.tools.eq_(hourly.data_written_time , 20000)
nose.tools.eq_(hourly.cpu_time, 2.5)
nose.tools.eq_(hourly.max_memory, 100)
nose.tools.eq_(hourly.data_read_size, 1000)
nose.tools.eq_(hourly.data_read_nb_blocks, 100)
nose.tools.eq_(hourly.data_read_time, 10000)
nose.tools.eq_(hourly.data_written_size, 2000)
nose.tools.eq_(hourly.data_written_nb_blocks, 200)
nose.tools.eq_(hourly.data_written_time, 20000)
def test_two_updates_in_the_same_hour(self):
statistics = beat.core.stats.Statistics(STATS_1)
......@@ -103,22 +92,20 @@ class UpdateStatisticsFunction(TestCase):
date2 = datetime(2013, 10, 5, 6, 45, 0)
updateStatistics(statistics, date=date2)
nose.tools.eq_(HourlyStatistics.objects.count(), 1)
hourly = HourlyStatistics.objects.all()[0]
nose.tools.eq_(hourly.date, date1.date())
nose.tools.eq_(hourly.hour, 6)
nose.tools.eq_(hourly.cpu_time , 7.5)
nose.tools.eq_(hourly.max_memory , 300)
nose.tools.eq_(hourly.data_read_size , 1500)
nose.tools.eq_(hourly.data_read_nb_blocks , 150)
nose.tools.eq_(hourly.data_read_time , 15000)
nose.tools.eq_(hourly.data_written_size , 2600)
nose.tools.eq_(hourly.data_written_nb_blocks , 260)
nose.tools.eq_(hourly.data_written_time , 26000)
nose.tools.eq_(hourly.cpu_time, 7.5)
nose.tools.eq_(hourly.max_memory, 300)
nose.tools.eq_(hourly.data_read_size, 1500)
nose.tools.eq_(hourly.data_read_nb_blocks, 150)
nose.tools.eq_(hourly.data_read_time, 15000)
nose.tools.eq_(hourly.data_written_size, 2600)
nose.tools.eq_(hourly.data_written_nb_blocks, 260)
nose.tools.eq_(hourly.data_written_time, 26000)
def test_two_updates_in_different_hours(self):
statistics = beat.core.stats.Statistics(STATS_1)
......@@ -131,36 +118,33 @@ class UpdateStatisticsFunction(TestCase):
date2 = datetime(2013, 10, 5, 7, 45, 0)
updateStatistics(statistics, date=date2)
nose.tools.eq_(HourlyStatistics.objects.count(), 2)
hourly = HourlyStatistics.objects.all()[0]
nose.tools.eq_(hourly.date, date1.date())
nose.tools.eq_(hourly.hour, 6)
nose.tools.eq_(hourly.cpu_time , 2.5)
nose.tools.eq_(hourly.max_memory , 100)
nose.tools.eq_(hourly.data_read_size , 1000)
nose.tools.eq_(hourly.data_read_nb_blocks , 100)
nose.tools.eq_(hourly.data_read_time , 10000)
nose.tools.eq_(hourly.data_written_size , 2000)
nose.tools.eq_(hourly.data_written_nb_blocks , 200)
nose.tools.eq_(hourly.data_written_time , 20000)
nose.tools.eq_(hourly.cpu_time, 2.5)
nose.tools.eq_(hourly.max_memory, 100)
nose.tools.eq_(hourly.data_read_size, 1000)
nose.tools.eq_(hourly.data_read_nb_blocks, 100)
nose.tools.eq_(hourly.data_read_time, 10000)
nose.tools.eq_(hourly.data_written_size, 2000)
nose.tools.eq_(hourly.data_written_nb_blocks, 200)
nose.tools.eq_(hourly.data_written_time, 20000)
hourly = HourlyStatistics.objects.all()[1]
nose.tools.eq_(hourly.date, date2.date())
nose.tools.eq_(hourly.hour, 7)
nose.tools.eq_(hourly.cpu_time , 5.0)
nose.tools.eq_(hourly.max_memory , 200)
nose.tools.eq_(hourly.data_read_size , 500)
nose.tools.eq_(hourly.data_read_nb_blocks , 50)
nose.tools.eq_(hourly.data_read_time , 5000)
nose.tools.eq_(hourly.data_written_size , 600)
nose.tools.eq_(hourly.data_written_nb_blocks , 60)
nose.tools.eq_(hourly.data_written_time , 6000)
nose.tools.eq_(hourly.cpu_time, 5.0)
nose.tools.eq_(hourly.max_memory, 200)
nose.tools.eq_(hourly.data_read_size, 500)
nose.tools.eq_(hourly.data_read_nb_blocks, 50)
nose.tools.eq_(hourly.data_read_time, 5000)
nose.tools.eq_(hourly.data_written_size, 600)
nose.tools.eq_(hourly.data_written_nb_blocks, 60)
nose.tools.eq_(hourly.data_written_time, 6000)
def test_two_updates_in_different_days(self):
statistics = beat.core.stats.Statistics(STATS_1)
......@@ -173,35 +157,33 @@ class UpdateStatisticsFunction(TestCase):
date2 = datetime(2013, 10, 6, 7, 45, 0)
updateStatistics(statistics, date=date2)
nose.tools.eq_(HourlyStatistics.objects.count(), 2)
hourly = HourlyStatistics.objects.all()[0]
nose.tools.eq_(hourly.date, date1.date())
nose.tools.eq_(hourly.hour, 6)
nose.tools.eq_(hourly.cpu_time , 2.5)
nose.tools.eq_(hourly.max_memory , 100)
nose.tools.eq_(hourly.data_read_size , 1000)
nose.tools.eq_(hourly.data_read_nb_blocks , 100)
nose.tools.eq_(hourly.data_read_time , 10000)
nose.tools.eq_(hourly.data_written_size , 2000)
nose.tools.eq_(hourly.data_written_nb_blocks , 200)
nose.tools.eq_(hourly.data_written_time , 20000)
nose.tools.eq_(hourly.cpu_time, 2.5)
nose.tools.eq_(hourly.max_memory, 100)
nose.tools.eq_(hourly.data_read_size, 1000)
nose.tools.eq_(hourly.data_read_nb_blocks, 100)
nose.tools.eq_(hourly.data_read_time, 10000)
nose.tools.eq_(hourly.data_written_size, 2000)
nose.tools.eq_(hourly.data_written_nb_blocks, 200)
nose.tools.eq_(hourly.data_written_time, 20000)
hourly = HourlyStatistics.objects.all()[1]
nose.tools.eq_(hourly.date, date2.date())
nose.tools.eq_(hourly.hour, 7)
nose.tools.eq_(hourly.cpu_time , 5.0)
nose.tools.eq_(hourly.max_memory , 200)
nose.tools.eq_(hourly.data_read_size , 500)
nose.tools.eq_(hourly.data_read_nb_blocks , 50)
nose.tools.eq_(hourly.data_read_time , 5000)
nose.tools.eq_(hourly.data_written_size , 600)
nose.tools.eq_(hourly.data_written_nb_blocks , 60)
nose.tools.eq_(hourly.data_written_time , 6000)
nose.tools.eq_(hourly.cpu_time, 5.0)
nose.tools.eq_(hourly.max_memory, 200)
nose.tools.eq_(hourly.data_read_size, 500)
nose.tools.eq_(hourly.data_read_nb_blocks, 50)
nose.tools.eq_(hourly.data_read_time, 5000)
nose.tools.eq_(hourly.data_written_size, 600)
nose.tools.eq_(hourly.data_written_nb_blocks, 60)
nose.tools.eq_(hourly.data_written_time, 6000)
def test_two_updates_in_different_weeks(self):
statistics = beat.core.stats.Statistics(STATS_1)
......@@ -214,31 +196,30 @@ class UpdateStatisticsFunction(TestCase):
date2 = datetime(2013, 10, 7, 7, 45, 0)
updateStatistics(statistics, date=date2)
nose.tools.eq_(HourlyStatistics.objects.count(), 2)
hourly = HourlyStatistics.objects.all()[0]
nose.tools.eq_(hourly.date, date1.date())
nose.tools.eq_(hourly.hour, 6)
nose.tools.eq_(hourly.cpu_time , 2.5)
nose.tools.eq_(hourly.max_memory , 100)
nose.tools.eq_(hourly.data_read_size , 1000)
nose.tools.eq_(hourly.data_read_nb_blocks , 100)
nose.tools.eq_(hourly.data_read_time , 10000)
nose.tools.eq_(hourly.data_written_size , 2000)
nose.tools.eq_(hourly.data_written_nb_blocks , 200)
nose.tools.eq_(hourly.data_written_time , 20000)
nose.tools.eq_(hourly.cpu_time, 2.5)
nose.tools.eq_(hourly.max_memory, 100)
nose.tools.eq_(hourly.data_read_size, 1000)
nose.tools.eq_(hourly.data_read_nb_blocks, 100)
nose.tools.eq_(hourly.data_read_time, 10000)
nose.tools.eq_(hourly.data_written_size, 2000)
nose.tools.eq_(hourly.data_written_nb_blocks, 200)
nose.tools.eq_(hourly.data_written_time, 20000)
hourly = HourlyStatistics.objects.all()[1]
nose.tools.eq_(hourly.date, date2.date())
nose.tools.eq_(hourly.hour, 7)
nose.tools.eq_(hourly.cpu_time , 5.0)
nose.tools.eq_(hourly.max_memory , 200)
nose.tools.eq_(hourly.data_read_size , 500)
nose.tools.eq_(hourly.data_read_nb_blocks , 50)
nose.tools.eq_(hourly.data_read_time , 5000)
nose.tools.eq_(hourly.data_written_size , 600)
nose.tools.eq_(hourly.data_written_nb_blocks , 60)
nose.tools.eq_(hourly.data_written_time , 6000)
nose.tools.eq_(hourly.cpu_time, 5.0)
nose.tools.eq_(hourly.max_memory, 200)
nose.tools.eq_(hourly.data_read_size, 500)
nose.tools.eq_(hourly.data_read_nb_blocks, 50)
nose.tools.eq_(hourly.data_read_time, 5000)
nose.tools.eq_(hourly.data_written_size, 600)
nose.tools.eq_(hourly.data_written_nb_blocks, 60)
nose.tools.eq_(hourly.data_written_time, 6000)
......@@ -25,9 +25,11 @@
# #
###############################################################################
from .models import HourlyStatistics
from datetime import datetime
from .models import HourlyStatistics
def updateStatistics(stats, date=None):
"""Update the hourly statistics
......@@ -41,34 +43,22 @@ def updateStatistics(stats, date=None):
date = date or datetime.now()
# Retrieve the current obj entry (if it exists)
try:
obj = HourlyStatistics.objects.order_by('-date', '-hour')[0]
if (obj.date != date.date()) or (obj.hour != date.hour):
obj = None
except:
obj = None
# Create an obj entry if necessary
if obj is None:
obj = HourlyStatistics()
obj.date = date.date()
obj.hour = date.hour
obj, _ = HourlyStatistics.objects.get_or_create(date=date.date(), hour=date.hour)
# Modify the obj entry
obj.cpu_time += stats.cpu['user'] + stats.cpu['system']
obj.max_memory += stats.memory['rss']
obj.cpu_time += stats.cpu["user"] + stats.cpu["system"]
obj.max_memory += stats.memory["rss"]
if 'volume' in stats.data:
obj.data_read_size += stats.data['volume'].get('read', 0)
obj.data_written_size += stats.data['volume'].get('write', 0)
if "volume" in stats.data:
obj.data_read_size += stats.data["volume"].get("read", 0)
obj.data_written_size += stats.data["volume"].get("write", 0)
if 'blocks' in stats.data:
obj.data_read_nb_blocks += stats.data['blocks'].get('read', 0)
obj.data_written_nb_blocks += stats.data['blocks'].get('write', 0)
if "blocks" in stats.data:
obj.data_read_nb_blocks += stats.data["blocks"].get("read", 0)
obj.data_written_nb_blocks += stats.data["blocks"].get("write", 0)
if 'time' in stats.data:
obj.data_read_time += stats.data['time'].get('read', 0)
obj.data_written_time += stats.data['time'].get('write', 0)
if "time" in stats.data:
obj.data_read_time += stats.data["time"].get("read", 0)
obj.data_written_time += stats.data["time"].get("write", 0)
obj.save()
......@@ -34,11 +34,10 @@ except ImportError:
from itertools import zip_longest as izip_longest
import simplejson
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseForbidden
from django.db.models import Sum
from django.http import HttpResponseForbidden
from django.shortcuts import render
from .models import HourlyStatistics
......@@ -47,20 +46,23 @@ def calculate_totals():
"""Caculates all totals required by the statistics display"""
from django.contrib.auth.models import User
from ..accounts.models import Profile
from ..algorithms.models import Algorithm
from ..attestations.models import Attestation
from ..backend.models import Environment
from ..backend.models import Queue
from ..backend.models import Worker
from ..databases.models import Database
from ..backend.models import Environment, Queue, Worker
from ..dataformats.models import DataFormat
from ..experiments.models import Experiment
from ..toolchains.models import Toolchain
from ..algorithms.models import Algorithm
from ..libraries.models import Library
from ..dataformats.models import DataFormat
from ..team.models import Team
from ..attestations.models import Attestation
from ..reports.models import Report
from ..plotters.models import Plotter
from ..plotters.models import PlotterParameter
from ..reports.models import Report
from ..search.models import Search
from ..accounts.models import Profile
from ..team.models import Team
from ..toolchains.models import Toolchain
# for calculating the total cpu time, we use the HourlyStatistics and
# accumulate over the whole history
......@@ -68,38 +70,44 @@ def calculate_totals():
counter = objects.count()
details = objects.aggregate(