Skip to content
Snippets Groups Projects
Commit 95f2641b authored by Flavio TARSETTI's avatar Flavio TARSETTI
Browse files

Merge branch 'cleanup_statistics' into 'django3_migration'

Cleanup statistics

See merge request !363
parents 40b8919b 6e70390a
No related branches found
No related tags found
2 merge requests!363Cleanup statistics,!342Django 3 migration
Pipeline #42676 passed
......@@ -25,28 +25,29 @@
# #
###############################################################################
from .models import HourlyStatistics as HourlyStatisticsModel
from django.contrib import admin
from .models import HourlyStatistics as HourlyStatisticsModel
#----------------------------------------------------------
# ----------------------------------------------------------
class HourlyStatistics(admin.ModelAdmin):
list_display = (
'id',
'date',
'hour',
'cpu_time',
'max_memory',
'data_read_size',
'data_read_nb_blocks',
'data_read_time',
'data_written_size',
'data_written_nb_blocks',
'data_written_time',
list_display = (
"id",
"date",
"hour",
"cpu_time",
"max_memory",
"data_read_size",
"data_read_nb_blocks",
"data_read_time",
"data_written_size",
"data_written_nb_blocks",
"data_written_time",
)
list_display_links = ('id', )
list_display_links = ("id",)
admin.site.register(HourlyStatisticsModel, HourlyStatistics)
......@@ -27,32 +27,38 @@
from __future__ import unicode_literals
from django.db import migrations, models
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
]
dependencies = []
operations = [
migrations.CreateModel(
name='HourlyStatistics',
name="HourlyStatistics",
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateField()),
('hour', models.IntegerField()),
('cpu_time', models.FloatField(default=0)),
('max_memory', models.BigIntegerField(default=0)),
('data_read_size', models.BigIntegerField(default=0)),
('data_read_nb_blocks', models.IntegerField(default=0)),
('data_read_time', models.FloatField(default=0)),
('data_written_size', models.BigIntegerField(default=0)),
('data_written_nb_blocks', models.IntegerField(default=0)),
('data_written_time', models.FloatField(default=0)),
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
("date", models.DateField()),
("hour", models.IntegerField()),
("cpu_time", models.FloatField(default=0)),
("max_memory", models.BigIntegerField(default=0)),
("data_read_size", models.BigIntegerField(default=0)),
("data_read_nb_blocks", models.IntegerField(default=0)),
("data_read_time", models.FloatField(default=0)),
("data_written_size", models.BigIntegerField(default=0)),
("data_written_nb_blocks", models.IntegerField(default=0)),
("data_written_time", models.FloatField(default=0)),
],
options={
'verbose_name_plural': 'Hourly statistics',
},
options={"verbose_name_plural": "Hourly statistics"},
),
]
......@@ -27,24 +27,28 @@
from django.db import models
# ----------------------------------------------------------
#----------------------------------------------------------
class HourlyStatistics(models.Model):
date = models.DateField()
hour = models.IntegerField()
cpu_time = models.FloatField(default=0)
max_memory = models.BigIntegerField(default=0)
data_read_size = models.BigIntegerField(default=0)
data_read_nb_blocks = models.IntegerField(default=0)
data_read_time = models.FloatField(default=0)
data_written_size = models.BigIntegerField(default=0)
data_written_nb_blocks = models.IntegerField(default=0)
data_written_time = models.FloatField(default=0)
date = models.DateField()
hour = models.IntegerField()
cpu_time = models.FloatField(default=0)
max_memory = models.BigIntegerField(default=0)
data_read_size = models.BigIntegerField(default=0)
data_read_nb_blocks = models.IntegerField(default=0)
data_read_time = models.FloatField(default=0)
data_written_size = models.BigIntegerField(default=0)
data_written_nb_blocks = models.IntegerField(default=0)
data_written_time = models.FloatField(default=0)
def __str__(self):
return 'Hourly statistics #%d (%s, hour %s)' % (self.id, self.date.strftime('%b %d, %Y'), self.hour)
return "Hourly statistics #%d (%s, hour %s)" % (
self.id,
self.date.strftime("%b %d, %Y"),
self.hour,
)
class Meta:
verbose_name_plural = "Hourly statistics"
......@@ -2,21 +2,21 @@
{% comment %}
* Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
* Contact: beat.support@idiap.ch
*
*
* This file is part of the beat.web module of the BEAT platform.
*
*
* Commercial License Usage
* Licensees holding valid commercial BEAT licenses may use this file in
* accordance with the terms contained in a written agreement between you
* and Idiap. For further information contact tto@idiap.ch
*
*
* Alternatively, this file may be used under the terms of the GNU Affero
* Public License version 3 as published by the Free Software and appearing
* in the file LICENSE.AGPL included in the packaging of this file.
* The BEAT platform is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
*
*
* You should have received a copy of the GNU Affero Public License along
* with the BEAT platform. If not, see http://www.gnu.org/licenses/.
{% endcomment %}
......
......@@ -27,18 +27,16 @@
from django import template
from django.conf import settings
from ..views import calculate_totals
register = template.Library()
@register.inclusion_tag('statistics/panels/status.html', takes_context=True)
@register.inclusion_tag("statistics/panels/status.html", takes_context=True)
def status(context, id):
return {
'request': context['request'],
'panel_id': id,
'totals': calculate_totals(),
"request": context["request"],
"panel_id": id,
"totals": calculate_totals(),
}
......@@ -25,51 +25,41 @@
# #
###############################################################################
from django.test import TestCase
from django.contrib.auth.models import User
from django.urls import reverse
from datetime import datetime
from datetime import date
from datetime import timedelta
from .models import HourlyStatistics
from .utils import updateStatistics
from ..dataformats.models import DataFormat
from ..common.testutils import BaseTestCase
import nose.tools
from django.test import TestCase
import beat.core.stats
import nose.tools
from .models import HourlyStatistics
from .utils import updateStatistics
STATS_1 = {
'cpu': { 'user': 1, 'system': 1.5, },
'memory': { 'rss': 100, },
'data': {
'volume': { 'read': 1000, 'write': 2000, },
'blocks': { 'read': 100, 'write': 200, },
'time': { 'read': 10000, 'write': 20000, },
'generated_files': [],
}
"cpu": {"user": 1, "system": 1.5},
"memory": {"rss": 100},
"data": {
"volume": {"read": 1000, "write": 2000},
"blocks": {"read": 100, "write": 200},
"time": {"read": 10000, "write": 20000},
"generated_files": [],
},
}
STATS_2 = {
'cpu': { 'user': 2, 'system': 3, },
'memory': { 'rss': 200, },
'data': {
'volume': { 'read': 500, 'write': 600, },
'blocks': { 'read': 50, 'write': 60, },
'time': { 'read': 5000, 'write': 6000, },
'generated_files': [],
}
"cpu": {"user": 2, "system": 3},
"memory": {"rss": 200},
"data": {
"volume": {"read": 500, "write": 600},
"blocks": {"read": 50, "write": 60},
"time": {"read": 5000, "write": 6000},
"generated_files": [],
},
}
class UpdateStatisticsFunction(TestCase):
def test_first_update(self):
statistics = beat.core.stats.Statistics(STATS_1)
......@@ -82,15 +72,14 @@ class UpdateStatisticsFunction(TestCase):
nose.tools.eq_(hourly.date, date1.date())
nose.tools.eq_(hourly.hour, 6)
nose.tools.eq_(hourly.cpu_time , 2.5)
nose.tools.eq_(hourly.max_memory , 100)
nose.tools.eq_(hourly.data_read_size , 1000)
nose.tools.eq_(hourly.data_read_nb_blocks , 100)
nose.tools.eq_(hourly.data_read_time , 10000)
nose.tools.eq_(hourly.data_written_size , 2000)
nose.tools.eq_(hourly.data_written_nb_blocks , 200)
nose.tools.eq_(hourly.data_written_time , 20000)
nose.tools.eq_(hourly.cpu_time, 2.5)
nose.tools.eq_(hourly.max_memory, 100)
nose.tools.eq_(hourly.data_read_size, 1000)
nose.tools.eq_(hourly.data_read_nb_blocks, 100)
nose.tools.eq_(hourly.data_read_time, 10000)
nose.tools.eq_(hourly.data_written_size, 2000)
nose.tools.eq_(hourly.data_written_nb_blocks, 200)
nose.tools.eq_(hourly.data_written_time, 20000)
def test_two_updates_in_the_same_hour(self):
statistics = beat.core.stats.Statistics(STATS_1)
......@@ -103,22 +92,20 @@ class UpdateStatisticsFunction(TestCase):
date2 = datetime(2013, 10, 5, 6, 45, 0)
updateStatistics(statistics, date=date2)
nose.tools.eq_(HourlyStatistics.objects.count(), 1)
hourly = HourlyStatistics.objects.all()[0]
nose.tools.eq_(hourly.date, date1.date())
nose.tools.eq_(hourly.hour, 6)
nose.tools.eq_(hourly.cpu_time , 7.5)
nose.tools.eq_(hourly.max_memory , 300)
nose.tools.eq_(hourly.data_read_size , 1500)
nose.tools.eq_(hourly.data_read_nb_blocks , 150)
nose.tools.eq_(hourly.data_read_time , 15000)
nose.tools.eq_(hourly.data_written_size , 2600)
nose.tools.eq_(hourly.data_written_nb_blocks , 260)
nose.tools.eq_(hourly.data_written_time , 26000)
nose.tools.eq_(hourly.cpu_time, 7.5)
nose.tools.eq_(hourly.max_memory, 300)
nose.tools.eq_(hourly.data_read_size, 1500)
nose.tools.eq_(hourly.data_read_nb_blocks, 150)
nose.tools.eq_(hourly.data_read_time, 15000)
nose.tools.eq_(hourly.data_written_size, 2600)
nose.tools.eq_(hourly.data_written_nb_blocks, 260)
nose.tools.eq_(hourly.data_written_time, 26000)
def test_two_updates_in_different_hours(self):
statistics = beat.core.stats.Statistics(STATS_1)
......@@ -131,36 +118,33 @@ class UpdateStatisticsFunction(TestCase):
date2 = datetime(2013, 10, 5, 7, 45, 0)
updateStatistics(statistics, date=date2)
nose.tools.eq_(HourlyStatistics.objects.count(), 2)
hourly = HourlyStatistics.objects.all()[0]
nose.tools.eq_(hourly.date, date1.date())
nose.tools.eq_(hourly.hour, 6)
nose.tools.eq_(hourly.cpu_time , 2.5)
nose.tools.eq_(hourly.max_memory , 100)
nose.tools.eq_(hourly.data_read_size , 1000)
nose.tools.eq_(hourly.data_read_nb_blocks , 100)
nose.tools.eq_(hourly.data_read_time , 10000)
nose.tools.eq_(hourly.data_written_size , 2000)
nose.tools.eq_(hourly.data_written_nb_blocks , 200)
nose.tools.eq_(hourly.data_written_time , 20000)
nose.tools.eq_(hourly.cpu_time, 2.5)
nose.tools.eq_(hourly.max_memory, 100)
nose.tools.eq_(hourly.data_read_size, 1000)
nose.tools.eq_(hourly.data_read_nb_blocks, 100)
nose.tools.eq_(hourly.data_read_time, 10000)
nose.tools.eq_(hourly.data_written_size, 2000)
nose.tools.eq_(hourly.data_written_nb_blocks, 200)
nose.tools.eq_(hourly.data_written_time, 20000)
hourly = HourlyStatistics.objects.all()[1]
nose.tools.eq_(hourly.date, date2.date())
nose.tools.eq_(hourly.hour, 7)
nose.tools.eq_(hourly.cpu_time , 5.0)
nose.tools.eq_(hourly.max_memory , 200)
nose.tools.eq_(hourly.data_read_size , 500)
nose.tools.eq_(hourly.data_read_nb_blocks , 50)
nose.tools.eq_(hourly.data_read_time , 5000)
nose.tools.eq_(hourly.data_written_size , 600)
nose.tools.eq_(hourly.data_written_nb_blocks , 60)
nose.tools.eq_(hourly.data_written_time , 6000)
nose.tools.eq_(hourly.cpu_time, 5.0)
nose.tools.eq_(hourly.max_memory, 200)
nose.tools.eq_(hourly.data_read_size, 500)
nose.tools.eq_(hourly.data_read_nb_blocks, 50)
nose.tools.eq_(hourly.data_read_time, 5000)
nose.tools.eq_(hourly.data_written_size, 600)
nose.tools.eq_(hourly.data_written_nb_blocks, 60)
nose.tools.eq_(hourly.data_written_time, 6000)
def test_two_updates_in_different_days(self):
statistics = beat.core.stats.Statistics(STATS_1)
......@@ -173,35 +157,33 @@ class UpdateStatisticsFunction(TestCase):
date2 = datetime(2013, 10, 6, 7, 45, 0)
updateStatistics(statistics, date=date2)
nose.tools.eq_(HourlyStatistics.objects.count(), 2)
hourly = HourlyStatistics.objects.all()[0]
nose.tools.eq_(hourly.date, date1.date())
nose.tools.eq_(hourly.hour, 6)
nose.tools.eq_(hourly.cpu_time , 2.5)
nose.tools.eq_(hourly.max_memory , 100)
nose.tools.eq_(hourly.data_read_size , 1000)
nose.tools.eq_(hourly.data_read_nb_blocks , 100)
nose.tools.eq_(hourly.data_read_time , 10000)
nose.tools.eq_(hourly.data_written_size , 2000)
nose.tools.eq_(hourly.data_written_nb_blocks , 200)
nose.tools.eq_(hourly.data_written_time , 20000)
nose.tools.eq_(hourly.cpu_time, 2.5)
nose.tools.eq_(hourly.max_memory, 100)
nose.tools.eq_(hourly.data_read_size, 1000)
nose.tools.eq_(hourly.data_read_nb_blocks, 100)
nose.tools.eq_(hourly.data_read_time, 10000)
nose.tools.eq_(hourly.data_written_size, 2000)
nose.tools.eq_(hourly.data_written_nb_blocks, 200)
nose.tools.eq_(hourly.data_written_time, 20000)
hourly = HourlyStatistics.objects.all()[1]
nose.tools.eq_(hourly.date, date2.date())
nose.tools.eq_(hourly.hour, 7)
nose.tools.eq_(hourly.cpu_time , 5.0)
nose.tools.eq_(hourly.max_memory , 200)
nose.tools.eq_(hourly.data_read_size , 500)
nose.tools.eq_(hourly.data_read_nb_blocks , 50)
nose.tools.eq_(hourly.data_read_time , 5000)
nose.tools.eq_(hourly.data_written_size , 600)
nose.tools.eq_(hourly.data_written_nb_blocks , 60)
nose.tools.eq_(hourly.data_written_time , 6000)
nose.tools.eq_(hourly.cpu_time, 5.0)
nose.tools.eq_(hourly.max_memory, 200)
nose.tools.eq_(hourly.data_read_size, 500)
nose.tools.eq_(hourly.data_read_nb_blocks, 50)
nose.tools.eq_(hourly.data_read_time, 5000)
nose.tools.eq_(hourly.data_written_size, 600)
nose.tools.eq_(hourly.data_written_nb_blocks, 60)
nose.tools.eq_(hourly.data_written_time, 6000)
def test_two_updates_in_different_weeks(self):
statistics = beat.core.stats.Statistics(STATS_1)
......@@ -214,31 +196,30 @@ class UpdateStatisticsFunction(TestCase):
date2 = datetime(2013, 10, 7, 7, 45, 0)
updateStatistics(statistics, date=date2)
nose.tools.eq_(HourlyStatistics.objects.count(), 2)
hourly = HourlyStatistics.objects.all()[0]
nose.tools.eq_(hourly.date, date1.date())
nose.tools.eq_(hourly.hour, 6)
nose.tools.eq_(hourly.cpu_time , 2.5)
nose.tools.eq_(hourly.max_memory , 100)
nose.tools.eq_(hourly.data_read_size , 1000)
nose.tools.eq_(hourly.data_read_nb_blocks , 100)
nose.tools.eq_(hourly.data_read_time , 10000)
nose.tools.eq_(hourly.data_written_size , 2000)
nose.tools.eq_(hourly.data_written_nb_blocks , 200)
nose.tools.eq_(hourly.data_written_time , 20000)
nose.tools.eq_(hourly.cpu_time, 2.5)
nose.tools.eq_(hourly.max_memory, 100)
nose.tools.eq_(hourly.data_read_size, 1000)
nose.tools.eq_(hourly.data_read_nb_blocks, 100)
nose.tools.eq_(hourly.data_read_time, 10000)
nose.tools.eq_(hourly.data_written_size, 2000)
nose.tools.eq_(hourly.data_written_nb_blocks, 200)
nose.tools.eq_(hourly.data_written_time, 20000)
hourly = HourlyStatistics.objects.all()[1]
nose.tools.eq_(hourly.date, date2.date())
nose.tools.eq_(hourly.hour, 7)
nose.tools.eq_(hourly.cpu_time , 5.0)
nose.tools.eq_(hourly.max_memory , 200)
nose.tools.eq_(hourly.data_read_size , 500)
nose.tools.eq_(hourly.data_read_nb_blocks , 50)
nose.tools.eq_(hourly.data_read_time , 5000)
nose.tools.eq_(hourly.data_written_size , 600)
nose.tools.eq_(hourly.data_written_nb_blocks , 60)
nose.tools.eq_(hourly.data_written_time , 6000)
nose.tools.eq_(hourly.cpu_time, 5.0)
nose.tools.eq_(hourly.max_memory, 200)
nose.tools.eq_(hourly.data_read_size, 500)
nose.tools.eq_(hourly.data_read_nb_blocks, 50)
nose.tools.eq_(hourly.data_read_time, 5000)
nose.tools.eq_(hourly.data_written_size, 600)
nose.tools.eq_(hourly.data_written_nb_blocks, 60)
nose.tools.eq_(hourly.data_written_time, 6000)
......@@ -25,9 +25,11 @@
# #
###############################################################################
from .models import HourlyStatistics
from datetime import datetime
from .models import HourlyStatistics
def updateStatistics(stats, date=None):
"""Update the hourly statistics
......@@ -41,34 +43,22 @@ def updateStatistics(stats, date=None):
date = date or datetime.now()
# Retrieve the current obj entry (if it exists)
try:
obj = HourlyStatistics.objects.order_by('-date', '-hour')[0]
if (obj.date != date.date()) or (obj.hour != date.hour):
obj = None
except:
obj = None
# Create an obj entry if necessary
if obj is None:
obj = HourlyStatistics()
obj.date = date.date()
obj.hour = date.hour
obj, _ = HourlyStatistics.objects.get_or_create(date=date.date(), hour=date.hour)
# Modify the obj entry
obj.cpu_time += stats.cpu['user'] + stats.cpu['system']
obj.max_memory += stats.memory['rss']
obj.cpu_time += stats.cpu["user"] + stats.cpu["system"]
obj.max_memory += stats.memory["rss"]
if 'volume' in stats.data:
obj.data_read_size += stats.data['volume'].get('read', 0)
obj.data_written_size += stats.data['volume'].get('write', 0)
if "volume" in stats.data:
obj.data_read_size += stats.data["volume"].get("read", 0)
obj.data_written_size += stats.data["volume"].get("write", 0)
if 'blocks' in stats.data:
obj.data_read_nb_blocks += stats.data['blocks'].get('read', 0)
obj.data_written_nb_blocks += stats.data['blocks'].get('write', 0)
if "blocks" in stats.data:
obj.data_read_nb_blocks += stats.data["blocks"].get("read", 0)
obj.data_written_nb_blocks += stats.data["blocks"].get("write", 0)
if 'time' in stats.data:
obj.data_read_time += stats.data['time'].get('read', 0)
obj.data_written_time += stats.data['time'].get('write', 0)
if "time" in stats.data:
obj.data_read_time += stats.data["time"].get("read", 0)
obj.data_written_time += stats.data["time"].get("write", 0)
obj.save()
......@@ -34,11 +34,10 @@ except ImportError:
from itertools import zip_longest as izip_longest
import simplejson
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseForbidden
from django.db.models import Sum
from django.http import HttpResponseForbidden
from django.shortcuts import render
from .models import HourlyStatistics
......@@ -47,20 +46,23 @@ def calculate_totals():
"""Caculates all totals required by the statistics display"""
from django.contrib.auth.models import User
from ..accounts.models import Profile
from ..algorithms.models import Algorithm
from ..attestations.models import Attestation
from ..backend.models import Environment
from ..backend.models import Queue
from ..backend.models import Worker
from ..databases.models import Database
from ..backend.models import Environment, Queue, Worker
from ..dataformats.models import DataFormat
from ..experiments.models import Experiment
from ..toolchains.models import Toolchain
from ..algorithms.models import Algorithm
from ..libraries.models import Library
from ..dataformats.models import DataFormat
from ..team.models import Team
from ..attestations.models import Attestation
from ..reports.models import Report
from ..plotters.models import Plotter
from ..plotters.models import PlotterParameter
from ..reports.models import Report
from ..search.models import Search
from ..accounts.models import Profile
from ..team.models import Team
from ..toolchains.models import Toolchain
# for calculating the total cpu time, we use the HourlyStatistics and
# accumulate over the whole history
......@@ -68,38 +70,44 @@ def calculate_totals():
counter = objects.count()
details = objects.aggregate(
cpu_time=Sum('cpu_time'),
max_memory=Sum('max_memory'),
data_read_size=Sum('data_read_size'),
data_read_time=Sum('data_read_time'),
data_written_size=Sum('data_written_size'),
data_written_time=Sum('data_written_time'),
cpu_time=Sum("cpu_time"),
max_memory=Sum("max_memory"),
data_read_size=Sum("data_read_size"),
data_read_time=Sum("data_read_time"),
data_written_size=Sum("data_written_size"),
data_written_time=Sum("data_written_time"),
)
cpu_time = details['cpu_time']
memory = details['max_memory']
input_size = details['data_read_size']
input_time = details['data_read_time']
output_size = details['data_written_size']
output_time = details['data_written_time']
cpu_time = details["cpu_time"]
memory = details["max_memory"]
input_size = details["data_read_size"]
input_time = details["data_read_time"]
output_size = details["data_written_size"]
output_time = details["data_written_time"]
new_users = User.objects.filter(profile__status = Profile.NEWUSER)
waiting_validation_users = User.objects.filter(profile__status = Profile.WAITINGVALIDATION)
accepted_users = User.objects.filter(profile__status = Profile.ACCEPTED)
rejected_users = User.objects.filter(profile__status = Profile.REJECTED)
yearrevalidation_users = User.objects.filter(profile__status = Profile.YEARREVALIDATION)
blocked_users = User.objects.filter(profile__status = Profile.BLOCKED)
new_users = User.objects.filter(profile__status=Profile.NEWUSER)
waiting_validation_users = User.objects.filter(
profile__status=Profile.WAITINGVALIDATION
)
accepted_users = User.objects.filter(profile__status=Profile.ACCEPTED)
rejected_users = User.objects.filter(profile__status=Profile.REJECTED)
yearrevalidation_users = User.objects.filter(
profile__status=Profile.YEARREVALIDATION
)
blocked_users = User.objects.filter(profile__status=Profile.BLOCKED)
return dict(
counter=counter,
cpu_time_hours=int(cpu_time/float(60**2)) if cpu_time else 0,
memory_gb=int(memory/float(2**30)) if memory else 0,
input_gb = int(input_size/float(2**30)) if input_size else 0,
input_bw_mb_s = int((input_size/float(2**20))/input_time) if input_size else 0,
output_gb = int(output_size/float(2**30)) if output_size else 0,
output_bw_mb_s = int((output_size/float(2**20))/output_time) if output_size else 0,
cpu_time_hours=int(cpu_time / float(60 ** 2)) if cpu_time else 0,
memory_gb=int(memory / float(2 ** 30)) if memory else 0,
input_gb=int(input_size / float(2 ** 30)) if input_size else 0,
input_bw_mb_s=int((input_size / float(2 ** 20)) / input_time)
if input_size
else 0,
output_gb=int(output_size / float(2 ** 30)) if output_size else 0,
output_bw_mb_s=int((output_size / float(2 ** 20)) / output_time)
if output_size
else 0,
users=User.objects.count(),
newusers=new_users.count(),
waitingvalidationusers=waiting_validation_users.count(),
......@@ -111,20 +119,17 @@ def calculate_totals():
environments=Environment.objects.count(),
queues=Queue.objects.count(),
workers=Worker.objects.count(),
experiments=Experiment.objects.count(),
toolchains=Toolchain.objects.count(),
algorithms=Algorithm.objects.count(),
libraries=Library.objects.count(),
dataformats=DataFormat.objects.count(),
teams=Team.objects.count(),
attestations=Attestation.objects.count(),
searches=Search.objects.count(),
reports=Report.objects.count(),
plotters=Plotter.objects.count(),
plotterparameters=PlotterParameter.objects.count(),
)
......@@ -133,23 +138,29 @@ def convert_values(stats):
# transform into plottable data
def _seconds_to_hours(s, field):
if s is not None: return s[field]/float(60**2) #hours
return 0.
if s is not None:
return s[field] / float(60 ** 2) # hours
return 0.0
def _bytes_to_gb(s, field):
if s is not None: return s[field]/float(2**30) #gigabytes
return 0.
if s is not None:
return s[field] / float(2 ** 30) # gigabytes
return 0.0
def _bw_in_mbs(s, num, den):
if s is None: return 0.
if not s[den]: return 0. #avoid division by zero
return (s[num]/float(2**20))/s[den] #mb per sec
if s is None:
return 0.0
if not s[den]:
return 0.0 # avoid division by zero
return (s[num] / float(2 ** 20)) / s[den] # mb per sec
return dict(
cpu_time=[_seconds_to_hours(k, 'cpu_time') for k in stats],
memory=[_bytes_to_gb(k, 'max_memory') for k in stats],
input_bw=[_bw_in_mbs(k, 'data_read_size', 'data_read_time') for k in stats],
output_bw=[_bw_in_mbs(k, 'data_written_size', 'data_written_time') for k in stats],
cpu_time=[_seconds_to_hours(k, "cpu_time") for k in stats],
memory=[_bytes_to_gb(k, "max_memory") for k in stats],
input_bw=[_bw_in_mbs(k, "data_read_size", "data_read_time") for k in stats],
output_bw=[
_bw_in_mbs(k, "data_written_size", "data_written_time") for k in stats
],
)
......@@ -165,25 +176,24 @@ def get_statistics(hours_to_go_back, cluster_by):
# entries for X days, including today
now = datetime.datetime.now()
back = now - datetime.timedelta(hours=hours_to_go_back)
available = HourlyStatistics.objects.filter(date__gt=back).order_by('date',
'hour')
available = HourlyStatistics.objects.filter(date__gt=back).order_by("date", "hour")
# this must be true!
assert available.count() <= hours_to_go_back
if available.count() > hours_to_go_back:
raise RuntimeError("Too much entries in the hourly statistics")
# annotate real entries with the actual timestamp
onehour = datetime.timedelta(hours=1)
start = back + onehour
def _hours_since_back(o):
timestamp = datetime.datetime.combine(o.date, datetime.time(o.hour))
return int((timestamp - back).total_seconds()/(60**2))
return int((timestamp - back).total_seconds() / (60 ** 2))
annotated = dict([(_hours_since_back(k), k) for k in available])
# complete the holes as there must be X entries available
stats = hours_to_go_back * [None]
for k in annotated: stats[k] = annotated[k]
for k in annotated:
stats[k] = annotated[k]
# cluster using the amount of hours provided by ``cluster_by``
groups = grouper(cluster_by, stats)
......@@ -195,8 +205,14 @@ def get_statistics(hours_to_go_back, cluster_by):
retval[f] = sum([getattr(k, f) for k in group if k is not None])
return retval
fields = ('cpu_time', 'max_memory', 'data_read_size', 'data_read_time',
'data_written_size', 'data_written_time')
fields = (
"cpu_time",
"max_memory",
"data_read_size",
"data_read_time",
"data_written_size",
"data_written_time",
)
summaries = [_totals(g, fields) for g in groups]
# setup data so information is shown for every field accros groups
......@@ -207,12 +223,12 @@ def hourly_charts():
"""Returns the data for hourly charts on various axes"""
# a set of intuitive labels for all data points
labels = 24*['']
labels[-1] = 'now'
labels[17] = '-6h'
labels[11] = '-12h'
labels[5] = '-18h'
labels[0] = '-23h'
labels = 24 * [""]
labels[-1] = "now"
labels[17] = "-6h"
labels[11] = "-12h"
labels[5] = "-18h"
labels[0] = "-23h"
retval = dict(labels=labels)
retval.update(get_statistics(hours_to_go_back=24, cluster_by=1))
......@@ -223,15 +239,15 @@ def daily_charts():
"""Returns the data for daily charts on various axes"""
# a set of intuitive labels for all data points
labels = 30*['']
labels[-1] = 'today'
labels[22] = '-7d'
labels[14] = '-15d'
labels[7] = '-22d'
labels[0] = '-29d'
labels = 30 * [""]
labels[-1] = "today"
labels[22] = "-7d"
labels[14] = "-15d"
labels[7] = "-22d"
labels[0] = "-29d"
retval = dict(labels=labels)
retval.update(get_statistics(hours_to_go_back=24*30, cluster_by=24))
retval.update(get_statistics(hours_to_go_back=24 * 30, cluster_by=24))
return simplejson.dumps(retval)
......@@ -239,16 +255,15 @@ def weekly_charts():
"""Returns the data for weekly charts on various axes"""
# a set of intuitive labels for all data points
labels = 24*['']
labels[-1] = 'this week'
labels[17] = '-6w'
labels[11] = '-12w'
labels[5] = '-18w'
labels[0] = '-23w'
labels = 24 * [""]
labels[-1] = "this week"
labels[17] = "-6w"
labels[11] = "-12w"
labels[5] = "-18w"
labels[0] = "-23w"
retval = dict(labels=labels)
retval.update(get_statistics(hours_to_go_back=24*7*24,
cluster_by=24*7))
retval.update(get_statistics(hours_to_go_back=24 * 7 * 24, cluster_by=24 * 7))
return simplejson.dumps(retval)
......@@ -271,13 +286,16 @@ def statistics(request):
"""
if not(request.user.is_superuser): return HttpResponseForbidden()
return render(request,
'statistics/statistics.html',
dict(
totals=calculate_totals(),
hourly_chart_data=hourly_charts(),
daily_chart_data=daily_charts(),
weekly_chart_data=weekly_charts(),
))
if not (request.user.is_superuser):
return HttpResponseForbidden()
return render(
request,
"statistics/statistics.html",
dict(
totals=calculate_totals(),
hourly_chart_data=hourly_charts(),
daily_chart_data=daily_charts(),
weekly_chart_data=weekly_charts(),
),
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment