Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
beat
beat.web
Commits
6e70390a
Commit
6e70390a
authored
Sep 11, 2020
by
Samuel GAIST
Committed by
Flavio TARSETTI
Sep 11, 2020
Browse files
[statistics][all] Pre-commit cleanup
parent
c6ca27d8
Pipeline
#42675
passed with stage
in 15 minutes
Changes
5
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
beat/web/statistics/admin.py
View file @
6e70390a
...
...
@@ -25,28 +25,29 @@
# #
###############################################################################
from
.models
import
HourlyStatistics
as
HourlyStatisticsModel
from
django.contrib
import
admin
from
.models
import
HourlyStatistics
as
HourlyStatisticsModel
#----------------------------------------------------------
#
----------------------------------------------------------
class
HourlyStatistics
(
admin
.
ModelAdmin
):
list_display
=
(
'
id
'
,
'
date
'
,
'
hour
'
,
'
cpu_time
'
,
'
max_memory
'
,
'
data_read_size
'
,
'
data_read_nb_blocks
'
,
'
data_read_time
'
,
'
data_written_size
'
,
'
data_written_nb_blocks
'
,
'
data_written_time
'
,
list_display
=
(
"
id
"
,
"
date
"
,
"
hour
"
,
"
cpu_time
"
,
"
max_memory
"
,
"
data_read_size
"
,
"
data_read_nb_blocks
"
,
"
data_read_time
"
,
"
data_written_size
"
,
"
data_written_nb_blocks
"
,
"
data_written_time
"
,
)
list_display_links
=
(
'id'
,
)
list_display_links
=
(
"id"
,)
admin
.
site
.
register
(
HourlyStatisticsModel
,
HourlyStatistics
)
beat/web/statistics/models.py
View file @
6e70390a
...
...
@@ -27,24 +27,28 @@
from
django.db
import
models
# ----------------------------------------------------------
#----------------------------------------------------------
class
HourlyStatistics
(
models
.
Model
):
date
=
models
.
DateField
()
hour
=
models
.
IntegerField
()
cpu_time
=
models
.
FloatField
(
default
=
0
)
max_memory
=
models
.
BigIntegerField
(
default
=
0
)
data_read_size
=
models
.
BigIntegerField
(
default
=
0
)
data_read_nb_blocks
=
models
.
IntegerField
(
default
=
0
)
data_read_time
=
models
.
FloatField
(
default
=
0
)
data_written_size
=
models
.
BigIntegerField
(
default
=
0
)
data_written_nb_blocks
=
models
.
IntegerField
(
default
=
0
)
data_written_time
=
models
.
FloatField
(
default
=
0
)
date
=
models
.
DateField
()
hour
=
models
.
IntegerField
()
cpu_time
=
models
.
FloatField
(
default
=
0
)
max_memory
=
models
.
BigIntegerField
(
default
=
0
)
data_read_size
=
models
.
BigIntegerField
(
default
=
0
)
data_read_nb_blocks
=
models
.
IntegerField
(
default
=
0
)
data_read_time
=
models
.
FloatField
(
default
=
0
)
data_written_size
=
models
.
BigIntegerField
(
default
=
0
)
data_written_nb_blocks
=
models
.
IntegerField
(
default
=
0
)
data_written_time
=
models
.
FloatField
(
default
=
0
)
def
__str__
(
self
):
return
'Hourly statistics #%d (%s, hour %s)'
%
(
self
.
id
,
self
.
date
.
strftime
(
'%b %d, %Y'
),
self
.
hour
)
return
"Hourly statistics #%d (%s, hour %s)"
%
(
self
.
id
,
self
.
date
.
strftime
(
"%b %d, %Y"
),
self
.
hour
,
)
class
Meta
:
verbose_name_plural
=
"Hourly statistics"
beat/web/statistics/tests.py
View file @
6e70390a
...
...
@@ -25,51 +25,41 @@
# #
###############################################################################
from
django.test
import
TestCase
from
django.contrib.auth.models
import
User
from
django.urls
import
reverse
from
datetime
import
datetime
from
datetime
import
date
from
datetime
import
timedelta
from
.models
import
HourlyStatistics
from
.utils
import
updateStatistics
from
..dataformats.models
import
DataFormat
from
..common.testutils
import
BaseTestCase
import
nose.tools
from
django.test
import
TestCase
import
beat.core.stats
import
nose.tools
from
.models
import
HourlyStatistics
from
.utils
import
updateStatistics
STATS_1
=
{
'
cpu
'
:
{
'
user
'
:
1
,
'
system
'
:
1.5
,
},
'
memory
'
:
{
'
rss
'
:
100
,
},
'
data
'
:
{
'
volume
'
:
{
'
read
'
:
1000
,
'
write
'
:
2000
,
},
'
blocks
'
:
{
'
read
'
:
100
,
'
write
'
:
200
,
},
'
time
'
:
{
'
read
'
:
10000
,
'
write
'
:
20000
,
},
'
generated_files
'
:
[],
}
"
cpu
"
:
{
"
user
"
:
1
,
"
system
"
:
1.5
},
"
memory
"
:
{
"
rss
"
:
100
},
"
data
"
:
{
"
volume
"
:
{
"
read
"
:
1000
,
"
write
"
:
2000
},
"
blocks
"
:
{
"
read
"
:
100
,
"
write
"
:
200
},
"
time
"
:
{
"
read
"
:
10000
,
"
write
"
:
20000
},
"
generated_files
"
:
[],
}
,
}
STATS_2
=
{
'
cpu
'
:
{
'
user
'
:
2
,
'
system
'
:
3
,
},
'
memory
'
:
{
'
rss
'
:
200
,
},
'
data
'
:
{
'
volume
'
:
{
'
read
'
:
500
,
'
write
'
:
600
,
},
'
blocks
'
:
{
'
read
'
:
50
,
'
write
'
:
60
,
},
'
time
'
:
{
'
read
'
:
5000
,
'
write
'
:
6000
,
},
'
generated_files
'
:
[],
}
"
cpu
"
:
{
"
user
"
:
2
,
"
system
"
:
3
},
"
memory
"
:
{
"
rss
"
:
200
},
"
data
"
:
{
"
volume
"
:
{
"
read
"
:
500
,
"
write
"
:
600
},
"
blocks
"
:
{
"
read
"
:
50
,
"
write
"
:
60
},
"
time
"
:
{
"
read
"
:
5000
,
"
write
"
:
6000
},
"
generated_files
"
:
[],
}
,
}
class
UpdateStatisticsFunction
(
TestCase
):
def
test_first_update
(
self
):
statistics
=
beat
.
core
.
stats
.
Statistics
(
STATS_1
)
...
...
@@ -82,15 +72,14 @@ class UpdateStatisticsFunction(TestCase):
nose
.
tools
.
eq_
(
hourly
.
date
,
date1
.
date
())
nose
.
tools
.
eq_
(
hourly
.
hour
,
6
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
2.5
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
1000
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
10000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
2000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
20000
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
2.5
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
1000
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
10000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
2000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
20000
)
def
test_two_updates_in_the_same_hour
(
self
):
statistics
=
beat
.
core
.
stats
.
Statistics
(
STATS_1
)
...
...
@@ -103,22 +92,20 @@ class UpdateStatisticsFunction(TestCase):
date2
=
datetime
(
2013
,
10
,
5
,
6
,
45
,
0
)
updateStatistics
(
statistics
,
date
=
date2
)
nose
.
tools
.
eq_
(
HourlyStatistics
.
objects
.
count
(),
1
)
hourly
=
HourlyStatistics
.
objects
.
all
()[
0
]
nose
.
tools
.
eq_
(
hourly
.
date
,
date1
.
date
())
nose
.
tools
.
eq_
(
hourly
.
hour
,
6
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
7.5
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
300
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
1500
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
150
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
15000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
2600
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
260
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
26000
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
7.5
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
300
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
1500
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
150
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
15000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
2600
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
260
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
26000
)
def
test_two_updates_in_different_hours
(
self
):
statistics
=
beat
.
core
.
stats
.
Statistics
(
STATS_1
)
...
...
@@ -131,36 +118,33 @@ class UpdateStatisticsFunction(TestCase):
date2
=
datetime
(
2013
,
10
,
5
,
7
,
45
,
0
)
updateStatistics
(
statistics
,
date
=
date2
)
nose
.
tools
.
eq_
(
HourlyStatistics
.
objects
.
count
(),
2
)
hourly
=
HourlyStatistics
.
objects
.
all
()[
0
]
nose
.
tools
.
eq_
(
hourly
.
date
,
date1
.
date
())
nose
.
tools
.
eq_
(
hourly
.
hour
,
6
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
2.5
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
1000
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
10000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
2000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
20000
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
2.5
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
1000
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
10000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
2000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
20000
)
hourly
=
HourlyStatistics
.
objects
.
all
()[
1
]
nose
.
tools
.
eq_
(
hourly
.
date
,
date2
.
date
())
nose
.
tools
.
eq_
(
hourly
.
hour
,
7
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
5.0
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
500
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
50
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
5000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
600
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
60
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
6000
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
5.0
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
500
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
50
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
5000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
600
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
60
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
6000
)
def
test_two_updates_in_different_days
(
self
):
statistics
=
beat
.
core
.
stats
.
Statistics
(
STATS_1
)
...
...
@@ -173,35 +157,33 @@ class UpdateStatisticsFunction(TestCase):
date2
=
datetime
(
2013
,
10
,
6
,
7
,
45
,
0
)
updateStatistics
(
statistics
,
date
=
date2
)
nose
.
tools
.
eq_
(
HourlyStatistics
.
objects
.
count
(),
2
)
hourly
=
HourlyStatistics
.
objects
.
all
()[
0
]
nose
.
tools
.
eq_
(
hourly
.
date
,
date1
.
date
())
nose
.
tools
.
eq_
(
hourly
.
hour
,
6
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
2.5
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
1000
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
10000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
2000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
20000
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
2.5
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
1000
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
10000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
2000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
20000
)
hourly
=
HourlyStatistics
.
objects
.
all
()[
1
]
nose
.
tools
.
eq_
(
hourly
.
date
,
date2
.
date
())
nose
.
tools
.
eq_
(
hourly
.
hour
,
7
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
5.0
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
500
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
50
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
5000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
600
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
60
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
6000
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
5.0
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
500
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
50
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
5000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
600
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
60
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
6000
)
def
test_two_updates_in_different_weeks
(
self
):
statistics
=
beat
.
core
.
stats
.
Statistics
(
STATS_1
)
...
...
@@ -214,31 +196,30 @@ class UpdateStatisticsFunction(TestCase):
date2
=
datetime
(
2013
,
10
,
7
,
7
,
45
,
0
)
updateStatistics
(
statistics
,
date
=
date2
)
nose
.
tools
.
eq_
(
HourlyStatistics
.
objects
.
count
(),
2
)
hourly
=
HourlyStatistics
.
objects
.
all
()[
0
]
nose
.
tools
.
eq_
(
hourly
.
date
,
date1
.
date
())
nose
.
tools
.
eq_
(
hourly
.
hour
,
6
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
2.5
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
1000
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
10000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
2000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
20000
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
2.5
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
1000
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
100
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
10000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
2000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
20000
)
hourly
=
HourlyStatistics
.
objects
.
all
()[
1
]
nose
.
tools
.
eq_
(
hourly
.
date
,
date2
.
date
())
nose
.
tools
.
eq_
(
hourly
.
hour
,
7
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
5.0
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
500
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
50
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
5000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
600
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
60
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
6000
)
nose
.
tools
.
eq_
(
hourly
.
cpu_time
,
5.0
)
nose
.
tools
.
eq_
(
hourly
.
max_memory
,
200
)
nose
.
tools
.
eq_
(
hourly
.
data_read_size
,
500
)
nose
.
tools
.
eq_
(
hourly
.
data_read_nb_blocks
,
50
)
nose
.
tools
.
eq_
(
hourly
.
data_read_time
,
5000
)
nose
.
tools
.
eq_
(
hourly
.
data_written_size
,
600
)
nose
.
tools
.
eq_
(
hourly
.
data_written_nb_blocks
,
60
)
nose
.
tools
.
eq_
(
hourly
.
data_written_time
,
6000
)
beat/web/statistics/utils.py
View file @
6e70390a
...
...
@@ -25,9 +25,11 @@
# #
###############################################################################
from
.models
import
HourlyStatistics
from
datetime
import
datetime
from
.models
import
HourlyStatistics
def
updateStatistics
(
stats
,
date
=
None
):
"""Update the hourly statistics
...
...
@@ -41,34 +43,22 @@ def updateStatistics(stats, date=None):
date
=
date
or
datetime
.
now
()
# Retrieve the current obj entry (if it exists)
try
:
obj
=
HourlyStatistics
.
objects
.
order_by
(
'-date'
,
'-hour'
)[
0
]
if
(
obj
.
date
!=
date
.
date
())
or
(
obj
.
hour
!=
date
.
hour
):
obj
=
None
except
:
obj
=
None
# Create an obj entry if necessary
if
obj
is
None
:
obj
=
HourlyStatistics
()
obj
.
date
=
date
.
date
()
obj
.
hour
=
date
.
hour
obj
,
_
=
HourlyStatistics
.
objects
.
get_or_create
(
date
=
date
.
date
(),
hour
=
date
.
hour
)
# Modify the obj entry
obj
.
cpu_time
+=
stats
.
cpu
[
'
user
'
]
+
stats
.
cpu
[
'
system
'
]
obj
.
max_memory
+=
stats
.
memory
[
'
rss
'
]
obj
.
cpu_time
+=
stats
.
cpu
[
"
user
"
]
+
stats
.
cpu
[
"
system
"
]
obj
.
max_memory
+=
stats
.
memory
[
"
rss
"
]
if
'
volume
'
in
stats
.
data
:
obj
.
data_read_size
+=
stats
.
data
[
'
volume
'
].
get
(
'
read
'
,
0
)
obj
.
data_written_size
+=
stats
.
data
[
'
volume
'
].
get
(
'
write
'
,
0
)
if
"
volume
"
in
stats
.
data
:
obj
.
data_read_size
+=
stats
.
data
[
"
volume
"
].
get
(
"
read
"
,
0
)
obj
.
data_written_size
+=
stats
.
data
[
"
volume
"
].
get
(
"
write
"
,
0
)
if
'
blocks
'
in
stats
.
data
:
obj
.
data_read_nb_blocks
+=
stats
.
data
[
'
blocks
'
].
get
(
'
read
'
,
0
)
obj
.
data_written_nb_blocks
+=
stats
.
data
[
'
blocks
'
].
get
(
'
write
'
,
0
)
if
"
blocks
"
in
stats
.
data
:
obj
.
data_read_nb_blocks
+=
stats
.
data
[
"
blocks
"
].
get
(
"
read
"
,
0
)
obj
.
data_written_nb_blocks
+=
stats
.
data
[
"
blocks
"
].
get
(
"
write
"
,
0
)
if
'
time
'
in
stats
.
data
:
obj
.
data_read_time
+=
stats
.
data
[
'
time
'
].
get
(
'
read
'
,
0
)
obj
.
data_written_time
+=
stats
.
data
[
'
time
'
].
get
(
'
write
'
,
0
)
if
"
time
"
in
stats
.
data
:
obj
.
data_read_time
+=
stats
.
data
[
"
time
"
].
get
(
"
read
"
,
0
)
obj
.
data_written_time
+=
stats
.
data
[
"
time
"
].
get
(
"
write
"
,
0
)
obj
.
save
()
beat/web/statistics/views.py
View file @
6e70390a
...
...
@@ -34,11 +34,10 @@ except ImportError:
from
itertools
import
zip_longest
as
izip_longest
import
simplejson
from
django.shortcuts
import
render
from
django.contrib.auth.decorators
import
login_required
from
django.http
import
HttpResponseForbidden
from
django.db.models
import
Sum
from
django.http
import
HttpResponseForbidden
from
django.shortcuts
import
render
from
.models
import
HourlyStatistics
...
...
@@ -47,20 +46,23 @@ def calculate_totals():
"""Caculates all totals required by the statistics display"""
from
django.contrib.auth.models
import
User
from
..accounts.models
import
Profile
from
..algorithms.models
import
Algorithm
from
..attestations.models
import
Attestation
from
..backend.models
import
Environment
from
..backend.models
import
Queue
from
..backend.models
import
Worker
from
..databases.models
import
Database
from
..
backend
.models
import
Environment
,
Queue
,
Worker
from
..
dataformats
.models
import
DataFormat
from
..experiments.models
import
Experiment
from
..toolchains.models
import
Toolchain
from
..algorithms.models
import
Algorithm
from
..libraries.models
import
Library
from
..dataformats.models
import
DataFormat
from
..team.models
import
Team
from
..attestations.models
import
Attestation
from
..reports.models
import
Report
from
..plotters.models
import
Plotter
from
..plotters.models
import
PlotterParameter
from
..reports.models
import
Report
from
..search.models
import
Search
from
..accounts.models
import
Profile
from
..team.models
import
Team
from
..toolchains.models
import
Toolchain
# for calculating the total cpu time, we use the HourlyStatistics and
# accumulate over the whole history
...
...
@@ -68,38 +70,44 @@ def calculate_totals():
counter
=
objects
.
count
()
details
=
objects
.
aggregate
(
cpu_time
=
Sum
(
'
cpu_time
'
),
max_memory
=
Sum
(
'
max_memory
'
),
data_read_size
=
Sum
(
'
data_read_size
'
),
data_read_time
=
Sum
(
'
data_read_time
'
),
data_written_size
=
Sum
(
'
data_written_size
'
),
data_written_time
=
Sum
(
'
data_written_time
'
),
cpu_time
=
Sum
(
"
cpu_time
"
),
max_memory
=
Sum
(
"
max_memory
"
),
data_read_size
=
Sum
(
"
data_read_size
"
),
data_read_time
=
Sum
(
"
data_read_time
"
),
data_written_size
=
Sum
(
"
data_written_size
"
),
data_written_time
=
Sum
(
"
data_written_time
"
),
)
cpu_time
=
details
[
'
cpu_time
'
]
memory
=
details
[
'
max_memory
'
]
input_size
=
details
[
'
data_read_size
'
]
input_time
=
details
[
'
data_read_time
'
]
output_size
=
details
[
'
data_written_size
'
]
output_time
=
details
[
'
data_written_time
'
]
cpu_time
=
details
[
"
cpu_time
"
]
memory
=
details
[
"
max_memory
"
]
input_size
=
details
[
"
data_read_size
"
]
input_time
=
details
[
"
data_read_time
"
]
output_size
=
details
[
"
data_written_size
"
]
output_time
=
details
[
"
data_written_time
"
]
new_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
NEWUSER
)
waiting_validation_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
WAITINGVALIDATION
)
accepted_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
ACCEPTED
)
rejected_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
REJECTED
)
yearrevalidation_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
YEARREVALIDATION
)
blocked_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
BLOCKED
)
new_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
NEWUSER
)
waiting_validation_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
WAITINGVALIDATION
)
accepted_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
ACCEPTED
)
rejected_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
REJECTED
)
yearrevalidation_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
YEARREVALIDATION
)
blocked_users
=
User
.
objects
.
filter
(
profile__status
=
Profile
.
BLOCKED
)
return
dict
(
counter
=
counter
,
cpu_time_hours
=
int
(
cpu_time
/
float
(
60
**
2
))
if
cpu_time
else
0
,
memory_gb
=
int
(
memory
/
float
(
2
**
30
))
if
memory
else
0
,
input_gb
=
int
(
input_size
/
float
(
2
**
30
))
if
input_size
else
0
,
input_bw_mb_s
=
int
((
input_size
/
float
(
2
**
20
))
/
input_time
)
if
input_size
else
0
,
output_gb
=
int
(
output_size
/
float
(
2
**
30
))
if
output_size
else
0
,
output_bw_mb_s
=
int
((
output_size
/
float
(
2
**
20
))
/
output_time
)
if
output_size
else
0
,
cpu_time_hours
=
int
(
cpu_time
/
float
(
60
**
2
))
if
cpu_time
else
0
,
memory_gb
=
int
(
memory
/
float
(
2
**
30
))
if
memory
else
0
,
input_gb
=
int
(
input_size
/
float
(
2
**
30
))
if
input_size
else
0
,
input_bw_mb_s
=
int
((
input_size
/
float
(
2
**
20
))
/
input_time
)
if
input_size
else
0
,
output_gb
=
int
(
output_size
/
float
(
2
**
30
))
if
output_size
else
0
,
output_bw_mb_s
=
int
((
output_size
/
float
(
2
**
20
))
/
output_time
)
if
output_size
else
0
,
users
=
User
.
objects
.
count
(),
newusers
=
new_users
.
count
(),
waitingvalidationusers
=
waiting_validation_users
.
count
(),
...
...
@@ -111,20 +119,17 @@ def calculate_totals():
environments
=
Environment
.
objects
.
count
(),
queues
=
Queue
.
objects
.
count
(),
workers
=
Worker
.
objects
.
count
(),
experiments
=
Experiment
.
objects
.
count
(),
toolchains
=
Toolchain
.
objects
.
count
(),
algorithms
=
Algorithm
.
objects
.
count
(),
libraries
=
Library
.
objects
.
count
(),
dataformats
=
DataFormat
.
objects
.
count
(),
teams
=
Team
.
objects
.
count
(),
attestations
=
Attestation
.
objects
.
count
(),
searches
=
Search
.
objects
.
count
(),
reports
=
Report
.
objects
.
count
(),
plotters
=
Plotter
.
objects
.
count
(),
plotterparameters
=
PlotterParameter
.
objects
.
count
(),
)
...
...
@@ -133,23 +138,29 @@ def convert_values(stats):
# transform into plottable data
def
_seconds_to_hours
(
s
,
field
):
if
s
is
not
None
:
return
s
[
field
]
/
float
(
60
**
2
)
#hours
return
0.
if
s
is
not
None
:
return
s
[
field
]
/
float
(
60
**
2
)
# hours
return
0.0
def
_bytes_to_gb
(
s
,
field
):
if
s
is
not
None
:
return
s
[
field
]
/
float
(
2
**
30
)
#gigabytes
return
0.
if
s
is
not
None
:
return
s
[
field
]
/
float
(
2
**
30
)
# gigabytes
return
0.0
def
_bw_in_mbs
(
s
,
num
,
den
):
if
s
is
None
:
return
0.