Commit 39255663 authored by Samuel GAIST's avatar Samuel GAIST
Browse files

[backend][tests] Fix prefix setup

This commit also cleans up the code.
parent 49f1e195
......@@ -25,20 +25,21 @@
# #
###############################################################################
import os
import beat.core.hash
from django.test import TestCase
from django.conf import settings
from ...utils.management.commands import install
from ...experiments.models import Experiment
from ...experiments.models import Block
from ...experiments.models import CachedFile
from ...algorithms.models import Algorithm
from ...common.testutils import tearDownModule # noqa test runner will call it
from ..models import Queue
from ..models import Worker
from ..models import Environment
from ..models import Job
from ..models import JobSplit
from ..utils import setup_backend
from ..management.commands import qsetup
......@@ -46,106 +47,80 @@ from ..management.commands import qsetup
from beat.core.dataformat import DataFormat
from beat.core.data import CachedDataSink
from beat.core.database import Database
import beat.core.hash
import os
import sys
#----------------------------------------------------------
# ----------------------------------------------------------
ONE_QUEUE_TWO_WORKERS = {
"queues": {
"queue": {
"memory-limit": 4*1024,
"time-limit": 1440, #1 day
"memory-limit": 4 * 1024,
"time-limit": 1440, # 1 day
"cores-per-slot": 1,
"max-slots-per-user": 2,
"environments": [
'Python 2.7 (1.3.0)'
],
"environments": ["Python 2.7 (1.3.0)"],
"slots": {
'node1': {
"quantity": 1,
"priority": 0
},
'node2': {
"quantity": 1,
"priority": 0
}
"node1": {"quantity": 1, "priority": 0},
"node2": {"quantity": 1, "priority": 0},
},
"groups": [
"Default",
],
"groups": ["Default"],
}
},
"workers": {
"node1": {
"cores": 1,
"memory": 16*1024,
},
"node2": {
"cores": 1,
"memory": 16*1024,
}
"node1": {"cores": 1, "memory": 16 * 1024},
"node2": {"cores": 1, "memory": 16 * 1024},
},
"environments": {
"Python 2.7 (1.3.0)": {
"name": 'Python 2.7',
"version": '1.3.0',
"name": "Python 2.7",
"version": "1.3.0",
"short_description": "Test",
"description": "Test environment",
"languages": ["python"],
},
}
},
}
#----------------------------------------------------------
# ----------------------------------------------------------
class BackendUtilitiesMixin(object):
@classmethod
def setup_test_data(cls):
install.create_sites()
system_user, plot_user, user = install.create_users('user', 'user')
install.add_group('Default')
system_user, plot_user, user = install.create_users("user", "user")
install.add_group("Default")
setup_backend(qsetup.DEFAULT_CONFIGURATION)
Worker.objects.update(active=True)
env = Environment.objects.get(name='Python 2.7')
env = Environment.objects.get(name="Python 2.7")
queue = Queue.objects.first()
template_data = dict(
system_user = system_user,
plot_user = plot_user,
user = user,
private = False,
queue = queue.name,
environment = dict(name=env.name, version=env.version),
system_user=system_user,
plot_user=plot_user,
user=user,
private=False,
queue=queue.name,
environment=dict(name=env.name, version=env.version),
)
prefix = os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0]))),
'src',
'beat.examples',
)
source_prefix = os.path.join(settings.BASE_DIR, "src", "beat.examples")
install.install_contributions(prefix, 'system', template_data)
install.install_contributions(prefix, 'test', template_data)
install.install_contributions(source_prefix, "system", template_data)
install.install_contributions(source_prefix, "test", template_data)
if not os.path.exists(settings.CACHE_ROOT):
os.mkdir(settings.CACHE_ROOT)
os.mkdir(settings.CACHE_ROOT)
def clean_cache(self):
for p, dirs, files in os.walk(settings.CACHE_ROOT, topdown=False):
files = [f for f in files if not f.startswith('.')]
dirs[:] = [d for d in dirs if not d.startswith('.')] #note: in-place
files = [f for f in files if not f.startswith(".")]
dirs[:] = [d for d in dirs if not d.startswith(".")] # note: in-place
for f in files:
fullpath = os.path.join(p, f)
......@@ -155,9 +130,9 @@ class BackendUtilitiesMixin(object):
fullpath = os.path.join(p, d)
os.rmdir(fullpath)
def set_experiment_state(self, experiment, experiment_status=None, block_status=None,
cache_status=None):
def set_experiment_state(
self, experiment, experiment_status=None, block_status=None, cache_status=None
):
if block_status:
for name, status in block_status.items():
block = experiment.blocks.get(name=name)
......@@ -175,9 +150,8 @@ class BackendUtilitiesMixin(object):
experiment.status = experiment_status
experiment.save()
def generate_cached_files(self, hash, splits):
dataformat = DataFormat(settings.PREFIX, 'system/integer/1')
dataformat = DataFormat(settings.PREFIX, "system/integer/1")
path = os.path.join(settings.CACHE_ROOT, beat.core.hash.toPath(hash))
os.makedirs(os.path.dirname(path))
......@@ -200,80 +174,76 @@ class BackendUtilitiesMixin(object):
start = indices[0]
end = indices[1]
sink.write({
'value': value,
},
start_data_index = start,
end_data_index = end
)
sink.write({"value": value}, start_data_index=start, end_data_index=end)
value += 1
sink.close()
def prepare_databases(self, configuration):
for _, cfg in configuration['datasets'].items():
path = beat.core.hash.toPath(beat.core.hash.hashDataset(
cfg['database'], cfg['protocol'], cfg['set']), suffix='.db')
for _, cfg in configuration["datasets"].items():
path = beat.core.hash.toPath(
beat.core.hash.hashDataset(
cfg["database"], cfg["protocol"], cfg["set"]
),
suffix=".db",
)
if not os.path.exists(os.path.join(settings.CACHE_ROOT, path)):
database = Database(settings.PREFIX, cfg['database'])
view = database.view(cfg['protocol'], cfg['set'])
database = Database(settings.PREFIX, cfg["database"])
view = database.view(cfg["protocol"], cfg["set"])
view.index(os.path.join(settings.CACHE_ROOT, path))
#----------------------------------------------------------
# ----------------------------------------------------------
class BaseBackendTestCase(TestCase, BackendUtilitiesMixin):
@classmethod
def setUpTestData(cls):
cls.setup_test_data()
cls.setup_test_data()
def setUp(self):
self.clean_cache()
def tearDown(self):
self.clean_cache()
def check_single(self, xp):
'''Checks user/user/single/1/single'''
"""Checks user/user/single/1/single"""
self.assertEqual(xp.blocks.count(), 2)
b0 = xp.blocks.all()[0]
self.assertEqual(b0.name, 'echo')
self.assertEqual(b0.name, "echo")
self.assertEqual(b0.status, Block.PENDING)
self.assertEqual(b0.algorithm, Algorithm.objects.get(name='integers_echo'))
self.assertEqual(b0.algorithm, Algorithm.objects.get(name="integers_echo"))
self.assertEqual(b0.dependencies.count(), 0)
self.assertEqual(b0.dependents.count(), 1)
self.assertEqual(b0.queue.name, 'queue')
self.assertEqual(b0.environment.name, 'Python 2.7')
self.assertEqual(b0.queue.name, "queue")
self.assertEqual(b0.environment.name, "Python 2.7")
self.assertEqual(b0.required_slots, 1)
self.assertEqual(b0.inputs.count(), 1)
self.assertEqual(b0.outputs.count(), 1)
self.assertEqual(b0.job.splits.count(), 0) #not scheduled yet
self.assertEqual(b0.job.splits.count(), 0) # not scheduled yet
assert not b0.done()
self.assertFalse(b0.done())
b1 = xp.blocks.all()[1]
self.assertEqual(b1.name, 'analysis')
self.assertEqual(b1.name, "analysis")
self.assertEqual(b1.status, Block.PENDING)
self.assertEqual(b1.algorithm, Algorithm.objects.get(name='integers_echo_analyzer'))
self.assertEqual(
b1.algorithm, Algorithm.objects.get(name="integers_echo_analyzer")
)
self.assertEqual(b1.dependencies.count(), 1)
self.assertEqual(b1.dependents.count(), 0)
self.assertEqual(b1.queue.name, 'queue')
self.assertEqual(b1.environment.name, 'Python 2.7')
self.assertEqual(b1.queue.name, "queue")
self.assertEqual(b1.environment.name, "Python 2.7")
self.assertEqual(b1.required_slots, 1)
self.assertEqual(b1.inputs.count(), 1)
self.assertEqual(b1.outputs.count(), 1)
self.assertEqual(b1.job.splits.count(), 0) #not scheduled yet
self.assertEqual(b1.job.splits.count(), 0) # not scheduled yet
assert not b1.done()
self.assertFalse(b1.done())
......@@ -25,25 +25,24 @@
# #
###############################################################################
from ...common.testutils import BaseTestCase as APITestCase
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from ...common.testutils import BaseTestCase as APITestCase
from ...common.testutils import tearDownModule # noqa test runner will call it
class CancelAllExperimentsAPI(APITestCase):
class CancelAllExperimentsAPI(APITestCase):
def setUp(self):
self.url = reverse('backend:cancel-experiments')
self.url = reverse("backend:cancel-experiments")
def test_no_access_for_anonymous_user(self):
response = self.client.get(self.url)
self.checkResponse(response, 302) #redirects to login page
self.checkResponse(response, 302) # redirects to login page
def test_no_access_for_non_superuser(self):
User.objects.create_user('johndoe', 'johndoe@test.org', '1234')
self.client.login(username='johndoe', password='1234')
PASSWORD = "1234"
User.objects.create_user("johndoe", "johndoe@test.org", PASSWORD)
self.client.login(username="johndoe", password=PASSWORD)
response = self.client.get(self.url)
self.checkResponse(response, 403)
......@@ -26,7 +26,6 @@
###############################################################################
import os
import sys
import time
import shutil
import tempfile
......@@ -34,43 +33,36 @@ import tempfile
from django.core import management
from django.test import TestCase
from ...common.testutils import tearDownModule # noqa test runner will call it
from ..utils import cleanup_cache
class CacheCleanUp(TestCase):
def setUp(self):
self.cache = tempfile.mkdtemp(prefix='beat_')
self.cache = tempfile.mkdtemp(prefix="beat_")
def tearDown(self):
shutil.rmtree(self.cache)
def touch(self, f, times=None):
"""Replicates the `touch' command-line utility"""
with open(f, 'a'): os.utime(f, times)
with open(f, "a"):
os.utime(f, times)
def J(self, *args):
return os.path.join(*((self.cache,) + args))
def prepare_cleanup_full(self):
# creates a temporary directory structure
os.makedirs(self.J('a', 'b', 'c'))
os.makedirs(self.J('a', 'c', 'd'))
os.makedirs(self.J('a', 'c', 'e'))
self.touch(self.J('a', 'b', 'c', 'd.json'))
self.touch(self.J('a', 'c', 'd', 'e.json'))
os.makedirs(self.J("a", "b", "c"))
os.makedirs(self.J("a", "c", "d"))
os.makedirs(self.J("a", "c", "e"))
self.touch(self.J("a", "b", "c", "d.json"))
self.touch(self.J("a", "c", "d", "e.json"))
def check_cleanup_full(self):
assert not os.listdir(self.cache)
self.assertIsNotNone(os.listdir(self.cache))
def test_cache_cleanup_full(self):
......@@ -78,34 +70,31 @@ class CacheCleanUp(TestCase):
cleanup_cache(self.cache, delete=True)
self.check_cleanup_full()
def test_cmd_cleanup_full(self):
self.prepare_cleanup_full()
management.call_command('cleanup_cache', path=self.cache,
verbosity=0, delete=True)
management.call_command(
"cleanup_cache", path=self.cache, verbosity=0, delete=True
)
self.check_cleanup_full()
def prepare_cleanup_aged(self):
two_min_ago = time.time() - 60*2
two_min_ago = time.time() - 60 * 2
# creates a temporary directory structure
os.makedirs(self.J('a', 'b', 'c'))
os.makedirs(self.J('a', 'c', 'd'))
os.makedirs(self.J('a', 'c', 'e'))
self.touch(self.J('a', 'b', 'c', 'd.json'), (two_min_ago, two_min_ago))
self.touch(self.J('a', 'c', 'd', 'e.json'))
os.makedirs(self.J("a", "b", "c"))
os.makedirs(self.J("a", "c", "d"))
os.makedirs(self.J("a", "c", "e"))
self.touch(self.J("a", "b", "c", "d.json"), (two_min_ago, two_min_ago))
self.touch(self.J("a", "c", "d", "e.json"))
def check_cleanup_aged(self):
assert os.path.exists(self.J('a', 'c', 'd', 'e.json'))
assert not os.path.exists(self.J('a', 'b', 'c'))
assert not os.path.exists(self.J('a', 'b', 'c', 'd.json'))
assert not os.path.exists(self.J('a', 'b', 'e'))
self.assertTrue(os.path.exists(self.J("a", "c", "d", "e.json")))
self.assertFalse(os.path.exists(self.J("a", "b", "c")))
self.assertFalse(os.path.exists(self.J("a", "b", "c", "d.json")))
self.assertFalse(os.path.exists(self.J("a", "b", "e")))
def test_cache_cleanup_aged(self):
......@@ -113,37 +102,34 @@ class CacheCleanUp(TestCase):
cleanup_cache(self.cache, age_in_minutes=2, delete=True)
self.check_cleanup_aged()
def test_cmd_cleanup_aged(self):
self.prepare_cleanup_aged()
management.call_command('cleanup_cache', path=self.cache,
verbosity=0, olderthan=2, delete=True)
management.call_command(
"cleanup_cache", path=self.cache, verbosity=0, olderthan=2, delete=True
)
self.check_cleanup_aged()
def prepare_cleanup_lock(self):
two_min_ago = time.time() - 60*2
ten_min_ago = time.time() - 60*10
two_min_ago = time.time() - 60 * 2
ten_min_ago = time.time() - 60 * 10
# creates a temporary directory structure
os.makedirs(self.J('a', 'b', 'c'))
os.makedirs(self.J('a', 'c', 'd'))
os.makedirs(self.J('a', 'c', 'e'))
self.touch(self.J('a', 'b', 'c', 'd.json'), (two_min_ago, two_min_ago))
self.touch(self.J('a', 'c', 'd', 'e.json'), (ten_min_ago, ten_min_ago))
self.touch(self.J('a', 'c', 'd', 'e.lock')) #create a lock
os.makedirs(self.J("a", "b", "c"))
os.makedirs(self.J("a", "c", "d"))
os.makedirs(self.J("a", "c", "e"))
self.touch(self.J("a", "b", "c", "d.json"), (two_min_ago, two_min_ago))
self.touch(self.J("a", "c", "d", "e.json"), (ten_min_ago, ten_min_ago))
self.touch(self.J("a", "c", "d", "e.lock")) # create a lock
def check_cleanup_lock(self):
assert os.path.exists(self.J('a', 'c', 'd', 'e.json'))
assert not os.path.exists(self.J('a', 'b', 'c'))
assert not os.path.exists(self.J('a', 'b', 'c', 'd.json'))
assert not os.path.exists(self.J('a', 'b', 'e'))
self.assertTrue(os.path.exists(self.J("a", "c", "d", "e.json")))
self.assertFalse(os.path.exists(self.J("a", "b", "c")))
self.assertFalse(os.path.exists(self.J("a", "b", "c", "d.json")))
self.assertFalse(os.path.exists(self.J("a", "b", "e")))
def test_cache_cleanup_lock(self):
......@@ -151,10 +137,10 @@ class CacheCleanUp(TestCase):
cleanup_cache(self.cache, delete=True)
self.check_cleanup_lock()
def test_cmd_cleanup_lock(self):
self.prepare_cleanup_lock()
management.call_command('cleanup_cache', path=self.cache,
verbosity=0, delete=True)
management.call_command(
"cleanup_cache", path=self.cache, verbosity=0, delete=True
)
self.check_cleanup_lock()
This diff is collapsed.
This diff is collapsed.
......@@ -25,15 +25,12 @@
# #
###############################################################################
import time
import collections
from django.core import management
from .common import BaseBackendTestCase
from ...experiments.models import Experiment
from ..models import Queue
from ..models import Worker
from ..models import Slot
......@@ -41,206 +38,180 @@ from ..utils import dump_backend
from ..utils import setup_backend
from ..management.commands import qsetup
from ...experiments.models import Experiment
from ...common.testutils import tearDownModule # noqa test runner will call it
# Example configuration with 3 queues with an increasing amount of resources
# running on the same host
QUEUES_WITHOUT_PRIORITY = {
"queues": collections.OrderedDict([
("q1", {
"memory-limit": 4*1024,
"time-limit": 180, #3 hours
"cores-per-slot": 1,
"max-slots-per-user": 4,
"environments": ['Python 2.7 (1.3.0)'],
"groups": [
"Default",
],
"slots": {
"node1": {
"quantity": 4,
"priority": 0
}
}
}
),
("q2", {
"memory-limit": 8*1024,
"time-limit": 360, #6 hours
"cores-per-slot": 2,
"max-slots-per-user": 2,
"environments": ['Python 2.7 (1.3.0)'],
"groups": [
"Default",
],
"slots": {
"node1": {
"quantity": 2,
"priority": 0
"queues": collections.OrderedDict(
[
(
"q1",
{
"memory-limit": 4 * 1024,
"time-limit": 180, # 3 hours
"cores-per-slot": 1,
"max-slots-per-user": 4,
"environments": ["Python 2.7 (1.3.0)"],
"groups": ["Default"],
"slots": {"node1": {"quantity": 4, "priority": 0}},
},
}
}
),
("q4", {
"memory-limit": 16*1024,
"time-limit": 720, #12 hours
"cores-per-slot": 4,
"max-slots-per-user": 1,
"environments": ['Python 2.7 (1.3.0)'],
"groups": [
"Default",
],
"slots": {
"node1": {
"quantity": 1,
"priority": 0
),
(
"q2",
{