diff --git a/beat/web/algorithms/models.py b/beat/web/algorithms/models.py index dbd68887bb63f06d3cd2552cb140b8ce2022a2cd..c4ddf0554e32d0c5e7eb2ebf1abffdfb5f4fa112 100755 --- a/beat/web/algorithms/models.py +++ b/beat/web/algorithms/models.py @@ -201,7 +201,7 @@ class Algorithm(Code): envs = Environment.objects.filter(blocks__in=self.blocks.filter( Q(experiment__status=Experiment.DONE) | \ - ((~Q(experiment__status=Experiment.DONE)) & Q(status=Block.CACHED)) + ((~Q(experiment__status=Experiment.DONE)) & Q(status=Block.DONE)) )).annotate(itemcount=Count('id')).order_by('-creation_date').distinct() return [(k, k.itemcount) for k in envs] diff --git a/beat/web/backend/models.py b/beat/web/backend/models.py index 9ea3d5a4d5c216565a26cc9f3e94b52d035b592b..053b42af697acb962ebd0ab4ef697f4b95b1ef2e 100755 --- a/beat/web/backend/models.py +++ b/beat/web/backend/models.py @@ -718,7 +718,7 @@ class Result(models.Model): class Job(models.Model): '''Class describing the execution of a Job on the backend''' - QUEUED = 'N' #Block.NOT_CACHED + QUEUED = 'N' #Block.PENDING PROCESSING = 'P' #Block.PROCESSING COMPLETED = 'C' #Block.COMPLETED FAILED = 'F' #Block.FAILED diff --git a/beat/web/backend/tests.py b/beat/web/backend/tests.py index 2087c3a65f072e6bcdd68b9d0d16756cd1d1a5c1..2f57e7cd4258e93841bdbe3304a2d4c590c37806 100755 --- a/beat/web/backend/tests.py +++ b/beat/web/backend/tests.py @@ -411,7 +411,7 @@ class BaseBackendTestCase(TestCase): b0 = xp.blocks.all()[0] self.assertEqual(b0.name, 'echo') - self.assertEqual(b0.status, Block.NOT_CACHED) + self.assertEqual(b0.status, Block.PENDING) self.assertEqual(b0.algorithm, Algorithm.objects.get(name='integers_echo')) self.assertEqual(b0.dependencies.count(), 0) @@ -432,7 +432,7 @@ class BaseBackendTestCase(TestCase): b1 = xp.blocks.all()[1] self.assertEqual(b1.name, 'analysis') - self.assertEqual(b1.status, Block.NOT_CACHED) + self.assertEqual(b1.status, Block.PENDING) self.assertEqual(b1.algorithm, Algorithm.objects.get(name='integers_echo_analyzer')) self.assertEqual(b1.dependencies.count(), 1) @@ -775,7 +775,7 @@ class Scheduling(BaseBackendTestCase): # simulate end job signal split.end(Result(status=0)) self.assertEqual(split.job.status, Job.COMPLETED) - self.assertEqual(split.job.block.status, Block.CACHED) + self.assertEqual(split.job.block.status, Block.DONE) self.assertEqual(split.job.block.experiment.status, Experiment.RUNNING) # checks the number of statistics objects has increased by 1 @@ -823,7 +823,7 @@ class Scheduling(BaseBackendTestCase): self.assertTrue(HourlyStatistics.objects.count() > current_stats) self.assertEqual(split.job.status, Job.COMPLETED) - self.assertEqual(split.job.block.status, Block.CACHED) + self.assertEqual(split.job.block.status, Block.DONE) self.assertEqual(split.job.block.experiment.status, Experiment.DONE) self.check_stats_success(split) @@ -983,7 +983,7 @@ class Scheduling(BaseBackendTestCase): # simulate end job signal split.end(Result(status=0)) self.assertEqual(split.job.status, Job.COMPLETED) - self.assertEqual(split.job.block.status, Block.CACHED) + self.assertEqual(split.job.block.status, Block.DONE) self.assertEqual(split.job.block.experiment.status, Experiment.RUNNING) # checks the number of statistics objects has increased by 1 @@ -1120,7 +1120,7 @@ class Scheduling(BaseBackendTestCase): # simulate end job signal split.end(Result(status=0)) self.assertEqual(split.job.status, Job.COMPLETED) - self.assertEqual(split.job.block.status, Block.CACHED) + self.assertEqual(split.job.block.status, Block.DONE) self.assertEqual(split.job.block.experiment.status, Experiment.RUNNING) # checks the number of statistics objects has increased by 1 @@ -1143,7 +1143,7 @@ class Scheduling(BaseBackendTestCase): self.assertEqual( [str(k) for k in xp.blocks.order_by('id').values_list('status', flat=True)], - [Block.CACHED, Block.CANCELLED] + [Block.DONE, Block.CANCELLED] ) self.assertEqual(xp.status, Experiment.FAILED) @@ -1336,10 +1336,10 @@ class Scheduling(BaseBackendTestCase): # simulate end job signal split.end(Result(status=0)) self.assertEqual(split.job.status, Job.COMPLETED) - self.assertEqual(split.job.block.status, Block.CACHED) + self.assertEqual(split.job.block.status, Block.DONE) self.assertEqual(split.job.block.experiment.status, Experiment.RUNNING) self.assertEqual(split.job.child.status, Job.COMPLETED) - self.assertEqual(split.job.child.block.status, Block.CACHED) + self.assertEqual(split.job.child.block.status, Block.DONE) self.assertEqual(split.job.child.block.experiment.status, Experiment.RUNNING) @@ -1395,10 +1395,10 @@ class Scheduling(BaseBackendTestCase): self.assertTrue(HourlyStatistics.objects.count() > current_stats) self.assertEqual(split.job.status, Job.COMPLETED) - self.assertEqual(split.job.block.status, Block.CACHED) + self.assertEqual(split.job.block.status, Block.DONE) self.assertEqual(split.job.block.experiment.status, Experiment.DONE) self.assertEqual(split.job.child.status, Job.COMPLETED) - self.assertEqual(split.job.child.block.status, Block.CACHED) + self.assertEqual(split.job.child.block.status, Block.DONE) self.assertEqual(split.job.child.block.experiment.status, Experiment.DONE) @@ -1546,10 +1546,10 @@ class Scheduling(BaseBackendTestCase): # simulate end job signal split.end(Result(status=0)) self.assertEqual(split.job.status, Job.COMPLETED) - self.assertEqual(split.job.block.status, Block.CACHED) + self.assertEqual(split.job.block.status, Block.DONE) self.assertEqual(split.job.block.experiment.status, Experiment.RUNNING) self.assertEqual(split.job.child.status, Job.COMPLETED) - self.assertEqual(split.job.child.block.status, Block.CACHED) + self.assertEqual(split.job.child.block.status, Block.DONE) self.assertEqual(split.job.child.block.experiment.status, Experiment.RUNNING) @@ -1572,7 +1572,7 @@ class Scheduling(BaseBackendTestCase): xp.cancel() self.assertEqual( [str(k) for k in xp.blocks.order_by('id').values_list('status', flat=True)], - [Block.CACHED, Block.CANCELLED] + [Block.DONE, Block.CANCELLED] ) self.assertEqual(xp.status, Experiment.FAILED) @@ -1753,10 +1753,10 @@ class Scheduling(BaseBackendTestCase): # simulate end job signal split.end(Result(status=0)) self.assertEqual(split.job.status, Job.COMPLETED) - self.assertEqual(split.job.block.status, Block.CACHED) + self.assertEqual(split.job.block.status, Block.DONE) self.assertEqual(split.job.block.experiment.status, Experiment.RUNNING) self.assertEqual(split.job.child.status, Job.COMPLETED) - self.assertEqual(split.job.child.block.status, Block.CACHED) + self.assertEqual(split.job.child.block.status, Block.DONE) self.assertEqual(split.job.child.block.experiment.status, Experiment.RUNNING) @@ -1770,7 +1770,7 @@ class Scheduling(BaseBackendTestCase): self.assertEqual( [str(k) for k in xpc.blocks.order_by('id').values_list('status', flat=True)], - [Block.CACHED, Block.CANCELLED] + [Block.DONE, Block.CANCELLED] ) self.assertEqual(xpc.status, Experiment.FAILED) @@ -2154,7 +2154,7 @@ class Working(BaseBackendTestCase): # at this point, job should have been successful xp.refresh_from_db() block = xp.blocks.first() - self.assertEqual(block.status, Block.CACHED) + self.assertEqual(block.status, Block.DONE) self.assertEqual(xp.status, Experiment.RUNNING) # all caches must be have been generated @@ -2194,7 +2194,7 @@ class Working(BaseBackendTestCase): xp.refresh_from_db() block = xp.blocks.last() - self.assertEqual(block.status, Block.CACHED) + self.assertEqual(block.status, Block.DONE) self.assertEqual(xp.status, Experiment.DONE) # all caches must be have been generated @@ -2303,7 +2303,7 @@ class Working(BaseBackendTestCase): # at this point, job should have been successful xp.refresh_from_db() block = xp.blocks.first() - self.assertEqual(block.status, Block.CACHED) + self.assertEqual(block.status, Block.DONE) self.assertEqual(block.experiment.status, Experiment.RUNNING) # all caches must be have been generated @@ -2343,7 +2343,7 @@ class Working(BaseBackendTestCase): xp.refresh_from_db() block = xp.blocks.last() - self.assertEqual(block.status, Block.CACHED) + self.assertEqual(block.status, Block.DONE) self.assertEqual(block.experiment.status, Experiment.DONE) # all caches must be have been generated @@ -2402,7 +2402,7 @@ class Working(BaseBackendTestCase): # at this point, job should have been successful xp.refresh_from_db() block = xp.blocks.first() - self.assertEqual(block.status, Block.CACHED) + self.assertEqual(block.status, Block.DONE) self.assertEqual(block.experiment.status, Experiment.RUNNING) # all caches must be have been generated @@ -2429,7 +2429,7 @@ class Working(BaseBackendTestCase): # schedules the first runnable block assert not hasattr(xpc.blocks.first(), 'job') - assert xpc.blocks.first().status == Block.CACHED + assert xpc.blocks.first().status == Block.DONE # since this job was successful, the next one should be ready to run @@ -2453,7 +2453,7 @@ class Working(BaseBackendTestCase): xpc.refresh_from_db() block = xpc.blocks.last() - self.assertEqual(block.status, Block.CACHED) + self.assertEqual(block.status, Block.DONE) self.assertEqual(block.experiment.status, Experiment.DONE) # all caches must be have been generated @@ -2472,7 +2472,7 @@ class Working(BaseBackendTestCase): self.assertEqual( [str(k) for k in xp.blocks.order_by('id').values_list('status', flat=True)], - [Block.CACHED, Block.CANCELLED] + [Block.DONE, Block.CANCELLED] ) self.assertEqual(xp.status, Experiment.FAILED) @@ -2513,7 +2513,7 @@ class Working(BaseBackendTestCase): # at this point, job should have been successful xp.refresh_from_db() block = xp.blocks.first() - self.assertEqual(block.status, Block.CACHED) + self.assertEqual(block.status, Block.DONE) self.assertEqual(block.experiment.status, Experiment.RUNNING) # all caches must be have been generated @@ -2537,7 +2537,7 @@ class Working(BaseBackendTestCase): xpc.schedule() self.assertEqual([k.status for k in xpc.blocks.all()], - [Block.CACHED, Block.NOT_CACHED]) + [Block.DONE, Block.PENDING]) assert xpc.blocks.last().job.parent == xp.blocks.last().job @@ -2621,7 +2621,7 @@ class WorkingExternally(TransactionTestCase): def condition(): xp.refresh_from_db() block = xp.blocks.first() - return block.status == Block.CACHED + return block.status == Block.DONE _sleep(120, condition) @@ -2630,7 +2630,7 @@ class WorkingExternally(TransactionTestCase): xp.refresh_from_db() block = xp.blocks.first() - self.assertEqual(block.status, Block.CACHED) + self.assertEqual(block.status, Block.DONE) self.assertEqual(xp.status, Experiment.RUNNING) # all caches must be have been generated @@ -2677,7 +2677,7 @@ class WorkingExternally(TransactionTestCase): xp.refresh_from_db() block = xp.blocks.last() - self.assertEqual(block.status, Block.CACHED) + self.assertEqual(block.status, Block.DONE) self.assertEqual(xp.status, Experiment.DONE) # all caches must be have been generated diff --git a/beat/web/experiments/migrations/0008_block_status.py b/beat/web/experiments/migrations/0008_block_status.py new file mode 100644 index 0000000000000000000000000000000000000000..e5a3d74d559c75dc29a41e1904f26e87e317eaad --- /dev/null +++ b/beat/web/experiments/migrations/0008_block_status.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.9.13 on 2017-09-27 16:48 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('experiments', '0007_cachedfile_status'), + ] + + operations = [ + migrations.AlterField( + model_name='block', + name='status', + field=models.CharField(choices=[(b'N', b'Pending'), (b'P', b'Processing'), (b'C', b'Done'), (b'F', b'Failed'), (b'S', b'Skipped'), (b'L', b'Cancelled')], default=b'N', max_length=1), + ), + ] diff --git a/beat/web/experiments/models/block.py b/beat/web/experiments/models/block.py index c70c983c30c3693a82d4ec64ce6997df7961b215..8c21605e418f6adc0480edb2c34a566b22d5ad33 100755 --- a/beat/web/experiments/models/block.py +++ b/beat/web/experiments/models/block.py @@ -68,17 +68,17 @@ class BlockManager(models.Manager): class Block(models.Model): - NOT_CACHED = 'N' + PENDING = 'N' PROCESSING = 'P' - CACHED = 'C' + DONE = 'C' FAILED = 'F' SKIPPED = 'S' CANCELLED = 'L' STATUS = ( - (NOT_CACHED, 'Not cached'), + (PENDING, 'Pending'), (PROCESSING, 'Processing'), - (CACHED, 'Cached'), + (DONE, 'Done'), (FAILED, 'Failed'), (SKIPPED, 'Skipped'), (CANCELLED, 'Cancelled'), @@ -88,7 +88,7 @@ class Block(models.Model): on_delete=models.CASCADE) name = models.CharField(max_length=200) command = models.TextField(null=True, blank=True) - status = models.CharField(max_length=1, choices=STATUS, default=NOT_CACHED) + status = models.CharField(max_length=1, choices=STATUS, default=PENDING) analyzer = models.BooleanField(default=False) algorithm = models.ForeignKey(Algorithm, related_name='blocks', on_delete=models.CASCADE) @@ -254,7 +254,7 @@ class Block(models.Model): def done(self): '''Says whether the block has finished or not''' - return self.status not in (Block.NOT_CACHED, Block.PROCESSING) + return self.status not in (Block.PENDING, Block.PROCESSING) def _cancel(self): @@ -280,7 +280,7 @@ class Block(models.Model): def is_runnable(self): '''Checks if a block is runnable presently''' - return all([k.status in (Block.CACHED, Block.SKIPPED) \ + return all([k.status in (Block.DONE, Block.SKIPPED) \ for k in self.dependencies.all()]) and \ (hasattr(self, 'job') and self.job.parent is None) @@ -375,7 +375,7 @@ class Block(models.Model): self.outputs.update(**info) if self.job.status == Block.SKIPPED: - self.status = Block.CACHED + self.status = Block.DONE else: self.status = self.job.status @@ -390,7 +390,7 @@ class Block(models.Model): cached_file.update(self.status) # Loads Results from cache - if self.job.result and self.analyzer and self.status == Block.CACHED: + if self.job.result and self.analyzer and self.status == Block.DONE: cache = self.first_cache() data_source = beat.core.data.CachedDataSource() data_source.setup(os.path.join(settings.CACHE_ROOT, diff --git a/beat/web/experiments/models/cached_file.py b/beat/web/experiments/models/cached_file.py index bb79e07c5eb53dad3f92776543f2e74e89e03d41..6c8e2b5aaeb00859ccfa875e09a2239a6e7951a3 100755 --- a/beat/web/experiments/models/cached_file.py +++ b/beat/web/experiments/models/cached_file.py @@ -129,7 +129,7 @@ class CachedFile(models.Model): def update(self, block_status): from . import Block - if (block_status == Block.CACHED) and (self.status != CachedFile.CACHED): + if (block_status == Block.DONE) and (self.status != CachedFile.CACHED): self.status = CachedFile.CACHED self.save() @@ -137,7 +137,7 @@ class CachedFile(models.Model): self.status = CachedFile.PROCESSING self.save() - elif (block_status != Block.CACHED) and (self.status == CachedFile.PROCESSING): + elif (block_status != Block.DONE) and (self.status == CachedFile.PROCESSING): self.status = CachedFile.NOT_CACHED self.save() diff --git a/beat/web/experiments/models/experiment.py b/beat/web/experiments/models/experiment.py index 7d7d68d76e94f64d1c8cfa6fce0f38be09df57ac..6b4f35ccbc8f09544f347e96dc332c5c6fa33570 100755 --- a/beat/web/experiments/models/experiment.py +++ b/beat/web/experiments/models/experiment.py @@ -568,7 +568,7 @@ class Experiment(Shareable): b.execution_order = order_0 + 1 b.command = simplejson.dumps(job_description, indent=4) - b.status = Block.NOT_CACHED + b.status = Block.PENDING b.analyzer = algorithm.analysis() b.environment = env b.queue = queue @@ -691,7 +691,7 @@ class Experiment(Shareable): if len(blocks) == 0: return 0 - return int(100 * float(len(filter(lambda x: x.status == Block.CACHED, blocks))) / len(blocks)) + return int(100 * float(len(filter(lambda x: x.status == Block.DONE, blocks))) / len(blocks)) def all_needed_dataformats(self): @@ -712,7 +712,7 @@ class Experiment(Shareable): if not self.is_done(): return #can only reset experiments which are done self.blocks.update( - status=Block.NOT_CACHED, + status=Block.PENDING, start_date=None, end_date=None, ) @@ -794,12 +794,12 @@ class Experiment(Shareable): self.status = Experiment.FAILED elif (Block.PROCESSING in block_statuses) or \ - ((Block.NOT_CACHED in block_statuses or \ + ((Block.PENDING in block_statuses or \ Block.SKIPPED in block_statuses) and \ - Block.CACHED in block_statuses): + Block.DONE in block_statuses): self.status = Experiment.RUNNING - elif Block.NOT_CACHED not in block_statuses: + elif Block.PENDING not in block_statuses: self.status = Experiment.DONE else: diff --git a/beat/web/experiments/serializers.py b/beat/web/experiments/serializers.py index 4dadd9e09505efa39db0bb2bf5faa640ffea0fca..deb9c6709b7c6ff8d80e4132c3dddba3b21316bc 100755 --- a/beat/web/experiments/serializers.py +++ b/beat/web/experiments/serializers.py @@ -228,9 +228,9 @@ class ExperimentResultsSerializer(ShareableSerializer): def get_blocks_status(self, obj): results = {} for block in obj.blocks.iterator(): - if block.status == Block.CACHED: + if block.status == Block.DONE: results[block.name] = 'generated' - elif block.status == Block.NOT_CACHED: + elif block.status == Block.PENDING: results[block.name] = 'pending' elif block.status == Block.FAILED: results[block.name] = 'failed' diff --git a/beat/web/experiments/signals.py b/beat/web/experiments/signals.py index 42258984286f46310b2c7de52cb246169f1c7cfb..aa4967670a99ab9fe097d016a3cf73d957e2e1d8 100644 --- a/beat/web/experiments/signals.py +++ b/beat/web/experiments/signals.py @@ -87,7 +87,7 @@ def log_dates_on_state_change(sender, instance, **kwargs): if old_status != new_status: #status has changed - if new_status == Block.NOT_CACHED: #admin reset + if new_status == Block.PENDING: #admin reset instance.results.all().delete() elif new_status == Block.PROCESSING: #started to process diff --git a/beat/web/experiments/tests.py b/beat/web/experiments/tests.py index f82abacef56d810f4a6cc4d46ee4907b25b4873d..209ab725f9389ca3d6331da868217c28179d243a 100755 --- a/beat/web/experiments/tests.py +++ b/beat/web/experiments/tests.py @@ -970,7 +970,7 @@ class ExperimentStartingAPI(ExperimentTestBase): self.assertEqual(experiment.blocks.count(), 3) block = experiment.blocks.get(name='addition1') - self.assertEqual(Block.NOT_CACHED, block.status) + self.assertEqual(Block.PENDING, block.status) self.assertFalse(block.analyzer) self.assertEqual(0, block.results.count()) @@ -981,7 +981,7 @@ class ExperimentStartingAPI(ExperimentTestBase): self.assertEqual(CachedFile.NOT_CACHED, cached_file.status) block = experiment.blocks.get(name='addition2') - self.assertEqual(Block.NOT_CACHED, block.status) + self.assertEqual(Block.PENDING, block.status) self.assertFalse(block.analyzer) self.assertEqual(0, block.results.count()) @@ -992,7 +992,7 @@ class ExperimentStartingAPI(ExperimentTestBase): self.assertEqual(CachedFile.NOT_CACHED, cached_file.status) block = experiment.blocks.get(name='analysis') - self.assertEqual(Block.NOT_CACHED, block.status) + self.assertEqual(Block.PENDING, block.status) self.assertTrue(block.analyzer) self.assertEqual(0, block.results.count()) @@ -1228,7 +1228,7 @@ class ResultsAPI(ExperimentTestBase): def test_retrieve_done_experiment(self): for name in ['addition1', 'addition2', 'analysis']: block = self.experiment.blocks.get(name=name) - block.status = Block.CACHED + block.status = Block.DONE block.save() self.experiment.status = Experiment.DONE @@ -1313,7 +1313,7 @@ class ResultsAPI(ExperimentTestBase): def test_retrieve_certified_experiment(self): for name in ['addition1', 'addition2', 'analysis']: block = self.experiment.blocks.get(name=name) - block.status = Block.CACHED + block.status = Block.DONE block.save() self.experiment.status = Experiment.DONE @@ -1351,7 +1351,7 @@ class ResultsAPI(ExperimentTestBase): def test_retrieve_done_experiment_results_data(self): analysis_block = self.experiment2.blocks.get(name='analysis') - analysis_block.status = Block.CACHED + analysis_block.status = Block.DONE analysis_block.save() db_result = Result() @@ -1396,7 +1396,7 @@ class ResultsAPI(ExperimentTestBase): def test_retrieve_certified_experiment_results(self): analysis_block = self.experiment2.blocks.get(name='analysis') - analysis_block.status = Block.CACHED + analysis_block.status = Block.DONE analysis_block.save() db_result = Result() diff --git a/beat/web/libraries/models.py b/beat/web/libraries/models.py index e2ec84b87b38d8c8f59924dcf7ea941bd4120daa..ffc0d27f3174a51893465f3a1d54baf20760157e 100644 --- a/beat/web/libraries/models.py +++ b/beat/web/libraries/models.py @@ -240,7 +240,7 @@ class Library(Code): envs = Environment.objects.filter(blocks__in=Block.objects.filter( \ algorithm__in=self.used_by_algorithms.all()).filter( \ Q(experiment__status=Experiment.DONE) | \ - ((~Q(experiment__status=Experiment.DONE)) & Q(status=Block.CACHED)) + ((~Q(experiment__status=Experiment.DONE)) & Q(status=Block.DONE)) )).annotate(itemcount=Count('id')).order_by('-creation_date' \ ).distinct() diff --git a/beat/web/reports/tests.py b/beat/web/reports/tests.py index 693a884b1097e1a96aca4e9456975451e7ea0900..2da6c2f42b662062ec3d4e349fdde8be6daaa903 100755 --- a/beat/web/reports/tests.py +++ b/beat/web/reports/tests.py @@ -685,7 +685,7 @@ class ReportTestCase(APITestCase): self.client.logout() for block in experiment.blocks.all(): - block.status = Block.CACHED + block.status = Block.DONE block.save() experiment.status = status