From f393d1fc6a235bfaf914ff8effc6be3adb25e107 Mon Sep 17 00:00:00 2001 From: Andre Anjos <andre.dos.anjos@gmail.com> Date: Tue, 5 Apr 2016 12:30:57 +0200 Subject: [PATCH] [experiments,backend] API adaptations to the new experiment models --- beat/web/backend/api.py | 13 +++++-------- beat/web/experiments/serializers.py | 4 ++-- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/beat/web/backend/api.py b/beat/web/backend/api.py index c3e9b64f2..959f91a31 100644 --- a/beat/web/backend/api.py +++ b/beat/web/backend/api.py @@ -350,7 +350,7 @@ def cache_cleanup(request): # Reset the DB representation of the cache data = json.loads(data) if len(data) > 0: - blocks = Block.objects.filter(hashes__hash__in=data) + blocks = Block.objects.filter(outputs__hash__in=data) for block in blocks: block.status = Block.NOT_CACHED block.save() @@ -424,7 +424,7 @@ def block_started(request): # Update all the other similar not-cached blocks and associated scheduled # experiments. Note we don't updated failed blocks or unscheduled # experiments as not to reset experiments that have already been run. - similar_blocks = Block.objects.filter(hashes__in=block.hashes.all()).exclude(pk=block.pk).order_by('pk').distinct() + similar_blocks = Block.objects.filter(outputs__in=block.outputs.all()).exclude(pk=block.pk).order_by('pk').distinct() similar_blocks.filter(status=Block.NOT_CACHED).update(status=Block.PROCESSING) Experiment.objects.filter(blocks__in=similar_blocks, status=Experiment.SCHEDULED).update(start_date=datetime.now(), status=Experiment.RUNNING) @@ -513,10 +513,7 @@ def block_finished(request): # Create or retrieve cached files -- attach to block all_cached_files = [] for hash in data['outputs']: - cache, created = CachedFile.objects.get_or_create(hash=hash) - if created: - cache.hash = hash - cache.save() + cache = CachedFile.objects.get(hash=hash) cache.blocks.add(block) all_cached_files.append(cache) @@ -581,7 +578,7 @@ def block_finished(request): if block.analyzer and (block_state == 'processed'): data_source = beat.core.data.CachedDataSource() data_source.setup(os.path.join(settings.CACHE_ROOT, - beat.core.hash.toPath(block.hashes.all()[0].hash)), + beat.core.hash.toPath(block.outputs.all()[0].hash)), settings.PREFIX) output_data = data_source.next()[0] if output_data is not None: @@ -589,7 +586,7 @@ def block_finished(request): block.algorithm.fullname()) for field, value in output_data.as_dict().items(): result_entry = Result() - result_entry.block = block + result_entry.cache = block.first_cache() result_entry.primary = algorithm.results[field]['display'] result_entry.name = field result_entry.type = algorithm.results[field]["type"] diff --git a/beat/web/experiments/serializers.py b/beat/web/experiments/serializers.py index cd0bbaf3f..bab80c88b 100644 --- a/beat/web/experiments/serializers.py +++ b/beat/web/experiments/serializers.py @@ -31,7 +31,7 @@ from ..common.serializers import ShareableSerializer from ..common.fields import JSONSerializerField from ..ui.templatetags.markup import restructuredtext -from .models import Experiment, Block, CachedFile +from .models import Experiment, Block from datetime import datetime @@ -252,7 +252,7 @@ class ExperimentResultsSerializer(ShareableSerializer): return results def get_errors(self, obj): - serializer = BlockErrorSerializer(obj.blocks.filter(hashes__error_report__isnull=False), many=True) + serializer = BlockErrorSerializer(obj.blocks.filter(outputs__error_report__isnull=False), many=True) return serializer.data def get_html_description(self, obj): -- GitLab