Skip to content
Snippets Groups Projects
Commit 6acb6096 authored by André Anjos's avatar André Anjos :speech_balloon:
Browse files

[experiments] New block relationships and constraints

parent 0fa5b4ba
Branches
Tags
1 merge request!194Scheduler
......@@ -32,17 +32,19 @@ from django.contrib import admin
from django.core.files.base import ContentFile
from django.utils import six
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
from django.db.models import Max
from django.db.models import Max, Count
from .models import Experiment as ExperimentModel
from .models import Block as BlockModel
from .models import Result as ResultModel
from .models import CachedFile as CachedFileModel
from .models import BlockInput as BlockInputModel
from .models import validate_experiment
from ..ui.forms import CodeMirrorJSONFileField, CodeMirrorRSTFileField, \
NameField
NameField, CodeMirrorJSONCharField
from ..common.texts import Messages
......@@ -113,6 +115,27 @@ class ExperimentModelForm(forms.ModelForm):
self.data['declaration_file'] = ContentFile(self.data['declaration_file'], name='unsaved')
class BlockInline(admin.TabularInline):
model = BlockModel
extra = 0
readonly_fields = ['link', 'algorithm', 'analyzer', 'status']
ordering = ['id']
fields = readonly_fields
def link(self, obj):
url = reverse('admin:experiments_block_change', args=(obj.pk,))
return mark_safe('<a href="%s">%s</a>' % (url, obj.name))
link.short_description = 'name'
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request):
return False
#----------------------------------------------------------
......@@ -163,6 +186,10 @@ class Experiment(admin.ModelAdmin):
'shared_with_team'
]
inlines = [
BlockInline,
]
fieldsets = (
(None,
dict(
......@@ -205,9 +232,140 @@ admin.site.register(ExperimentModel, Experiment)
#----------------------------------------------------------
class BlockInputInline(admin.TabularInline):
model = BlockInputModel
verbose_name = 'Input'
verbose_name_plural = 'Inputs'
extra = 0
ordering = ['database', 'cache']
readonly_fields = ['input', 'channel']
fields = readonly_fields
def input(self, obj):
if obj.database:
url = reverse('admin:databases_databaseset_change',
args=(obj.database.set.pk,))
text = '%s (%s)' % (obj.database, obj.database.hash)
what = 'Dataset Output'
else:
url = reverse('admin:experiments_cachedfile_change',
args=(obj.cache.pk,))
text = obj.cache.hash
what = 'Cached File'
return mark_safe('%s: <a href="%s">%s</a>' % (what, url, text))
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request):
return False
class CachedFileInline(admin.TabularInline):
model = CachedFileModel.blocks.through
verbose_name = 'Output'
verbose_name_plural = 'Outputs'
extra = 0
readonly_fields = ['output']
fields = readonly_fields
def output(self, obj):
url = reverse('admin:experiments_cachedfile_change', args=(obj.cachedfile.pk,))
text = obj.cachedfile.hash
what = 'Cached File'
return mark_safe('%s: <a href="%s">%s</a>' % (what, url, text))
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request):
return False
class BlockDependentsInline(admin.TabularInline):
model = BlockModel.dependencies.through
verbose_name = 'Dependent'
verbose_name_plural = 'Dependents'
fk_name = 'to_block'
extra = 0
readonly_fields = ['name', 'algorithm', 'analyzer', 'status']
ordering = ['id']
fields = readonly_fields
def name(self, obj):
url = reverse('admin:experiments_block_change', args=(obj.from_block.pk,))
return mark_safe('<a href="%s">%s</a>' % (url, obj.from_block.name))
def algorithm(self, obj):
return obj.from_block.algorithm
def analyzer(self, obj):
return obj.from_block.analyzer
analyzer.boolean = True
def status(self, obj):
return obj.from_block.get_status_display()
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request):
return False
class BlockDependenciesInline(admin.TabularInline):
model = BlockModel.dependencies.through
verbose_name = 'Dependency'
verbose_name_plural = 'Dependencies'
fk_name = 'from_block'
extra = 0
readonly_fields = ['name', 'algorithm', 'analyzer', 'status']
ordering = ['id']
fields = readonly_fields
def name(self, obj):
url = reverse('admin:experiments_block_change', args=(obj.to_block.pk,))
return mark_safe('<a href="%s">%s</a>' % (url, obj.to_block.name))
def algorithm(self, obj):
return obj.to_block.algorithm
def analyzer(self, obj):
return obj.to_block.analyzer
analyzer.boolean = True
def status(self, obj):
return obj.to_block.get_status_display()
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request):
return False
class BlockModelForm(forms.ModelForm):
command = CodeMirrorJSONCharField(
help_text=Messages['json'],
readonly=True,
)
class Meta:
model = BlockModel
exclude = []
class Block(admin.ModelAdmin):
list_display = ('id', 'experiment', 'name', 'algorithm', 'analyzer', 'status', 'environment')
list_display = ('id', 'author', 'toolchain', 'xp', 'name', 'algorithm', 'analyzer', 'status', 'ins', 'outs', 'environment', 'q')
search_fields = ['name',
'experiment__author__username',
'experiment__toolchain__author__username',
......@@ -220,6 +378,84 @@ class Block(admin.ModelAdmin):
]
list_display_links = ('id', 'name')
inlines = [
BlockDependenciesInline,
BlockInputInline,
CachedFileInline,
BlockDependentsInline,
]
exclude = ['dependencies']
def get_queryset(self, request):
qs = super(Block, self).get_queryset(request)
return qs.annotate(Count('outputs'))
def author(self, obj):
return obj.experiment.author
def toolchain(self, obj):
return obj.experiment.toolchain
def xp(self, obj):
return obj.experiment.name
xp.short_description = 'experiment'
def ins(self, obj):
return obj.inputs.count()
def outs(self, obj):
return obj.outputs__count
outs.admin_order_field = 'outputs__count'
def q(self, obj):
if obj.queue: return obj.queue.name
return None
q.short_description = 'queue'
def get_readonly_fields(self, request, obj=None):
return list(self.readonly_fields) + \
[field.name for field in obj._meta.fields if field.name != 'command']
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request):
return False
form = BlockModelForm
fieldsets = (
(None,
dict(
fields=('id', 'name', 'experiment'),
),
),
('Status and dates',
dict(
fields=('creation_date', 'start_date', 'end_date', 'runnable_date', 'status'),
),
),
('Code',
dict(
classes=('collapse',),
fields=('algorithm', 'analyzer',),
),
),
('Backend',
dict(
classes=('collapse',),
fields=('environment', 'queue', 'required_slots', 'channel'),
),
),
('Command',
dict(
classes=('collapse',),
fields=('command',),
),
),
)
admin.site.register(BlockModel, Block)
......@@ -228,19 +464,25 @@ admin.site.register(BlockModel, Block)
class Result(admin.ModelAdmin):
list_display = ('id', 'block', 'name', 'type', 'primary', 'data_value')
list_display = ('id', 'cache', 'name', 'type', 'primary', 'data_value')
search_fields = [
'name',
'type',
'block__name',
'block__experiment__name',
'block__experiment__author__username',
'block__experiment__toolchain__name',
'cache__hash',
]
list_display_links = ('id', 'name')
def get_readonly_fields(self, request, obj=None):
return list(self.readonly_fields) + \
[field.name for field in obj._meta.fields]
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request):
return False
admin.site.register(ResultModel, Result)
......@@ -334,4 +576,16 @@ class CachedFile(admin.ModelAdmin):
),
)
readonly_fields = ['blocks']
def get_readonly_fields(self, request, obj=None):
return list(self.readonly_fields) + \
[field.name for field in obj._meta.fields]
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request):
return False
admin.site.register(CachedFileModel, CachedFile)
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models, utils
from django.conf import settings
import simplejson
import beat.core.experiment
from ...common import storage
def move_result_to_cache(apps, schema_editor):
'''Moves the result association from the block to the related cache file'''
Result = apps.get_model("experiments", "Result")
total = Result.objects.count()
if total: print('')
for i, r in enumerate(Result.objects.all()):
print("Resetting result %d/%d..." % (i+1, total))
r.cache = r.block.hashes.first()
r.save()
def reset_blocks(apps, schema_editor):
'''Resets block dependencies and queue relationship'''
Experiment = apps.get_model("experiments", "Experiment")
Block = apps.get_model("experiments", "Block")
BlockInput = apps.get_model("experiments", "BlockInput")
CachedFile = apps.get_model("experiments", "CachedFile")
Queue = apps.get_model("backend", "Queue")
Environment = apps.get_model("backend", "Environment")
Algorithm = apps.get_model("algorithms", "Algorithm")
DatabaseSetOutput = apps.get_model("databases", "DatabaseSetOutput")
total = Experiment.objects.count()
for i, e in enumerate(Experiment.objects.order_by('id')):
fullname = '%s/%s/%s/%d/%s' % (
e.author.username,
e.toolchain.author.username,
e.toolchain.name,
e.toolchain.version,
e.name,
)
print("Updating blocks for experiment %d/%d (%s, id=%d)..." % (i+1, total, fullname, e.id))
xp_decl = simplejson.loads(storage.get_file_content(e,
'declaration_file'))
tc_decl = simplejson.loads(storage.get_file_content(e.toolchain,
'declaration_file'))
xp = beat.core.experiment.Experiment(settings.PREFIX, (xp_decl,
tc_decl))
if xp.errors:
message = "The experiment `%s' isn't valid (skipping " \
"block update), due to the following errors:\n * %s"
print message % (fullname, '\n * '.join(xp.errors))
continue
# Loads the experiment execution description, creating the Block's,
# BlockInput's and BlockOutput's as required.
for block_name, description in xp.setup().items():
# Checks that the Queue/Environment exists
job_description = description['configuration']
env = Environment.objects.filter(
name=job_description['environment']['name'],
version=job_description['environment']['version'],
)
if not env:
print("Cannot find environment `%s (%s)' - not setting" % \
(job_description['environment']['name'],
job_description['environment']['version']))
env = None
else:
env = env[0]
# Search for queue that contains a specific environment
# notice we don't require environment to exist in relation to
# the queue as it may have been removed already.
queue = Queue.objects.filter(name=job_description['queue'])
if not queue:
print("Cannot find queue `%s'" % job_description['queue'])
queue = None
else:
queue = queue[0]
parts = job_description['algorithm'].split('/')
algorithm = Algorithm.objects.get(
author__username=parts[0],
name=parts[1],
version=parts[2],
)
# Ties the block in
slots = job_description.get('nb_slots')
try:
b, _ = Block.objects.get_or_create(experiment=e,
name=block_name, algorithm=algorithm)
except utils.IntegrityError as exc:
print("Block `%s' for experiment `%s' already exists - " \
"modifying entry for migration purposes. This " \
"issue is due a misconnection on the toolchain level " \
"(known case: tpereira/full_isv/2)" % \
(block_name, fullname))
b = Block.objects.get(experiment=e, name=block_name)
b.command=simplejson.dumps(job_description, indent=4)
b.status='N' if (e.status == 'P') else b.status
b.environment=env
b.queue=queue
b.algorithm = algorithm
b.analyzer = (algorithm.result_dataformat is not None)
b.required_slots=job_description['nb_slots']
b.channel=job_description['channel']
b.save()
# from this point: requires block to have an assigned id
b.dependencies.add(*[e.blocks.get(name=k) \
for k in description['dependencies']])
# reset inputs and outputs - creates if necessary only
for v in job_description['inputs'].values():
if 'database' in v: #database input
db = DatabaseSetOutput.objects.get(hash=v['hash'])
BlockInput.objects.get_or_create(block=b,
channel=v['channel'], database=db)
else:
cache = CachedFile.objects.get(hash=v['hash'])
BlockInput.objects.get_or_create(block=b,
channel=v['channel'], cache=cache)
outputs = job_description.get('outputs',
{'': job_description.get('result')})
for v in outputs.values():
cache, cr = CachedFile.objects.get_or_create(hash=v['hash'])
if cr:
print("CachedFile (hash=%s) created for block `%s' of " \
"experiment `%s' which is in state `%s'" % \
(cache.hash, block_name, fullname,
b.get_status_display()))
cache.blocks.add(b)
#asserts all blocks (except analysis blocks have dependents)
for b in e.blocks.all():
assert (b.analyzer and b.dependents.count() == 0) or b.dependents.count() > 0
class Migration(migrations.Migration):
dependencies = [
('backend', '0002_auto_20160330_0005'),
('databases', '0002_auto_20160329_1733'),
('experiments', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='result',
name='cache',
field=models.ForeignKey(related_name='results', to='experiments.CachedFile', null=True),
),
migrations.RunPython(move_result_to_cache),
migrations.RemoveField(
model_name='result',
name='block',
),
migrations.CreateModel(
name='BlockInput',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('channel', models.CharField(default=b'', help_text=b'Synchronization channel within the toolchain', max_length=200, blank=True)),
('block', models.ForeignKey(related_name='inputs', to='experiments.Block', null=True)),
('cache', models.ForeignKey(related_name='inputs', to='experiments.CachedFile', null=True)),
('database', models.ForeignKey(related_name='blocks', to='databases.DatabaseSetOutput', null=True)),
],
),
migrations.AddField(
model_name='block',
name='channel',
field=models.CharField(default=b'', help_text=b'Synchronization channel within the toolchain', max_length=200, blank=True),
),
migrations.AddField(
model_name='block',
name='command',
field=models.TextField(null=True, blank=True),
),
migrations.AddField(
model_name='block',
name='dependencies',
field=models.ManyToManyField(related_name='dependents', to='experiments.Block', blank=True),
),
migrations.AlterField(
model_name='block',
name='environment',
field=models.ForeignKey(related_name='blocks', on_delete=models.deletion.SET_NULL, to='backend.Environment', null=True),
),
migrations.AddField(
model_name='block',
name='queue',
field=models.ForeignKey(related_name='blocks', on_delete=models.deletion.SET_NULL, to='backend.Queue', null=True),
),
migrations.AddField(
model_name='block',
name='required_slots',
field=models.PositiveIntegerField(default=1),
),
migrations.AddField(
model_name='block',
name='runnable_date',
field=models.DateTimeField(null=True, blank=True),
),
migrations.AlterField(
model_name='block',
name='status',
field=models.CharField(default=b'N', max_length=1, choices=[(b'N', b'Not cached'), (b'P', b'Processing'), (b'C', b'Cached'), (b'F', b'Failed'), (b'S', b'Skipped'), (b'L', b'Cancelled')]),
),
migrations.AlterUniqueTogether(
name='block',
unique_together=set([('experiment', 'name')]),
),
migrations.AlterField(
model_name='cachedfile',
name='blocks',
field=models.ManyToManyField(related_name='outputs', to='experiments.Block', blank=True),
),
migrations.RunPython(reset_blocks),
]
......@@ -40,7 +40,6 @@ import beat.core.experiment
from beat.core.utils import NumpyJSONEncoder
from ..algorithms.models import Algorithm
from ..databases.models import DatabaseSet
from ..toolchains.models import Toolchain
from ..common.models import Shareable
......@@ -55,6 +54,7 @@ from ..common.exceptions import ShareError
from ..common.texts import Messages
from ..common.storage import OverwriteStorage
from ..backend.models import Queue, Environment
from ..databases.models import DatabaseSet, DatabaseSetOutput
from ..import __version__
......@@ -217,8 +217,10 @@ class Experiment(Shareable):
#_____ Fields __________
author = models.ForeignKey(User, related_name='experiments')
toolchain = models.ForeignKey(Toolchain, related_name='experiments')
author = models.ForeignKey(User, related_name='experiments',
on_delete=models.CASCADE)
toolchain = models.ForeignKey(Toolchain,
related_name='experiments', on_delete=models.CASCADE)
name = models.CharField(max_length=200)
short_description = models.CharField(max_length=100, default='', blank=True, help_text=Messages['short_description'])
status = models.CharField(max_length=1, choices=STATUS, default=PENDING)
......@@ -244,8 +246,8 @@ class Experiment(Shareable):
# read-only parameters that are updated at every save(), if required
hash = models.CharField(max_length=64)
referenced_datasets = models.ManyToManyField(DatabaseSet, related_name='experiments', blank=True)
hash = models.CharField(max_length=64)
referenced_datasets = models.ManyToManyField(DatabaseSet, related_name='experiments', blank=True)
referenced_algorithms = models.ManyToManyField(Algorithm, related_name='experiments', blank=True)
objects = ExperimentManager()
......@@ -409,6 +411,9 @@ class Experiment(Shareable):
storage.rename_file(self, 'description_file', self.description_filename())
if content_modified:
# Creates experiment blocks and setup dependencies
self.update_blocks()
# Link the experiment to the datasets
self.referenced_datasets.clear()
for dataset_declaration in xp.datasets.values():
......@@ -485,6 +490,88 @@ class Experiment(Shareable):
super(Experiment, self).share(users=users, teams=teams)
def update_blocks(self):
"""Updates internal block representation of an experiment"""
corexp = self.core()
# Loads the experiment execution description, creating the Block's,
# BlockInput's and BlockOutput's as required.
for block_name, description in corexp.setup().items():
# Checks that the Queue/Environment exists
job_description = description['configuration']
env = Environment.objects.filter(
name=job_description['environment']['name'],
version=job_description['environment']['version'],
)
if not env:
logger.warn("Cannot find environment `%s (%s)' - not setting",
job_description['environment']['name'],
job_description['environment']['version'])
env = None
else:
env = env[0]
# Search for queue that contains a specific environment
if env:
queue = Queue.objects.filter(name=job_description['queue'],
environments__in=[env])
else:
queue = Queue.objects.filter(name=queue)
if not queue:
env_name = env.fullname() if env else 'NULL'
logger.warn("Cannot find queue `%s' which contains " \
"environment `%s' - not setting",
job_description['queue'], env_name)
queue = None
else:
queue = queue[0]
parts = job_description['algorithm'].split('/')
algorithm = Algorithm.objects.get(
author__username=parts[0],
name=parts[1],
version=parts[2],
)
# Ties the block in
slots = job_description.get('nb_slots')
b, _ = Block.objects.get_or_create(experiment=self,
name=block_name, algorithm=algorithm)
b.command=simplejson.dumps(job_description, indent=4)
b.status=Block.NOT_CACHED if (self.status == Experiment.PENDING) else b.status
b.analyzer=algorithm.analysis()
b.environment=env
b.queue=queue
b.required_slots=job_description['nb_slots']
b.channel=job_description['channel']
b.save()
# from this point: requires block to have an assigned id
b.dependencies.add(*[self.blocks.get(name=k) \
for k in description['dependencies']])
# reset inputs and outputs - creates if necessary only
for v in job_description['inputs'].values():
if 'database' in v: #database input
db = DatabaseSetOutput.objects.get(hash=v['hash'])
BlockInput.objects.get_or_create(block=b,
channel=v['channel'], database=db)
else:
cache = CachedFile.objects.get(hash=v['hash'])
BlockInput.objects.get_or_create(block=b,
channel=v['channel'], cache=cache)
outputs = job_description.get('outputs',
{'': job_description.get('result')})
for v in outputs.values():
cache, _ = CachedFile.objects.get_or_create(hash=v['hash'])
cache.blocks.add(b)
#_____ Methods __________
def is_busy(self):
......@@ -649,26 +736,54 @@ class Block(models.Model):
PROCESSING = 'P'
CACHED = 'C'
FAILED = 'F'
SKIPPED = 'S'
CANCELLED = 'L'
STATUS = (
(NOT_CACHED, 'Not cached'),
(PROCESSING, 'Processing'),
(CACHED, 'Cached'),
(FAILED, 'Failed'),
(SKIPPED, 'Skipped'),
(CANCELLED, 'Cancelled'),
)
experiment = models.ForeignKey(Experiment, related_name='blocks')
name = models.CharField(max_length=200)
status = models.CharField(max_length=1, choices=STATUS, default=NOT_CACHED)
analyzer = models.BooleanField(default=False)
algorithm = models.ForeignKey(Algorithm, related_name='blocks')
creation_date = models.DateTimeField(null=True, blank=True, auto_now_add=True)
start_date = models.DateTimeField(null=True, blank=True)
end_date = models.DateTimeField(null=True, blank=True)
environment = models.ForeignKey(Environment, related_name='blocks', null=True)
experiment = models.ForeignKey(Experiment, related_name='blocks',
on_delete=models.CASCADE)
name = models.CharField(max_length=200)
command = models.TextField(null=True, blank=True)
status = models.CharField(max_length=1, choices=STATUS, default=NOT_CACHED)
analyzer = models.BooleanField(default=False)
algorithm = models.ForeignKey(Algorithm, related_name='blocks',
on_delete=models.CASCADE)
creation_date = models.DateTimeField(null=True, blank=True,
auto_now_add=True)
runnable_date = models.DateTimeField(null=True, blank=True)
start_date = models.DateTimeField(null=True, blank=True)
end_date = models.DateTimeField(null=True, blank=True)
environment = models.ForeignKey(Environment, related_name='blocks',
null=True, on_delete=models.SET_NULL)
queue = models.ForeignKey(Queue, related_name='blocks', null=True,
on_delete=models.SET_NULL)
required_slots = models.PositiveIntegerField(default=1)
channel = models.CharField(max_length=200, default='', blank=True,
help_text="Synchronization channel within the toolchain")
# relationship to blocks to which this block depends on
dependencies = models.ManyToManyField('self',
related_name='dependents',
blank=True,
symmetrical=False,
)
objects = BlockManager()
class Meta:
unique_together = ('experiment', 'name')
def __str__(self):
return self.experiment.fullname() + ', ' + self.name + ' (%s)' % self.get_status_display()
......@@ -685,12 +800,12 @@ class Block(models.Model):
# Accessors for statistics
def __return_first__(self, field):
if not self.hashes.count(): return ''
return getattr(self.hashes.first(), field)
if not self.outputs.count(): return ''
return getattr(self.outputs.first(), field)
def first_cache(self):
if not self.hashes.count(): return None
return self.hashes.first()
if not self.outputs.count(): return None
return self.outputs.first()
def error_report(self):
return self.__return_first__('error_report')
......@@ -737,6 +852,9 @@ class Block(models.Model):
def data_written_time(self):
return self.__return_first__('data_written_time') or 0.
# Accessor for results
results = property(lambda self: self.__return_first__('results'))
#----------------------------------------------------------
......@@ -749,7 +867,7 @@ class CachedFileManager(models.Manager):
class CachedFile(models.Model):
blocks = models.ManyToManyField(Block, related_name='hashes', blank=True)
blocks = models.ManyToManyField(Block, related_name='outputs', blank=True)
hash = models.CharField(max_length=64, unique=True)
# the total amount of time this block took to run considering the
......@@ -781,6 +899,9 @@ class CachedFile(models.Model):
data_written_nb_blocks = models.IntegerField(default=0)
data_written_time = models.FloatField(default=0.)
objects = CachedFileManager()
def __str__(self):
return self.hash
......@@ -791,30 +912,60 @@ class CachedFile(models.Model):
#----------------------------------------------------------
class BlockInputManager(models.Manager):
def get_by_natural_key(self, hash):
candidate = self.filter(cache__hash=hash)
if candidate:
return candidate[0]
else:
return self.get(database__hash=hash)
class BlockInput(models.Model):
block = models.ForeignKey(Block, related_name='inputs', null=True,
on_delete=models.CASCADE)
# if the input cames from another block, then this one is set
cache = models.ForeignKey(CachedFile, related_name='inputs', null=True,
on_delete=models.CASCADE)
# if the input cames from a dataset, then this one is set
database = models.ForeignKey(DatabaseSetOutput, related_name='blocks',
null=True, on_delete=models.CASCADE)
channel = models.CharField(max_length=200, default='', blank=True,
help_text="Synchronization channel within the toolchain")
objects = BlockInputManager()
def natural_key(self):
return self.has,
#----------------------------------------------------------
class ResultManager(models.Manager):
def get_by_natural_key(self, name, block_name, experiment_author,
toolchain_author, toolchain_name,
toolchain_version, experiment_name):
def get_by_natural_key(self, name, hash):
return self.get(
name=name,
block__name=block_name,
block__experiment__author__username=experiment_author,
block__experiment__toolchain__author__username=toolchain_author,
block__experiment__toolchain__name=toolchain_name,
block__experiment__toolchain__version=toolchain_version,
block__experiment__name=experiment_name,
cache__hash=hash,
)
class Result(models.Model):
SIMPLE_TYPE_NAMES = ('int32', 'float32', 'bool', 'string')
block = models.ForeignKey(Block, related_name='results')
name = models.CharField(max_length=200)
type = models.CharField(max_length=200)
primary = models.BooleanField(default=False)
data_value = models.TextField(null=True, blank=True)
cache = models.ForeignKey(CachedFile, related_name='results', null=True,
on_delete=models.CASCADE)
name = models.CharField(max_length=200)
type = models.CharField(max_length=200)
primary = models.BooleanField(default=False)
data_value = models.TextField(null=True, blank=True)
objects = ResultManager()
......@@ -824,12 +975,7 @@ class Result(models.Model):
def natural_key(self):
return (
self.name,
self.block.name,
self.block.experiment.author.username,
self.block.experiment.toolchain.author.username,
self.block.experiment.toolchain.name,
self.block.experiment.toolchain.version,
self.block.experiment.name,
self.cache.hash,
)
def value(self):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment