Added a new textfield to the BackupData model to save additional (not required) files uploaded by the user.

This commit is contained in:
dvanaken 2020-04-13 11:31:44 -04:00 committed by Dana Van Aken
parent 52843f6fc3
commit 4e1233ded2
4 changed files with 62 additions and 33 deletions

View File

@ -13,7 +13,7 @@ profile=no
# Add files or directories to the blacklist. They should be base names, not # Add files or directories to the blacklist. They should be base names, not
# paths. # paths.
ignore=CVS,.git,manage.py,0001_initial.py,0002_enable_compression.py,0003_load_initial_data.py,0004_add_lhs.py,0005_add_workload_field.py,0006_session_hyperparameters.py,0007_executiontime.py,0008_change_result_taskids_field.py,0009_change_executiontime_function_field.py,0010_add_pipeline_data_field.py,0011_knob_bound_fields.py,0012_make_workload_status_editable.py,credentials.py,create_knob_settings.py ignore=CVS,.git,manage.py,0001_initial.py,0002_enable_compression.py,0003_load_initial_data.py,0004_add_lhs.py,0005_add_workload_field.py,0006_session_hyperparameters.py,0007_executiontime.py,0008_change_result_taskids_field.py,0009_change_executiontime_function_field.py,0010_add_pipeline_data_field.py,0011_knob_bound_fields.py,0012_make_workload_status_editable.py,0013_backupdata_other.py,credentials.py,create_knob_settings.py
# ignore-patterns=**/migrations/*.py # ignore-patterns=**/migrations/*.py

View File

@ -0,0 +1,20 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-04-03 14:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('website', '0012_make_workload_status_editable'),
]
operations = [
migrations.AddField(
model_name='backupdata',
name='other',
field=models.TextField(default='{}'),
),
]

View File

@ -506,6 +506,7 @@ class BackupData(BaseModel):
raw_summary = models.TextField() raw_summary = models.TextField()
knob_log = models.TextField() knob_log = models.TextField()
metric_log = models.TextField() metric_log = models.TextField()
other = models.TextField(default='{}')
class ExecutionTime(models.Model): class ExecutionTime(models.Model):

View File

@ -529,7 +529,7 @@ def handle_result_files(session, files, execution_times=None):
worst_result = Result.objects.filter(metric_data=worst_metric).first() worst_result = Result.objects.filter(metric_data=worst_metric).first()
last_result = Result.objects.filter(session=session).order_by("-id").first() last_result = Result.objects.filter(session=session).order_by("-id").first()
backup_data = BackupData.objects.filter(result=worst_result).first() #backup_data = BackupData.objects.filter(result=worst_result).first()
last_conf = JSONUtil.loads(last_result.next_configuration) last_conf = JSONUtil.loads(last_result.next_configuration)
last_conf = last_conf["recommendation"] last_conf = last_conf["recommendation"]
last_conf = parser.convert_dbms_knobs(last_result.dbms.pk, last_conf) last_conf = parser.convert_dbms_knobs(last_result.dbms.pk, last_conf)
@ -581,10 +581,11 @@ def handle_result_files(session, files, execution_times=None):
result.save() result.save()
result = Result.objects.filter(session=session).order_by("-id").first() result = Result.objects.filter(session=session).order_by("-id").first()
backup_data.pk = None knob_diffs, metric_diffs = {}, {}
backup_data.result = result #backup_data.pk = None
backup_data.creation_time = now() #backup_data.result = result
backup_data.save() #backup_data.creation_time = now()
#backup_data.save()
else: else:
dbms_type = DBMSType.type(summary['database_type']) dbms_type = DBMSType.type(summary['database_type'])
@ -688,6 +689,30 @@ def handle_result_files(session, files, execution_times=None):
workload.status = WorkloadStatusType.MODIFIED workload.status = WorkloadStatusType.MODIFIED
workload.save() workload.save()
other_data = {}
if execution_times:
other_data['execution_times.csv'] = execution_times
try:
batch = []
f = StringIO(execution_times)
reader = csv.reader(f, delimiter=',')
for module, fn, tag, start_ts, end_ts in reader:
start_ts = float(start_ts)
end_ts = float(end_ts)
exec_time = end_ts - start_ts
start_time = datetime.fromtimestamp(int(start_ts), timezone(TIME_ZONE))
batch.append(
ExecutionTime(module=module, function=fn, tag=tag, start_time=start_time,
execution_time=exec_time, result=result))
ExecutionTime.objects.bulk_create(batch)
except Exception: # pylint: disable=broad-except
LOG.warning("Error parsing execution times:\n%s", execution_times, exc_info=True)
for filename, filedata in files.items():
if filename not in ('knobs', 'metrics_before', 'metrics_after', 'summary'):
other_data[filename] = filedata
# Save all original data # Save all original data
backup_data = BackupData.objects.create( backup_data = BackupData.objects.create(
result=result, raw_knobs=files['knobs'], result=result, raw_knobs=files['knobs'],
@ -695,7 +720,8 @@ def handle_result_files(session, files, execution_times=None):
raw_final_metrics=files['metrics_after'], raw_final_metrics=files['metrics_after'],
raw_summary=files['summary'], raw_summary=files['summary'],
knob_log=JSONUtil.dumps(knob_diffs, pprint=True), knob_log=JSONUtil.dumps(knob_diffs, pprint=True),
metric_log=JSONUtil.dumps(metric_diffs, pprint=True)) metric_log=JSONUtil.dumps(metric_diffs, pprint=True),
other=JSONUtil.dumps(other_data))
backup_data.save() backup_data.save()
session.project.last_update = now() session.project.last_update = now()
@ -742,24 +768,6 @@ def handle_result_files(session, files, execution_times=None):
result.task_ids = JSONUtil.dumps(response.as_tuple()) result.task_ids = JSONUtil.dumps(response.as_tuple())
result.save() result.save()
if execution_times:
try:
batch = []
f = StringIO(execution_times)
reader = csv.reader(f, delimiter=',')
for module, fn, tag, start_ts, end_ts in reader:
start_ts = float(start_ts)
end_ts = float(end_ts)
exec_time = end_ts - start_ts
start_time = datetime.fromtimestamp(int(start_ts), timezone(TIME_ZONE))
batch.append(
ExecutionTime(module=module, function=fn, tag=tag, start_time=start_time,
execution_time=exec_time, result=result))
ExecutionTime.objects.bulk_create(batch)
except Exception: # pylint: disable=broad-except
LOG.warning("Error parsing execution times:\n%s", execution_times, exc_info=True)
return HttpResponse("Result stored successfully! Running tuner...({}, status={}) Result ID:{}" return HttpResponse("Result stored successfully! Running tuner...({}, status={}) Result ID:{}"
.format(celery_status, response.status, result_id)) .format(celery_status, response.status, result_id))