Initial code to send/manage DB error
This commit is contained in:
parent
e80639c859
commit
0dff040012
|
@ -114,6 +114,7 @@ def restart_database():
|
||||||
run_sql_script('restartOracle.sh')
|
run_sql_script('restartOracle.sh')
|
||||||
else:
|
else:
|
||||||
raise Exception("Database Type {} Not Implemented !".format(dconf.DB_TYPE))
|
raise Exception("Database Type {} Not Implemented !".format(dconf.DB_TYPE))
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
@task
|
@task
|
||||||
|
@ -518,7 +519,19 @@ def loop(i):
|
||||||
clean_logs()
|
clean_logs()
|
||||||
|
|
||||||
# restart database
|
# restart database
|
||||||
restart_database()
|
restart_succeeded = restart_database()
|
||||||
|
if not restart_succeeded:
|
||||||
|
files = {'summary':b'{error:"DB_RESTART_ERROR"}',
|
||||||
|
'knobs':b'',
|
||||||
|
'metrics_before':b'',
|
||||||
|
'metrics_after':b''}
|
||||||
|
response = requests.post(dconf.WEBSITE_URL + '/new_result/', files=files,
|
||||||
|
data={'upload_code': dconf.UPLOAD_CODE})
|
||||||
|
response = get_result()
|
||||||
|
save_next_config(response, t=result_timestamp)
|
||||||
|
change_conf(response['recommendation'])
|
||||||
|
return
|
||||||
|
|
||||||
time.sleep(dconf.RESTART_SLEEP_SEC)
|
time.sleep(dconf.RESTART_SLEEP_SEC)
|
||||||
|
|
||||||
# check disk usage
|
# check disk usage
|
||||||
|
|
|
@ -455,85 +455,124 @@ def handle_result_files(session, files):
|
||||||
|
|
||||||
# Load the contents of the controller's summary file
|
# Load the contents of the controller's summary file
|
||||||
summary = JSONUtil.loads(files['summary'])
|
summary = JSONUtil.loads(files['summary'])
|
||||||
dbms_type = DBMSType.type(summary['database_type'])
|
|
||||||
dbms_version = summary['database_version'] # TODO: fix parse_version_string
|
|
||||||
workload_name = summary['workload_name']
|
|
||||||
observation_time = summary['observation_time']
|
|
||||||
start_time = datetime.fromtimestamp(
|
|
||||||
# int(summary['start_time']), # unit: seconds
|
|
||||||
int(float(summary['start_time']) / 1000), # unit: ms
|
|
||||||
timezone(TIME_ZONE))
|
|
||||||
end_time = datetime.fromtimestamp(
|
|
||||||
# int(summary['end_time']), # unit: seconds
|
|
||||||
int(float(summary['end_time']) / 1000), # unit: ms
|
|
||||||
timezone(TIME_ZONE))
|
|
||||||
|
|
||||||
# Check if workload name only contains alpha-numeric, underscore and hyphen
|
# If database crashed on restart, pull latest result and worst throughput so far
|
||||||
if not re.match('^[a-zA-Z0-9_-]+$', workload_name):
|
if 'error' in summary and summary['error']=="DB_RESTART_ERROR":
|
||||||
return HttpResponse('Your workload name ' + workload_name + ' contains '
|
|
||||||
'invalid characters! It should only contain '
|
|
||||||
'alpha-numeric, underscore(_) and hyphen(-)')
|
|
||||||
|
|
||||||
try:
|
LOG.debug("Error in restarting database")
|
||||||
# Check that we support this DBMS and version
|
# Find worst throughput
|
||||||
dbms = DBMSCatalog.objects.get(
|
past_configs = MetricData.objects.filter(session=session)
|
||||||
type=dbms_type, version=dbms_version)
|
worst_throughput = None
|
||||||
except ObjectDoesNotExist:
|
for curr_config in past_configs:
|
||||||
return HttpResponse('{} v{} is not yet supported.'.format(
|
throughput = JSONUtil.loads(curr_config.data)["throughput_txn_per_sec"]
|
||||||
dbms_type, dbms_version))
|
if worst_throughput is None or throughput < worst_throughput:
|
||||||
|
worst_throughput = throughput
|
||||||
|
LOG.debug("Worst throughput so far is:%d",worst_throughput)
|
||||||
|
|
||||||
if dbms != session.dbms:
|
# Copy latest data and modify
|
||||||
return HttpResponse('The DBMS must match the type and version '
|
knob_data = KnobData.objects.filter(session=session).order_by("-id").first()
|
||||||
'specified when creating the session. '
|
knob_data.pk = None
|
||||||
'(expected=' + session.dbms.full_name + ') '
|
knob_data.save()
|
||||||
'(actual=' + dbms.full_name + ')')
|
|
||||||
|
|
||||||
# Load, process, and store the knobs in the DBMS's configuration
|
metric_data = MetricData.objects.filter(session=session).order_by("-id").first()
|
||||||
knob_dict, knob_diffs = parser.parse_dbms_knobs(
|
metric_cpy = JSONUtil.loads(metric_data.data)
|
||||||
dbms.pk, JSONUtil.loads(files['knobs']))
|
metric_cpy["throughput_txn_per_sec"]=worst_throughput
|
||||||
tunable_knob_dict = parser.convert_dbms_knobs(
|
metric_cpy = JSONUtil.dumps(metric_cpy)
|
||||||
dbms.pk, knob_dict)
|
metric_data.pk = None
|
||||||
knob_data = KnobData.objects.create_knob_data(
|
metric_data.data = metric_cpy
|
||||||
session, JSONUtil.dumps(knob_dict, pprint=True, sort=True),
|
metric_data.save()
|
||||||
JSONUtil.dumps(tunable_knob_dict, pprint=True, sort=True), dbms)
|
|
||||||
|
|
||||||
# Load, process, and store the runtime metrics exposed by the DBMS
|
result = Result.objects.filter(session=session).order_by("-id").first()
|
||||||
initial_metric_dict, initial_metric_diffs = parser.parse_dbms_metrics(
|
result.pk = None
|
||||||
dbms.pk, JSONUtil.loads(files['metrics_before']))
|
result.knob_data = knob_data
|
||||||
final_metric_dict, final_metric_diffs = parser.parse_dbms_metrics(
|
result.metric_data = metric_data
|
||||||
dbms.pk, JSONUtil.loads(files['metrics_after']))
|
result.save()
|
||||||
metric_dict = parser.calculate_change_in_metrics(
|
|
||||||
dbms.pk, initial_metric_dict, final_metric_dict)
|
|
||||||
initial_metric_diffs.extend(final_metric_diffs)
|
|
||||||
numeric_metric_dict = parser.convert_dbms_metrics(
|
|
||||||
dbms.pk, metric_dict, observation_time, session.target_objective)
|
|
||||||
metric_data = MetricData.objects.create_metric_data(
|
|
||||||
session, JSONUtil.dumps(metric_dict, pprint=True, sort=True),
|
|
||||||
JSONUtil.dumps(numeric_metric_dict, pprint=True, sort=True), dbms)
|
|
||||||
|
|
||||||
# Create a new workload if this one does not already exist
|
backup_data = BackupData.objects.filter(result=result).first()
|
||||||
workload = Workload.objects.create_workload(
|
backup_data.pk = None
|
||||||
dbms, session.hardware, workload_name)
|
backup_data.result = result
|
||||||
|
backup_data.save()
|
||||||
|
|
||||||
# Save this result
|
else:
|
||||||
result = Result.objects.create_result(
|
dbms_type = DBMSType.type(summary['database_type'])
|
||||||
session, dbms, workload, knob_data, metric_data,
|
dbms_version = summary['database_version'] # TODO: fix parse_version_string
|
||||||
start_time, end_time, observation_time)
|
workload_name = summary['workload_name']
|
||||||
result.save()
|
observation_time = summary['observation_time']
|
||||||
|
start_time = datetime.fromtimestamp(
|
||||||
|
# int(summary['start_time']), # unit: seconds
|
||||||
|
int(float(summary['start_time']) / 1000), # unit: ms
|
||||||
|
timezone(TIME_ZONE))
|
||||||
|
end_time = datetime.fromtimestamp(
|
||||||
|
# int(summary['end_time']), # unit: seconds
|
||||||
|
int(float(summary['end_time']) / 1000), # unit: ms
|
||||||
|
timezone(TIME_ZONE))
|
||||||
|
|
||||||
# Workload is now modified so backgroundTasks can make calculationw
|
# Check if workload name only contains alpha-numeric, underscore and hyphen
|
||||||
workload.status = WorkloadStatusType.MODIFIED
|
if not re.match('^[a-zA-Z0-9_-]+$', workload_name):
|
||||||
workload.save()
|
return HttpResponse('Your workload name ' + workload_name + ' contains '
|
||||||
|
'invalid characters! It should only contain '
|
||||||
|
'alpha-numeric, underscore(_) and hyphen(-)')
|
||||||
|
|
||||||
# Save all original data
|
try:
|
||||||
backup_data = BackupData.objects.create(
|
# Check that we support this DBMS and version
|
||||||
result=result, raw_knobs=files['knobs'],
|
dbms = DBMSCatalog.objects.get(
|
||||||
raw_initial_metrics=files['metrics_before'],
|
type=dbms_type, version=dbms_version)
|
||||||
raw_final_metrics=files['metrics_after'],
|
except ObjectDoesNotExist:
|
||||||
raw_summary=files['summary'],
|
return HttpResponse('{} v{} is not yet supported.'.format(
|
||||||
knob_log=knob_diffs,
|
dbms_type, dbms_version))
|
||||||
metric_log=initial_metric_diffs)
|
|
||||||
backup_data.save()
|
if dbms != session.dbms:
|
||||||
|
return HttpResponse('The DBMS must match the type and version '
|
||||||
|
'specified when creating the session. '
|
||||||
|
'(expected=' + session.dbms.full_name + ') '
|
||||||
|
'(actual=' + dbms.full_name + ')')
|
||||||
|
|
||||||
|
# Load, process, and store the knobs in the DBMS's configuration
|
||||||
|
knob_dict, knob_diffs = parser.parse_dbms_knobs(
|
||||||
|
dbms.pk, JSONUtil.loads(files['knobs']))
|
||||||
|
tunable_knob_dict = parser.convert_dbms_knobs(
|
||||||
|
dbms.pk, knob_dict)
|
||||||
|
knob_data = KnobData.objects.create_knob_data(
|
||||||
|
session, JSONUtil.dumps(knob_dict, pprint=True, sort=True),
|
||||||
|
JSONUtil.dumps(tunable_knob_dict, pprint=True, sort=True), dbms)
|
||||||
|
|
||||||
|
# Load, process, and store the runtime metrics exposed by the DBMS
|
||||||
|
initial_metric_dict, initial_metric_diffs = parser.parse_dbms_metrics(
|
||||||
|
dbms.pk, JSONUtil.loads(files['metrics_before']))
|
||||||
|
final_metric_dict, final_metric_diffs = parser.parse_dbms_metrics(
|
||||||
|
dbms.pk, JSONUtil.loads(files['metrics_after']))
|
||||||
|
metric_dict = parser.calculate_change_in_metrics(
|
||||||
|
dbms.pk, initial_metric_dict, final_metric_dict)
|
||||||
|
initial_metric_diffs.extend(final_metric_diffs)
|
||||||
|
numeric_metric_dict = parser.convert_dbms_metrics(
|
||||||
|
dbms.pk, metric_dict, observation_time, session.target_objective)
|
||||||
|
metric_data = MetricData.objects.create_metric_data(
|
||||||
|
session, JSONUtil.dumps(metric_dict, pprint=True, sort=True),
|
||||||
|
JSONUtil.dumps(numeric_metric_dict, pprint=True, sort=True), dbms)
|
||||||
|
|
||||||
|
# Create a new workload if this one does not already exist
|
||||||
|
workload = Workload.objects.create_workload(
|
||||||
|
dbms, session.hardware, workload_name)
|
||||||
|
|
||||||
|
# Save this result
|
||||||
|
result = Result.objects.create_result(
|
||||||
|
session, dbms, workload, knob_data, metric_data,
|
||||||
|
start_time, end_time, observation_time)
|
||||||
|
result.save()
|
||||||
|
|
||||||
|
# Workload is now modified so backgroundTasks can make calculationw
|
||||||
|
workload.status = WorkloadStatusType.MODIFIED
|
||||||
|
workload.save()
|
||||||
|
|
||||||
|
# Save all original data
|
||||||
|
backup_data = BackupData.objects.create(
|
||||||
|
result=result, raw_knobs=files['knobs'],
|
||||||
|
raw_initial_metrics=files['metrics_before'],
|
||||||
|
raw_final_metrics=files['metrics_after'],
|
||||||
|
raw_summary=files['summary'],
|
||||||
|
knob_log=knob_diffs,
|
||||||
|
metric_log=initial_metric_diffs)
|
||||||
|
backup_data.save()
|
||||||
|
|
||||||
session.project.last_update = now()
|
session.project.last_update = now()
|
||||||
session.last_update = now()
|
session.last_update = now()
|
||||||
|
|
Loading…
Reference in New Issue