support user defined metrics
This commit is contained in:
committed by
Dana Van Aken
parent
d4b3364819
commit
346a2470b5
@@ -357,7 +357,7 @@ class BaseParser:
|
||||
LOG.warning("Changing metric %s from COUNTER to STATISTICS", met_name)
|
||||
met_info.metric_type = MetricType.STATISTICS
|
||||
met_info.save()
|
||||
if allow_negative:
|
||||
if allow_negative and adj_val < 0:
|
||||
LOG.warning('%s metric type %s value is negative (start=%s, end=%s, diff=%s)',
|
||||
met_name, MetricType.name(met_info.metric_type), start_val, end_val,
|
||||
end_val - start_val)
|
||||
|
||||
@@ -16,7 +16,7 @@ LOG = logging.getLogger(__name__)
|
||||
LESS_IS_BETTER = '(less is better)'
|
||||
MORE_IS_BETTER = '(more is better)'
|
||||
THROUGHPUT = 'throughput_txn_per_sec'
|
||||
|
||||
USER_DEFINED_TARGET = 'user_defined_metric'
|
||||
|
||||
class BaseMetric:
|
||||
|
||||
@@ -73,6 +73,30 @@ class BaseThroughput(BaseTargetObjective):
|
||||
return float(num_txns) / observation_time
|
||||
|
||||
|
||||
class UserDefinedTargetObjective(BaseTargetObjective):
|
||||
_improvement_choices = (LESS_IS_BETTER, MORE_IS_BETTER, '')
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(name=USER_DEFINED_TARGET, pprint='User Defined Metric', unit='unknown',
|
||||
short_unit='unknown', improvement='')
|
||||
|
||||
def is_registered(self):
|
||||
return USER_DEFINED_TARGET != self.name
|
||||
|
||||
def register_target(self, name, more_is_better, unit, short_unit, pprint='User Defined Metric'):
|
||||
self.name = name
|
||||
assert isinstance(more_is_better, bool), 'more_is_better should be bool type'
|
||||
if more_is_better:
|
||||
self.improvement = MORE_IS_BETTER
|
||||
else:
|
||||
self.improvement = LESS_IS_BETTER
|
||||
self.unit = unit
|
||||
self.short_unit = short_unit
|
||||
self.pprint = pprint
|
||||
|
||||
def compute(self, metrics, observation_time):
|
||||
return metrics[self.name]
|
||||
|
||||
class TargetObjectives:
|
||||
LESS_IS_BETTER = LESS_IS_BETTER
|
||||
MORE_IS_BETTER = MORE_IS_BETTER
|
||||
@@ -81,6 +105,7 @@ class TargetObjectives:
|
||||
def __init__(self):
|
||||
self._registry = {}
|
||||
self._metric_metadatas = {}
|
||||
self._udm_metadatas = {} # user defined metrics
|
||||
self._default_target_objective = THROUGHPUT
|
||||
|
||||
def register(self):
|
||||
@@ -109,9 +134,33 @@ class TargetObjectives:
|
||||
self._metric_metadatas[dbms_id] = [(mname, BaseMetric(mname)) for mname
|
||||
in sorted(numeric_metrics)]
|
||||
|
||||
LOG.info('Registering user defined target objectives...')
|
||||
dbmss = models.DBMSCatalog.objects.all()
|
||||
for dbms in dbmss:
|
||||
dbms_id = int(dbms.pk)
|
||||
if dbms_id not in self._registry:
|
||||
self._registry[dbms_id] = {}
|
||||
self._registry[dbms_id][USER_DEFINED_TARGET] = UserDefinedTargetObjective()
|
||||
|
||||
|
||||
def registered(self):
|
||||
return len(self._registry) > 0
|
||||
|
||||
def udm_registered(self, dbms_id):
|
||||
return dbms_id in self._udm_metadatas
|
||||
|
||||
def register_udm(self, dbms_id, metrics):
|
||||
if dbms_id in self._udm_metadatas:
|
||||
LOG.warning('User Defined Metrics have already been registered, append to existing one')
|
||||
metadatas = self._udm_metadatas[dbms_id]
|
||||
else:
|
||||
metadatas = []
|
||||
for name, info in metrics.items():
|
||||
name = 'udm.' + name
|
||||
metadatas.append((name,
|
||||
BaseMetric(name, unit=info['unit'], short_unit=info['short_unit'])))
|
||||
self._udm_metadatas[dbms_id] = metadatas
|
||||
|
||||
def get_metric_metadata(self, dbms_id, target_objective):
|
||||
if not self.registered():
|
||||
self.register()
|
||||
@@ -121,8 +170,13 @@ class TargetObjectives:
|
||||
if target_name == target_objective:
|
||||
targets_list.insert(0, (target_name, target_instance))
|
||||
else:
|
||||
targets_list.append((target_name, target_instance))
|
||||
metadata = targets_list + list(self._metric_metadatas[dbms_id])
|
||||
if target_name != USER_DEFINED_TARGET:
|
||||
targets_list.append((target_name, target_instance))
|
||||
if dbms_id in self._udm_metadatas:
|
||||
metadata = targets_list + list(self._udm_metadatas[dbms_id]) +\
|
||||
list(self._metric_metadatas[dbms_id])
|
||||
else:
|
||||
metadata = targets_list + list(self._metric_metadatas[dbms_id])
|
||||
return OrderedDict(metadata)
|
||||
|
||||
def default(self):
|
||||
|
||||
@@ -12,6 +12,7 @@ import re
|
||||
import shutil
|
||||
import socket
|
||||
import time
|
||||
import copy
|
||||
from collections import OrderedDict
|
||||
from io import StringIO
|
||||
|
||||
@@ -51,6 +52,7 @@ from .utils import (JSONUtil, LabelUtil, MediaUtil, TaskUtil)
|
||||
from .settings import LOG_DIR, TIME_ZONE, CHECK_CELERY
|
||||
|
||||
from .set_default_knobs import set_default_knobs
|
||||
from .db.base.target_objective import USER_DEFINED_TARGET
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@@ -486,6 +488,54 @@ def handle_result_files(session, files, execution_times=None):
|
||||
# Load the contents of the controller's summary file
|
||||
summary = JSONUtil.loads(files['summary'])
|
||||
|
||||
dbms_id = session.dbms.pk
|
||||
udm_before = {}
|
||||
udm_after = {}
|
||||
if ('user_defined_metrics' not in files) and (USER_DEFINED_TARGET == session.target_objective):
|
||||
return HttpResponse('ERROR: user defined target objective is not uploaded!')
|
||||
# User defined metrics
|
||||
udm = {}
|
||||
if 'user_defined_metrics' in files:
|
||||
udm = JSONUtil.loads(files['user_defined_metrics'])
|
||||
if len(udm) > 0:
|
||||
udm_target = udm['target_objective']
|
||||
udm_not_target = udm['metrics']
|
||||
udm_all = copy.deepcopy(udm_not_target)
|
||||
if (udm_target is None) and (USER_DEFINED_TARGET == session.target_objective):
|
||||
return HttpResponse('ERROR: user defined target objective is not uploaded!')
|
||||
if udm_target is not None:
|
||||
udm_all.update(udm_target)
|
||||
if not target_objectives.udm_registered(dbms_id):
|
||||
target_objectives.register_udm(dbms_id, udm_all)
|
||||
for name, info in udm_all.items():
|
||||
udm_name = 'udm.' + name
|
||||
udm_before[name] = 0
|
||||
udm_after[name] = info['value']
|
||||
if MetricCatalog.objects.filter(dbms=session.dbms, name=udm_name).exists():
|
||||
continue
|
||||
udm_catalog = MetricCatalog.objects.create(dbms=session.dbms,
|
||||
name=udm_name,
|
||||
vartype=info['type'],
|
||||
scope='global',
|
||||
metric_type=MetricType.STATISTICS)
|
||||
udm_catalog.summary = 'user defined metric, not target objective'
|
||||
udm_catalog.save()
|
||||
if udm_target is not None:
|
||||
target_name = 'udm.' + list(udm_target.keys())[0]
|
||||
pprint_name = 'udf.' + list(udm_target.keys())[0]
|
||||
info = list(udm_target.values())[0]
|
||||
if USER_DEFINED_TARGET != session.target_objective:
|
||||
LOG.warning('the target objective is not user defined metric (UDM),\
|
||||
please disable UDM target objective in driver')
|
||||
else:
|
||||
udm_instance = target_objectives.get_instance(dbms_id, USER_DEFINED_TARGET)
|
||||
if not udm_instance.is_registered():
|
||||
udm_instance.register_target(name=target_name,
|
||||
more_is_better=info['more_is_better'],
|
||||
unit=info['unit'],
|
||||
short_unit=info['short_unit'],
|
||||
pprint=pprint_name)
|
||||
|
||||
# Find worst throughput
|
||||
past_metrics = MetricData.objects.filter(session=session)
|
||||
metric_meta = target_objectives.get_instance(session.dbms.pk, session.target_objective)
|
||||
@@ -643,10 +693,16 @@ def handle_result_files(session, files, execution_times=None):
|
||||
JSONUtil.dumps(converted_knob_dict, pprint=True, sort=True), dbms)
|
||||
|
||||
# Load, process, and store the runtime metrics exposed by the DBMS
|
||||
metrics_before = JSONUtil.loads(files['metrics_before'])
|
||||
metrics_after = JSONUtil.loads(files['metrics_after'])
|
||||
# Add user defined metrics
|
||||
if len(udm_before) > 0:
|
||||
metrics_before['global']['udm'] = udm_before
|
||||
metrics_after['global']['udm'] = udm_after
|
||||
initial_metric_dict, initial_metric_diffs = parser.parse_dbms_metrics(
|
||||
dbms.pk, JSONUtil.loads(files['metrics_before']))
|
||||
dbms.pk, metrics_before)
|
||||
final_metric_dict, final_metric_diffs = parser.parse_dbms_metrics(
|
||||
dbms.pk, JSONUtil.loads(files['metrics_after']))
|
||||
dbms.pk, metrics_after)
|
||||
metric_dict = parser.calculate_change_in_metrics(
|
||||
dbms.pk, initial_metric_dict, final_metric_dict)
|
||||
metric_diffs = OrderedDict([
|
||||
|
||||
Reference in New Issue
Block a user