update udm

This commit is contained in:
bohanjason 2020-05-05 03:26:06 -04:00 committed by Dana Van Aken
parent f14f87c817
commit 906d528357
6 changed files with 72 additions and 95 deletions

View File

@ -643,6 +643,9 @@ def loop(i):
# remove oltpbench log and controller log
clean_logs()
if dconf.ENABLE_UDM is True:
clean_oltpbench_results()
# check disk usage
if check_disk_usage() > dconf.MAX_DISK_USAGE:
LOG.warning('Exceeds max disk usage %s', dconf.MAX_DISK_USAGE)
@ -676,7 +679,6 @@ def loop(i):
# add user defined metrics
if dconf.ENABLE_UDM is True:
add_udm()
clean_oltpbench_results()
# save result
result_timestamp = save_dbms_result()
@ -722,8 +724,8 @@ def run_loops(max_iter=10):
# reload database periodically
if dconf.RELOAD_INTERVAL > 0:
time.sleep(15)
if i % dconf.RELOAD_INTERVAL == 0:
is_ready_db(interval_sec=10)
if i == 0 and dump is False:
restore_database()
elif i > 0:

View File

@ -12,17 +12,12 @@ parser = argparse.ArgumentParser() # pylint: disable=invalid-name
parser.add_argument("result_dir")
args = parser.parse_args() # pylint: disable=invalid-name
HAS_TARGET_OBJECTIVE = True
USER_DEINFED_METRICS = {
"target_objective": {
"throughput": {
"more_is_better": True,
"unit": "transaction / second",
"short_unit": "txn/s",
"type": VarType.INTEGER
}
},
"metrics": {
"latency_99": {
"unit": "microseconds",
"short_unit": "us",
@ -34,7 +29,6 @@ USER_DEINFED_METRICS = {
"type": VarType.INTEGER
}
}
}
def get_udm():
@ -42,16 +36,10 @@ def get_udm():
with open(summary_path, 'r') as f:
info = json.load(f)
metrics = copy.deepcopy(USER_DEINFED_METRICS)
if HAS_TARGET_OBJECTIVE is False:
metrics["target_objective"] = None
else:
assert len(metrics["target_objective"]) == 1, "It should have only one target objective"
metrics["target_objective"]["throughput"]["value"] =\
info["Throughput (requests/second)"]
metrics["metrics"]["latency_99"]["value"] =\
metrics["throughput"]["value"] = info["Throughput (requests/second)"]
metrics["latency_99"]["value"] =\
info["Latency Distribution"]["99th Percentile Latency (microseconds)"]
metrics["metrics"]["latency_95"]["value"] =\
metrics["latency_95"]["value"] =\
info["Latency Distribution"]["95th Percentile Latency (microseconds)"]
return metrics

View File

@ -0,0 +1 @@
sudo -b -E env "PATH=$PATH" nohup fab run_loops:100 > loop.log 2>&1 < /dev/null

View File

@ -16,7 +16,6 @@ LOG = logging.getLogger(__name__)
LESS_IS_BETTER = '(less is better)'
MORE_IS_BETTER = '(more is better)'
THROUGHPUT = 'throughput_txn_per_sec'
USER_DEFINED_TARGET = 'user_defined_metric'
class BaseMetric:
@ -52,6 +51,9 @@ class BaseTargetObjective(BaseMetric):
def compute(self, metrics, observation_time):
raise NotImplementedError()
def is_udf(self): # pylint: disable=no-self-use
return False
class BaseThroughput(BaseTargetObjective):
@ -73,29 +75,25 @@ class BaseThroughput(BaseTargetObjective):
return float(num_txns) / observation_time
class UserDefinedTargetObjective(BaseTargetObjective):
class BaseUserDefinedTarget(BaseTargetObjective):
_improvement_choices = (LESS_IS_BETTER, MORE_IS_BETTER, '')
def __init__(self):
super().__init__(name=USER_DEFINED_TARGET, pprint='User Defined Metric', unit='unknown',
short_unit='unknown', improvement='')
def __init__(self, target_name, improvement, unit='unknown', short_unit='unknown', pprint=None):
if pprint is None:
pprint = 'udf.' + target_name
super().__init__(name=target_name, pprint=pprint, unit=unit,
short_unit=short_unit, improvement=improvement)
def is_registered(self):
return USER_DEFINED_TARGET != self.name
def register_target(self, name, more_is_better, unit, short_unit, pprint='User Defined Metric'):
self.name = name
assert isinstance(more_is_better, bool), 'more_is_better should be bool type'
if more_is_better:
self.improvement = MORE_IS_BETTER
else:
self.improvement = LESS_IS_BETTER
self.unit = unit
self.short_unit = short_unit
self.pprint = pprint
def is_udf(self):
return True
def compute(self, metrics, observation_time):
return metrics.get(self.name, 0)
name = 'udm.' + self.name
if name not in metrics:
LOG.warning('cannot find the user defined target objective %s,\
return 0 instead', self.name)
return metrics.get(name, 0)
class TargetObjectives:
LESS_IS_BETTER = LESS_IS_BETTER
@ -134,15 +132,6 @@ class TargetObjectives:
self._metric_metadatas[dbms_id] = [(mname, BaseMetric(mname)) for mname
in sorted(numeric_metrics)]
LOG.info('Registering user defined target objectives...')
dbmss = models.DBMSCatalog.objects.all()
for dbms in dbmss:
dbms_id = int(dbms.pk)
if dbms_id not in self._registry:
self._registry[dbms_id] = {}
self._registry[dbms_id][USER_DEFINED_TARGET] = UserDefinedTargetObjective()
def registered(self):
return len(self._registry) > 0
@ -169,15 +158,24 @@ class TargetObjectives:
for target_name, target_instance in self._registry[dbms_id].items():
if target_name == target_objective:
targets_list.insert(0, (target_name, target_instance))
else:
if target_name != USER_DEFINED_TARGET:
targets_list.append((target_name, target_instance))
if dbms_id in self._udm_metadatas:
metadata = targets_list + list(self._udm_metadatas[dbms_id]) +\
list(self._metric_metadatas[dbms_id])
else:
metadata = targets_list + list(self._metric_metadatas[dbms_id])
return OrderedDict(metadata)
metric_meta = list(self._metric_metadatas[dbms_id])
udm_metric_meta = []
db_metric_meta = []
for metric_name, metric in metric_meta:
if metric_name.startswith('udm.'):
udm_metric_meta.append((metric_name, metric))
else:
db_metric_meta.append((metric_name, metric))
metadata = targets_list + udm_metric_meta + db_metric_meta
meta_dict = OrderedDict()
for metric_name, metric in metadata:
if metric_name not in meta_dict:
meta_dict[metric_name] = metric
return meta_dict
def default(self):
return self._default_target_objective

View File

@ -5,10 +5,15 @@
#
from website.types import DBMSType
from ..base.target_objective import BaseThroughput # pylint: disable=relative-beyond-top-level
from ..base.target_objective import (BaseThroughput, BaseUserDefinedTarget,
LESS_IS_BETTER, MORE_IS_BETTER) # pylint: disable=relative-beyond-top-level
target_objective_list = tuple((DBMSType.MYSQL, target_obj) for target_obj in [ # pylint: disable=invalid-name
BaseThroughput(transactions_counter=('innodb_metrics.trx_rw_commits',
'innodb_metrics.trx_ro_commits',
'innodb_metrics.trx_nl_ro_commits'))
'innodb_metrics.trx_nl_ro_commits')),
BaseUserDefinedTarget(target_name='latency_99', improvement=LESS_IS_BETTER,
unit='microseconds', short_unit='us'),
BaseUserDefinedTarget(target_name='throughput', improvement=MORE_IS_BETTER,
unit='transactions / seconds', short_unit='txn/s')
])

View File

@ -52,7 +52,6 @@ from .utils import (JSONUtil, LabelUtil, MediaUtil, TaskUtil)
from .settings import LOG_DIR, TIME_ZONE, CHECK_CELERY
from .set_default_knobs import set_default_knobs
from .db.base.target_objective import USER_DEFINED_TARGET
LOG = logging.getLogger(__name__)
@ -491,20 +490,20 @@ def handle_result_files(session, files, execution_times=None):
dbms_id = session.dbms.pk
udm_before = {}
udm_after = {}
if ('user_defined_metrics' not in files) and (USER_DEFINED_TARGET == session.target_objective):
return HttpResponse('ERROR: user defined target objective is not uploaded!')
# User defined metrics
udm = {}
udm_all = {}
if 'user_defined_metrics' in files:
udm = JSONUtil.loads(files['user_defined_metrics'])
if len(udm) > 0:
udm_target = udm['target_objective']
udm_not_target = udm['metrics']
udm_all = copy.deepcopy(udm_not_target)
if (udm_target is None) and (USER_DEFINED_TARGET == session.target_objective):
return HttpResponse('ERROR: user defined target objective is not uploaded!')
if udm_target is not None:
udm_all.update(udm_target)
udm_all = JSONUtil.loads(files['user_defined_metrics'])
target_name = session.target_objective
target_instance = target_objectives.get_instance(dbms_id, target_name)
if target_instance.is_udf() and len(udm_all) == 0:
return HttpResponse('ERROR: user defined target objective {} is not uploaded!'.format(
target_name))
if len(udm_all) > 0:
# Note: Here we assume that for sessions with same dbms, user defined metrics are same.
# Otherwise there may exist inconsistency, it becomes worse after restarting web server.
if target_instance.is_udf() and (target_name not in udm_all.keys()):
return HttpResponse('ERROR: user defined target objective {} is not uploaded!'.format(
target_name))
if not target_objectives.udm_registered(dbms_id):
target_objectives.register_udm(dbms_id, udm_all)
for name, info in udm_all.items():
@ -520,22 +519,6 @@ def handle_result_files(session, files, execution_times=None):
metric_type=MetricType.STATISTICS)
udm_catalog.summary = 'user defined metric, not target objective'
udm_catalog.save()
if udm_target is not None:
target_name = 'udm.' + list(udm_target.keys())[0]
pprint_name = 'udf.' + list(udm_target.keys())[0]
info = list(udm_target.values())[0]
if USER_DEFINED_TARGET != session.target_objective:
LOG.warning('the target objective is not user defined metric (UDM),\
please disable UDM target objective in driver')
else:
udm_instance = target_objectives.get_instance(dbms_id, USER_DEFINED_TARGET)
if not udm_instance.is_registered():
udm_instance.register_target(name=target_name,
more_is_better=info['more_is_better'],
unit=info['unit'],
short_unit=info['short_unit'],
pprint=pprint_name)
# Find worst throughput
past_metrics = MetricData.objects.filter(session=session)
metric_meta = target_objectives.get_instance(session.dbms.pk, session.target_objective)