do not use range_test results for tuning

This commit is contained in:
yangdsh 2020-03-17 05:04:46 +00:00 committed by Dana Van Aken
parent 52f4a5801e
commit 240be46d58
3 changed files with 6 additions and 1 deletions

View File

@ -448,6 +448,9 @@ def train_ddpg(train_ddpg_input):
params = JSONUtil.loads(session.hyperparameters)
session_results = Result.objects.filter(session=session,
creation_time__lt=result.creation_time)
for i, result in enumerate(session_results):
if 'range_test' in result.metric_data.name:
session_results.pop(i)
target_data = {}
target_data['newest_result_id'] = result_id

View File

@ -153,6 +153,8 @@ class DataUtil(object):
rowlabels = np.empty(len(results), dtype=int)
for i, result in enumerate(results):
if 'range_test' in result.metric_data.name:
continue
param_data = JSONUtil.loads(result.knob_data.data)
if len(param_data) != len(knob_labels):
raise Exception(

View File

@ -645,7 +645,7 @@ def handle_result_files(session, files, execution_times=None):
if 'status' in summary and summary['status'] == "range_test":
# The metric should not be used for learning because the driver did not run workload
# We tag the metric as invalid, so later they will be set to the worst result
metric_data.name = metric_data.name + '*'
metric_data.name = 'range_test_' + metric_data.name + '*'
metric_data.save()
# Create a new workload if this one does not already exist