add integration test

This commit is contained in:
bohanjason 2019-10-24 11:59:36 -04:00 committed by Dana Van Aken
parent dd05e2c63c
commit 2fa97149d5
5 changed files with 89 additions and 12 deletions

View File

@ -647,24 +647,38 @@ def run_loops(max_iter=1):
LOG.info('The %s-th Loop Ends / Total Loops %s', i + 1, max_iter) LOG.info('The %s-th Loop Ends / Total Loops %s', i + 1, max_iter)
@task @task
def rename_batch(result_dir=None): def rename_batch(result_dir=None):
result_dir = result_dir or CONF['save_path'] result_dir = result_dir or CONF['save_path']
results = glob.glob(os.path.join(result_dir, '*__summary.json')) results = glob.glob(os.path.join(result_dir, '*__summary.json'))
results = sorted(results) results = sorted(results)
count = len(results)
for i, result in enumerate(results): for i, result in enumerate(results):
prefix = os.path.basename(result) prefix = os.path.basename(result)
prefix_len = os.path.basename(result).find('_') + 2 prefix_len = os.path.basename(result).find('_') + 2
prefix = prefix[:prefix_len] prefix = prefix[:prefix_len]
new_prefix = str(i) + '__' new_prefix = str(i) + '__'
files = {}
for base in ('summary', 'knobs', 'metrics_before', 'metrics_after'): for base in ('summary', 'knobs', 'metrics_before', 'metrics_after'):
fpath = os.path.join(result_dir, prefix + base + '.json') fpath = os.path.join(result_dir, prefix + base + '.json')
rename_path = os.path.join(result_dir, new_prefix + base + '.json') rename_path = os.path.join(result_dir, new_prefix + base + '.json')
os.rename(fpath, rename_path) os.rename(fpath, rename_path)
def wait_pipeline_data_ready(max_time_sec=800, interval_sec=10):
max_time_sec = int(max_time_sec)
interval_sec = int(interval_sec)
elapsed = 0
while elapsed <= max_time_sec:
response = requests.get(CONF['upload_url'] + '/test/pipeline/')
response = response.content
LOG.info(response)
if 'False' in response:
time.sleep(interval_sec)
elapsed += interval_sec
else:
return
@task @task
def integration_tests(): def integration_tests():
@ -674,17 +688,22 @@ def integration_tests():
# Upload training data # Upload training data
LOG.info('Upload training data to no tuning session') LOG.info('Upload training data to no tuning session')
upload_batch(result_dir='../../integrationTests/data/',upload_code='ottertuneTestNoTuning') upload_batch(result_dir='../../integrationTests/data/', upload_code='ottertuneTestNoTuning')
# TO DO: BG ready # TO DO: BG ready
response = requests.get(CONF['upload_url'] + '/test/pipeline/')
LOG.info(response.content)
# Test DNN # Test DNN
LOG.info('Test DNN (deep neural network)') LOG.info('Test DNN (deep neural network)')
upload_result(result_dir='../../integrationTests/data/', prefix='0__', upload_code='ottertuneTestTuningDNN') upload_result(result_dir='../../integrationTests/data/', prefix='0__',
upload_code='ottertuneTestTuningDNN')
response = get_result(upload_code='ottertuneTestTuningDNN') response = get_result(upload_code='ottertuneTestTuningDNN')
assert response['status'] == 'good' assert response['status'] == 'good'
# Test GPR # Test GPR
LOG.info('Test GPR (gaussian process regression)') LOG.info('Test GPR (gaussian process regression)')
upload_result(result_dir='../../integrationTests/data/', prefix='0__', upload_code='ottertuneTestTuningGPR') upload_result(result_dir='../../integrationTests/data/', prefix='0__',
upload_code='ottertuneTestTuningGPR')
response = get_result(upload_code='ottertuneTestTuningGPR') response = get_result(upload_code='ottertuneTestTuningGPR')
assert response['status'] == 'good' assert response['status'] == 'good'

View File

@ -182,17 +182,22 @@ class Session(BaseModel):
class SessionKnobManager(models.Manager): class SessionKnobManager(models.Manager):
@staticmethod @staticmethod
def get_knobs_for_session(session): def get_knobs_for_session(session, only_session_knobs=False):
# Returns a dict of the knob # Returns a dict of the knob
knobs = KnobCatalog.objects.filter(dbms=session.dbms) knobs = KnobCatalog.objects.filter(dbms=session.dbms)
knob_dicts = list(knobs.values()) knob_dicts = list(knobs.values())
session_knob_dicts = []
for i, _ in enumerate(knob_dicts): for i, _ in enumerate(knob_dicts):
if SessionKnob.objects.filter(session=session, knob=knobs[i]).exists(): if SessionKnob.objects.filter(session=session, knob=knobs[i]).exists():
new_knob = SessionKnob.objects.filter(session=session, knob=knobs[i])[0] new_knob = SessionKnob.objects.filter(session=session, knob=knobs[i])[0]
knob_dicts[i]["minval"] = new_knob.minval knob_dicts[i]["minval"] = new_knob.minval
knob_dicts[i]["maxval"] = new_knob.maxval knob_dicts[i]["maxval"] = new_knob.maxval
knob_dicts[i]["tunable"] = new_knob.tunable knob_dicts[i]["tunable"] = new_knob.tunable
knob_dicts = [knob for knob in knob_dicts if knob["tunable"]] session_knob_dicts.append(new_knob)
if only_session_knobs:
knob_dicts = session_knob_dicts
else:
knob_dicts = [knob for knob in knob_dicts if knob["tunable"]]
return knob_dicts return knob_dicts
@staticmethod @staticmethod

View File

@ -544,7 +544,7 @@ def configuration_recommendation(recommendation_input):
X_max = np.empty(X_scaled.shape[1]) X_max = np.empty(X_scaled.shape[1])
X_scaler_matrix = np.zeros([1, X_scaled.shape[1]]) X_scaler_matrix = np.zeros([1, X_scaled.shape[1]])
session_knobs = SessionKnob.objects.get_knobs_for_session(newest_result.session) session_knobs = SessionKnob.objects.get_knobs_for_session(newest_result.session, only_session_knobs=True)
# Set min/max for knob values # Set min/max for knob values
for i in range(X_scaled.shape[1]): for i in range(X_scaled.shape[1]):

View File

@ -70,6 +70,10 @@ urlpatterns = [
# train ddpg with results in the given session # train ddpg with results in the given session
url(r'^train_ddpg/sessions/(?P<session_id>[0-9]+)$', website_views.train_ddpg_loops, name='train_ddpg_loops'), url(r'^train_ddpg/sessions/(?P<session_id>[0-9]+)$', website_views.train_ddpg_loops, name='train_ddpg_loops'),
# Test
url(r'^test/create/', website_views.create_test_website, name='create_test_website'),
url(r'^test/pipeline/', website_views.pipeline_data_ready, name='pipeline_data_ready')
] ]
if settings.DEBUG: if settings.DEBUG:

View File

@ -29,8 +29,8 @@ from pytz import timezone
from .db import parser, target_objectives from .db import parser, target_objectives
from .forms import NewResultForm, ProjectForm, SessionForm, SessionKnobForm from .forms import NewResultForm, ProjectForm, SessionForm, SessionKnobForm
from .models import (BackupData, DBMSCatalog, KnobCatalog, KnobData, MetricCatalog, from .models import (BackupData, DBMSCatalog, KnobCatalog, KnobData, MetricCatalog, User, Hardware,
MetricData, Project, Result, Session, Workload, SessionKnob) MetricData, Project, Result, Session, Workload, SessionKnob, PipelineRun)
from .tasks import (aggregate_target_results, map_workload, train_ddpg, from .tasks import (aggregate_target_results, map_workload, train_ddpg,
configuration_recommendation, configuration_recommendation_ddpg) configuration_recommendation, configuration_recommendation_ddpg)
from .types import (DBMSType, KnobUnitType, MetricType, from .types import (DBMSType, KnobUnitType, MetricType,
@ -1041,3 +1041,52 @@ def train_ddpg_loops(request, session_id): # pylint: disable=unused-argument
for result in results: for result in results:
train_ddpg(result.pk) train_ddpg(result.pk)
return HttpResponse() return HttpResponse()
# integration test
@csrf_exempt
def pipeline_data_ready(request): # pylint: disable=unused-argument
LOG.info(PipelineRun.objects.get_latest())
if PipelineRun.objects.get_latest() is None:
response = "Pipeline data ready: False"
else:
response = "Pipeline data ready: True"
return HttpResponse(response)
# integration test
@csrf_exempt
def create_test_website(request): # pylint: disable=unused-argument
if User.objects.filter(username='ottertune_test_user').exists():
User.objects.filter(username='ottertune_test_user').delete()
if Hardware.objects.filter(pk=1).exists():
test_hardware = Hardware.objects.get(pk=1)
else:
test_hardware = Hardware.objects.create(pk=1)
test_user = User.objects.create_user(username='ottertune_test_user',
password='ottertune_test_user')
test_project = Project.objects.create(user=test_user, name='ottertune_test_project',
creation_time=now(), last_update=now())
# create no tuning session
Session.objects.create(name='test_session_no_tuning', tuning_session='no_tuning_session',
dbms_id=1, hardware=test_hardware, project=test_project,
creation_time=now(), last_update=now(), user=test_user,
upload_code='ottertuneTestNoTuning')
# create gpr session
Session.objects.create(name='test_session_gpr', user=test_user, tuning_session='tuning_session',
dbms_id=1, hardware=test_hardware, project=test_project,
creation_time=now(), last_update=now(), algorithm=AlgorithmType.GPR,
upload_code='ottertuneTestTuningGPR')
# create dnn session
Session.objects.create(name='test_session_dnn', user=test_user, tuning_session='tuning_session',
dbms_id=1, hardware=test_hardware, project=test_project,
creation_time=now(), last_update=now(), algorithm=AlgorithmType.DNN,
upload_code='ottertuneTestTuningDNN')
# create ddpg session
Session.objects.create(name='test_session_ddpg', tuning_session='tuning_session',
dbms_id=1, hardware=test_hardware, project=test_project,
creation_time=now(), last_update=now(), user=test_user,
upload_code='ottertuneTestTuningDDPG', algorithm=AlgorithmType.DDPG)
response = HttpResponse("Success: create test website successfully")
return response