From c37ef9c07242699b5b082cd6a7433d88381eec25 Mon Sep 17 00:00:00 2001 From: bohanjason Date: Sat, 28 Sep 2019 01:42:56 -0400 Subject: [PATCH] move dnn parameters to constant file --- server/website/website/settings/constants.py | 23 +++++++++++++++++--- server/website/website/tasks/async_tasks.py | 19 ++++++++++------ server/website/website/views.py | 4 +++- 3 files changed, 35 insertions(+), 11 deletions(-) diff --git a/server/website/website/settings/constants.py b/server/website/website/settings/constants.py index cdbe721..100058e 100644 --- a/server/website/website/settings/constants.py +++ b/server/website/website/settings/constants.py @@ -42,20 +42,37 @@ NUM_THREADS = 4 # the maximum iterations of gradient descent MAX_ITER = 500 +DEFAULT_LEARNING_RATE = 0.01 + +# ---GRADIENT DESCENT FOR GPR--- # a small bias when using training data points as starting points. GPR_EPS = 0.001 DEFAULT_RIDGE = 0.01 -DEFAULT_LEARNING_RATE = 0.01 - DEFAULT_EPSILON = 1e-6 DEFAULT_SIGMA_MULTIPLIER = 3.0 DEFAULT_MU_MULTIPLIER = 1.0 -# ---CONSTRAINTS CONSTANTS--- +# ---GRADIENT DESCENT FOR DNN--- +DNN_TRAIN_ITER = 500 + +DNN_EXPLORE = False + +DNN_EXPLORE_ITER = 500 + +# noise scale for paramater space exploration +DNN_NOISE_SCALE_BEGIN = 0.1 + +DNN_NOISE_SCALE_END = 0.0 + +DNN_DEBUG = True + +DNN_DEBUG_INTERVAL = 100 + +# ---DDPG CONSTRAINTS CONSTANTS--- # Batch size in DDPG model DDPG_BATCH_SIZE = 32 diff --git a/server/website/website/tasks/async_tasks.py b/server/website/website/tasks/async_tasks.py index 3d0408d..9a87023 100644 --- a/server/website/website/tasks/async_tasks.py +++ b/server/website/website/tasks/async_tasks.py @@ -31,7 +31,10 @@ from website.settings import (DEFAULT_LENGTH_SCALE, DEFAULT_MAGNITUDE, DEFAULT_EPSILON, MAX_ITER, GPR_EPS, DEFAULT_SIGMA_MULTIPLIER, DEFAULT_MU_MULTIPLIER, DDPG_BATCH_SIZE, ACTOR_LEARNING_RATE, - CRITIC_LEARNING_RATE, GAMMA, TAU) + CRITIC_LEARNING_RATE, GAMMA, TAU, + DNN_TRAIN_ITER, DNN_EXPLORE, DNN_EXPLORE_ITER, + DNN_NOISE_SCALE_BEGIN, DNN_NOISE_SCALE_END, + DNN_DEBUG, DNN_DEBUG_INTERVAL) from website.settings import INIT_FLIP_PROB, FLIP_PROB_DECAY from website.settings import MODEL_DIR @@ -553,12 +556,14 @@ def configuration_recommendation(recommendation_input): model_nn = NeuralNet(weights_file=full_path, n_input=X_samples.shape[1], batch_size=X_samples.shape[0], - explore_iters=500, - noise_scale_begin=0.1, - noise_scale_end=0, - debug=True) - model_nn.fit(X_scaled, y_scaled) - res = model_nn.recommend(X_samples, X_min, X_max, explore=True) + explore_iters=DNN_EXPLORE_ITER, + noise_scale_begin=DNN_NOISE_SCALE_BEGIN, + noise_scale_end=DNN_NOISE_SCALE_END, + debug=DNN_DEBUG, + debug_interval=DNN_DEBUG_INTERVAL) + model_nn.fit(X_scaled, y_scaled, fit_epochs=DNN_TRAIN_ITER) + res = model_nn.recommend(X_samples, X_min, X_max, + explore=DNN_EXPLORE, recommend_epochs=MAX_ITER) elif algorithm == 'gpr': # default gpr model model = GPRGD(length_scale=DEFAULT_LENGTH_SCALE, diff --git a/server/website/website/views.py b/server/website/website/views.py index 793db20..e9b618b 100644 --- a/server/website/website/views.py +++ b/server/website/website/views.py @@ -539,11 +539,13 @@ def handle_result_files(session, files): response = chain(aggregate_target_results.s(result.pk, 'dnn'), map_workload.s(), configuration_recommendation.s()).apply_async() + taskmeta_ids = [] current_task = response while current_task: - taskmeta_ids.append(current_task.id) + taskmeta_ids.insert(0, current_task.id) current_task = current_task.parent + result.task_ids = ','.join(taskmeta_ids) result.save() return HttpResponse("Result stored successfully! Running tuner...(status={}) Result ID:{} "