add GPFLOW to website

This commit is contained in:
yangdsh 2019-11-23 19:52:28 +00:00 committed by Dana Van Aken
parent dceee6e0ba
commit b149684d77
3 changed files with 55 additions and 14 deletions

View File

@ -16,6 +16,15 @@ from analysis.util import get_analysis_logger
LOG = get_analysis_logger(__name__)
class GPRGDResult():
def __init__(self, ypreds=None, sigmas=None, minl=None, minl_conf=None):
self.ypreds = ypreds
self.sigmas = sigmas
self.minl = minl
self.minl_conf = minl_conf
def tf_optimize(model, Xnew_arr, learning_rate=0.01, maxiter=100, ucb_beta=3.,
active_dims=None, bounds=None):
Xnew_arr = check_array(Xnew_arr, copy=False, warn_on_dtype=True, dtype=FLOAT_DTYPES)
@ -61,4 +70,4 @@ def tf_optimize(model, Xnew_arr, learning_rate=0.01, maxiter=100, ucb_beta=3.,
assert_all_finite(y_mean_value)
assert_all_finite(y_var_value)
assert_all_finite(loss_value)
return Xnew_value, y_mean_value, y_var_value, loss_value
return GPRGDResult(y_mean_value, y_var_value, loss_value, Xnew_value)

View File

@ -25,6 +25,8 @@ INIT_FLIP_PROB = 0.3
FLIP_PROB_DECAY = 0.5
# ---GPR CONSTANTS---
USE_GPFLOW = True
DEFAULT_LENGTH_SCALE = 1.0
DEFAULT_MAGNITUDE = 1.0
@ -56,6 +58,13 @@ DEFAULT_SIGMA_MULTIPLIER = 3.0
DEFAULT_MU_MULTIPLIER = 1.0
DEFAULT_UCB_SCALE = 0.2
# ---HYPERPARAMETER TUNING FOR GPR---
HP_MAX_ITER = 5000
HP_DEFAULT_LEARNING_RATE = 0.001
# ---GRADIENT DESCENT FOR DNN---
DNN_TRAIN_ITER = 500

View File

@ -6,6 +6,8 @@
import random
import queue
import numpy as np
import tensorflow as tf
import gpflow
from pyDOE import lhs
from scipy.stats import uniform
@ -18,6 +20,9 @@ from analysis.ddpg.ddpg import DDPG
from analysis.gp import GPRNP
from analysis.gp_tf import GPRGD
from analysis.nn_tf import NeuralNet
from analysis.gpr import gpr_models
from analysis.gpr import ucb
from analysis.gpr.optimize import tf_optimize
from analysis.preprocessing import Bin, DummyEncoder
from analysis.constraints import ParamConstraintHelper
from website.models import PipelineData, PipelineRun, Result, Workload, KnobCatalog, SessionKnob
@ -25,11 +30,12 @@ from website import db
from website.types import PipelineTaskType, AlgorithmType
from website.utils import DataUtil, JSONUtil
from website.settings import IMPORTANT_KNOB_NUMBER, NUM_SAMPLES, TOP_NUM_CONFIG # pylint: disable=no-name-in-module
from website.settings import (DEFAULT_LENGTH_SCALE, DEFAULT_MAGNITUDE,
from website.settings import (USE_GPFLOW, DEFAULT_LENGTH_SCALE, DEFAULT_MAGNITUDE,
MAX_TRAIN_SIZE, BATCH_SIZE, NUM_THREADS,
DEFAULT_RIDGE, DEFAULT_LEARNING_RATE,
DEFAULT_EPSILON, MAX_ITER, GPR_EPS,
DEFAULT_SIGMA_MULTIPLIER, DEFAULT_MU_MULTIPLIER,
DEFAULT_UCB_SCALE, HP_LEARNING_RATE, HP_MAX_ITER,
DDPG_BATCH_SIZE, ACTOR_LEARNING_RATE,
CRITIC_LEARNING_RATE, UPDATE_EPOCHS,
ACTOR_HIDDEN_SIZES, CRITIC_HIDDEN_SIZES,
@ -613,6 +619,23 @@ def configuration_recommendation(recommendation_input):
elif algorithm == AlgorithmType.GPR:
# default gpr model
if USE_GPFLOW:
model_kwargs = {}
model_kwargs['model_learning_rate'] = HP_LEARNING_RATE
model_kwargs['model_maxiter'] = HP_MAX_ITER
opt_kwargs = {}
opt_kwargs['learning_rate'] = DEFAULT_LEARNING_RATE
opt_kwargs['maxiter'] = MAX_ITER
opt_kwargs['bounds'] = [X_min, X_max]
ucb_beta = 'get_beta_td'
opt_kwargs['ucb_beta'] = ucb.get_ucb_beta(ucb_beta, scale=DEFAULT_UCB_SCALE,
t=i + 1., ndim=X_scaled.shape[1])
tf.reset_default_graph()
graph = tf.get_default_graph()
gpflow.reset_default_session(graph=graph)
m = gpr_models.create_model('BasicGP', X=X_scaled, y=y_scaled, **model_kwargs)
res = tf_optimize(m.model, X_samples, **opt_kwargs)
else:
model = GPRGD(length_scale=DEFAULT_LENGTH_SCALE,
magnitude=DEFAULT_MAGNITUDE,
max_train_size=MAX_TRAIN_SIZE,