add tests for new gpr models

This commit is contained in:
bohanjason 2019-11-30 15:58:25 -05:00 committed by Dana Van Aken
parent 7ebb2ea473
commit 7b57eb98a2
3 changed files with 84 additions and 4 deletions

View File

@ -36,7 +36,7 @@ class GPRGDResult(GPRResult):
class GPR(object):
def __init__(self, length_scale=1.0, magnitude=1.0, ridge=1.0, max_train_size=7000,
def __init__(self, length_scale=2.0, magnitude=1.0, ridge=1.0, max_train_size=7000,
batch_size=3000, num_threads=4, check_numerics=True, debug=False,
hyperparameter_trainable=False):
assert np.isscalar(length_scale)
@ -348,7 +348,7 @@ class GPRGD(GPR):
GP_BETA_CONST = "CONST"
def __init__(self,
length_scale=1.0,
length_scale=2.0,
magnitude=1.0,
ridge=1.0,
max_train_size=7000,

View File

@ -4,11 +4,16 @@
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#
import unittest
import random
import numpy as np
import gpflow
import tensorflow as tf
from sklearn import datasets
from analysis.gp import GPRNP
from analysis.gp_tf import GPR
from analysis.gp_tf import GPRGD
from analysis.gpr import gpr_models
from analysis.gpr.optimize import tf_optimize
# test numpy version GPR
class TestGPRNP(unittest.TestCase):
@ -88,3 +93,78 @@ class TestGPRGD(unittest.TestCase):
sigmas_round = [round(x[0], 4) for x in self.gpr_result.sigmas]
expected_sigmas = [1.4142, 1.4142, 1.4142, 1.4142, 1.4142, 1.4142]
self.assertEqual(sigmas_round, expected_sigmas)
# test Gradient Descent in GPFlow model
class TestGPRGP(unittest.TestCase):
@classmethod
def setUpClass(cls):
super(TestGPRGP, cls).setUpClass()
boston = datasets.load_boston()
data = boston['data']
X_train = data[0:500]
X_test = data[500:501]
y_train = boston['target'][0:500].reshape(500, 1)
X_min = np.min(X_train, 0)
X_max = np.max(X_train, 0)
random.seed(0)
np.random.seed(0)
tf.set_random_seed(0)
model_kwargs = {}
opt_kwargs = {}
opt_kwargs['learning_rate'] = 0.01
opt_kwargs['maxiter'] = 10
opt_kwargs['bounds'] = [X_min, X_max]
opt_kwargs['ucb_beta'] = 1.0
tf.reset_default_graph()
graph = tf.get_default_graph()
gpflow.reset_default_session(graph=graph)
cls.m = gpr_models.create_model('BasicGP', X=X_train, y=y_train, **model_kwargs)
cls.gpr_result = tf_optimize(cls.m.model, X_test, **opt_kwargs)
def test_gprnp_ypreds(self):
ypreds_round = [round(x[0], 4) for x in self.gpr_result.ypreds]
expected_ypreds = [0.5272]
self.assertEqual(ypreds_round, expected_ypreds)
def test_gprnp_sigmas(self):
sigmas_round = [round(x[0], 4) for x in self.gpr_result.sigmas]
expected_sigmas = [1.4153]
self.assertEqual(sigmas_round, expected_sigmas)
# test Gradient Descent in Tensorflow GPRGD model
class TestGPRGDGD(unittest.TestCase):
@classmethod
def setUpClass(cls):
super(TestGPRGDGD, cls).setUpClass()
boston = datasets.load_boston()
data = boston['data']
X_train = data[0:500]
X_test = data[500:501]
y_train = boston['target'][0:500].reshape(500, 1)
Xmin = np.min(X_train, 0)
Xmax = np.max(X_train, 0)
random.seed(0)
np.random.seed(0)
tf.set_random_seed(0)
cls.model = GPRGD(length_scale=2.0, magnitude=1.0, max_iter=10, learning_rate=0.01,
ridge=1.0, hyperparameter_trainable=True, sigma_multiplier=1.0)
cls.model.fit(X_train, y_train, Xmin, Xmax)
cls.gpr_result = cls.model.predict(X_test)
def test_gprnp_ypreds(self):
ypreds_round = [round(x[0], 4) for x in self.gpr_result.ypreds]
expected_ypreds = [0.5272]
self.assertEqual(ypreds_round, expected_ypreds)
def test_gprnp_sigmas(self):
sigmas_round = [round(x[0], 4) for x in self.gpr_result.sigmas]
expected_sigmas = [1.4153]
self.assertEqual(sigmas_round, expected_sigmas)

View File

@ -27,7 +27,7 @@ FLIP_PROB_DECAY = 0.5
# ---GPR CONSTANTS---
USE_GPFLOW = True
DEFAULT_LENGTH_SCALE = 1.0
DEFAULT_LENGTH_SCALE = 2.0
DEFAULT_MAGNITUDE = 1.0
@ -54,7 +54,7 @@ DEFAULT_RIDGE = 1.0
DEFAULT_EPSILON = 1e-6
DEFAULT_SIGMA_MULTIPLIER = 3.0
DEFAULT_SIGMA_MULTIPLIER = 1.0
DEFAULT_MU_MULTIPLIER = 1.0