save dnn model into database
This commit is contained in:
parent
c37ef9c072
commit
25d0838376
|
@ -264,6 +264,8 @@ def clean_logs():
|
|||
|
||||
@task
|
||||
def lhs_samples(count=10):
|
||||
if not os.path.exists(CONF['lhs_save_path']):
|
||||
os.makedirs(CONF['lhs_save_path'])
|
||||
cmd = 'python3 lhs.py {} {} {}'.format(count, CONF['lhs_knob_path'], CONF['lhs_save_path'])
|
||||
local(cmd)
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ Created on Sep 16, 2019
|
|||
@author: Bohan Zhang
|
||||
'''
|
||||
|
||||
import pickle
|
||||
import numpy as np
|
||||
import tensorflow as tf
|
||||
from tensorflow import keras
|
||||
|
@ -28,7 +29,6 @@ class NeuralNet(object):
|
|||
|
||||
def __init__(self,
|
||||
n_input,
|
||||
weights_file,
|
||||
learning_rate=0.01,
|
||||
debug=False,
|
||||
debug_interval=100,
|
||||
|
@ -36,9 +36,6 @@ class NeuralNet(object):
|
|||
explore_iters=500,
|
||||
noise_scale_begin=0.1,
|
||||
noise_scale_end=0):
|
||||
# absolute path for the model weitghs file
|
||||
# one model for each (project, session)
|
||||
self.weights_file = weights_file
|
||||
|
||||
self.history = None
|
||||
self.recommend_iters = 0
|
||||
|
@ -58,7 +55,6 @@ class NeuralNet(object):
|
|||
layers.Dense(64, activation=tf.nn.relu),
|
||||
layers.Dense(1)
|
||||
])
|
||||
self.load_weights()
|
||||
self.model.compile(loss='mean_squared_error',
|
||||
optimizer=self.optimizer,
|
||||
metrics=['mean_squared_error', 'mean_absolute_error'])
|
||||
|
@ -66,17 +62,28 @@ class NeuralNet(object):
|
|||
self.ops = {}
|
||||
self.build_graph()
|
||||
|
||||
def save_weights(self):
|
||||
self.model.save_weights(self.weights_file)
|
||||
def save_weights(self, weights_file):
|
||||
self.model.save_weights(weights_file)
|
||||
|
||||
def load_weights(self):
|
||||
def load_weights(self, weights_file):
|
||||
try:
|
||||
self.model.load_weights(self.weights_file)
|
||||
self.model.load_weights(weights_file)
|
||||
if self.debug:
|
||||
LOG.info('Neural Network Model weights file exists, load weights from the file')
|
||||
except Exception: # pylint: disable=broad-except
|
||||
LOG.info('Weights file does not match neural network model, train model from scratch')
|
||||
|
||||
def get_weights_bin(self):
|
||||
return pickle.dumps(self.model.get_weights())
|
||||
|
||||
def set_weights_bin(self, weights):
|
||||
try:
|
||||
self.model.set_weights(pickle.loads(weights))
|
||||
if self.debug:
|
||||
LOG.info('Neural Network Model weights exists, load the existing weights')
|
||||
except Exception: # pylint: disable=broad-except
|
||||
LOG.info('Weights does not match neural network model, train model from scratch')
|
||||
|
||||
# Build same neural network as self.model, But input X is variables,
|
||||
# weights are placedholders. Find optimial X using gradient descent.
|
||||
def build_graph(self):
|
||||
|
@ -109,8 +116,6 @@ class NeuralNet(object):
|
|||
def fit(self, X_train, y_train, fit_epochs=500):
|
||||
self.history = self.model.fit(
|
||||
X_train, y_train, epochs=fit_epochs, verbose=0)
|
||||
# save model weights
|
||||
self.save_weights()
|
||||
if self.debug:
|
||||
mse = self.history.history['mean_squared_error']
|
||||
i = 0
|
||||
|
|
|
@ -188,6 +188,7 @@ class Migration(migrations.Migration):
|
|||
('ddpg_actor_model', models.BinaryField(null=True, blank=True)),
|
||||
('ddpg_critic_model', models.BinaryField(null=True, blank=True)),
|
||||
('ddpg_reply_memory', models.BinaryField(null=True, blank=True)),
|
||||
('dnn_model', models.BinaryField(null=True, blank=True)),
|
||||
('creation_time', models.DateTimeField()),
|
||||
('last_update', models.DateTimeField()),
|
||||
('upload_code', models.CharField(max_length=30, unique=True)),
|
||||
|
|
|
@ -191,6 +191,7 @@ class Session(BaseModel):
|
|||
ddpg_actor_model = models.BinaryField(null=True, blank=True)
|
||||
ddpg_critic_model = models.BinaryField(null=True, blank=True)
|
||||
ddpg_reply_memory = models.BinaryField(null=True, blank=True)
|
||||
dnn_model = models.BinaryField(null=True, blank=True)
|
||||
|
||||
project = models.ForeignKey(Project)
|
||||
creation_time = models.DateTimeField()
|
||||
|
|
|
@ -33,9 +33,6 @@ CONFIG_DIR = join(PROJECT_ROOT, 'config')
|
|||
# Where the log files are stored
|
||||
LOG_DIR = join(PROJECT_ROOT, 'log')
|
||||
|
||||
# Where the model weight files are stored
|
||||
MODEL_DIR = join(PROJECT_ROOT, 'model')
|
||||
|
||||
# File/directory upload permissions
|
||||
FILE_UPLOAD_DIRECTORY_PERMISSIONS = 0o664
|
||||
FILE_UPLOAD_PERMISSIONS = 0o664
|
||||
|
@ -57,13 +54,6 @@ try:
|
|||
except OSError: # Invalid permissions
|
||||
pass
|
||||
|
||||
# Try to create the model directory
|
||||
try:
|
||||
if not exists(MODEL_DIR):
|
||||
os.mkdir(MODEL_DIR)
|
||||
except OSError: # Invalid permissions
|
||||
pass
|
||||
|
||||
# ==============================================
|
||||
# DEBUG CONFIGURATION
|
||||
# ==============================================
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
#
|
||||
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
|
||||
#
|
||||
import os
|
||||
import random
|
||||
import queue
|
||||
import numpy as np
|
||||
|
@ -37,7 +36,6 @@ from website.settings import (DEFAULT_LENGTH_SCALE, DEFAULT_MAGNITUDE,
|
|||
DNN_DEBUG, DNN_DEBUG_INTERVAL)
|
||||
|
||||
from website.settings import INIT_FLIP_PROB, FLIP_PROB_DECAY
|
||||
from website.settings import MODEL_DIR
|
||||
from website.types import VarType
|
||||
|
||||
|
||||
|
@ -543,27 +541,27 @@ def configuration_recommendation(recommendation_input):
|
|||
except queue.Empty:
|
||||
break
|
||||
|
||||
# one model for each (project, session)
|
||||
session = newest_result.session.pk
|
||||
project = newest_result.session.project.pk
|
||||
full_path = os.path.join(MODEL_DIR, 'p' + str(project) + '_s' + str(session) + '_nn.weights')
|
||||
|
||||
session = newest_result.session
|
||||
res = None
|
||||
assert algorithm in ['gpr', 'dnn']
|
||||
|
||||
if algorithm == 'dnn':
|
||||
# neural network model
|
||||
model_nn = NeuralNet(weights_file=full_path,
|
||||
n_input=X_samples.shape[1],
|
||||
model_nn = NeuralNet(n_input=X_samples.shape[1],
|
||||
batch_size=X_samples.shape[0],
|
||||
explore_iters=DNN_EXPLORE_ITER,
|
||||
noise_scale_begin=DNN_NOISE_SCALE_BEGIN,
|
||||
noise_scale_end=DNN_NOISE_SCALE_END,
|
||||
debug=DNN_DEBUG,
|
||||
debug_interval=DNN_DEBUG_INTERVAL)
|
||||
if session.dnn_model is not None:
|
||||
model_nn.set_weights_bin(session.dnn_model)
|
||||
model_nn.fit(X_scaled, y_scaled, fit_epochs=DNN_TRAIN_ITER)
|
||||
res = model_nn.recommend(X_samples, X_min, X_max,
|
||||
explore=DNN_EXPLORE, recommend_epochs=MAX_ITER)
|
||||
session.dnn_model = model_nn.get_weights_bin()
|
||||
session.save()
|
||||
|
||||
elif algorithm == 'gpr':
|
||||
# default gpr model
|
||||
model = GPRGD(length_scale=DEFAULT_LENGTH_SCALE,
|
||||
|
|
Loading…
Reference in New Issue