Removed old LHS code from driver (the website already supports running LHS)

This commit is contained in:
dvanaken 2019-11-04 21:35:49 -05:00 committed by Dana Van Aken
parent fcec765a7c
commit 159e26e1c7
4 changed files with 0 additions and 320 deletions

View File

@ -1,83 +0,0 @@
[
{
"name": "SHARED_POOL_SIZE",
"tuning_range": {
"minval": "500MB",
"maxval": "2500MB"
},
"default": "1500MB",
"type": "bytes"
},
{
"name": "DB_CACHE_SIZE",
"tuning_range": {
"minval": "10GB",
"maxval": "24GB"
},
"default": "14GB",
"type": "bytes"
},
{
"name": "LOG_BUFFER",
"tuning_range": {
"minval": "10MB",
"maxval": "1000MB"
},
"default": "20MB",
"type": "bytes"
},
{
"name": "LARGE_POOL_SIZE",
"tuning_range": {
"minval": "10MB",
"maxval": "1000MB"
},
"default": "100MB",
"type": "bytes"
},
{
"name": "STREAMS_POOL_SIZE",
"tuning_range": {
"minval": "10MB",
"maxval": "1000MB"
},
"default": "100MB",
"type": "bytes"
},
{
"name": "bitmap_merge_area_size",
"tuning_range": {
"minval": "1000000",
"maxval": "20000000"
},
"default": "1MB",
"type": "integer"
},
{
"name": "create_bitmap_area_size",
"tuning_range": {
"minval": "1000000",
"maxval": "100000000"
},
"default": "8MB",
"type": "integer"
},
{
"name": "hash_area_size",
"tuning_range": {
"minval": "65536",
"maxval": "1000000"
},
"default": "65536",
"type": "integer"
},
{
"name": "sort_area_size",
"tuning_range": {
"minval": "128000",
"maxval": "2000000"
},
"default": "128000",
"type": "integer"
}
]

View File

@ -1,110 +0,0 @@
[
{
"name": "effective_cache_size",
"tuning_range": {
"minval": "4GB",
"maxval": "16GB"
},
"default": "4GB",
"type": "bytes"
},
{
"name": "shared_buffers",
"tuning_range": {
"minval": "128MB",
"maxval": "12GB"
},
"default": "128MB",
"type": "bytes"
},
{
"name": "max_parallel_workers_per_gather",
"tuning_range": {
"minval": 0,
"maxval": 8
},
"default": 0,
"type": "integer"
},
{
"name": "default_statistics_target",
"tuning_range": {
"minval": 100,
"maxval": 2048
},
"default": 100,
"type": "integer"
},
{
"name": "bgwriter_lru_maxpages",
"tuning_range": {
"minval": 0,
"maxval": 1000
},
"default": 10,
"type": "integer"
},
{
"name": "checkpoint_timeout",
"tuning_range": {
"minval": "10ms",
"maxval": "1min"
},
"default": "200ms",
"type": "time"
},
{
"name": "random_page_cost",
"tuning_range": {
"minval": 1,
"maxval": 10
},
"default": 4.0,
"type": "float"
},
{
"name": "checkpoint_completion_target",
"tuning_range": {
"minval": 0.1,
"maxval": 0.9
},
"default": 0.5,
"type": "float"
},
{
"name": "checkpoint_timeout",
"tuning_range": {
"minval": "1min",
"maxval": "30min"
},
"default": "5min",
"type": "time"
},
{
"name": "max_wal_size",
"tuning_range": {
"minval": "256MB",
"maxval": "16GB"
},
"default": "1GB",
"type": "bytes"
},
{
"name": "temp_buffers",
"tuning_range": {
"minval": "8MB",
"maxval": "1GB"
},
"default": "8MB",
"type": "bytes"
},
{
"name": "work_mem",
"tuning_range": {
"minval": "4MB",
"maxval": "1GB"
},
"default": "4MB",
"type": "bytes"
}
]

View File

@ -1,126 +0,0 @@
#
# OtterTune - lhs.py
#
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#
import sys
import json
import os
import numpy as np
from pyDOE import lhs
from scipy.stats import uniform
from hurry.filesize import size
BYTES_SYSTEM = [
(1024 ** 5, 'PB'),
(1024 ** 4, 'TB'),
(1024 ** 3, 'GB'),
(1024 ** 2, 'MB'),
(1024 ** 1, 'kB'),
(1024 ** 0, 'B'),
]
TIME_SYSTEM = [
(1000 * 60 * 60 * 24, 'd'),
(1000 * 60 * 60, 'h'),
(1000 * 60, 'min'),
(1000, 's'),
(1, 'ms'),
]
def get_raw_size(value, system):
for factor, suffix in system:
if value.endswith(suffix):
if len(value) == len(suffix):
amount = 1
else:
try:
amount = int(value[:-len(suffix)])
except ValueError:
continue
return amount * factor
return None
def get_knob_raw(value, knob_type):
if knob_type == 'integer':
return int(value)
elif knob_type == 'float':
return float(value)
elif knob_type == 'bytes':
return get_raw_size(value, BYTES_SYSTEM)
elif knob_type == 'time':
return get_raw_size(value, TIME_SYSTEM)
else:
raise Exception('Knob Type does not support')
def get_knob_readable(value, knob_type):
if knob_type == 'integer':
return int(round(value))
elif knob_type == 'float':
return float(value)
elif knob_type == 'bytes':
value = int(round(value))
return size(value, system=BYTES_SYSTEM)
elif knob_type == 'time':
value = int(round(value))
return size(value, system=TIME_SYSTEM)
else:
raise Exception('Knob Type does not support')
def get_knobs_readable(values, types):
result = []
for i, value in enumerate(values):
result.append(get_knob_readable(value, types[i]))
return result
def main(args):
if (len(sys.argv) != 4):
raise Exception("Usage: python3 lhs.py [Samples Count] [Knob Path] [Save Path]")
knob_path = args[2]
save_path = args[3]
with open(knob_path, "r") as f:
tuning_knobs = json.load(f)
names = []
maxvals = []
minvals = []
types = []
for knob in tuning_knobs:
names.append(knob['name'])
maxvals.append(get_knob_raw(knob['tuning_range']['maxval'], knob['type']))
minvals.append(get_knob_raw(knob['tuning_range']['minval'], knob['type']))
types.append(knob['type'])
nsamples = int(args[1])
nfeats = len(tuning_knobs)
samples = lhs(nfeats, samples=nsamples, criterion='maximin')
maxvals = np.array(maxvals)
minvals = np.array(minvals)
scales = maxvals - minvals
for fidx in range(nfeats):
samples[:, fidx] = uniform(loc=minvals[fidx], scale=scales[fidx]).ppf(samples[:, fidx])
samples_readable = []
for sample in samples:
samples_readable.append(get_knobs_readable(sample, types))
config = {'recommendation': {}}
for sidx in range(nsamples):
for fidx in range(nfeats):
config["recommendation"][names[fidx]] = samples_readable[sidx][fidx]
with open(os.path.join(save_path, 'config_' + str(sidx)), 'w+') as f:
f.write(json.dumps(config))
if __name__ == '__main__':
main(sys.argv)

View File

@ -1 +0,0 @@
sudo -b nohup fab run_lhs > lhs.log 2>&1 < /dev/null