Changes:
- fixed bug in validator causing it to validate files in excluded directories and updated the pre-commit hook - hardware uniqueness is now based on the cpu, memory, storage, and storage_type fields, fixes #230 - initial settings of a minval/maxval for a session knob are calculated based on its resource type and the user's hardware - squashed migrations
This commit is contained in:
parent
c14a337695
commit
e676433ece
|
@ -14,29 +14,35 @@
|
||||||
# ln -s ../../script/git-hooks/pre-commit ./pre-commit
|
# ln -s ../../script/git-hooks/pre-commit ./pre-commit
|
||||||
|
|
||||||
|
|
||||||
FILES=$(git diff --name-only HEAD --cached --diff-filter=d | grep '\.\(py\)$')
|
FILES=$(git diff --name-only --cached --diff-filter=d | grep -E '*\.(py|java)$')
|
||||||
|
|
||||||
SERVER_TESTS_RESULT=0
|
WEBSITE_TESTS_RESULT=0
|
||||||
|
ANALYSIS_TESTS_RESULT=0
|
||||||
CONTROLLER_TESTS_RESULT=0
|
CONTROLLER_TESTS_RESULT=0
|
||||||
VALIDATOR_RESULT=0
|
VALIDATOR_RESULT=0
|
||||||
|
|
||||||
if [ -n "$FILES" ]; then
|
if [ -n "$FILES" ]; then
|
||||||
|
|
||||||
# Uncomment to run the server tests
|
# Uncomment to run the website tests
|
||||||
# cd server/website && python manage.py test -v 2
|
# cd server/website && python3 manage.py test --noinput -v 2
|
||||||
# SERVER_TESTS_RESULT=$?
|
# WEBSITE_TESTS_RESULT=$?
|
||||||
# cd ../..
|
# cd ../..
|
||||||
|
|
||||||
|
# Uncomment to run the analysis tests
|
||||||
|
# cd server && python3 -m unittest discover -s analysis/tests -v
|
||||||
|
# ANALYSIS_TESTS_RESULT=$?
|
||||||
|
# cd ..
|
||||||
|
|
||||||
# Uncomment to run the controller tests
|
# Uncomment to run the controller tests
|
||||||
# cd controller && gradle build -q
|
# cd controller && gradle build -q
|
||||||
# CONTROLLER_TESTS_RESULT=$?
|
# CONTROLLER_TESTS_RESULT=$?
|
||||||
# cd ..
|
# cd ..
|
||||||
|
|
||||||
# Run source code validator
|
# Run source code validator
|
||||||
python script/validators/source_validator.py $FILES
|
python3 script/validators/source_validator.py $FILES
|
||||||
VALIDATOR_RESULT=$?
|
VALIDATOR_RESULT=$?
|
||||||
|
|
||||||
if [ "$VALIDATOR_RESULT" -ne 0 ] || [ "$SERVER_TESTS_RESULT" -ne 0 ] || [ "$CONTROLLER_TESTS_RESULT" -ne 0 ]; then
|
if [ "$VALIDATOR_RESULT" -ne 0 ] || [ "$WEBSITE_TESTS_RESULT" -ne 0 ] || [ "$ANALYSIS_TESTS_RESULT" -ne 0 ] || [ "$CONTROLLER_TESTS_RESULT" -ne 0 ]; then
|
||||||
|
|
||||||
echo " +------------------------------------------------------------+"
|
echo " +------------------------------------------------------------+"
|
||||||
echo " | |"
|
echo " | |"
|
||||||
|
@ -45,8 +51,13 @@ if [ -n "$FILES" ]; then
|
||||||
echo " +------------------------------------------------------------+"
|
echo " +------------------------------------------------------------+"
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
if [ "$SERVER_TESTS_RESULT" -ne 0 ]; then
|
if [ "$WEBSITE_TESTS_RESULT" -ne 0 ]; then
|
||||||
echo " FAILED server tests!"
|
echo " FAILED website tests!"
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$ANALYSIS_TESTS_RESULT" -ne 0 ]; then
|
||||||
|
echo " FAILED analysis tests!"
|
||||||
echo ""
|
echo ""
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,6 @@ import logging
|
||||||
import imp
|
import imp
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
import functools
|
import functools
|
||||||
|
@ -67,6 +66,9 @@ EXCLUDE_DIRECTORIES = [
|
||||||
|
|
||||||
# Django manage.py extensions
|
# Django manage.py extensions
|
||||||
os.path.join(OTTERTUNE_DIR, "server/website/website/management"),
|
os.path.join(OTTERTUNE_DIR, "server/website/website/management"),
|
||||||
|
|
||||||
|
# Old management scripts
|
||||||
|
os.path.join(OTTERTUNE_DIR, "server/website/script/management"),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Files that should NOT be checked
|
# Files that should NOT be checked
|
||||||
|
@ -151,6 +153,9 @@ def validate_file(file_path):
|
||||||
return True
|
return True
|
||||||
if not file_path.endswith(".py") and not file_path.endswith(".java"):
|
if not file_path.endswith(".py") and not file_path.endswith(".java"):
|
||||||
return True
|
return True
|
||||||
|
for exclude_dir in EXCLUDE_DIRECTORIES:
|
||||||
|
if file_path.startswith(exclude_dir):
|
||||||
|
return True
|
||||||
|
|
||||||
LOG.debug("Validating file: %s", file_path)
|
LOG.debug("Validating file: %s", file_path)
|
||||||
status = True
|
status = True
|
||||||
|
@ -172,13 +177,25 @@ def validate_file(file_path):
|
||||||
|
|
||||||
# Validate all the files in the root_dir passed as argument
|
# Validate all the files in the root_dir passed as argument
|
||||||
def validate_dir(root_dir):
|
def validate_dir(root_dir):
|
||||||
if root_dir in EXCLUDE_DIRECTORIES:
|
for exclude_dir in EXCLUDE_DIRECTORIES:
|
||||||
return True
|
if root_dir.startswith(exclude_dir):
|
||||||
|
return True
|
||||||
|
|
||||||
status = True
|
status = True
|
||||||
for root, dirs, files in os.walk(root_dir): # pylint: disable=not-an-iterable
|
for root, dirs, files in os.walk(root_dir): # pylint: disable=not-an-iterable
|
||||||
# Remove excluded dirs from list
|
# Remove excluded dirs from list
|
||||||
dirs[:] = [d for d in dirs if os.path.join(root, d) not in EXCLUDE_DIRECTORIES]
|
valid_dirs = []
|
||||||
|
for d in dirs:
|
||||||
|
valid = True
|
||||||
|
for exclude_dir in EXCLUDE_DIRECTORIES:
|
||||||
|
if d.startswith(exclude_dir):
|
||||||
|
valid = False
|
||||||
|
break
|
||||||
|
if valid:
|
||||||
|
valid_dirs.append(d)
|
||||||
|
dirs[:] = valid_dirs
|
||||||
|
|
||||||
|
# Validate files
|
||||||
for file_path in files:
|
for file_path in files:
|
||||||
file_path = os.path.join(root, file_path)
|
file_path = os.path.join(root, file_path)
|
||||||
|
|
||||||
|
@ -187,6 +204,35 @@ def validate_dir(root_dir):
|
||||||
return status
|
return status
|
||||||
|
|
||||||
|
|
||||||
|
def get_git_files(state):
|
||||||
|
if state == 'staged':
|
||||||
|
# Files staged for commit
|
||||||
|
cmd = r"git diff --name-only --cached --diff-filter=d | grep -E '*\.(py|java)$'"
|
||||||
|
|
||||||
|
elif state == 'unstaged':
|
||||||
|
# Tracked files not staged for commit
|
||||||
|
cmd = r"git diff --name-only --diff-filter=d | grep -E '*\.(py|java)$'"
|
||||||
|
|
||||||
|
elif state == 'untracked':
|
||||||
|
# Untracked files not staged for commit
|
||||||
|
cmd = r"git ls-files --other --exclude-standard | grep -E '*\.(py|java)$'"
|
||||||
|
|
||||||
|
with settings(warn_only=True):
|
||||||
|
res = local(cmd, capture=True)
|
||||||
|
|
||||||
|
if res.succeeded:
|
||||||
|
targets = res.stdout.strip().split('\n')
|
||||||
|
|
||||||
|
if not targets:
|
||||||
|
LOG.warning("No %s files found.", state)
|
||||||
|
else:
|
||||||
|
LOG.error("An error occurred while fetching %s files (exit code %d). "
|
||||||
|
"Exiting...\n\n%s\n", state, res.return_code, res.stderr)
|
||||||
|
sys.exit(EXIT_FAILURE)
|
||||||
|
|
||||||
|
return targets
|
||||||
|
|
||||||
|
|
||||||
# ==============================================
|
# ==============================================
|
||||||
# VALIDATOR FUNCTION DEFINITIONS
|
# VALIDATOR FUNCTION DEFINITIONS
|
||||||
# ==============================================
|
# ==============================================
|
||||||
|
@ -204,9 +250,12 @@ def check_pylint(file_path, config_path=None):
|
||||||
|
|
||||||
with settings(warn_only=True), quiet():
|
with settings(warn_only=True), quiet():
|
||||||
res = local('pylint {} {}'.format(' '.join(options), file_path), capture=True)
|
res = local('pylint {} {}'.format(' '.join(options), file_path), capture=True)
|
||||||
|
|
||||||
if res.stdout == '':
|
if res.stdout == '':
|
||||||
assert res.return_code == 0, 'return_code={}, expected=0\n{}'.format(
|
if res.return_code != 0:
|
||||||
res.return_code, res.stderr)
|
raise Exception(
|
||||||
|
'An error occurred while running pylint on {} (exit code {}).\n\n{}\n'.format(
|
||||||
|
file_path, res.return_code, res.stderr))
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
output = []
|
output = []
|
||||||
|
@ -393,40 +442,66 @@ def main():
|
||||||
parser = argparse.ArgumentParser(description="Validate OtterTune's source code")
|
parser = argparse.ArgumentParser(description="Validate OtterTune's source code")
|
||||||
parser.add_argument('paths', metavar='PATH', type=str, nargs='*',
|
parser.add_argument('paths', metavar='PATH', type=str, nargs='*',
|
||||||
help='Files or directories to (recursively) validate')
|
help='Files or directories to (recursively) validate')
|
||||||
|
parser.add_argument('-v', '--verbose', action='store_true',
|
||||||
|
help='Enable verbose output')
|
||||||
parser.add_argument('--staged-files', action='store_true',
|
parser.add_argument('--staged-files', action='store_true',
|
||||||
help='Apply the selected action(s) to all staged files (git)')
|
help='Apply the selected action(s) to all staged files (git)')
|
||||||
|
parser.add_argument('--unstaged-files', action='store_true',
|
||||||
|
help='Apply the selected action(s) to all unstaged tracked files (git)')
|
||||||
|
parser.add_argument('--untracked-files', action='store_true',
|
||||||
|
help='Apply the selected action(s) to all untracked files (git)')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.verbose:
|
||||||
|
LOG.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
LOG.info('\nRunning source validators:\n%s\n',
|
LOG.info('\nRunning source validators:\n%s\n',
|
||||||
'\n'.join(' ' + v.name for v in VALIDATORS))
|
'\n'.join(' ' + v.name for v in VALIDATORS))
|
||||||
for validator in VALIDATORS:
|
for validator in VALIDATORS:
|
||||||
if not validate_validator(validator.modules, validator.config_path):
|
if not validate_validator(validator.modules, validator.config_path):
|
||||||
sys.exit(EXIT_FAILURE)
|
sys.exit(EXIT_FAILURE)
|
||||||
|
|
||||||
if args.staged_files:
|
targets = []
|
||||||
targets = [os.path.abspath(os.path.join(OTTERTUNE_DIR, f))
|
|
||||||
for f in subprocess.check_output(["git", "diff", "--name-only", "HEAD",
|
if args.paths or args.staged_files or args.unstaged_files or args.untracked_files:
|
||||||
"--cached", "--diff-filter=d"]).split()]
|
if args.paths:
|
||||||
|
targets += args.paths
|
||||||
|
|
||||||
|
if args.staged_files:
|
||||||
|
targets += get_git_files('staged')
|
||||||
|
|
||||||
|
if args.unstaged_files:
|
||||||
|
targets += get_git_files('unstaged')
|
||||||
|
|
||||||
|
if args.untracked_files:
|
||||||
|
targets += get_git_files('untracked')
|
||||||
|
|
||||||
if not targets:
|
if not targets:
|
||||||
LOG.error("No staged files or not calling from a repository. Exiting...")
|
LOG.error("No files/directories found. Exiting...")
|
||||||
sys.exit(EXIT_FAILURE)
|
sys.exit(EXIT_FAILURE)
|
||||||
elif args.paths:
|
|
||||||
targets = args.paths
|
|
||||||
else:
|
else:
|
||||||
targets = DEFAULT_DIRS
|
targets = DEFAULT_DIRS
|
||||||
|
|
||||||
|
targets = sorted(os.path.abspath(t) for t in targets)
|
||||||
|
LOG.info('\nFiles/directories to validate:\n%s\n',
|
||||||
|
'\n'.join(' ' + t for t in targets))
|
||||||
|
|
||||||
|
status = True
|
||||||
for target in targets:
|
for target in targets:
|
||||||
target = os.path.abspath(target)
|
|
||||||
if os.path.isfile(target):
|
if os.path.isfile(target):
|
||||||
LOG.debug("Scanning file: %s\n", target)
|
LOG.debug("Scanning file: %s\n", target)
|
||||||
status = validate_file(target)
|
target_status = validate_file(target)
|
||||||
elif os.path.isdir(target):
|
elif os.path.isdir(target):
|
||||||
LOG.debug("Scanning directory: %s\n", target)
|
LOG.debug("Scanning directory: %s\n", target)
|
||||||
status = validate_dir(target)
|
target_status = validate_dir(target)
|
||||||
else:
|
else:
|
||||||
LOG.error("%s isn't a file or directory", target)
|
LOG.error("%s isn't a file or directory", target)
|
||||||
sys.exit(EXIT_FAILURE)
|
sys.exit(EXIT_FAILURE)
|
||||||
|
|
||||||
|
if not target_status:
|
||||||
|
status = False
|
||||||
|
|
||||||
if not status:
|
if not status:
|
||||||
LOG.info(SEPARATOR + '\n')
|
LOG.info(SEPARATOR + '\n')
|
||||||
LOG.info("Validation NOT successful\n")
|
LOG.info("Validation NOT successful\n")
|
||||||
|
|
|
@ -3,6 +3,7 @@ celery==3.1.23
|
||||||
Django==1.10.1
|
Django==1.10.1
|
||||||
django-celery==3.2.1
|
django-celery==3.2.1
|
||||||
django-debug-toolbar==1.5
|
django-debug-toolbar==1.5
|
||||||
|
django-db-logger>=0.1.7
|
||||||
django-request-logging==0.4.6
|
django-request-logging==0.4.6
|
||||||
mock==2.0.0
|
mock==2.0.0
|
||||||
Fabric3==1.13.1.post1
|
Fabric3==1.13.1.post1
|
||||||
|
|
|
@ -138,6 +138,18 @@ class SessionViewsTests(TestCase):
|
||||||
|
|
||||||
fixtures = ['test_website.json']
|
fixtures = ['test_website.json']
|
||||||
|
|
||||||
|
post_data = {
|
||||||
|
'name': 'test_create_basic_session',
|
||||||
|
'description': 'testing create basic session...',
|
||||||
|
'tuning_session': 'no_tuning_session',
|
||||||
|
'algorithm': 1,
|
||||||
|
'cpu': '2',
|
||||||
|
'memory': '16',
|
||||||
|
'storage': '32',
|
||||||
|
'storage_type': 5,
|
||||||
|
'dbms': 1
|
||||||
|
}
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.client.login(username=TEST_USERNAME, password=TEST_PASSWORD)
|
self.client.login(username=TEST_USERNAME, password=TEST_PASSWORD)
|
||||||
|
|
||||||
|
@ -154,17 +166,7 @@ class SessionViewsTests(TestCase):
|
||||||
|
|
||||||
def test_create_basic_session_ok(self):
|
def test_create_basic_session_ok(self):
|
||||||
form_addr = reverse('new_session', kwargs={'project_id': TEST_PROJECT_ID})
|
form_addr = reverse('new_session', kwargs={'project_id': TEST_PROJECT_ID})
|
||||||
post_data = {
|
response = self.client.post(form_addr, self.post_data, follow=True)
|
||||||
'name': 'test_create_basic_session',
|
|
||||||
'description': 'testing create basic session...',
|
|
||||||
'tuning_session': 'no_tuning_session',
|
|
||||||
'algorithm': 1,
|
|
||||||
'cpu': '2',
|
|
||||||
'memory': '16.0',
|
|
||||||
'storage': '32',
|
|
||||||
'dbms': 1
|
|
||||||
}
|
|
||||||
response = self.client.post(form_addr, post_data, follow=True)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
session_id = response.context['session'].pk
|
session_id = response.context['session'].pk
|
||||||
self.assertRedirects(response, reverse('session',
|
self.assertRedirects(response, reverse('session',
|
||||||
|
@ -173,17 +175,8 @@ class SessionViewsTests(TestCase):
|
||||||
|
|
||||||
def test_create_tuning_session_ok(self):
|
def test_create_tuning_session_ok(self):
|
||||||
form_addr = reverse('new_session', kwargs={'project_id': TEST_PROJECT_ID})
|
form_addr = reverse('new_session', kwargs={'project_id': TEST_PROJECT_ID})
|
||||||
post_data = {
|
post_data = dict(self.post_data)
|
||||||
'name': 'test_create_basic_session',
|
post_data.update(tuning_session='tuning_session')
|
||||||
'description': 'testing create basic session...',
|
|
||||||
'tuning_session': 'tuning_session',
|
|
||||||
'cpu': '2',
|
|
||||||
'memory': '16.0',
|
|
||||||
'storage': '32',
|
|
||||||
'algorithm': 1,
|
|
||||||
'dbms': 1,
|
|
||||||
'target_objective': 'throughput_txn_per_sec'
|
|
||||||
}
|
|
||||||
response = self.client.post(form_addr, post_data, follow=True)
|
response = self.client.post(form_addr, post_data, follow=True)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
session_id = response.context['session'].pk
|
session_id = response.context['session'].pk
|
||||||
|
@ -201,17 +194,8 @@ class SessionViewsTests(TestCase):
|
||||||
def test_edit_basic_session_ok(self):
|
def test_edit_basic_session_ok(self):
|
||||||
form_addr = reverse('edit_session', kwargs={'project_id': TEST_PROJECT_ID,
|
form_addr = reverse('edit_session', kwargs={'project_id': TEST_PROJECT_ID,
|
||||||
'session_id': TEST_BASIC_SESSION_ID})
|
'session_id': TEST_BASIC_SESSION_ID})
|
||||||
post_data = {
|
post_data = dict(self.post_data)
|
||||||
'name': 'new_session_name',
|
post_data.update(name='new_session_name')
|
||||||
'description': 'testing edit basic session...',
|
|
||||||
'tuning_session': 'tuning_session',
|
|
||||||
'cpu': '2',
|
|
||||||
'memory': '16.0',
|
|
||||||
'storage': '32',
|
|
||||||
'algorithm': 1,
|
|
||||||
'dbms': 1,
|
|
||||||
'target_objective': 'throughput_txn_per_sec'
|
|
||||||
}
|
|
||||||
response = self.client.post(form_addr, post_data, follow=True)
|
response = self.client.post(form_addr, post_data, follow=True)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertRedirects(response, reverse('session',
|
self.assertRedirects(response, reverse('session',
|
||||||
|
@ -255,18 +239,9 @@ class SessionViewsTests(TestCase):
|
||||||
def test_delete_multiple_sessions(self):
|
def test_delete_multiple_sessions(self):
|
||||||
create_form_addr = reverse('new_session', kwargs={'project_id': TEST_PROJECT_ID})
|
create_form_addr = reverse('new_session', kwargs={'project_id': TEST_PROJECT_ID})
|
||||||
session_ids = []
|
session_ids = []
|
||||||
|
post_data = dict(self.post_data)
|
||||||
for i in range(5):
|
for i in range(5):
|
||||||
post_data = {
|
post_data.update(name='session_{}'.format(i))
|
||||||
'name': 'session_{}'.format(i),
|
|
||||||
'description': "",
|
|
||||||
'tuning_session': 'no_tuning_session',
|
|
||||||
'cpu': '2',
|
|
||||||
'memory': '16.0',
|
|
||||||
'storage': '32',
|
|
||||||
'algorithm': 1,
|
|
||||||
'dbms': 1,
|
|
||||||
'target_objective': 'throughput_txn_per_sec'
|
|
||||||
}
|
|
||||||
response = self.client.post(create_form_addr, post_data, follow=True)
|
response = self.client.post(create_form_addr, post_data, follow=True)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
session_ids.append(response.context['session'].pk)
|
session_ids.append(response.context['session'].pk)
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
|
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
|
||||||
#
|
#
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
from django.db.utils import ProgrammingError
|
||||||
from django.utils.html import format_html
|
from django.utils.html import format_html
|
||||||
from django_db_logger.admin import StatusLogAdmin
|
from django_db_logger.admin import StatusLogAdmin
|
||||||
from django_db_logger.models import StatusLog
|
from django_db_logger.models import StatusLog
|
||||||
|
@ -185,6 +186,9 @@ UNUSED_DJCELERY_MODELS = (
|
||||||
djcelery_models.WorkerState,
|
djcelery_models.WorkerState,
|
||||||
)
|
)
|
||||||
|
|
||||||
for model in UNUSED_DJCELERY_MODELS:
|
try:
|
||||||
if model.objects.count() == 0:
|
for model in UNUSED_DJCELERY_MODELS:
|
||||||
admin.site.unregister(model)
|
if model.objects.count() == 0:
|
||||||
|
admin.site.unregister(model)
|
||||||
|
except ProgrammingError:
|
||||||
|
pass
|
||||||
|
|
|
@ -84,7 +84,7 @@
|
||||||
"category": "Resource Usage / Asynchronous Behavior",
|
"category": "Resource Usage / Asynchronous Behavior",
|
||||||
"maxval": 262143,
|
"maxval": 262143,
|
||||||
"dbms": 4,
|
"dbms": 4,
|
||||||
"resource": 4,
|
"resource": 2,
|
||||||
"name": "global.max_worker_processes",
|
"name": "global.max_worker_processes",
|
||||||
"minval": 0,
|
"minval": 0,
|
||||||
"default": 8,
|
"default": 8,
|
||||||
|
|
|
@ -3095,7 +3095,7 @@
|
||||||
"summary": "Sets the maximum number of parallel processes per executor node.",
|
"summary": "Sets the maximum number of parallel processes per executor node.",
|
||||||
"unit": "3",
|
"unit": "3",
|
||||||
"description": "",
|
"description": "",
|
||||||
"resource": "4"
|
"resource": "2"
|
||||||
},
|
},
|
||||||
"model": "website.KnobCatalog"
|
"model": "website.KnobCatalog"
|
||||||
},
|
},
|
||||||
|
@ -3255,7 +3255,7 @@
|
||||||
"summary": "Sets the WAL size that triggers a checkpoint.",
|
"summary": "Sets the WAL size that triggers a checkpoint.",
|
||||||
"unit": "1",
|
"unit": "1",
|
||||||
"description": "",
|
"description": "",
|
||||||
"resource": "4"
|
"resource": "3"
|
||||||
},
|
},
|
||||||
"model": "website.KnobCatalog"
|
"model": "website.KnobCatalog"
|
||||||
},
|
},
|
||||||
|
@ -3275,7 +3275,7 @@
|
||||||
"summary": "Maximum number of concurrent worker processes.",
|
"summary": "Maximum number of concurrent worker processes.",
|
||||||
"unit": "3",
|
"unit": "3",
|
||||||
"description": "",
|
"description": "",
|
||||||
"resource": "4"
|
"resource": "2"
|
||||||
},
|
},
|
||||||
"model": "website.KnobCatalog"
|
"model": "website.KnobCatalog"
|
||||||
},
|
},
|
||||||
|
@ -3315,7 +3315,7 @@
|
||||||
"summary": "Sets the minimum size to shrink the WAL to.",
|
"summary": "Sets the minimum size to shrink the WAL to.",
|
||||||
"unit": "1",
|
"unit": "1",
|
||||||
"description": "",
|
"description": "",
|
||||||
"resource": "4"
|
"resource": "3"
|
||||||
},
|
},
|
||||||
"model": "website.KnobCatalog"
|
"model": "website.KnobCatalog"
|
||||||
},
|
},
|
||||||
|
|
|
@ -12,13 +12,11 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"fields": {
|
"fields": {
|
||||||
"type": 1,
|
|
||||||
"name": "New Hardware",
|
|
||||||
"cpu": 4,
|
"cpu": 4,
|
||||||
"memory": 16.0,
|
"memory": 16,
|
||||||
"storage": "32",
|
"storage": 32,
|
||||||
"storage_type": "",
|
"storage_type": 5,
|
||||||
"additional_specs": ""
|
"additional_specs": null
|
||||||
},
|
},
|
||||||
"model": "website.Hardware",
|
"model": "website.Hardware",
|
||||||
"pk": 1
|
"pk": 1
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -10,9 +10,9 @@ Created on Jul 25, 2017
|
||||||
'''
|
'''
|
||||||
|
|
||||||
from django import forms
|
from django import forms
|
||||||
from django.db.models import Max
|
|
||||||
|
|
||||||
from .models import Session, Project, Hardware, SessionKnob
|
from .models import Session, Project, Hardware, SessionKnob
|
||||||
|
from .types import StorageType
|
||||||
|
|
||||||
|
|
||||||
class NewResultForm(forms.Form):
|
class NewResultForm(forms.Form):
|
||||||
|
@ -44,18 +44,20 @@ class SessionForm(forms.ModelForm):
|
||||||
required=False,
|
required=False,
|
||||||
label='Get new upload code')
|
label='Get new upload code')
|
||||||
|
|
||||||
cpu = forms.IntegerField(label='Number of Processors')
|
cpu = forms.IntegerField(label='Number of CPUs', min_value=1)
|
||||||
memory = forms.FloatField(label='RAM (GB)')
|
memory = forms.IntegerField(label='Memory (GB)', min_value=1)
|
||||||
storage = forms.IntegerField(label='Storage (GB)')
|
storage = forms.IntegerField(label='Storage (GB)', min_value=1)
|
||||||
|
storage_type = forms.ChoiceField(label='Storage Type', choices=StorageType.choices())
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(SessionForm, self).__init__(*args, **kwargs)
|
super(SessionForm, self).__init__(*args, **kwargs)
|
||||||
self.fields['description'].required = False
|
self.fields['description'].required = False
|
||||||
self.fields['target_objective'].required = False
|
self.fields['target_objective'].required = False
|
||||||
self.fields['tuning_session'].required = True
|
self.fields['tuning_session'].required = True
|
||||||
self.fields['cpu'].initial = 2
|
self.fields['cpu'].initial = 4
|
||||||
self.fields['memory'].initial = 16.0
|
self.fields['memory'].initial = 16
|
||||||
self.fields['storage'].initial = 32
|
self.fields['storage'].initial = 32
|
||||||
|
self.fields['storage_type'].initial = StorageType.SSD
|
||||||
|
|
||||||
def save(self, commit=True):
|
def save(self, commit=True):
|
||||||
model = super(SessionForm, self).save(commit=False)
|
model = super(SessionForm, self).save(commit=False)
|
||||||
|
@ -63,23 +65,14 @@ class SessionForm(forms.ModelForm):
|
||||||
cpu2 = self.cleaned_data['cpu']
|
cpu2 = self.cleaned_data['cpu']
|
||||||
memory2 = self.cleaned_data['memory']
|
memory2 = self.cleaned_data['memory']
|
||||||
storage2 = self.cleaned_data['storage']
|
storage2 = self.cleaned_data['storage']
|
||||||
|
storage_type2 = self.cleaned_data['storage_type']
|
||||||
|
|
||||||
if hasattr(model, 'hardware'):
|
hardware, _ = Hardware.objects.get_or_create(cpu=cpu2,
|
||||||
model.hardware.cpu = cpu2
|
|
||||||
model.hardware.memory = memory2
|
|
||||||
model.hardware.storage = storage2
|
|
||||||
model.hardware.save()
|
|
||||||
else:
|
|
||||||
last_type = Hardware.objects.aggregate(Max('type'))['type__max']
|
|
||||||
if last_type is None:
|
|
||||||
last_type = 0
|
|
||||||
model.hardware = Hardware.objects.create(type=last_type + 1,
|
|
||||||
name='New Hardware',
|
|
||||||
cpu=cpu2,
|
|
||||||
memory=memory2,
|
memory=memory2,
|
||||||
storage=storage2,
|
storage=storage2,
|
||||||
storage_type='Default',
|
storage_type=storage_type2)
|
||||||
additional_specs='{}')
|
|
||||||
|
model.hardware = hardware
|
||||||
|
|
||||||
if commit:
|
if commit:
|
||||||
model.save()
|
model.save()
|
||||||
|
|
|
@ -26,7 +26,7 @@ class Command(BaseCommand):
|
||||||
metavar='SESSIONNAME',
|
metavar='SESSIONNAME',
|
||||||
help='Specifies the name of the session.')
|
help='Specifies the name of the session.')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--upload-code',
|
'--uploadcode',
|
||||||
metavar='UPLOADCODE',
|
metavar='UPLOADCODE',
|
||||||
default=None,
|
default=None,
|
||||||
help='Specifies the value to set the upload code to.')
|
help='Specifies the value to set the upload code to.')
|
||||||
|
@ -39,7 +39,7 @@ class Command(BaseCommand):
|
||||||
project__name=projectname,
|
project__name=projectname,
|
||||||
name=sessionname).first()
|
name=sessionname).first()
|
||||||
if session:
|
if session:
|
||||||
upload_code = options['upload_code'] or MediaUtil.upload_code_generator()
|
upload_code = options['uploadcode'] or MediaUtil.upload_code_generator()
|
||||||
session.upload_code = upload_code
|
session.upload_code = upload_code
|
||||||
session.save()
|
session.save()
|
||||||
self.stdout.write(self.style.NOTICE(upload_code))
|
self.stdout.write(self.style.NOTICE(upload_code))
|
||||||
|
|
|
@ -87,7 +87,7 @@ class Command(BaseCommand):
|
||||||
pipe = '' if 'console' in settings.LOGGING['loggers']['celery']['handlers'] \
|
pipe = '' if 'console' in settings.LOGGING['loggers']['celery']['handlers'] \
|
||||||
else '> /dev/null 2>&1'
|
else '> /dev/null 2>&1'
|
||||||
|
|
||||||
with lcd(settings.PROJECT_ROOT), hide('command'):
|
with lcd(settings.PROJECT_ROOT), hide('commands'):
|
||||||
if not options['celerybeat_only']:
|
if not options['celerybeat_only']:
|
||||||
local(self.celery_cmd(
|
local(self.celery_cmd(
|
||||||
cmd='celery worker', opts=' '.join(celery_options), pipe=pipe))
|
cmd='celery worker', opts=' '.join(celery_options), pipe=pipe))
|
||||||
|
@ -103,7 +103,7 @@ class Command(BaseCommand):
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
wait_sec += 1
|
wait_sec += 1
|
||||||
|
|
||||||
for i in range(len(pidfiles)):
|
for i in range(len(pidfiles))[::-1]:
|
||||||
if os.path.exists(pidfiles[i]):
|
if os.path.exists(pidfiles[i]):
|
||||||
pidfiles.pop(i)
|
pidfiles.pop(i)
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.10.1 on 2018-03-26 02:21
|
# Generated by Django 1.10.1 on 2019-10-02 07:59
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
import django.core.validators
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
@ -37,7 +35,7 @@ class Migration(migrations.Migration):
|
||||||
name='DBMSCatalog',
|
name='DBMSCatalog',
|
||||||
fields=[
|
fields=[
|
||||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
('type', models.IntegerField(choices=[(1, b'MySQL'), (2, b'Postgres'), (3, b'Db2'), (4, b'Oracle'), (5, b'SQL Server'), (6, b'SQLite'), (7, b'HStore'), (8, b'Vector'), (9, b'MyRocks')])),
|
('type', models.IntegerField(choices=[(1, 'MySQL'), (2, 'Postgres'), (3, 'Db2'), (4, 'Oracle'), (6, 'SQLite'), (7, 'HStore'), (8, 'Vector'), (5, 'SQL Server'), (9, 'MyRocks')])),
|
||||||
('version', models.CharField(max_length=16)),
|
('version', models.CharField(max_length=16)),
|
||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
|
@ -48,36 +46,31 @@ class Migration(migrations.Migration):
|
||||||
name='Hardware',
|
name='Hardware',
|
||||||
fields=[
|
fields=[
|
||||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
('type', models.IntegerField()),
|
('cpu', models.IntegerField(default=4, verbose_name='Number of CPUs')),
|
||||||
('name', models.CharField(max_length=32)),
|
('memory', models.IntegerField(default=16, verbose_name='Memory (GB)')),
|
||||||
('cpu', models.IntegerField()),
|
('storage', models.IntegerField(default=32, verbose_name='Storage (GB)')),
|
||||||
('memory', models.FloatField()),
|
('storage_type', models.IntegerField(choices=[(5, 'SSD'), (10, 'HDD')], default=5, verbose_name='Storage Type')),
|
||||||
('storage', models.CharField(max_length=64, validators=[django.core.validators.RegexValidator(re.compile('^\\d+(?:\\,\\d+)*\\Z'), code='invalid', message='Enter only digits separated by commas.')])),
|
('additional_specs', models.TextField(default=None, null=True)),
|
||||||
('storage_type', models.CharField(max_length=16)),
|
|
||||||
('additional_specs', models.TextField(null=True)),
|
|
||||||
],
|
],
|
||||||
options={
|
|
||||||
'abstract': False,
|
|
||||||
},
|
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name='KnobCatalog',
|
name='KnobCatalog',
|
||||||
fields=[
|
fields=[
|
||||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
('name', models.CharField(max_length=128)),
|
('name', models.CharField(max_length=128)),
|
||||||
('vartype', models.IntegerField(choices=[(1, b'STRING'), (2, b'INTEGER'), (3, b'REAL'), (4, b'BOOL'), (5, b'ENUM'), (6, b'TIMESTAMP')], verbose_name=b'variable type')),
|
('vartype', models.IntegerField(choices=[(1, 'STRING'), (2, 'INTEGER'), (3, 'REAL'), (4, 'BOOL'), (5, 'ENUM'), (6, 'TIMESTAMP')], verbose_name='variable type')),
|
||||||
('unit', models.IntegerField(choices=[(1, b'bytes'), (2, b'milliseconds'), (3, b'other')])),
|
('unit', models.IntegerField(choices=[(1, 'bytes'), (2, 'milliseconds'), (3, 'other')])),
|
||||||
('category', models.TextField(null=True)),
|
('category', models.TextField(null=True)),
|
||||||
('summary', models.TextField(null=True, verbose_name=b'description')),
|
('summary', models.TextField(null=True, verbose_name='description')),
|
||||||
('description', models.TextField(null=True)),
|
('description', models.TextField(null=True)),
|
||||||
('scope', models.CharField(max_length=16)),
|
('scope', models.CharField(max_length=16)),
|
||||||
('minval', models.CharField(max_length=32, null=True, verbose_name=b'minimum value')),
|
('minval', models.CharField(max_length=32, null=True, verbose_name='minimum value')),
|
||||||
('maxval', models.CharField(max_length=32, null=True, verbose_name=b'maximum value')),
|
('maxval', models.CharField(max_length=32, null=True, verbose_name='maximum value')),
|
||||||
('default', models.TextField(verbose_name=b'default value')),
|
('default', models.TextField(verbose_name='default value')),
|
||||||
('enumvals', models.TextField(null=True, verbose_name=b'valid values')),
|
('enumvals', models.TextField(null=True, verbose_name='valid values')),
|
||||||
('context', models.CharField(max_length=32)),
|
('context', models.CharField(max_length=32)),
|
||||||
('tunable', models.BooleanField(verbose_name=b'tunable')),
|
('tunable', models.BooleanField(verbose_name='tunable')),
|
||||||
('resource', models.IntegerField(choices=[(1, b'Memory'), (2, b'CPU'), (3, b'Storage'), (4, b'Other')], default=4)),
|
('resource', models.IntegerField(choices=[(1, 'Memory'), (2, 'CPU'), (3, 'Storage'), (4, 'Other')], default=4)),
|
||||||
('dbms', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.DBMSCatalog')),
|
('dbms', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.DBMSCatalog')),
|
||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
|
@ -103,10 +96,10 @@ class Migration(migrations.Migration):
|
||||||
fields=[
|
fields=[
|
||||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
('name', models.CharField(max_length=128)),
|
('name', models.CharField(max_length=128)),
|
||||||
('vartype', models.IntegerField(choices=[(1, b'STRING'), (2, b'INTEGER'), (3, b'REAL'), (4, b'BOOL'), (5, b'ENUM'), (6, b'TIMESTAMP')])),
|
('vartype', models.IntegerField(choices=[(1, 'STRING'), (2, 'INTEGER'), (3, 'REAL'), (4, 'BOOL'), (5, 'ENUM'), (6, 'TIMESTAMP')])),
|
||||||
('summary', models.TextField(null=True, verbose_name=b'description')),
|
('summary', models.TextField(null=True, verbose_name='description')),
|
||||||
('scope', models.CharField(max_length=16)),
|
('scope', models.CharField(max_length=16)),
|
||||||
('metric_type', models.IntegerField(choices=[(1, b'COUNTER'), (2, b'INFO'), (3,b'STATISTICS')])),
|
('metric_type', models.IntegerField(choices=[(1, 'COUNTER'), (2, 'INFO'), (3, 'STATISTICS')])),
|
||||||
('dbms', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.DBMSCatalog')),
|
('dbms', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.DBMSCatalog')),
|
||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
|
@ -131,7 +124,7 @@ class Migration(migrations.Migration):
|
||||||
name='PipelineData',
|
name='PipelineData',
|
||||||
fields=[
|
fields=[
|
||||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
('task_type', models.IntegerField(choices=[(1, b'Pruned Metrics'), (2, b'Ranked Knobs'), (3, b'Knob Data'), (4, b'Metric Data')])),
|
('task_type', models.IntegerField(choices=[(1, 'Pruned Metrics'), (2, 'Ranked Knobs'), (3, 'Knob Data'), (4, 'Metric Data')])),
|
||||||
('data', models.TextField()),
|
('data', models.TextField()),
|
||||||
('creation_time', models.DateTimeField()),
|
('creation_time', models.DateTimeField()),
|
||||||
],
|
],
|
||||||
|
@ -151,7 +144,7 @@ class Migration(migrations.Migration):
|
||||||
name='Project',
|
name='Project',
|
||||||
fields=[
|
fields=[
|
||||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
('name', models.CharField(max_length=64, verbose_name=b'project name')),
|
('name', models.CharField(max_length=64, verbose_name='project name')),
|
||||||
('description', models.TextField(blank=True, null=True)),
|
('description', models.TextField(blank=True, null=True)),
|
||||||
('creation_time', models.DateTimeField()),
|
('creation_time', models.DateTimeField()),
|
||||||
('last_update', models.DateTimeField()),
|
('last_update', models.DateTimeField()),
|
||||||
|
@ -183,17 +176,18 @@ class Migration(migrations.Migration):
|
||||||
name='Session',
|
name='Session',
|
||||||
fields=[
|
fields=[
|
||||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
('name', models.CharField(max_length=64, verbose_name=b'session name')),
|
('name', models.CharField(max_length=64, verbose_name='session name')),
|
||||||
('description', models.TextField(blank=True, null=True)),
|
('description', models.TextField(blank=True, null=True)),
|
||||||
('ddpg_actor_model', models.BinaryField(null=True, blank=True)),
|
('algorithm', models.IntegerField(choices=[(1, 'Gaussian Process Bandits'), (2, 'Deep Deterministic Policy Gradients'), (3, 'Deep Neural Network')], default=1)),
|
||||||
('ddpg_critic_model', models.BinaryField(null=True, blank=True)),
|
('ddpg_actor_model', models.BinaryField(blank=True, null=True)),
|
||||||
('ddpg_reply_memory', models.BinaryField(null=True, blank=True)),
|
('ddpg_critic_model', models.BinaryField(blank=True, null=True)),
|
||||||
('dnn_model', models.BinaryField(null=True, blank=True)),
|
('ddpg_reply_memory', models.BinaryField(blank=True, null=True)),
|
||||||
|
('dnn_model', models.BinaryField(blank=True, null=True)),
|
||||||
('creation_time', models.DateTimeField()),
|
('creation_time', models.DateTimeField()),
|
||||||
('last_update', models.DateTimeField()),
|
('last_update', models.DateTimeField()),
|
||||||
('upload_code', models.CharField(max_length=30, unique=True)),
|
('upload_code', models.CharField(max_length=30, unique=True)),
|
||||||
('tuning_session', models.CharField(choices=[('tuning_session', 'Tuning Session'), ('no_tuning_session', 'No Tuning'), ('randomly_generate', 'Randomly Generate')], max_length=64)),
|
('tuning_session', models.CharField(choices=[('tuning_session', 'Tuning Session'), ('no_tuning_session', 'No Tuning'), ('randomly_generate', 'Randomly Generate')], default='tuning_session', max_length=64)),
|
||||||
('target_objective', models.CharField(choices=[(b'throughput_txn_per_sec', b'Throughput'), (b'99th_lat_ms', b'99 Percentile Latency')], max_length=64, null=True)),
|
('target_objective', models.CharField(choices=[('throughput_txn_per_sec', 'Throughput'), ('99th_lat_ms', '99 Percentile Latency')], max_length=64, null=True)),
|
||||||
('dbms', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.DBMSCatalog')),
|
('dbms', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.DBMSCatalog')),
|
||||||
('hardware', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Hardware')),
|
('hardware', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Hardware')),
|
||||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Project')),
|
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Project')),
|
||||||
|
@ -203,11 +197,26 @@ class Migration(migrations.Migration):
|
||||||
'abstract': False,
|
'abstract': False,
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SessionKnob',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('minval', models.CharField(max_length=32, null=True, verbose_name='minimum value')),
|
||||||
|
('maxval', models.CharField(max_length=32, null=True, verbose_name='maximum value')),
|
||||||
|
('tunable', models.BooleanField(verbose_name='tunable')),
|
||||||
|
('knob', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.KnobCatalog')),
|
||||||
|
('session', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Session')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name='Workload',
|
name='Workload',
|
||||||
fields=[
|
fields=[
|
||||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
('name', models.CharField(max_length=128, verbose_name=b'workload name')),
|
('name', models.CharField(max_length=128, verbose_name='workload name')),
|
||||||
|
('status', models.IntegerField(choices=[(1, 'MODIFIED'), (2, 'PROCESSING'), (3, 'PROCESSED')], default=1, editable=False)),
|
||||||
('dbms', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.DBMSCatalog')),
|
('dbms', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.DBMSCatalog')),
|
||||||
('hardware', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Hardware')),
|
('hardware', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Hardware')),
|
||||||
],
|
],
|
||||||
|
@ -215,7 +224,7 @@ class Migration(migrations.Migration):
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='result',
|
model_name='result',
|
||||||
name='session',
|
name='session',
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Session', verbose_name=b'session name'),
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Session', verbose_name='session name'),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='result',
|
model_name='result',
|
||||||
|
@ -225,7 +234,7 @@ class Migration(migrations.Migration):
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='pipelinedata',
|
model_name='pipelinedata',
|
||||||
name='pipeline_run',
|
name='pipeline_run',
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.PipelineRun'),
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.PipelineRun', verbose_name='group'),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='pipelinedata',
|
model_name='pipelinedata',
|
||||||
|
@ -242,6 +251,10 @@ class Migration(migrations.Migration):
|
||||||
name='session',
|
name='session',
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Session'),
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Session'),
|
||||||
),
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='hardware',
|
||||||
|
unique_together=set([('cpu', 'memory', 'storage', 'storage_type')]),
|
||||||
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='backupdata',
|
model_name='backupdata',
|
||||||
name='result',
|
name='result',
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 1.10.1 on 2018-08-02 07:58
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('website', '0002_enable_compression'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='workload',
|
|
||||||
name='status',
|
|
||||||
field=models.IntegerField(choices=[(1, 'MODIFIED'), (2, 'PROCESSING'), (3, 'PROCESSED')], default=1, editable=False),
|
|
||||||
)
|
|
||||||
]
|
|
|
@ -43,7 +43,7 @@ def unload_initial_data(apps, schema_editor):
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('website', '0003_background_task_optimization'),
|
('website', '0002_enable_compression'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
|
@ -1,138 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 1.10.1 on 2019-08-07 18:18
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('website', '0004_load_initial_data'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='SessionKnob',
|
|
||||||
fields=[
|
|
||||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('minval', models.CharField(max_length=32, null=True, verbose_name='minimum value')),
|
|
||||||
('maxval', models.CharField(max_length=32, null=True, verbose_name='maximum value')),
|
|
||||||
('tunable', models.BooleanField(verbose_name='tunable')),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'abstract': False,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='dbmscatalog',
|
|
||||||
name='type',
|
|
||||||
field=models.IntegerField(choices=[(1, 'MySQL'), (2, 'Postgres'), (3, 'Db2'), (4, 'Oracle'), (6, 'SQLite'), (7, 'HStore'), (8, 'Vector'), (5, 'SQL Server'), (9, 'MyRocks')]),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='knobcatalog',
|
|
||||||
name='default',
|
|
||||||
field=models.TextField(verbose_name='default value'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='knobcatalog',
|
|
||||||
name='enumvals',
|
|
||||||
field=models.TextField(null=True, verbose_name='valid values'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='knobcatalog',
|
|
||||||
name='maxval',
|
|
||||||
field=models.CharField(max_length=32, null=True, verbose_name='maximum value'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='knobcatalog',
|
|
||||||
name='minval',
|
|
||||||
field=models.CharField(max_length=32, null=True, verbose_name='minimum value'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='knobcatalog',
|
|
||||||
name='resource',
|
|
||||||
field=models.IntegerField(choices=[(1, 'Memory'), (2, 'CPU'), (3, 'Storage'), (4, 'Other')], default=4),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='knobcatalog',
|
|
||||||
name='summary',
|
|
||||||
field=models.TextField(null=True, verbose_name='description'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='knobcatalog',
|
|
||||||
name='tunable',
|
|
||||||
field=models.BooleanField(verbose_name='tunable'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='knobcatalog',
|
|
||||||
name='unit',
|
|
||||||
field=models.IntegerField(choices=[(1, 'bytes'), (2, 'milliseconds'), (3, 'other')]),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='knobcatalog',
|
|
||||||
name='vartype',
|
|
||||||
field=models.IntegerField(choices=[(1, 'STRING'), (2, 'INTEGER'), (3, 'REAL'), (4, 'BOOL'), (5, 'ENUM'), (6, 'TIMESTAMP')], verbose_name='variable type'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='metriccatalog',
|
|
||||||
name='metric_type',
|
|
||||||
field=models.IntegerField(choices=[(1, 'COUNTER'), (2, 'INFO'), (3, 'STATISTICS')]),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='metriccatalog',
|
|
||||||
name='summary',
|
|
||||||
field=models.TextField(null=True, verbose_name='description'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='metriccatalog',
|
|
||||||
name='vartype',
|
|
||||||
field=models.IntegerField(choices=[(1, 'STRING'), (2, 'INTEGER'), (3, 'REAL'), (4, 'BOOL'), (5, 'ENUM'), (6, 'TIMESTAMP')]),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='pipelinedata',
|
|
||||||
name='task_type',
|
|
||||||
field=models.IntegerField(choices=[(1, 'Pruned Metrics'), (2, 'Ranked Knobs'), (3, 'Knob Data'), (4, 'Metric Data')]),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='project',
|
|
||||||
name='name',
|
|
||||||
field=models.CharField(max_length=64, verbose_name='project name'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='result',
|
|
||||||
name='session',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Session', verbose_name='session name'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='session',
|
|
||||||
name='name',
|
|
||||||
field=models.CharField(max_length=64, verbose_name='session name'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='session',
|
|
||||||
name='target_objective',
|
|
||||||
field=models.CharField(choices=[('throughput_txn_per_sec', 'Throughput'), ('99th_lat_ms', '99 Percentile Latency')], max_length=64, null=True),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='session',
|
|
||||||
name='tuning_session',
|
|
||||||
field=models.CharField(choices=[('tuning_session', 'Tuning Session'), ('no_tuning_session', 'No Tuning'), ('randomly_generate', 'Randomly Generate')], default='tuning_session', max_length=64),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='workload',
|
|
||||||
name='name',
|
|
||||||
field=models.CharField(max_length=128, verbose_name='workload name'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='sessionknob',
|
|
||||||
name='knob',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.KnobCatalog'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='sessionknob',
|
|
||||||
name='session',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.Session'),
|
|
||||||
),
|
|
||||||
]
|
|
|
@ -1,28 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Generated by Django 1.10.1 on 2019-09-10 04:25
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('website', '0005_adding_session_knob'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='session',
|
|
||||||
name='algorithm',
|
|
||||||
field=models.IntegerField(choices=[(1, 'Ottertune Default'),
|
|
||||||
(2, 'Deep Deterministic Policy Gradients'),
|
|
||||||
(3, 'Deep Neural Network')], default=1),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='pipelinedata',
|
|
||||||
name='pipeline_run',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='website.PipelineRun', verbose_name='group'),
|
|
||||||
),
|
|
||||||
]
|
|
|
@ -6,13 +6,12 @@
|
||||||
from collections import namedtuple, OrderedDict
|
from collections import namedtuple, OrderedDict
|
||||||
|
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.core.validators import validate_comma_separated_integer_list
|
|
||||||
from django.db import models, DEFAULT_DB_ALIAS
|
from django.db import models, DEFAULT_DB_ALIAS
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
|
|
||||||
from .types import (DBMSType, LabelStyleType, MetricType, KnobUnitType,
|
from .types import (DBMSType, LabelStyleType, MetricType, KnobUnitType,
|
||||||
PipelineTaskType, VarType, KnobResourceType,
|
PipelineTaskType, VarType, KnobResourceType,
|
||||||
WorkloadStatusType, AlgorithmType)
|
WorkloadStatusType, AlgorithmType, StorageType)
|
||||||
|
|
||||||
|
|
||||||
class BaseModel(models.Model):
|
class BaseModel(models.Model):
|
||||||
|
@ -167,17 +166,21 @@ class Project(BaseModel):
|
||||||
|
|
||||||
|
|
||||||
class Hardware(BaseModel):
|
class Hardware(BaseModel):
|
||||||
type = models.IntegerField()
|
|
||||||
name = models.CharField(max_length=32)
|
|
||||||
cpu = models.IntegerField()
|
|
||||||
memory = models.FloatField()
|
|
||||||
storage = models.CharField(
|
|
||||||
max_length=64, validators=[validate_comma_separated_integer_list])
|
|
||||||
storage_type = models.CharField(max_length=16)
|
|
||||||
additional_specs = models.TextField(null=True)
|
|
||||||
|
|
||||||
def __unicode__(self):
|
@property
|
||||||
return 'CPU:{}, RAM:{}, Storage:{}'.format(self.cpu, self.memory, self.storage)
|
def name(self):
|
||||||
|
return '{} CPUs, {}GB RAM, {}GB {}'.format(
|
||||||
|
self.cpu, self.memory, self.storage, StorageType.name(self.storage_type))
|
||||||
|
|
||||||
|
cpu = models.IntegerField(default=4, verbose_name='Number of CPUs')
|
||||||
|
memory = models.IntegerField(default=16, verbose_name='Memory (GB)')
|
||||||
|
storage = models.IntegerField(default=32, verbose_name='Storage (GB)')
|
||||||
|
storage_type = models.IntegerField(choices=StorageType.choices(),
|
||||||
|
default=StorageType.SSD, verbose_name='Storage Type')
|
||||||
|
additional_specs = models.TextField(null=True, default=None)
|
||||||
|
|
||||||
|
class Meta: # pylint: disable=old-style-class,no-init
|
||||||
|
unique_together = ('cpu', 'memory', 'storage', 'storage_type')
|
||||||
|
|
||||||
|
|
||||||
class Session(BaseModel):
|
class Session(BaseModel):
|
||||||
|
@ -187,7 +190,7 @@ class Session(BaseModel):
|
||||||
dbms = models.ForeignKey(DBMSCatalog)
|
dbms = models.ForeignKey(DBMSCatalog)
|
||||||
hardware = models.ForeignKey(Hardware)
|
hardware = models.ForeignKey(Hardware)
|
||||||
algorithm = models.IntegerField(choices=AlgorithmType.choices(),
|
algorithm = models.IntegerField(choices=AlgorithmType.choices(),
|
||||||
default=AlgorithmType.OTTERTUNE)
|
default=AlgorithmType.GPR)
|
||||||
ddpg_actor_model = models.BinaryField(null=True, blank=True)
|
ddpg_actor_model = models.BinaryField(null=True, blank=True)
|
||||||
ddpg_critic_model = models.BinaryField(null=True, blank=True)
|
ddpg_critic_model = models.BinaryField(null=True, blank=True)
|
||||||
ddpg_reply_memory = models.BinaryField(null=True, blank=True)
|
ddpg_reply_memory = models.BinaryField(null=True, blank=True)
|
||||||
|
@ -217,11 +220,11 @@ class Session(BaseModel):
|
||||||
self.target_objective = MetricManager.get_default_objective_function()
|
self.target_objective = MetricManager.get_default_objective_function()
|
||||||
|
|
||||||
def delete(self, using=DEFAULT_DB_ALIAS, keep_parents=False):
|
def delete(self, using=DEFAULT_DB_ALIAS, keep_parents=False):
|
||||||
targets = KnobData.objects.filter(session=self)
|
SessionKnob.objects.get(session=self).delete()
|
||||||
results = Result.objects.filter(session=self)
|
results = Result.objects.filter(session=self)
|
||||||
for t in targets:
|
|
||||||
t.delete()
|
|
||||||
for r in results:
|
for r in results:
|
||||||
|
r.knob_data.delete()
|
||||||
|
r.metric_data.delete()
|
||||||
r.delete()
|
r.delete()
|
||||||
super(Session, self).delete(using=DEFAULT_DB_ALIAS, keep_parents=False)
|
super(Session, self).delete(using=DEFAULT_DB_ALIAS, keep_parents=False)
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,10 @@ class MyRocksParser(BaseParser):
|
||||||
def transactions_counter(self):
|
def transactions_counter(self):
|
||||||
return 'session_status.questions'
|
return 'session_status.questions'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latency_timer(self):
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
def convert_integer(self, int_value, metadata):
|
def convert_integer(self, int_value, metadata):
|
||||||
converted = None
|
converted = None
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -17,3 +17,7 @@ class OracleParser(BaseParser):
|
||||||
@property
|
@property
|
||||||
def transactions_counter(self):
|
def transactions_counter(self):
|
||||||
return 'global.user commits'
|
return 'global.user commits'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latency_timer(self):
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
|
@ -22,6 +22,10 @@ class PostgresParser(BaseParser):
|
||||||
def transactions_counter(self):
|
def transactions_counter(self):
|
||||||
return 'pg_stat_database.xact_commit'
|
return 'pg_stat_database.xact_commit'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latency_timer(self):
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
def convert_integer(self, int_value, metadata):
|
def convert_integer(self, int_value, metadata):
|
||||||
converted = None
|
converted = None
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -3,50 +3,80 @@
|
||||||
#
|
#
|
||||||
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
|
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
|
||||||
#
|
#
|
||||||
|
import logging
|
||||||
|
|
||||||
from .models import KnobCatalog, SessionKnob
|
from .models import KnobCatalog, SessionKnob
|
||||||
from .types import DBMSType
|
from .types import DBMSType, KnobResourceType, VarType
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
def turn_knobs_off(session, knob_names):
|
GB = 1024 ** 3
|
||||||
for knob_name in knob_names:
|
|
||||||
knob = KnobCatalog.objects.filter(dbms=session.dbms, name=knob_name).first()
|
|
||||||
SessionKnob.objects.create(session=session,
|
|
||||||
knob=knob,
|
|
||||||
minval=knob.minval,
|
|
||||||
maxval=knob.maxval,
|
|
||||||
tunable=False)
|
|
||||||
|
|
||||||
|
DEFAULT_TUNABLE_KNOBS = {
|
||||||
def set_knob_tuning_range(session, knob_name, minval, maxval):
|
DBMSType.POSTGRES: {
|
||||||
knob = KnobCatalog.objects.filter(dbms=session.dbms, name=knob_name).first()
|
"global.checkpoint_completion_target",
|
||||||
SessionKnob.objects.create(session=session,
|
"global.default_statistics_target",
|
||||||
knob=knob,
|
"global.effective_cache_size",
|
||||||
minval=minval,
|
"global.maintenance_work_mem",
|
||||||
maxval=maxval,
|
"global.max_wal_size",
|
||||||
tunable=True)
|
"global.max_worker_processes",
|
||||||
|
"global.shared_buffers",
|
||||||
|
"global.temp_buffers",
|
||||||
|
"global.wal_buffers",
|
||||||
|
"global.work_mem",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def set_default_knobs(session):
|
def set_default_knobs(session):
|
||||||
if session.dbms.type == DBMSType.POSTGRES and session.dbms.version == '9.6':
|
dbtype = session.dbms.type
|
||||||
turn_knobs_off(session, ["global.backend_flush_after", "global.bgwriter_delay",
|
default_tunable_knobs = DEFAULT_TUNABLE_KNOBS.get(dbtype)
|
||||||
"global.bgwriter_flush_after", "global.bgwriter_lru_multiplier",
|
|
||||||
"global.checkpoint_flush_after", "global.commit_delay",
|
|
||||||
"global.commit_siblings", "global.deadlock_timeout",
|
|
||||||
"global.effective_io_concurrency", "global.from_collapse_limit",
|
|
||||||
"global.join_collapse_limit", "global.maintenance_work_mem",
|
|
||||||
"global.max_worker_processes",
|
|
||||||
"global.min_parallel_relation_size", "global.min_wal_size",
|
|
||||||
"global.seq_page_cost", "global.wal_buffers",
|
|
||||||
"global.wal_sync_method", "global.wal_writer_delay",
|
|
||||||
"global.wal_writer_flush_after"])
|
|
||||||
|
|
||||||
set_knob_tuning_range(session, "global.checkpoint_completion_target", 0.1, 0.9)
|
if not default_tunable_knobs:
|
||||||
set_knob_tuning_range(session, "global.checkpoint_timeout", 60000, 1800000)
|
default_tunable_knobs = set(KnobCatalog.objects.filter(
|
||||||
set_knob_tuning_range(session, "global.default_statistics_target", 100, 2048)
|
dbms=session.dbms, tunable=True).values_list('name', flat=True))
|
||||||
set_knob_tuning_range(session, "global.effective_cache_size", 4294967296, 17179869184)
|
|
||||||
set_knob_tuning_range(session, "global.max_parallel_workers_per_gather", 0, 8)
|
for knob in KnobCatalog.objects.filter(dbms=session.dbms):
|
||||||
set_knob_tuning_range(session, "global.max_wal_size", 268435456, 17179869184)
|
tunable = knob.name in default_tunable_knobs
|
||||||
set_knob_tuning_range(session, "global.random_page_cost", 1, 10)
|
minval = knob.minval
|
||||||
set_knob_tuning_range(session, "global.shared_buffers", 134217728, 12884901888)
|
|
||||||
set_knob_tuning_range(session, "global.temp_buffers", 8388608, 1073741824)
|
if knob.vartype in (VarType.INTEGER, VarType.REAL):
|
||||||
set_knob_tuning_range(session, "global.work_mem", 4194304, 1073741824)
|
vtype = int if knob.vartype == VarType.INTEGER else float
|
||||||
|
|
||||||
|
minval = vtype(minval)
|
||||||
|
knob_maxval = vtype(knob.maxval)
|
||||||
|
|
||||||
|
if knob.resource == KnobResourceType.CPU:
|
||||||
|
maxval = session.hardware.cpu * 2
|
||||||
|
elif knob.resource == KnobResourceType.MEMORY:
|
||||||
|
maxval = session.hardware.memory * GB
|
||||||
|
elif knob.resource == KnobResourceType.STORAGE:
|
||||||
|
maxval = session.hardware.storage * GB
|
||||||
|
else:
|
||||||
|
maxval = knob_maxval
|
||||||
|
|
||||||
|
# Special cases
|
||||||
|
if dbtype == DBMSType.POSTGRES:
|
||||||
|
if knob.name == 'global.work_mem':
|
||||||
|
maxval /= 50.0
|
||||||
|
|
||||||
|
if maxval > knob_maxval:
|
||||||
|
maxval = knob_maxval
|
||||||
|
|
||||||
|
if maxval < minval:
|
||||||
|
LOG.warning(("Invalid range for session knob '%s': maxval <= minval "
|
||||||
|
"(minval: %s, maxval: %s). Setting maxval to the vendor setting: %s."),
|
||||||
|
knob.name, minval, maxval, knob_maxval)
|
||||||
|
maxval = knob_maxval
|
||||||
|
|
||||||
|
maxval = vtype(maxval)
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert knob.resource == KnobResourceType.OTHER
|
||||||
|
maxval = knob.maxval
|
||||||
|
|
||||||
|
SessionKnob.objects.create(session=session,
|
||||||
|
knob=knob,
|
||||||
|
minval=minval,
|
||||||
|
maxval=maxval,
|
||||||
|
tunable=tunable)
|
||||||
|
|
|
@ -148,9 +148,10 @@ def aggregate_target_results(result_id, algorithm):
|
||||||
# implement a sampling technique to generate new training data).
|
# implement a sampling technique to generate new training data).
|
||||||
newest_result = Result.objects.get(pk=result_id)
|
newest_result = Result.objects.get(pk=result_id)
|
||||||
has_pipeline_data = PipelineData.objects.filter(workload=newest_result.workload).exists()
|
has_pipeline_data = PipelineData.objects.filter(workload=newest_result.workload).exists()
|
||||||
if not has_pipeline_data:
|
|
||||||
LOG.debug("Background tasks haven't ran for this workload yet, picking random data.")
|
|
||||||
if not has_pipeline_data or newest_result.session.tuning_session == 'randomly_generate':
|
if not has_pipeline_data or newest_result.session.tuning_session == 'randomly_generate':
|
||||||
|
if not has_pipeline_data and newest_result.session.tuning_session == 'tuning_session':
|
||||||
|
LOG.debug("Background tasks haven't ran for this workload yet, picking random data.")
|
||||||
|
|
||||||
result = Result.objects.filter(pk=result_id)
|
result = Result.objects.filter(pk=result_id)
|
||||||
knobs = SessionKnob.objects.get_knobs_for_session(newest_result.session)
|
knobs = SessionKnob.objects.get_knobs_for_session(newest_result.session)
|
||||||
|
|
||||||
|
@ -160,25 +161,30 @@ def aggregate_target_results(result_id, algorithm):
|
||||||
agg_data['newest_result_id'] = result_id
|
agg_data['newest_result_id'] = result_id
|
||||||
agg_data['bad'] = True
|
agg_data['bad'] = True
|
||||||
agg_data['config_recommend'] = random_knob_result
|
agg_data['config_recommend'] = random_knob_result
|
||||||
return agg_data, algorithm
|
LOG.debug('%s: Finished generating a random config.\n\ndata=%s\n',
|
||||||
|
AlgorithmType.name(algorithm), JSONUtil.dumps(agg_data, pprint=True))
|
||||||
|
|
||||||
# Aggregate all knob config results tried by the target so far in this
|
else:
|
||||||
# tuning session and this tuning workload.
|
# Aggregate all knob config results tried by the target so far in this
|
||||||
target_results = Result.objects.filter(session=newest_result.session,
|
# tuning session and this tuning workload.
|
||||||
dbms=newest_result.dbms,
|
target_results = Result.objects.filter(session=newest_result.session,
|
||||||
workload=newest_result.workload)
|
dbms=newest_result.dbms,
|
||||||
if len(target_results) == 0:
|
workload=newest_result.workload)
|
||||||
raise Exception('Cannot find any results for session_id={}, dbms_id={}'
|
if len(target_results) == 0:
|
||||||
.format(newest_result.session, newest_result.dbms))
|
raise Exception('Cannot find any results for session_id={}, dbms_id={}'
|
||||||
agg_data = DataUtil.aggregate_data(target_results)
|
.format(newest_result.session, newest_result.dbms))
|
||||||
agg_data['newest_result_id'] = result_id
|
agg_data = DataUtil.aggregate_data(target_results)
|
||||||
agg_data['bad'] = False
|
agg_data['newest_result_id'] = result_id
|
||||||
|
agg_data['bad'] = False
|
||||||
|
|
||||||
# Clean knob data
|
# Clean knob data
|
||||||
cleaned_agg_data = clean_knob_data(agg_data['X_matrix'], agg_data['X_columnlabels'],
|
cleaned_agg_data = clean_knob_data(agg_data['X_matrix'], agg_data['X_columnlabels'],
|
||||||
newest_result.session)
|
newest_result.session)
|
||||||
agg_data['X_matrix'] = np.array(cleaned_agg_data[0])
|
agg_data['X_matrix'] = np.array(cleaned_agg_data[0])
|
||||||
agg_data['X_columnlabels'] = np.array(cleaned_agg_data[1])
|
agg_data['X_columnlabels'] = np.array(cleaned_agg_data[1])
|
||||||
|
|
||||||
|
LOG.debug('%s: Finished aggregating target results.\n\ndata=%s\n',
|
||||||
|
AlgorithmType.name(algorithm), JSONUtil.dumps(agg_data, pprint=True))
|
||||||
|
|
||||||
return agg_data, algorithm
|
return agg_data, algorithm
|
||||||
|
|
||||||
|
@ -336,19 +342,22 @@ def configuration_recommendation_ddpg(result_info): # pylint: disable=invalid-n
|
||||||
def configuration_recommendation(recommendation_input):
|
def configuration_recommendation(recommendation_input):
|
||||||
target_data, algorithm = recommendation_input
|
target_data, algorithm = recommendation_input
|
||||||
LOG.info('configuration_recommendation called')
|
LOG.info('configuration_recommendation called')
|
||||||
latest_pipeline_run = PipelineRun.objects.get_latest()
|
|
||||||
|
|
||||||
if target_data['bad'] is True:
|
if target_data['bad'] is True:
|
||||||
target_data_res = {}
|
target_data_res = dict(
|
||||||
target_data_res['status'] = 'bad'
|
status='bad',
|
||||||
target_data_res['result_id'] = target_data['newest_result_id']
|
result_id=target_data['newest_result_id'],
|
||||||
target_data_res['info'] = 'WARNING: no training data, the config is generated randomly'
|
info='WARNING: no training data, the config is generated randomly',
|
||||||
target_data_res['recommendation'] = target_data['config_recommend']
|
recommendation=target_data['config_recommend'],
|
||||||
|
pipeline_run=target_data['pipeline_run'])
|
||||||
|
LOG.debug('%s: Skipping configuration recommendation.\n\ndata=%s\n',
|
||||||
|
AlgorithmType.name(algorithm), JSONUtil.dumps(target_data, pprint=True))
|
||||||
return target_data_res
|
return target_data_res
|
||||||
|
|
||||||
# Load mapped workload data
|
# Load mapped workload data
|
||||||
mapped_workload_id = target_data['mapped_workload'][0]
|
mapped_workload_id = target_data['mapped_workload'][0]
|
||||||
|
|
||||||
|
latest_pipeline_run = PipelineRun.objects.get(pk=target_data['pipeline_run'])
|
||||||
mapped_workload = Workload.objects.get(pk=mapped_workload_id)
|
mapped_workload = Workload.objects.get(pk=mapped_workload_id)
|
||||||
workload_knob_data = PipelineData.objects.get(
|
workload_knob_data = PipelineData.objects.get(
|
||||||
pipeline_run=latest_pipeline_run,
|
pipeline_run=latest_pipeline_run,
|
||||||
|
@ -563,7 +572,7 @@ def configuration_recommendation(recommendation_input):
|
||||||
session.dnn_model = model_nn.get_weights_bin()
|
session.dnn_model = model_nn.get_weights_bin()
|
||||||
session.save()
|
session.save()
|
||||||
|
|
||||||
elif algorithm == AlgorithmType.OTTERTUNE:
|
elif algorithm == AlgorithmType.GPR:
|
||||||
# default gpr model
|
# default gpr model
|
||||||
model = GPRGD(length_scale=DEFAULT_LENGTH_SCALE,
|
model = GPRGD(length_scale=DEFAULT_LENGTH_SCALE,
|
||||||
magnitude=DEFAULT_MAGNITUDE,
|
magnitude=DEFAULT_MAGNITUDE,
|
||||||
|
@ -595,11 +604,15 @@ def configuration_recommendation(recommendation_input):
|
||||||
best_config = np.maximum(best_config, X_min_inv)
|
best_config = np.maximum(best_config, X_min_inv)
|
||||||
|
|
||||||
conf_map = {k: best_config[i] for i, k in enumerate(X_columnlabels)}
|
conf_map = {k: best_config[i] for i, k in enumerate(X_columnlabels)}
|
||||||
conf_map_res = {}
|
conf_map_res = dict(
|
||||||
conf_map_res['status'] = 'good'
|
status='good',
|
||||||
conf_map_res['result_id'] = target_data['newest_result_id']
|
result_id=target_data['newest_result_id'],
|
||||||
conf_map_res['recommendation'] = conf_map
|
recommendation=conf_map,
|
||||||
conf_map_res['info'] = 'INFO: training data size is {}'.format(X_scaled.shape[0])
|
info='INFO: training data size is {}'.format(X_scaled.shape[0]),
|
||||||
|
pipeline_run=latest_pipeline_run.pk)
|
||||||
|
LOG.debug('%s: Finished selecting the next config.\n\ndata=%s\n',
|
||||||
|
AlgorithmType.name(algorithm), JSONUtil.dumps(conf_map_res, pprint=True))
|
||||||
|
|
||||||
return conf_map_res
|
return conf_map_res
|
||||||
|
|
||||||
|
|
||||||
|
@ -613,12 +626,19 @@ def load_data_helper(filtered_pipeline_data, workload, task_type):
|
||||||
@task(base=MapWorkload, name='map_workload')
|
@task(base=MapWorkload, name='map_workload')
|
||||||
def map_workload(map_workload_input):
|
def map_workload(map_workload_input):
|
||||||
target_data, algorithm = map_workload_input
|
target_data, algorithm = map_workload_input
|
||||||
# Get the latest version of pipeline data that's been computed so far.
|
|
||||||
latest_pipeline_run = PipelineRun.objects.get_latest()
|
|
||||||
if target_data['bad']:
|
if target_data['bad']:
|
||||||
assert target_data is not None
|
assert target_data is not None
|
||||||
|
target_data['pipeline_run'] = None
|
||||||
|
LOG.debug('%s: Skipping workload mapping.\n\ndata=%s\n',
|
||||||
|
AlgorithmType.name(algorithm), JSONUtil.dumps(target_data, pprint=True))
|
||||||
|
|
||||||
return target_data, algorithm
|
return target_data, algorithm
|
||||||
|
|
||||||
|
# Get the latest version of pipeline data that's been computed so far.
|
||||||
|
latest_pipeline_run = PipelineRun.objects.get_latest()
|
||||||
assert latest_pipeline_run is not None
|
assert latest_pipeline_run is not None
|
||||||
|
target_data['pipeline_run'] = latest_pipeline_run.pk
|
||||||
|
|
||||||
newest_result = Result.objects.get(pk=target_data['newest_result_id'])
|
newest_result = Result.objects.get(pk=target_data['newest_result_id'])
|
||||||
target_workload = newest_result.workload
|
target_workload = newest_result.workload
|
||||||
|
@ -752,7 +772,7 @@ def map_workload(map_workload_input):
|
||||||
# Find the best (minimum) score
|
# Find the best (minimum) score
|
||||||
best_score = np.inf
|
best_score = np.inf
|
||||||
best_workload_id = None
|
best_workload_id = None
|
||||||
# scores_info = {workload_id: (workload_name, score)}
|
best_workload_name = None
|
||||||
scores_info = {}
|
scores_info = {}
|
||||||
for workload_id, similarity_score in list(scores.items()):
|
for workload_id, similarity_score in list(scores.items()):
|
||||||
workload_name = Workload.objects.get(pk=workload_id).name
|
workload_name = Workload.objects.get(pk=workload_id).name
|
||||||
|
@ -761,7 +781,9 @@ def map_workload(map_workload_input):
|
||||||
best_workload_id = workload_id
|
best_workload_id = workload_id
|
||||||
best_workload_name = workload_name
|
best_workload_name = workload_name
|
||||||
scores_info[workload_id] = (workload_name, similarity_score)
|
scores_info[workload_id] = (workload_name, similarity_score)
|
||||||
target_data['mapped_workload'] = (best_workload_id, best_workload_name, best_score)
|
target_data.update(mapped_workload=(best_workload_id, best_workload_name, best_score),
|
||||||
|
scores=scores_info)
|
||||||
|
LOG.debug('%s: Finished mapping the workload.\n\ndata=%s\n',
|
||||||
|
AlgorithmType.name(algorithm), JSONUtil.dumps(target_data, pprint=True))
|
||||||
|
|
||||||
target_data['scores'] = scores_info
|
|
||||||
return target_data, algorithm
|
return target_data, algorithm
|
||||||
|
|
|
@ -33,6 +33,10 @@
|
||||||
<td>{{ form.storage.label_tag }}</td>
|
<td>{{ form.storage.label_tag }}</td>
|
||||||
<td>{{ form.storage }}</td>
|
<td>{{ form.storage }}</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr id="storage_type_row">
|
||||||
|
<td>{{ form.storage_type.label_tag }}</td>
|
||||||
|
<td>{{ form.storage_type }}</td>
|
||||||
|
</tr>
|
||||||
<tr id="algorithm_row">
|
<tr id="algorithm_row">
|
||||||
<td>{{ form.algorithm.label_tag }}</td>
|
<td>{{ form.algorithm.label_tag }}</td>
|
||||||
<td>{{ form.algorithm }}</td>
|
<td>{{ form.algorithm }}</td>
|
||||||
|
@ -71,6 +75,7 @@ $(function() {
|
||||||
$("#cpu_row").hide();
|
$("#cpu_row").hide();
|
||||||
$("#memory_row").hide();
|
$("#memory_row").hide();
|
||||||
$("#storage_row").hide();
|
$("#storage_row").hide();
|
||||||
|
$("#storage_type_row").hide();
|
||||||
$("#algorithm_row").hide();
|
$("#algorithm_row").hide();
|
||||||
} else {
|
} else {
|
||||||
$("#upload_code_row").hide();
|
$("#upload_code_row").hide();
|
||||||
|
|
|
@ -173,12 +173,22 @@ class LabelStyleType(BaseType):
|
||||||
|
|
||||||
|
|
||||||
class AlgorithmType(BaseType):
|
class AlgorithmType(BaseType):
|
||||||
OTTERTUNE = 1
|
GPR = 1
|
||||||
DDPG = 2
|
DDPG = 2
|
||||||
DNN = 3
|
DNN = 3
|
||||||
|
|
||||||
TYPE_NAMES = {
|
TYPE_NAMES = {
|
||||||
OTTERTUNE: 'Ottertune Default',
|
GPR: 'Gaussian Process Bandits',
|
||||||
DDPG: 'Deep Deterministic Policy Gradients',
|
DDPG: 'Deep Deterministic Policy Gradients',
|
||||||
DNN: 'Deep Neural Network',
|
DNN: 'Deep Neural Network',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class StorageType(BaseType):
|
||||||
|
SSD = 5
|
||||||
|
HDD = 10
|
||||||
|
|
||||||
|
TYPE_NAMES = {
|
||||||
|
SSD: 'SSD',
|
||||||
|
HDD: 'HDD',
|
||||||
|
}
|
||||||
|
|
|
@ -314,7 +314,7 @@ def create_or_edit_session(request, project_id, session_id=''):
|
||||||
initial={
|
initial={
|
||||||
'dbms': DBMSCatalog.objects.get(
|
'dbms': DBMSCatalog.objects.get(
|
||||||
type=DBMSType.POSTGRES, version='9.6'),
|
type=DBMSType.POSTGRES, version='9.6'),
|
||||||
'algorithm': AlgorithmType.OTTERTUNE,
|
'algorithm': AlgorithmType.GPR,
|
||||||
'target_objective': 'throughput_txn_per_sec',
|
'target_objective': 'throughput_txn_per_sec',
|
||||||
})
|
})
|
||||||
context = {
|
context = {
|
||||||
|
@ -342,15 +342,12 @@ def edit_knobs(request, project_id, session_id):
|
||||||
instance.save()
|
instance.save()
|
||||||
return HttpResponse(status=204)
|
return HttpResponse(status=204)
|
||||||
else:
|
else:
|
||||||
knobs = KnobCatalog.objects.filter(dbms=session.dbms).order_by('-tunable')
|
knobs = SessionKnob.objects.filter(session=session).order_by('-tunable', 'knob__name')
|
||||||
forms = []
|
forms = []
|
||||||
for knob in knobs:
|
for knob in knobs:
|
||||||
knob_values = model_to_dict(knob)
|
knob_values = model_to_dict(knob)
|
||||||
if SessionKnob.objects.filter(session=session, knob=knob).exists():
|
knob_values['session'] = session
|
||||||
new_knob = SessionKnob.objects.filter(session=session, knob=knob)[0]
|
knob_values['name'] = KnobCatalog.objects.get(pk=knob.knob.pk).name
|
||||||
knob_values["minval"] = new_knob.minval
|
|
||||||
knob_values["maxval"] = new_knob.maxval
|
|
||||||
knob_values["tunable"] = new_knob.tunable
|
|
||||||
forms.append(SessionKnobForm(initial=knob_values))
|
forms.append(SessionKnobForm(initial=knob_values))
|
||||||
context = {
|
context = {
|
||||||
'project': project,
|
'project': project,
|
||||||
|
@ -526,7 +523,7 @@ def handle_result_files(session, files):
|
||||||
|
|
||||||
result_id = result.pk
|
result_id = result.pk
|
||||||
response = None
|
response = None
|
||||||
if session.algorithm == AlgorithmType.OTTERTUNE:
|
if session.algorithm == AlgorithmType.GPR:
|
||||||
response = chain(aggregate_target_results.s(result.pk, session.algorithm),
|
response = chain(aggregate_target_results.s(result.pk, session.algorithm),
|
||||||
map_workload.s(),
|
map_workload.s(),
|
||||||
configuration_recommendation.s()).apply_async()
|
configuration_recommendation.s()).apply_async()
|
||||||
|
@ -967,13 +964,17 @@ def give_result(request, upload_code): # pylint: disable=unused-argument
|
||||||
res = Result.objects.get(pk=lastest_result.pk)
|
res = Result.objects.get(pk=lastest_result.pk)
|
||||||
response = HttpResponse(JSONUtil.dumps(res.next_configuration),
|
response = HttpResponse(JSONUtil.dumps(res.next_configuration),
|
||||||
content_type='application/json')
|
content_type='application/json')
|
||||||
elif overall_status in ('PENDING', 'RECEIVED', 'STARTED'):
|
|
||||||
|
elif overall_status in ('FAILURE', 'REVOKED', 'RETRY'):
|
||||||
|
msg = "STATUS: {}\nRESULT ID: {}\n".format(overall_status, lastest_result)
|
||||||
|
if tasks:
|
||||||
|
failed_task_idx = min(len(tasks) - 1, num_completed + 1)
|
||||||
|
failed_task = tasks[failed_task_idx]
|
||||||
|
msg += "TRACEBACK: {}".format(failed_task.traceback)
|
||||||
|
response = HttpResponse(msg, status=400)
|
||||||
|
|
||||||
|
else: # overall_status in ('PENDING', 'RECEIVED', 'STARTED'):
|
||||||
response = HttpResponse("{}: Result not ready".format(overall_status), status=202)
|
response = HttpResponse("{}: Result not ready".format(overall_status), status=202)
|
||||||
else: # overall_status in ('FAILURE', 'REVOKED', 'RETRY'):
|
|
||||||
failed_task_idx = min(len(tasks) - 1, num_completed + 1)
|
|
||||||
failed_task = tasks[failed_task_idx]
|
|
||||||
response = HttpResponse(
|
|
||||||
"{}: {}".format(overall_status, failed_task.traceback), status=400)
|
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue