Added additional Oracle views to metric collection (v, v).

This commit is contained in:
Dana Van Aken 2019-10-07 19:34:03 -04:00
parent 162dc48c53
commit 40c75de3ce
13 changed files with 46337 additions and 13484 deletions

View File

@ -18,7 +18,7 @@ repositories {
dependencies {
testCompile group: 'junit', name: 'junit', version: '4.11'
runtime fileTree(dir: 'lib', include: '*.jar')
runtime fileTree(dir: 'libs', include: '*.jar')
compile group: 'net.sourceforge.collections', name: 'collections-generic', version: '4.01'
compile group: 'commons-lang', name: 'commons-lang', version: '2.6'
compile group: 'log4j', name: 'log4j', version: '1.2.17'
@ -51,8 +51,8 @@ dependencies {
// https://mvnrepository.com/artifact/org.postgresql/postgresql
compile group: 'org.postgresql', name: 'postgresql', version: '9.4-1201-jdbc41'
// This lib has to be manually downloaded from Oracle
dependencies {compile files('lib/ojdbc8.jar')}
// For Oracle, create the directory client/controller/libs and copy the driver
// (e.g., ojdbc8.jar) into it. The driver must be manually downloaded from Oracle.
}
run {

View File

@ -1,6 +0,0 @@
#Thu Nov 30 15:45:05 EST 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-3.1-bin.zip

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -13,6 +13,7 @@ import com.controller.util.json.JSONStringer;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Map;
@ -32,6 +33,10 @@ public class OracleCollector extends DBCollector {
private static final String METRICS_SQL = "select name, value from v$sysstat";
private static final String METRICS_SQL2 = "select stat_name, value from v$sys_time_model";
private static final String METRICS_SQL3 = "select * from v$system_event";
public OracleCollector(String oriDBUrl, String username, String password) {
try {
Connection conn = DriverManager.getConnection(oriDBUrl, username, password);
@ -43,7 +48,7 @@ public class OracleCollector extends DBCollector {
}
// Collect DBMS parameters
out = statement.executeQuery(PARAMETERS_SQL_WITH_HIDDEN);
out = statement.executeQuery(PARAMETERS_SQL);
while (out.next()) {
dbParameters.put(out.getString(1).toLowerCase(), out.getString(2));
}
@ -53,6 +58,28 @@ public class OracleCollector extends DBCollector {
while (out.next()) {
dbMetrics.put(out.getString(1).toLowerCase(), out.getString(2));
}
out = statement.executeQuery(METRICS_SQL2);
while (out.next()) {
dbMetrics.put(out.getString(1).toLowerCase(), out.getString(2));
}
out = statement.executeQuery(METRICS_SQL3);
ResultSetMetaData meta = out.getMetaData();
int columnCount = meta.getColumnCount();
String[] columnNames = new String[columnCount];
for (int i = 0; i < columnCount; ++i) {
columnNames[i] = meta.getColumnName(i + 1).toLowerCase();
}
while (out.next()) {
String eventName = out.getString(1).toLowerCase();
for (int i = 2; i <= columnCount; ++i) {
String name = eventName + "." + columnNames[i - 1];
Object value = out.getObject(i);
dbMetrics.put(name, String.valueOf(value));
}
}
conn.close();
} catch (SQLException e) {
LOG.error("Error while collecting DB parameters: " + e.getMessage());

View File

@ -228,7 +228,8 @@ def run_controller():
@task
def signal_controller():
pidfile = os.path.join(CONF['controller_home'], 'pid.txt')
pid = int(open(pidfile).read())
with open(pidfile, 'r') as f:
pid = int(f.read())
cmd = 'sudo kill -2 {}'.format(pid)
with lcd(CONF['controller_home']): # pylint: disable=not-context-manager
local(cmd)
@ -288,6 +289,9 @@ def upload_result(result_dir=None, prefix=None):
raise Exception('Error uploading result.\nStatus: {}\nMessage: {}\n'.format(
response.status_code, response.content))
for f in files.values(): # pylint: disable=not-an-iterable
f.close()
LOG.info(response.content)
return response
@ -372,7 +376,8 @@ def add_udf():
@task
def upload_batch(result_dir, sort=True):
def upload_batch(result_dir=None, sort=True):
result_dir = result_dir or CONF['save_path']
sort = _parse_bool(sort)
results = glob.glob(os.path.join(result_dir, '*__summary.json'))
if sort:
@ -381,7 +386,9 @@ def upload_batch(result_dir, sort=True):
LOG.info('Uploading %d samples from %s...', count, result_dir)
for i, result in enumerate(results):
prefix = os.path.basename(result).split('__')[0]
prefix = os.path.basename(result)
prefix_len = os.path.basename(result).find('_') + 2
prefix = prefix[:prefix_len]
upload_result(result_dir=result_dir, prefix=prefix)
LOG.info('Uploaded result %d/%d: %s__*.json', i + 1, count, prefix)
@ -430,19 +437,31 @@ def restore_database():
def _ready_to_start_oltpbench():
return os.path.exists(CONF['controller_log']) and \
'Output the process pid to' in open(CONF['controller_log']).read()
ready = False
if os.path.exists(CONF['controller_log']):
with open(CONF['controller_log'], 'r') as f:
content = f.read()
ready = 'Output the process pid to' in content
return ready
def _ready_to_start_controller():
return os.path.exists(CONF['oltpbench_log']) and \
'Warmup complete, starting measurements' in open(CONF['oltpbench_log']).read()
ready = False
if os.path.exists(CONF['oltpbench_log']):
with open(CONF['oltpbench_log'], 'r') as f:
content = f.read()
ready = 'Warmup complete, starting measurements' in content
return ready
def _ready_to_shut_down_controller():
pid_file_path = os.path.join(CONF['controller_home'], 'pid.txt')
return os.path.exists(pid_file_path) and os.path.exists(CONF['oltpbench_log']) and \
'Output throughput samples into file' in open(CONF['oltpbench_log']).read()
pidfile = os.path.join(CONF['controller_home'], 'pid.txt')
ready = False
if os.path.exists(pidfile) and os.path.exists(CONF['oltpbench_log']):
with open(CONF['oltpbench_log'], 'r') as f:
content = f.read()
ready = 'Output throughput samples into file' in content
return ready
def clean_logs():

View File

@ -67,8 +67,8 @@ EXCLUDE_DIRECTORIES = [
# Django manage.py extensions
os.path.join(OTTERTUNE_DIR, "server/website/website/management"),
# Old management scripts
os.path.join(OTTERTUNE_DIR, "server/website/script/management"),
# Stand-alone scripts
os.path.join(OTTERTUNE_DIR, "server/website/script"),
]
# Files that should NOT be checked

View File

@ -13,6 +13,7 @@ numpy==1.13.1
requests==2.18.4
pycodestyle==2.3.1
astroid==1.5.1
psycopg2>=2.5.4
pylint==1.5.2
pyDOE==0.3.8
mysqlclient==1.3.12

View File

@ -4,39 +4,138 @@
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#
import json
import shutil
import os
import sys
from collections import OrderedDict
ROOT = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
while os.path.basename(ROOT) != 'website':
ROOT = os.path.dirname(ROOT)
print('WEBSITE ROOT: {}'.format(ROOT))
sys.path.insert(0, ROOT)
from website.types import MetricType, VarType
# Metric catalog fields:
# dbms
# name
# vartype
# summary
# scope
# metric_type
# Constants
MODEL = 'website.MetricCatalog'
SCOPE = 'global'
VERSIONS = (12, 19)
# def main():
# final_metrics = []
# with open('oracle12.txt', 'r') as f:
# odd = 0
# entry = {}
# fields = {}
# lines = f.readlines()
# for line in lines:
# line = line.strip().replace("\n", "")
# if not line:
# continue
# if line == 'NAME' or line.startswith('-'):
# continue
# if odd == 0:
# entry = {}
# entry['model'] = 'website.MetricCatalog'
# fields = {}
# fields['name'] = "global." + line
# fields['summary'] = line
# fields['vartype'] = 2 # int
# fields['scope'] = 'global'
# fields['metric_type'] = 3 # stat
# if fields['name'] == "global.user commits":
# fields['metric_type'] = 1 # counter
# fields['dbms'] = 12 # oracle
# entry['fields'] = fields
# final_metrics.append(entry)
# with open('oracle-12_metrics.json', 'w') as f:
# json.dump(final_metrics, f, indent=4)
# shutil.copy('oracle-12_metrics.json', '../../../../website/fixtures/oracle-12_metrics.json')
def check_type(value):
# if value is not None:
try:
value = int(value)
except ValueError:
try:
value = float(value)
except ValueError:
pass
if isinstance(value, int):
vtype = VarType.INTEGER
elif isinstance(value, float):
vtype = VarType.REAL
else:
vtype = VarType.STRING
return vtype
def create_settings(metric_data, dbms):
metrics = []
for name, value in metric_data.items():
vartype = check_type(value)
if vartype in (VarType.INTEGER, VarType.REAL):
if 'average' in name or name.endswith('current') or \
name.startswith('session pga memory'):
mettype = MetricType.STATISTICS
else:
mettype = MetricType.COUNTER # Most int/float metrics are counters
else:
mettype = MetricType.INFO
summary = '{}: {}'.format(name, value)
if name == 'user commits':
assert vartype == VarType.INTEGER and mettype == MetricType.COUNTER
entry = OrderedDict([
('dbms', dbms),
('name', 'global.{}'.format(name)),
('vartype', vartype),
('summary', summary),
('scope', 'global'),
('metric_type', mettype),
])
metrics.append(OrderedDict([('fields', entry), ('model', MODEL)]))
return metrics
def usage():
print('python3 create_metric_settings.py [version] (valid versions: 12, 19)')
sys.exit(1)
def main():
final_metrics = []
with open('oracle12.txt', 'r') as f:
odd = 0
entry = {}
fields = {}
lines = f.readlines()
for line in lines:
line = line.strip().replace("\n", "")
if not line:
continue
if line == 'NAME' or line.startswith('-'):
continue
if odd == 0:
entry = {}
entry['model'] = 'website.MetricCatalog'
fields = {}
fields['name'] = "global." + line
fields['summary'] = line
fields['vartype'] = 2 # int
fields['scope'] = 'global'
fields['metric_type'] = 3 # stat
if fields['name'] == "global.user commits":
fields['metric_type'] = 1 # counter
fields['dbms'] = 12 # oracle
entry['fields'] = fields
final_metrics.append(entry)
with open('oracle-12_metrics.json', 'w') as f:
json.dump(final_metrics, f, indent=4)
shutil.copy('oracle-12_metrics.json', '../../../../website/fixtures/oracle-12_metrics.json')
if len(sys.argv) == 1:
versions = VERSIONS
else:
version = int(sys.argv[1])
if version not in VERSIONS:
usage()
versions = (version,)
for version in versions:
with open('oracle{}.json'.format(version), 'r') as f:
metrics = json.load(f, object_pairs_hook=OrderedDict)
metrics = metrics['global']['global']
meta = create_settings(metrics, version)
savepath = os.path.join(
ROOT, 'website', 'fixtures', 'oracle-{}_metrics.json'.format(version))
with open(savepath, 'w') as f:
json.dump(meta, f, indent=4)
if __name__ == '__main__':

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff