Added additional Oracle views to metric collection (v, v).

This commit is contained in:
Dana Van Aken
2019-10-07 19:34:03 -04:00
parent 162dc48c53
commit 40c75de3ce
13 changed files with 46337 additions and 13484 deletions

View File

@@ -18,7 +18,7 @@ repositories {
dependencies {
testCompile group: 'junit', name: 'junit', version: '4.11'
runtime fileTree(dir: 'lib', include: '*.jar')
runtime fileTree(dir: 'libs', include: '*.jar')
compile group: 'net.sourceforge.collections', name: 'collections-generic', version: '4.01'
compile group: 'commons-lang', name: 'commons-lang', version: '2.6'
compile group: 'log4j', name: 'log4j', version: '1.2.17'
@@ -51,8 +51,8 @@ dependencies {
// https://mvnrepository.com/artifact/org.postgresql/postgresql
compile group: 'org.postgresql', name: 'postgresql', version: '9.4-1201-jdbc41'
// This lib has to be manually downloaded from Oracle
dependencies {compile files('lib/ojdbc8.jar')}
// For Oracle, create the directory client/controller/libs and copy the driver
// (e.g., ojdbc8.jar) into it. The driver must be manually downloaded from Oracle.
}
run {

View File

@@ -1,6 +0,0 @@
#Thu Nov 30 15:45:05 EST 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-3.1-bin.zip

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -13,6 +13,7 @@ import com.controller.util.json.JSONStringer;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Map;
@@ -32,6 +33,10 @@ public class OracleCollector extends DBCollector {
private static final String METRICS_SQL = "select name, value from v$sysstat";
private static final String METRICS_SQL2 = "select stat_name, value from v$sys_time_model";
private static final String METRICS_SQL3 = "select * from v$system_event";
public OracleCollector(String oriDBUrl, String username, String password) {
try {
Connection conn = DriverManager.getConnection(oriDBUrl, username, password);
@@ -43,7 +48,7 @@ public class OracleCollector extends DBCollector {
}
// Collect DBMS parameters
out = statement.executeQuery(PARAMETERS_SQL_WITH_HIDDEN);
out = statement.executeQuery(PARAMETERS_SQL);
while (out.next()) {
dbParameters.put(out.getString(1).toLowerCase(), out.getString(2));
}
@@ -53,6 +58,28 @@ public class OracleCollector extends DBCollector {
while (out.next()) {
dbMetrics.put(out.getString(1).toLowerCase(), out.getString(2));
}
out = statement.executeQuery(METRICS_SQL2);
while (out.next()) {
dbMetrics.put(out.getString(1).toLowerCase(), out.getString(2));
}
out = statement.executeQuery(METRICS_SQL3);
ResultSetMetaData meta = out.getMetaData();
int columnCount = meta.getColumnCount();
String[] columnNames = new String[columnCount];
for (int i = 0; i < columnCount; ++i) {
columnNames[i] = meta.getColumnName(i + 1).toLowerCase();
}
while (out.next()) {
String eventName = out.getString(1).toLowerCase();
for (int i = 2; i <= columnCount; ++i) {
String name = eventName + "." + columnNames[i - 1];
Object value = out.getObject(i);
dbMetrics.put(name, String.valueOf(value));
}
}
conn.close();
} catch (SQLException e) {
LOG.error("Error while collecting DB parameters: " + e.getMessage());

View File

@@ -228,7 +228,8 @@ def run_controller():
@task
def signal_controller():
pidfile = os.path.join(CONF['controller_home'], 'pid.txt')
pid = int(open(pidfile).read())
with open(pidfile, 'r') as f:
pid = int(f.read())
cmd = 'sudo kill -2 {}'.format(pid)
with lcd(CONF['controller_home']): # pylint: disable=not-context-manager
local(cmd)
@@ -288,6 +289,9 @@ def upload_result(result_dir=None, prefix=None):
raise Exception('Error uploading result.\nStatus: {}\nMessage: {}\n'.format(
response.status_code, response.content))
for f in files.values(): # pylint: disable=not-an-iterable
f.close()
LOG.info(response.content)
return response
@@ -372,7 +376,8 @@ def add_udf():
@task
def upload_batch(result_dir, sort=True):
def upload_batch(result_dir=None, sort=True):
result_dir = result_dir or CONF['save_path']
sort = _parse_bool(sort)
results = glob.glob(os.path.join(result_dir, '*__summary.json'))
if sort:
@@ -381,7 +386,9 @@ def upload_batch(result_dir, sort=True):
LOG.info('Uploading %d samples from %s...', count, result_dir)
for i, result in enumerate(results):
prefix = os.path.basename(result).split('__')[0]
prefix = os.path.basename(result)
prefix_len = os.path.basename(result).find('_') + 2
prefix = prefix[:prefix_len]
upload_result(result_dir=result_dir, prefix=prefix)
LOG.info('Uploaded result %d/%d: %s__*.json', i + 1, count, prefix)
@@ -430,19 +437,31 @@ def restore_database():
def _ready_to_start_oltpbench():
return os.path.exists(CONF['controller_log']) and \
'Output the process pid to' in open(CONF['controller_log']).read()
ready = False
if os.path.exists(CONF['controller_log']):
with open(CONF['controller_log'], 'r') as f:
content = f.read()
ready = 'Output the process pid to' in content
return ready
def _ready_to_start_controller():
return os.path.exists(CONF['oltpbench_log']) and \
'Warmup complete, starting measurements' in open(CONF['oltpbench_log']).read()
ready = False
if os.path.exists(CONF['oltpbench_log']):
with open(CONF['oltpbench_log'], 'r') as f:
content = f.read()
ready = 'Warmup complete, starting measurements' in content
return ready
def _ready_to_shut_down_controller():
pid_file_path = os.path.join(CONF['controller_home'], 'pid.txt')
return os.path.exists(pid_file_path) and os.path.exists(CONF['oltpbench_log']) and \
'Output throughput samples into file' in open(CONF['oltpbench_log']).read()
pidfile = os.path.join(CONF['controller_home'], 'pid.txt')
ready = False
if os.path.exists(pidfile) and os.path.exists(CONF['oltpbench_log']):
with open(CONF['oltpbench_log'], 'r') as f:
content = f.read()
ready = 'Output throughput samples into file' in content
return ready
def clean_logs():