Initial commit with BSL

This commit is contained in:
Andy Pavlo
2019-08-23 11:47:19 -04:00
commit 3e564ce922
286 changed files with 177642 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
#
# OtterTune - __init__.py
#
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#

View File

@@ -0,0 +1,22 @@
#
# OtterTune - runner.py
#
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#
'''
Created on Jan 29, 2018
@author: dvanaken
'''
import logging
from django.test.runner import DiscoverRunner
class BaseRunner(DiscoverRunner):
def run_tests(self, test_labels, extra_tests=None, **kwargs):
# Disable logging while running tests
logging.disable(logging.CRITICAL)
return super(BaseRunner, self).run_tests(test_labels, extra_tests, **kwargs)

View File

@@ -0,0 +1,277 @@
{
"global": {
"global": {
"client_min_messages": "notice",
"autovacuum_vacuum_scale_factor": "0.2",
"bonjour_name": "",
"vacuum_freeze_table_age": "150000000",
"max_worker_processes": "8",
"syslog_facility": "local0",
"transaction_read_only": "off",
"wal_compression": "off",
"log_temp_files": "-1",
"authentication_timeout": "1min",
"ssl_dh_params_file": "",
"log_lock_waits": "off",
"integer_datetimes": "on",
"archive_mode": "off",
"superuser_reserved_connections": "3",
"track_functions": "none",
"synchronous_standby_names": "",
"max_replication_slots": "10",
"enable_hashjoin": "on",
"huge_pages": "try",
"bgwriter_flush_after": "0",
"lc_monetary": "en_US.UTF-8",
"geqo_seed": "0",
"DateStyle": "ISO, MDY",
"autovacuum_analyze_threshold": "50",
"dynamic_shared_memory_type": "posix",
"autovacuum_naptime": "1min",
"cluster_name": "",
"checkpoint_completion_target": "0.5",
"log_connections": "off",
"local_preload_libraries": "",
"effective_io_concurrency": "7",
"quote_all_identifiers": "off",
"log_checkpoints": "off",
"log_statement_stats": "off",
"wal_block_size": "8192",
"max_wal_size": "1GB",
"archive_timeout": "0",
"log_filename": "postgresql-%Y-%m-%d_%H%M%S.log",
"deadlock_timeout": "1s",
"shared_preload_libraries": "",
"statement_timeout": "0",
"dynamic_library_path": "$libdir",
"force_parallel_mode": "off",
"log_rotation_age": "1d",
"ssl": "off",
"max_function_args": "100",
"checkpoint_warning": "30s",
"log_hostname": "off",
"log_truncate_on_rotation": "off",
"cursor_tuple_fraction": "0.1",
"geqo_pool_size": "0",
"parallel_tuple_cost": "0.1",
"log_parser_stats": "off",
"autovacuum_max_workers": "3",
"fsync": "on",
"min_parallel_index_scan_size": "512kB",
"post_auth_delay": "0",
"server_version_num": "100001",
"cpu_index_tuple_cost": "0.005",
"lc_ctype": "en_US.UTF-8",
"ssl_ciphers": "HIGH:MEDIUM:+3DES:!aNULL",
"cpu_operator_cost": "0.0025",
"default_with_oids": "off",
"config_file": "/Users/MacadamiaKitten/Desktop/psql_db/postgresql.conf",
"escape_string_warning": "on",
"enable_bitmapscan": "on",
"from_collapse_limit": "8",
"max_logical_replication_workers": "4",
"vacuum_cost_page_hit": "1",
"backend_flush_after": "0",
"checkpoint_timeout": "5min",
"replacement_sort_tuples": "150000",
"lc_collate": "en_US.UTF-8",
"max_stack_depth": "2MB",
"standard_conforming_strings": "on",
"syslog_sequence_numbers": "on",
"pre_auth_delay": "0",
"gin_pending_list_limit": "4MB",
"debug_print_parse": "off",
"max_pred_locks_per_page": "2",
"enable_material": "on",
"port": "5432",
"maintenance_work_mem": "64MB",
"checkpoint_flush_after": "0",
"wal_keep_segments": "0",
"operator_precedence_warning": "off",
"stats_temp_directory": "pg_stat_tmp",
"wal_receiver_status_interval": "10s",
"wal_log_hints": "off",
"max_wal_senders": "10",
"extra_float_digits": "3",
"enable_hashagg": "on",
"ssl_ecdh_curve": "prime256v1",
"log_error_verbosity": "default",
"data_checksums": "off",
"shared_buffers": "7GB",
"min_wal_size": "80MB",
"zero_damaged_pages": "off",
"logging_collector": "off",
"enable_mergejoin": "on",
"lc_numeric": "en_US.UTF-8",
"commit_siblings": "5",
"log_min_error_statement": "error",
"krb_server_keyfile": "FILE:/usr/local/etc/postgresql/krb5.keytab",
"wal_level": "replica",
"vacuum_multixact_freeze_table_age": "150000000",
"vacuum_multixact_freeze_min_age": "5000000",
"wal_retrieve_retry_interval": "5s",
"hba_file": "/Users/MacadamiaKitten/Desktop/psql_db/pg_hba.conf",
"event_source": "PostgreSQL",
"lc_messages": "en_US.UTF-8",
"autovacuum": "on",
"enable_nestloop": "on",
"log_statement": "none",
"log_replication_commands": "off",
"trace_sort": "off",
"unix_socket_group": "",
"geqo_threshold": "12",
"max_pred_locks_per_relation": "-2",
"tcp_keepalives_count": "8",
"idle_in_transaction_session_timeout": "0",
"max_files_per_process": "1000",
"log_planner_stats": "off",
"allow_system_table_mods": "off",
"debug_print_plan": "off",
"log_min_messages": "warning",
"max_parallel_workers": "8",
"log_disconnections": "off",
"db_user_namespace": "off",
"trace_recovery_messages": "log",
"row_security": "on",
"enable_gathermerge": "on",
"log_duration": "off",
"autovacuum_vacuum_threshold": "50",
"xmloption": "content",
"syslog_split_messages": "on",
"wal_sender_timeout": "1min",
"password_encryption": "md5",
"ssl_cert_file": "server.crt",
"block_size": "8192",
"vacuum_cost_delay": "0",
"log_file_mode": "0600",
"max_connections": "100",
"hot_standby": "on",
"max_sync_workers_per_subscription": "2",
"timezone_abbreviations": "Default",
"log_line_prefix": "%m [%p] ",
"transaction_deferrable": "off",
"bgwriter_lru_maxpages": "100",
"archive_command": "(disabled)",
"default_text_search_config": "pg_catalog.english",
"min_parallel_table_scan_size": "8MB",
"data_directory": "/Users/MacadamiaKitten/Desktop/psql_db",
"autovacuum_analyze_scale_factor": "0.1",
"ident_file": "/Users/MacadamiaKitten/Desktop/psql_db/pg_ident.conf",
"default_transaction_deferrable": "off",
"lo_compat_privileges": "off",
"tcp_keepalives_idle": "7200",
"session_replication_role": "origin",
"log_timezone": "US/Eastern",
"log_directory": "log",
"listen_addresses": "localhost",
"server_encoding": "UTF8",
"xmlbinary": "base64",
"unix_socket_directories": "/tmp",
"search_path": "\"$user\", public",
"temp_buffers": "8MB",
"constraint_exclusion": "partition",
"wal_consistency_checking": "",
"autovacuum_vacuum_cost_limit": "-1",
"track_activity_query_size": "1024",
"geqo_selection_bias": "2",
"work_mem": "10GB",
"geqo_generations": "0",
"bonjour": "off",
"vacuum_freeze_min_age": "50000000",
"default_tablespace": "",
"vacuum_defer_cleanup_age": "0",
"default_statistics_target": "100",
"track_activities": "on",
"geqo": "on",
"external_pid_file": "",
"synchronous_commit": "on",
"restart_after_crash": "on",
"ssl_prefer_server_ciphers": "on",
"segment_size": "1GB",
"old_snapshot_threshold": "-1",
"effective_cache_size": "4GB",
"ssl_ca_file": "",
"application_name": "",
"debug_print_rewritten": "off",
"enable_tidscan": "on",
"lock_timeout": "0",
"tcp_keepalives_interval": "75",
"bytea_output": "hex",
"log_min_duration_statement": "-1",
"max_prepared_transactions": "0",
"wal_receiver_timeout": "1min",
"parallel_setup_cost": "1000",
"default_transaction_read_only": "off",
"autovacuum_multixact_freeze_max_age": "400000000",
"log_rotation_size": "10MB",
"krb_caseins_users": "off",
"IntervalStyle": "postgres",
"track_commit_timestamp": "off",
"TimeZone": "America/New_York",
"vacuum_cost_page_dirty": "20",
"log_executor_stats": "off",
"track_io_timing": "off",
"vacuum_cost_page_miss": "10",
"enable_seqscan": "on",
"full_page_writes": "on",
"temp_tablespaces": "",
"array_nulls": "on",
"log_destination": "stderr",
"wal_writer_delay": "200ms",
"enable_indexonlyscan": "on",
"exit_on_error": "off",
"debug_assertions": "off",
"max_parallel_workers_per_gather": "2",
"check_function_bodies": "on",
"cpu_tuple_cost": "0.01",
"random_page_cost": "4",
"wal_writer_flush_after": "1MB",
"autovacuum_work_mem": "-1",
"max_standby_archive_delay": "30s",
"bgwriter_lru_multiplier": "2",
"track_counts": "on",
"trace_notify": "off",
"wal_buffers": "4MB",
"max_standby_streaming_delay": "30s",
"commit_delay": "0",
"gin_fuzzy_search_limit": "0",
"ignore_checksum_failure": "off",
"max_index_keys": "32",
"wal_sync_method": "open_datasync",
"session_preload_libraries": "",
"vacuum_cost_limit": "200",
"default_transaction_isolation": "read committed",
"hot_standby_feedback": "off",
"unix_socket_permissions": "0777",
"max_pred_locks_per_transaction": "64",
"synchronize_seqscans": "on",
"checkpoint_timing": "3min",
"backslash_quote": "safe_encoding",
"wal_segment_size": "16MB",
"max_locks_per_transaction": "64",
"ssl_key_file": "server.key",
"transform_null_equals": "off",
"ssl_crl_file": "",
"lc_time": "en_US.UTF-8",
"server_version": "10.1",
"temp_file_limit": "-1",
"debug_pretty_print": "on",
"max_identifier_length": "63",
"client_encoding": "UTF8",
"seq_page_cost": "1",
"transaction_isolation": "read committed",
"autovacuum_freeze_max_age": "200000000",
"update_process_title": "on",
"ignore_system_indexes": "off",
"log_autovacuum_min_duration": "-1",
"bgwriter_delay": "200ms",
"join_collapse_limit": "8",
"autovacuum_vacuum_cost_delay": "20ms",
"geqo_effort": "5",
"enable_sort": "on",
"syslog_ident": "postgres",
"enable_indexscan": "on"
}
},
"local": null
}

View File

@@ -0,0 +1,582 @@
{
"global": {
"pg_stat_archiver": {
"archived_count": "0",
"stats_reset": "2017-11-10 10:59:47.397075-05",
"failed_count": "0"
},
"pg_stat_bgwriter": {
"buffers_backend": "81032",
"checkpoints_timed": "1277",
"buffers_alloc": 7477,
"buffers_clean": "49590",
"buffers_backend_fsync": "0",
"checkpoint_sync_time": "19",
"checkpoints_req": "2",
"checkpoint_write_time": "597851",
"maxwritten_clean": "325",
"buffers_checkpoint": "33250",
"stats_reset": "2017-11-10 10:59:47.397075-05"
}
},
"local": {
"table": {
"pg_stat_user_tables": {
"customer": {
"last_autoanalyze": "2017-11-20 15:59:18.824212-05",
"n_live_tup": "60000",
"vacuum_count": "0",
"n_tup_ins": 93806,
"n_tup_hot_upd": "262",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "1510",
"n_mod_since_analyze": "1594",
"relname": "customer",
"analyze_count": "0",
"idx_scan": "125261",
"idx_tup_fetch": "85299628",
"autovacuum_count": "0",
"n_tup_upd": "1594",
"schemaname": "public",
"seq_scan": "3",
"seq_tup_read": "0",
"relid": "16540"
},
"district": {
"last_autoanalyze": "2017-11-20 19:23:34.201509-05",
"n_live_tup": "20",
"vacuum_count": "0",
"n_tup_ins": 94452,
"n_tup_hot_upd": "1754",
"autoanalyze_count": "2",
"n_tup_del": "0",
"n_dead_tup": "33",
"n_mod_since_analyze": "0",
"relname": "district",
"analyze_count": "0",
"idx_scan": "122234",
"idx_tup_fetch": "122234",
"autovacuum_count": "0",
"n_tup_upd": "1754",
"schemaname": "public",
"seq_scan": "2221",
"seq_tup_read": "41522",
"relid": "16549"
},
"order_line": {
"last_autoanalyze": "2017-11-20 16:00:11.017507-05",
"n_live_tup": "608373",
"vacuum_count": "0",
"n_tup_ins": 95862,
"n_tup_hot_upd": "5393",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "2550",
"n_mod_since_analyze": "16230",
"relname": "order_line",
"analyze_count": "0",
"idx_scan": "1655",
"idx_tup_fetch": "33762",
"autovacuum_count": "0",
"n_tup_upd": "7329",
"schemaname": "public",
"seq_scan": "3",
"seq_tup_read": "0",
"relid": "16513"
},
"new_order": {
"last_autoanalyze": "2017-11-20 16:00:11.217111-05",
"n_live_tup": "16964",
"vacuum_count": "0",
"n_tup_ins": 94900,
"n_tup_hot_upd": "0",
"autoanalyze_count": "1",
"n_tup_del": "740",
"n_dead_tup": "751",
"n_mod_since_analyze": "1629",
"relname": "new_order",
"analyze_count": "0",
"idx_scan": "1481",
"idx_tup_fetch": "1480",
"autovacuum_count": "0",
"n_tup_upd": "0",
"schemaname": "public",
"seq_scan": "1",
"seq_tup_read": "0",
"relid": "16518"
},
"item": {
"last_autoanalyze": "2017-11-20 15:59:26.613728-05",
"n_live_tup": "102000",
"vacuum_count": "0",
"n_tup_ins": 99887,
"n_tup_hot_upd": "0",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "0",
"n_mod_since_analyze": "2000",
"relname": "item",
"analyze_count": "0",
"idx_scan": "209020",
"idx_tup_fetch": "209009",
"autovacuum_count": "0",
"n_tup_upd": "0",
"schemaname": "public",
"seq_scan": "1",
"seq_tup_read": "0",
"relid": "16554"
},
"oorder": {
"last_autoanalyze": "2017-11-20 15:59:54.690984-05",
"n_live_tup": "60889",
"vacuum_count": "0",
"n_tup_ins": 93463,
"n_tup_hot_upd": "662",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "117",
"n_mod_since_analyze": "1629",
"relname": "oorder",
"analyze_count": "0",
"idx_scan": "627652",
"idx_tup_fetch": "627652",
"autovacuum_count": "0",
"n_tup_upd": "740",
"schemaname": "public",
"seq_scan": "4",
"seq_tup_read": "0",
"relid": "16528"
},
"warehouse": {
"last_autoanalyze": "2017-11-20 19:23:34.236294-05",
"n_live_tup": "2",
"vacuum_count": "0",
"n_tup_ins": 81744,
"n_tup_hot_upd": "854",
"autoanalyze_count": "2",
"n_tup_del": "0",
"n_dead_tup": "0",
"n_mod_since_analyze": "0",
"relname": "warehouse",
"last_autovacuum": "2017-11-20 19:23:34.235793-05",
"analyze_count": "0",
"idx_scan": "202634",
"idx_tup_fetch": "202634",
"autovacuum_count": "2",
"n_tup_upd": "854",
"schemaname": "public",
"seq_scan": "1",
"seq_tup_read": "0",
"relid": "16559"
},
"stock": {
"last_autoanalyze": "2017-11-20 15:59:01.368483-05",
"n_live_tup": "200000",
"vacuum_count": "0",
"n_tup_ins": 82611,
"n_tup_hot_upd": "5305",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "4364",
"n_mod_since_analyze": "8901",
"relname": "stock",
"analyze_count": "0",
"idx_scan": "644561",
"idx_tup_fetch": "644561",
"autovacuum_count": "0",
"n_tup_upd": "8901",
"schemaname": "public",
"seq_scan": "3",
"seq_tup_read": "0",
"relid": "16523"
},
"history": {
"last_autoanalyze": "2017-11-20 15:59:02.567618-05",
"n_live_tup": "60854",
"vacuum_count": "0",
"n_tup_ins": 83824,
"n_tup_hot_upd": "0",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "0",
"relname": "history",
"analyze_count": "0",
"n_mod_since_analyze": "854",
"autovacuum_count": "0",
"n_tup_upd": "0",
"schemaname": "public",
"seq_scan": "2",
"seq_tup_read": "0",
"relid": "16536"
}
},
"pg_statio_user_tables": {
"customer": {
"relid": "16540",
"idx_blks_read": "2716",
"relname": "customer",
"tidx_blks_read": "0",
"toast_blks_hit": "0",
"idx_blks_hit": "1411491",
"tidx_blks_hit": "0",
"toast_blks_read": "0",
"heap_blks_hit": "70136669",
"schemaname": "public",
"heap_blks_read": "13826"
},
"district": {
"relid": "16549",
"heap_blks_hit": "249754",
"idx_blks_hit": "122259",
"relname": "district",
"idx_blks_read": "5",
"schemaname": "public",
"heap_blks_read": "3"
},
"order_line": {
"relid": "16513",
"heap_blks_hit": "1869417",
"idx_blks_hit": "1788651",
"relname": "order_line",
"idx_blks_read": "3708",
"schemaname": "public",
"heap_blks_read": "12419"
},
"new_order": {
"relid": "16518",
"heap_blks_hit": "37856",
"idx_blks_hit": "38225",
"relname": "new_order",
"idx_blks_read": "134",
"schemaname": "public",
"heap_blks_read": "192"
},
"item": {
"relid": "16554",
"heap_blks_hit": "509702",
"idx_blks_hit": "617914",
"relname": "item",
"idx_blks_read": "877",
"schemaname": "public",
"heap_blks_read": "4542"
},
"oorder": {
"relid": "16528",
"heap_blks_hit": "1378399",
"idx_blks_hit": "3979052",
"relname": "oorder",
"idx_blks_read": "1881",
"schemaname": "public",
"heap_blks_read": "928"
},
"warehouse": {
"relid": "16559",
"heap_blks_hit": "404486",
"idx_blks_hit": "202643",
"relname": "warehouse",
"idx_blks_read": "6",
"schemaname": "public",
"heap_blks_read": "80"
},
"stock": {
"relid": "16523",
"heap_blks_hit": "1920817",
"idx_blks_hit": "2447522",
"relname": "stock",
"idx_blks_read": "1530",
"schemaname": "public",
"heap_blks_read": "11757"
},
"history": {
"schemaname": "public",
"relname": "history",
"heap_blks_hit": "184380",
"heap_blks_read": "746",
"relid": "16536"
}
}
},
"database": {
"pg_stat_database_conflicts": {
"tpcc": {
"datname": "tpcc",
"confl_deadlock": "0",
"confl_bufferpin": "0",
"datid": "16384",
"confl_tablespace": "0",
"confl_lock": "0",
"confl_snapshot": "0"
},
"template0": {
"datname": "template0",
"confl_deadlock": "0",
"confl_bufferpin": "0",
"datid": "12557",
"confl_tablespace": "0",
"confl_lock": "0",
"confl_snapshot": "0"
},
"postgres": {
"datname": "postgres",
"confl_deadlock": "0",
"confl_bufferpin": "0",
"datid": "12558",
"confl_tablespace": "0",
"confl_lock": "0",
"confl_snapshot": "0"
},
"template1": {
"datname": "template1",
"confl_deadlock": "0",
"confl_bufferpin": "0",
"datid": "1",
"confl_tablespace": "0",
"confl_lock": "0",
"confl_snapshot": "0"
}
},
"pg_stat_database": {
"tpcc": {
"numbackends": "0",
"datname": "tpcc",
"blks_read": "0",
"deadlocks": "0",
"temp_files": "0",
"blks_hit": "0",
"temp_bytes": "0",
"blk_write_time": "0",
"datid": "16384",
"xact_commit": 72957,
"blk_read_time": "0",
"xact_rollback": "0",
"conflicts": "0",
"tup_inserted": "0",
"tup_returned": "0",
"tup_updated": "0",
"tup_deleted": "0",
"tup_fetched": "0"
},
"template0": {
"numbackends": "0",
"datname": "template0",
"blks_read": "0",
"deadlocks": "0",
"temp_files": "0",
"blks_hit": "0",
"temp_bytes": "0",
"blk_write_time": "0",
"datid": "12557",
"xact_commit": 95353,
"blk_read_time": "0",
"xact_rollback": "0",
"conflicts": "0",
"tup_inserted": "0",
"tup_returned": "0",
"tup_updated": "0",
"tup_deleted": "0",
"tup_fetched": "0"
},
"postgres": {
"numbackends": "1",
"datname": "postgres",
"blks_read": "104188",
"deadlocks": "0",
"temp_files": "0",
"stats_reset": "2017-11-10 11:14:57.116228-05",
"blks_hit": "115229324",
"temp_bytes": "0",
"blk_write_time": "0",
"datid": "12558",
"xact_commit": 80454,
"blk_read_time": "0",
"xact_rollback": "17",
"conflicts": "0",
"tup_inserted": "2210752",
"tup_returned": "110741743",
"tup_updated": "32675",
"tup_deleted": "1818",
"tup_fetched": "103355344"
},
"template1": {
"numbackends": "0",
"datname": "template1",
"blks_read": "0",
"deadlocks": "0",
"temp_files": "0",
"blks_hit": "0",
"temp_bytes": "0",
"blk_write_time": "0",
"datid": "1",
"xact_commit": 85045,
"blk_read_time": "0",
"xact_rollback": "0",
"conflicts": "0",
"tup_inserted": "0",
"tup_returned": "0",
"tup_updated": "0",
"tup_deleted": "0",
"tup_fetched": "0"
}
}
},
"indexes": {
"pg_stat_user_indexes": {
"customer": {
"indexrelid": "16564",
"relid": "16540",
"indexrelname": "idx_customer_name",
"relname": "customer",
"idx_tup_fetch": "85256809",
"idx_tup_read": "85256841",
"idx_scan": "82442",
"schemaname": "public"
},
"district": {
"indexrelid": "16552",
"relid": "16549",
"indexrelname": "district_pkey",
"relname": "district",
"idx_tup_fetch": "122234",
"idx_tup_read": "122234",
"idx_scan": "122234",
"schemaname": "public"
},
"order_line": {
"indexrelid": "16516",
"relid": "16513",
"indexrelname": "order_line_pkey",
"relname": "order_line",
"idx_tup_fetch": "33762",
"idx_tup_read": "35698",
"idx_scan": "1655",
"schemaname": "public"
},
"new_order": {
"indexrelid": "16521",
"relid": "16518",
"indexrelname": "new_order_pkey",
"relname": "new_order",
"idx_tup_fetch": "1480",
"idx_tup_read": "2200",
"idx_scan": "1481",
"schemaname": "public"
},
"item": {
"indexrelid": "16557",
"relid": "16554",
"indexrelname": "item_pkey",
"relname": "item",
"idx_tup_fetch": "209009",
"idx_tup_read": "209009",
"idx_scan": "209020",
"schemaname": "public"
},
"oorder": {
"indexrelid": "16565",
"relid": "16528",
"indexrelname": "idx_order",
"relname": "oorder",
"idx_tup_fetch": "616371",
"idx_tup_read": "616371",
"idx_scan": "616371",
"schemaname": "public"
},
"warehouse": {
"indexrelid": "16562",
"relid": "16559",
"indexrelname": "warehouse_pkey",
"relname": "warehouse",
"idx_tup_fetch": "201331",
"idx_tup_read": "202634",
"idx_scan": "202634",
"schemaname": "public"
},
"stock": {
"indexrelid": "16526",
"relid": "16523",
"indexrelname": "stock_pkey",
"relname": "stock",
"idx_tup_fetch": "644561",
"idx_tup_read": "647319",
"idx_scan": "644561",
"schemaname": "public"
}
},
"pg_statio_user_indexes": {
"customer": {
"indexrelid": "16564",
"relid": "16540",
"indexrelname": "idx_customer_name",
"idx_blks_hit": 81021,
"relname": "customer",
"idx_blks_read": "1589",
"schemaname": "public"
},
"district": {
"indexrelid": "16552",
"relid": "16549",
"indexrelname": "district_pkey",
"idx_blks_hit": 76868,
"relname": "district",
"idx_blks_read": "5",
"schemaname": "public"
},
"order_line": {
"indexrelid": "16516",
"relid": "16513",
"indexrelname": "order_line_pkey",
"idx_blks_hit": 73690,
"relname": "order_line",
"idx_blks_read": "3708",
"schemaname": "public"
},
"new_order": {
"indexrelid": "16521",
"relid": "16518",
"indexrelname": "new_order_pkey",
"idx_blks_hit": 98309,
"relname": "new_order",
"idx_blks_read": "134",
"schemaname": "public"
},
"item": {
"indexrelid": "16557",
"relid": "16554",
"indexrelname": "item_pkey",
"idx_blks_hit": 90212,
"relname": "item",
"idx_blks_read": "877",
"schemaname": "public"
},
"oorder": {
"indexrelid": "16565",
"relid": "16528",
"indexrelname": "idx_order",
"idx_blks_hit": 78961,
"relname": "oorder",
"idx_blks_read": "733",
"schemaname": "public"
},
"warehouse": {
"indexrelid": "16562",
"relid": "16559",
"indexrelname": "warehouse_pkey",
"idx_blks_hit": 96942,
"relname": "warehouse",
"idx_blks_read": "6",
"schemaname": "public"
},
"stock": {
"indexrelid": "16526",
"relid": "16523",
"indexrelname": "stock_pkey",
"idx_blks_hit": 85457,
"relname": "stock",
"idx_blks_read": "1530",
"schemaname": "public"
}
}
}
}
}

View File

@@ -0,0 +1,582 @@
{
"global": {
"pg_stat_archiver": {
"archived_count": "0",
"stats_reset": "2017-11-10 10:59:47.397075-05",
"failed_count": "0"
},
"pg_stat_bgwriter": {
"buffers_backend": "81032",
"checkpoints_timed": "1277",
"buffers_alloc": 4914,
"buffers_clean": "49590",
"buffers_backend_fsync": "0",
"checkpoint_sync_time": "19",
"checkpoints_req": "2",
"checkpoint_write_time": "597851",
"maxwritten_clean": "325",
"buffers_checkpoint": "33250",
"stats_reset": "2017-11-10 10:59:47.397075-05"
}
},
"local": {
"table": {
"pg_stat_user_tables": {
"customer": {
"last_autoanalyze": "2017-11-20 15:59:18.824212-05",
"n_live_tup": "60000",
"vacuum_count": "0",
"n_tup_ins": 46752,
"n_tup_hot_upd": "262",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "1510",
"n_mod_since_analyze": "1594",
"relname": "customer",
"analyze_count": "0",
"idx_scan": "125261",
"idx_tup_fetch": "85299628",
"autovacuum_count": "0",
"n_tup_upd": "1594",
"schemaname": "public",
"seq_scan": "3",
"seq_tup_read": "0",
"relid": "16540"
},
"district": {
"last_autoanalyze": "2017-11-20 19:23:34.201509-05",
"n_live_tup": "20",
"vacuum_count": "0",
"n_tup_ins": 31239,
"n_tup_hot_upd": "1754",
"autoanalyze_count": "2",
"n_tup_del": "0",
"n_dead_tup": "33",
"n_mod_since_analyze": "0",
"relname": "district",
"analyze_count": "0",
"idx_scan": "122234",
"idx_tup_fetch": "122234",
"autovacuum_count": "0",
"n_tup_upd": "1754",
"schemaname": "public",
"seq_scan": "2221",
"seq_tup_read": "41522",
"relid": "16549"
},
"order_line": {
"last_autoanalyze": "2017-11-20 16:00:11.017507-05",
"n_live_tup": "608373",
"vacuum_count": "0",
"n_tup_ins": 38861,
"n_tup_hot_upd": "5393",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "2550",
"n_mod_since_analyze": "16230",
"relname": "order_line",
"analyze_count": "0",
"idx_scan": "1655",
"idx_tup_fetch": "33762",
"autovacuum_count": "0",
"n_tup_upd": "7329",
"schemaname": "public",
"seq_scan": "3",
"seq_tup_read": "0",
"relid": "16513"
},
"new_order": {
"last_autoanalyze": "2017-11-20 16:00:11.217111-05",
"n_live_tup": "16964",
"vacuum_count": "0",
"n_tup_ins": 38698,
"n_tup_hot_upd": "0",
"autoanalyze_count": "1",
"n_tup_del": "740",
"n_dead_tup": "751",
"n_mod_since_analyze": "1629",
"relname": "new_order",
"analyze_count": "0",
"idx_scan": "1481",
"idx_tup_fetch": "1480",
"autovacuum_count": "0",
"n_tup_upd": "0",
"schemaname": "public",
"seq_scan": "1",
"seq_tup_read": "0",
"relid": "16518"
},
"item": {
"last_autoanalyze": "2017-11-20 15:59:26.613728-05",
"n_live_tup": "102000",
"vacuum_count": "0",
"n_tup_ins": 50065,
"n_tup_hot_upd": "0",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "0",
"n_mod_since_analyze": "2000",
"relname": "item",
"analyze_count": "0",
"idx_scan": "209020",
"idx_tup_fetch": "209009",
"autovacuum_count": "0",
"n_tup_upd": "0",
"schemaname": "public",
"seq_scan": "1",
"seq_tup_read": "0",
"relid": "16554"
},
"oorder": {
"last_autoanalyze": "2017-11-20 15:59:54.690984-05",
"n_live_tup": "60889",
"vacuum_count": "0",
"n_tup_ins": 68200,
"n_tup_hot_upd": "662",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "117",
"n_mod_since_analyze": "1629",
"relname": "oorder",
"analyze_count": "0",
"idx_scan": "627652",
"idx_tup_fetch": "627652",
"autovacuum_count": "0",
"n_tup_upd": "740",
"schemaname": "public",
"seq_scan": "4",
"seq_tup_read": "0",
"relid": "16528"
},
"warehouse": {
"last_autoanalyze": "2017-11-20 19:23:34.236294-05",
"n_live_tup": "2",
"vacuum_count": "0",
"n_tup_ins": 66112,
"n_tup_hot_upd": "854",
"autoanalyze_count": "2",
"n_tup_del": "0",
"n_dead_tup": "0",
"n_mod_since_analyze": "0",
"relname": "warehouse",
"last_autovacuum": "2017-11-20 19:23:34.235793-05",
"analyze_count": "0",
"idx_scan": "202634",
"idx_tup_fetch": "202634",
"autovacuum_count": "2",
"n_tup_upd": "854",
"schemaname": "public",
"seq_scan": "1",
"seq_tup_read": "0",
"relid": "16559"
},
"stock": {
"last_autoanalyze": "2017-11-20 15:59:01.368483-05",
"n_live_tup": "200000",
"vacuum_count": "0",
"n_tup_ins": 66271,
"n_tup_hot_upd": "5305",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "4364",
"n_mod_since_analyze": "8901",
"relname": "stock",
"analyze_count": "0",
"idx_scan": "644561",
"idx_tup_fetch": "644561",
"autovacuum_count": "0",
"n_tup_upd": "8901",
"schemaname": "public",
"seq_scan": "3",
"seq_tup_read": "0",
"relid": "16523"
},
"history": {
"last_autoanalyze": "2017-11-20 15:59:02.567618-05",
"n_live_tup": "60854",
"vacuum_count": "0",
"n_tup_ins": 67987,
"n_tup_hot_upd": "0",
"autoanalyze_count": "1",
"n_tup_del": "0",
"n_dead_tup": "0",
"relname": "history",
"analyze_count": "0",
"n_mod_since_analyze": "854",
"autovacuum_count": "0",
"n_tup_upd": "0",
"schemaname": "public",
"seq_scan": "2",
"seq_tup_read": "0",
"relid": "16536"
}
},
"pg_statio_user_tables": {
"customer": {
"relid": "16540",
"idx_blks_read": "2716",
"relname": "customer",
"tidx_blks_read": "0",
"toast_blks_hit": "0",
"idx_blks_hit": "1411491",
"tidx_blks_hit": "0",
"toast_blks_read": "0",
"heap_blks_hit": "70136669",
"schemaname": "public",
"heap_blks_read": "13826"
},
"district": {
"relid": "16549",
"heap_blks_hit": "249754",
"idx_blks_hit": "122259",
"relname": "district",
"idx_blks_read": "5",
"schemaname": "public",
"heap_blks_read": "3"
},
"order_line": {
"relid": "16513",
"heap_blks_hit": "1869417",
"idx_blks_hit": "1788651",
"relname": "order_line",
"idx_blks_read": "3708",
"schemaname": "public",
"heap_blks_read": "12419"
},
"new_order": {
"relid": "16518",
"heap_blks_hit": "37856",
"idx_blks_hit": "38225",
"relname": "new_order",
"idx_blks_read": "134",
"schemaname": "public",
"heap_blks_read": "192"
},
"item": {
"relid": "16554",
"heap_blks_hit": "509702",
"idx_blks_hit": "617914",
"relname": "item",
"idx_blks_read": "877",
"schemaname": "public",
"heap_blks_read": "4542"
},
"oorder": {
"relid": "16528",
"heap_blks_hit": "1378399",
"idx_blks_hit": "3979052",
"relname": "oorder",
"idx_blks_read": "1881",
"schemaname": "public",
"heap_blks_read": "928"
},
"warehouse": {
"relid": "16559",
"heap_blks_hit": "404486",
"idx_blks_hit": "202643",
"relname": "warehouse",
"idx_blks_read": "6",
"schemaname": "public",
"heap_blks_read": "80"
},
"stock": {
"relid": "16523",
"heap_blks_hit": "1920817",
"idx_blks_hit": "2447522",
"relname": "stock",
"idx_blks_read": "1530",
"schemaname": "public",
"heap_blks_read": "11757"
},
"history": {
"schemaname": "public",
"relname": "history",
"heap_blks_hit": "184380",
"heap_blks_read": "746",
"relid": "16536"
}
}
},
"database": {
"pg_stat_database_conflicts": {
"tpcc": {
"datname": "tpcc",
"confl_deadlock": "0",
"confl_bufferpin": "0",
"datid": "16384",
"confl_tablespace": "0",
"confl_lock": "0",
"confl_snapshot": "0"
},
"template0": {
"datname": "template0",
"confl_deadlock": "0",
"confl_bufferpin": "0",
"datid": "12557",
"confl_tablespace": "0",
"confl_lock": "0",
"confl_snapshot": "0"
},
"postgres": {
"datname": "postgres",
"confl_deadlock": "0",
"confl_bufferpin": "0",
"datid": "12558",
"confl_tablespace": "0",
"confl_lock": "0",
"confl_snapshot": "0"
},
"template1": {
"datname": "template1",
"confl_deadlock": "0",
"confl_bufferpin": "0",
"datid": "1",
"confl_tablespace": "0",
"confl_lock": "0",
"confl_snapshot": "0"
}
},
"pg_stat_database": {
"tpcc": {
"numbackends": "0",
"datname": "tpcc",
"blks_read": "0",
"deadlocks": "0",
"temp_files": "0",
"blks_hit": "0",
"temp_bytes": "0",
"blk_write_time": "0",
"datid": "16384",
"xact_commit": 37612,
"blk_read_time": "0",
"xact_rollback": "0",
"conflicts": "0",
"tup_inserted": "0",
"tup_returned": "0",
"tup_updated": "0",
"tup_deleted": "0",
"tup_fetched": "0"
},
"template0": {
"numbackends": "0",
"datname": "template0",
"blks_read": "0",
"deadlocks": "0",
"temp_files": "0",
"blks_hit": "0",
"temp_bytes": "0",
"blk_write_time": "0",
"datid": "12557",
"xact_commit": 49929,
"blk_read_time": "0",
"xact_rollback": "0",
"conflicts": "0",
"tup_inserted": "0",
"tup_returned": "0",
"tup_updated": "0",
"tup_deleted": "0",
"tup_fetched": "0"
},
"postgres": {
"numbackends": "1",
"datname": "postgres",
"blks_read": "104188",
"deadlocks": "0",
"temp_files": "0",
"stats_reset": "2017-11-10 11:14:57.116228-05",
"blks_hit": "115229324",
"temp_bytes": "0",
"blk_write_time": "0",
"datid": "12558",
"xact_commit": 52595,
"blk_read_time": "0",
"xact_rollback": "17",
"conflicts": "0",
"tup_inserted": "2210752",
"tup_returned": "110741743",
"tup_updated": "32675",
"tup_deleted": "1818",
"tup_fetched": "103355344"
},
"template1": {
"numbackends": "0",
"datname": "template1",
"blks_read": "0",
"deadlocks": "0",
"temp_files": "0",
"blks_hit": "0",
"temp_bytes": "0",
"blk_write_time": "0",
"datid": "1",
"xact_commit": 39030,
"blk_read_time": "0",
"xact_rollback": "0",
"conflicts": "0",
"tup_inserted": "0",
"tup_returned": "0",
"tup_updated": "0",
"tup_deleted": "0",
"tup_fetched": "0"
}
}
},
"indexes": {
"pg_stat_user_indexes": {
"customer": {
"indexrelid": "16564",
"relid": "16540",
"indexrelname": "idx_customer_name",
"relname": "customer",
"idx_tup_fetch": "85256809",
"idx_tup_read": "85256841",
"idx_scan": "82442",
"schemaname": "public"
},
"district": {
"indexrelid": "16552",
"relid": "16549",
"indexrelname": "district_pkey",
"relname": "district",
"idx_tup_fetch": "122234",
"idx_tup_read": "122234",
"idx_scan": "122234",
"schemaname": "public"
},
"order_line": {
"indexrelid": "16516",
"relid": "16513",
"indexrelname": "order_line_pkey",
"relname": "order_line",
"idx_tup_fetch": "33762",
"idx_tup_read": "35698",
"idx_scan": "1655",
"schemaname": "public"
},
"new_order": {
"indexrelid": "16521",
"relid": "16518",
"indexrelname": "new_order_pkey",
"relname": "new_order",
"idx_tup_fetch": "1480",
"idx_tup_read": "2200",
"idx_scan": "1481",
"schemaname": "public"
},
"item": {
"indexrelid": "16557",
"relid": "16554",
"indexrelname": "item_pkey",
"relname": "item",
"idx_tup_fetch": "209009",
"idx_tup_read": "209009",
"idx_scan": "209020",
"schemaname": "public"
},
"oorder": {
"indexrelid": "16565",
"relid": "16528",
"indexrelname": "idx_order",
"relname": "oorder",
"idx_tup_fetch": "616371",
"idx_tup_read": "616371",
"idx_scan": "616371",
"schemaname": "public"
},
"warehouse": {
"indexrelid": "16562",
"relid": "16559",
"indexrelname": "warehouse_pkey",
"relname": "warehouse",
"idx_tup_fetch": "201331",
"idx_tup_read": "202634",
"idx_scan": "202634",
"schemaname": "public"
},
"stock": {
"indexrelid": "16526",
"relid": "16523",
"indexrelname": "stock_pkey",
"relname": "stock",
"idx_tup_fetch": "644561",
"idx_tup_read": "647319",
"idx_scan": "644561",
"schemaname": "public"
}
},
"pg_statio_user_indexes": {
"customer": {
"indexrelid": "16564",
"relid": "16540",
"indexrelname": "idx_customer_name",
"idx_blks_hit": 43641,
"relname": "customer",
"idx_blks_read": "1589",
"schemaname": "public"
},
"district": {
"indexrelid": "16552",
"relid": "16549",
"indexrelname": "district_pkey",
"idx_blks_hit": 69722,
"relname": "district",
"idx_blks_read": "5",
"schemaname": "public"
},
"order_line": {
"indexrelid": "16516",
"relid": "16513",
"indexrelname": "order_line_pkey",
"idx_blks_hit": 34427,
"relname": "order_line",
"idx_blks_read": "3708",
"schemaname": "public"
},
"new_order": {
"indexrelid": "16521",
"relid": "16518",
"indexrelname": "new_order_pkey",
"idx_blks_hit": 41934,
"relname": "new_order",
"idx_blks_read": "134",
"schemaname": "public"
},
"item": {
"indexrelid": "16557",
"relid": "16554",
"indexrelname": "item_pkey",
"idx_blks_hit": 68779,
"relname": "item",
"idx_blks_read": "877",
"schemaname": "public"
},
"oorder": {
"indexrelid": "16565",
"relid": "16528",
"indexrelname": "idx_order",
"idx_blks_hit": 48553,
"relname": "oorder",
"idx_blks_read": "733",
"schemaname": "public"
},
"warehouse": {
"indexrelid": "16562",
"relid": "16559",
"indexrelname": "warehouse_pkey",
"idx_blks_hit": 69018,
"relname": "warehouse",
"idx_blks_read": "6",
"schemaname": "public"
},
"stock": {
"indexrelid": "16526",
"relid": "16523",
"indexrelname": "stock_pkey",
"idx_blks_hit": 48945,
"relname": "stock",
"idx_blks_read": "1530",
"schemaname": "public"
}
}
}
}
}

View File

@@ -0,0 +1,8 @@
{
"workload_name": "workload-0",
"observation_time": 300,
"database_type": "postgres",
"end_time": 1513113439011,
"start_time": 1513113139011,
"database_version": "9.6"
}

View File

@@ -0,0 +1,636 @@
#
# OtterTune - test_parser.py
#
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#
from abc import ABCMeta, abstractmethod
import mock
from django.test import TestCase
from website.parser.postgres import PostgresParser, Postgres96Parser
from website.types import BooleanType, VarType, KnobUnitType, MetricType
from website.models import KnobCatalog
class BaseParserTests(object, metaclass=ABCMeta):
def setUp(self):
self.test_dbms = None
def test_convert_bool(self):
mock_bool_knob = mock.Mock(spec=KnobCatalog)
for bool_val in self.test_dbms.valid_true_val:
self.assertEqual(BooleanType.TRUE,
self.test_dbms.convert_bool(bool_val, mock_bool_knob))
for bool_val in self.test_dbms.valid_false_val:
self.assertEqual(BooleanType.FALSE,
self.test_dbms.convert_bool(bool_val, mock_bool_knob))
with self.assertRaises(Exception):
self.test_dbms.convert_bool('ThisShouldNeverBeABool', mock_bool_knob)
def test_convert_enum(self):
mock_enum_knob = mock.Mock(spec=KnobCatalog)
mock_enum_knob.vartype = VarType.ENUM
mock_enum_knob.enumvals = 'apples,oranges,cake'
mock_enum_knob.name = 'Test'
self.assertEqual(self.test_dbms.convert_enum('apples', mock_enum_knob), 0)
self.assertEqual(self.test_dbms.convert_enum('oranges', mock_enum_knob), 1)
self.assertEqual(self.test_dbms.convert_enum('cake', mock_enum_knob), 2)
with self.assertRaises(Exception):
self.test_dbms.convert_enum('jackyl', mock_enum_knob)
def test_convert_integer(self):
mock_int_knob = mock.Mock(spec=KnobCatalog)
mock_int_knob.vartype = VarType.INTEGER
mock_int_knob.name = 'Test'
test_int = ['42', '-1', '0', '1', '42.0', '42.5', '42.7']
test_int_ans = [42, -1, 0, 1, 42, 42, 42]
for test_int, test_int_ans in zip(test_int, test_int_ans):
test_int_actual = self.test_dbms.convert_integer(test_int, mock_int_knob)
self.assertEqual(test_int_actual, test_int_ans)
with self.assertRaises(Exception):
self.test_dbms.convert_integer('notInt', mock_int_knob)
def test_convert_real(self):
mock_real_knob = mock.Mock(spec=KnobCatalog)
mock_real_knob.vartype = VarType.REAL
mock_real_knob.name = 'Test'
test_real = ['42.0', '42.2', '42.5', '42.7', '-1', '0', '1']
test_real_ans = [42.0, 42.2, 42.5, 42.7, -1.0, 0.0, 1.0]
for test_real, test_real_ans in zip(test_real, test_real_ans):
test_real_actual = self.test_dbms.convert_real(test_real, mock_real_knob)
self.assertEqual(test_real_actual, test_real_ans)
with self.assertRaises(Exception):
self.test_dbms.convert_real('notReal', mock_real_knob)
def test_convert_string(self):
# NOTE: Hasn't been used in any currently supported database
pass
def test_convert_timestamp(self):
# NOTE: Hasn't been used in any currently supported database
pass
@abstractmethod
def test_convert_dbms_knobs(self):
pass
@abstractmethod
def test_convert_dbms_metrics(self):
pass
@abstractmethod
def test_extract_valid_variables(self):
pass
def test_parse_helper(self):
test_view_vars = {'local': {'FAKE_KNOB': 'FAKE'}}
test_scope = 'global'
valid_vars = {}
test_parse = self.test_dbms.parse_helper(test_scope, valid_vars, test_view_vars)
self.assertEqual(len(list(test_parse.keys())), 1)
self.assertEqual(test_parse.get('local.FAKE_KNOB'), ['FAKE'])
def test_parse_dbms_variables(self):
test_dbms_vars = {'global': {'GlobalView1':
{'cpu_tuple_cost': 0.01,
'random_page_cost': 0.22},
'GlobalView2':
{'cpu_tuple_cost': 0.05,
'random_page_cost': 0.25}},
'local': {'CustomerTable':
{'LocalView1':
{'LocalObj1':
{'cpu_tuple_cost': 0.5,
'random_page_cost': 0.3}}}},
'fakeScope': None}
# NOTE: For local objects, method will not distinguish
# local objects or tables, might overwrite the variables
test_parse = self.test_dbms.parse_dbms_variables(test_dbms_vars)
self.assertEqual(len(list(test_parse.keys())), 6)
self.assertEqual(test_parse.get('GlobalView1.cpu_tuple_cost'), [0.01])
self.assertEqual(test_parse.get('GlobalView1.random_page_cost'), [0.22])
self.assertEqual(test_parse.get('GlobalView2.cpu_tuple_cost'), [0.05])
self.assertEqual(test_parse.get('GlobalView2.random_page_cost'), [0.25])
self.assertEqual(test_parse.get('LocalView1.cpu_tuple_cost'), [0.5])
self.assertEqual(test_parse.get('LocalView1.random_page_cost'), [0.3])
test_scope = {'unknownScope': {'GlobalView1':
{'cpu_tuple_cost': 0.01,
'random_page_cost': 0.22},
'GlobalView2':
{'cpu_tuple_cost': 0.05,
'random_page_cost': 0.25}}}
with self.assertRaises(Exception):
self.test_dbms.parse_dbms_variables(test_scope)
@abstractmethod
def test_parse_dbms_knobs(self):
pass
@abstractmethod
def test_parse_dbms_metrics(self):
pass
def test_calculate_change_in_metrics(self):
self.assertEqual(self.test_dbms.calculate_change_in_metrics({}, {}), {})
@abstractmethod
def test_create_knob_configuration(self):
pass
def test_get_nondefault_knob_settings(self):
self.assertEqual(self.test_dbms.get_nondefault_knob_settings({}), {})
def test_format_bool(self):
mock_other_knob = mock.Mock(spec=KnobCatalog)
mock_other_knob.unit = KnobUnitType.OTHER
self.assertEqual(self.test_dbms.format_bool(BooleanType.TRUE, mock_other_knob), 'on')
self.assertEqual(self.test_dbms.format_bool(BooleanType.FALSE, mock_other_knob), 'off')
def test_format_enum(self):
mock_enum_knob = mock.Mock(spec=KnobCatalog)
mock_enum_knob.enumvals = 'apple,oranges,cake'
self.assertEqual(self.test_dbms.format_enum(0, mock_enum_knob), "apple")
self.assertEqual(self.test_dbms.format_enum(1, mock_enum_knob), "oranges")
self.assertEqual(self.test_dbms.format_enum(2, mock_enum_knob), "cake")
def test_format_integer(self):
mock_other_knob = mock.Mock(spec=KnobCatalog)
mock_other_knob.unit = KnobUnitType.OTHER
test_int = [42, -1, 0, 0.5, 1, 42.0, 42.5, 42.7]
test_int_ans = [42, -1, 0, 1, 1, 42, 43, 43]
for test_int, actual_test_int in zip(test_int, test_int_ans):
self.assertEqual(
self.test_dbms.format_integer(test_int, mock_other_knob), actual_test_int)
def test_format_real(self):
mock_other_knob = mock.Mock(spec=KnobCatalog)
mock_other_knob.unit = KnobUnitType.OTHER
test_real = [42, -1, 0, 0.5, 1, 42.0, 42.5, 42.7]
test_real_ans = [42.0, -1.0, 0.0, 0.5, 1.0, 42.0, 42.5, 42.7]
for test_real, actual_test_real in zip(test_real, test_real_ans):
self.assertEqual(
self.test_dbms.format_real(test_real, mock_other_knob), actual_test_real)
def test_format_string(self):
pass
def test_format_timestamp(self):
pass
def test_format_dbms_knobs(self):
self.assertEqual(self.test_dbms.format_dbms_knobs({}), {})
test_exceptions = {'global.FAKE_KNOB': "20"}
with self.assertRaises(Exception):
self.test_dbms.format_dbms_knobs(test_exceptions)
@abstractmethod
def test_filter_numeric_metrics(self):
pass
@abstractmethod
def test_filter_tunable_knobs(self):
pass
class Postgres96ParserTests(BaseParserTests, TestCase):
def setUp(self):
self.test_dbms = Postgres96Parser(9.6)
def test_convert_dbms_knobs(self):
super(Postgres96ParserTests, self).test_convert_dbms_knobs()
test_knobs = {'global.wal_sync_method': 'open_sync', # Enum
'global.random_page_cost': 0.22, # Real
'global.archive_command': 'archive', # String
'global.cpu_tuple_cost': 0.55, # Real
'global.force_parallel_mode': 'regress', # Enum
'global.enable_hashjoin': 'on', # Bool
'global.geqo_effort': 5, # Int
'global.wal_buffers': 1024, # Int
'global.FAKE_KNOB': 20}
test_convert_knobs = self.test_dbms.convert_dbms_knobs(test_knobs)
self.assertEqual(len(list(test_convert_knobs.keys())), 3)
self.assertEqual(test_convert_knobs['global.random_page_cost'], 0.22)
self.assertEqual(test_convert_knobs['global.wal_sync_method'], 2)
self.assertEqual(test_convert_knobs['global.wal_buffers'], 1024)
test_except_knobs = {'global.wal_sync_method': '3'}
with self.assertRaises(Exception):
self.test_dbms.convert_dbms_knobs(test_except_knobs)
test_nontune_knobs = {'global.enable_hashjoin': 'on'}
self.assertEqual(self.test_dbms.convert_dbms_knobs(test_nontune_knobs), {})
def test_convert_dbms_metrics(self):
super(Postgres96ParserTests, self).test_convert_dbms_metrics()
test_metrics = {}
for key in list(self.test_dbms.numeric_metric_catalog_.keys()):
test_metrics[key] = 2
test_metrics['pg_stat_database.xact_commit'] = 10
test_metrics['pg_FAKE_METRIC'] = 0
self.assertEqual(test_metrics.get('throughput_txn_per_sec'), None)
test_convert_metrics = self.test_dbms.convert_dbms_metrics(test_metrics, 0.1)
for key, metadata in list(self.test_dbms.numeric_metric_catalog_.items()):
if (key == self.test_dbms.transactions_counter):
self.assertEqual(test_convert_metrics[key], 10 / 0.1)
continue
if metadata.metric_type == MetricType.COUNTER:
self.assertEqual(test_convert_metrics[key], 2 / 0.1)
else: # MetricType.STATISTICS
self.assertEqual(test_convert_metrics[key], 2)
self.assertEqual(test_convert_metrics['throughput_txn_per_sec'], 100)
self.assertEqual(test_convert_metrics.get('pg_FAKE_METRIC'), None)
def test_properties(self):
base_config = self.test_dbms.base_configuration_settings
base_config_set = set(base_config)
self.assertTrue('global.data_directory' in base_config_set)
self.assertTrue('global.hba_file' in base_config_set)
self.assertTrue('global.ident_file' in base_config_set)
self.assertTrue('global.external_pid_file' in base_config_set)
self.assertTrue('global.listen_addresses' in base_config_set)
self.assertTrue('global.port' in base_config_set)
self.assertTrue('global.max_connections' in base_config_set)
self.assertTrue('global.unix_socket_directories' in base_config_set)
self.assertTrue('global.log_line_prefix' in base_config_set)
self.assertTrue('global.track_counts' in base_config_set)
self.assertTrue('global.track_io_timing' in base_config_set)
self.assertTrue('global.autovacuum' in base_config_set)
self.assertTrue('global.default_text_search_config' in base_config_set)
self.assertEqual(self.test_dbms
.knob_configuration_filename, 'postgresql.conf')
self.assertEqual(self.test_dbms
.transactions_counter, 'pg_stat_database.xact_commit')
def test_parse_version_string(self):
self.assertTrue(self.test_dbms.parse_version_string("9.6.1"), "9.6")
self.assertTrue(self.test_dbms.parse_version_string("9.6.3"), "9.6")
self.assertTrue(self.test_dbms.parse_version_string("10.2.1"), "10.2")
self.assertTrue(self.test_dbms.parse_version_string("0.0.0"), "0.0")
with self.assertRaises(Exception):
self.test_dbms.parse_version_string("postgres")
with self.assertRaises(Exception):
self.test_dbms.parse_version_string("1.0")
def test_extract_valid_variables(self):
num_tunable_knobs = len(list(self.test_dbms.tunable_knob_catalog_.keys()))
test_empty, test_empty_diff = self.test_dbms.extract_valid_variables(
{}, self.test_dbms.tunable_knob_catalog_)
self.assertEqual(len(list(test_empty.keys())), num_tunable_knobs)
self.assertEqual(len(test_empty_diff), num_tunable_knobs)
test_vars = {'global.wal_sync_method': 'fsync',
'global.random_page_cost': 0.22,
'global.Wal_buffers': 1024,
'global.archive_command': 'archive',
'global.GEQO_EFFORT': 5,
'global.enable_hashjoin': 'on',
'global.cpu_tuple_cost': 0.55,
'global.force_parallel_mode': 'regress',
'global.FAKE_KNOB': 'fake'}
tune_extract, tune_diff = self.test_dbms.extract_valid_variables(
test_vars, self.test_dbms.tunable_knob_catalog_)
self.assertTrue(('miscapitalized', 'global.wal_buffers',
'global.Wal_buffers', 1024) in tune_diff)
self.assertTrue(('extra', None, 'global.GEQO_EFFORT', 5) in tune_diff)
self.assertTrue(('extra', None, 'global.enable_hashjoin', 'on') in tune_diff)
self.assertTrue(('missing', 'global.deadlock_timeout', None, None) in tune_diff)
self.assertTrue(('missing', 'global.temp_buffers', None, None) in tune_diff)
self.assertTrue(tune_extract.get('global.temp_buffers') is not None)
self.assertTrue(tune_extract.get('global.deadlock_timeout') is not None)
self.assertEqual(tune_extract.get('global.wal_buffers'), 1024)
self.assertEqual(tune_extract.get('global.Wal_buffers'), None)
self.assertEqual(len(tune_extract), len(self.test_dbms.tunable_knob_catalog_))
nontune_extract, nontune_diff = self.test_dbms.extract_valid_variables(
test_vars, self.test_dbms.knob_catalog_)
self.assertTrue(('miscapitalized', 'global.wal_buffers',
'global.Wal_buffers', 1024) in nontune_diff)
self.assertTrue(('miscapitalized', 'global.geqo_effort',
'global.GEQO_EFFORT', 5) in nontune_diff)
self.assertTrue(('extra', None, 'global.FAKE_KNOB', 'fake') in nontune_diff)
self.assertTrue(('missing', 'global.lc_ctype', None, None) in nontune_diff)
self.assertTrue(('missing', 'global.full_page_writes', None, None) in nontune_diff)
self.assertEqual(nontune_extract.get('global.wal_buffers'), 1024)
self.assertEqual(nontune_extract.get('global.geqo_effort'), 5)
self.assertEqual(nontune_extract.get('global.Wal_buffers'), None)
self.assertEqual(nontune_extract.get('global.GEQO_EFFORT'), None)
def test_convert_integer(self):
super(Postgres96ParserTests, self).test_convert_integer()
# Convert Integer
knob_unit_bytes = KnobUnitType()
knob_unit_bytes.unit = 1
knob_unit_time = KnobUnitType()
knob_unit_time.unit = 2
knob_unit_other = KnobUnitType()
knob_unit_other.unit = 3
self.assertEqual(self.test_dbms.convert_integer('5', knob_unit_other), 5)
self.assertEqual(self.test_dbms.convert_integer('0', knob_unit_other), 0)
self.assertEqual(self.test_dbms.convert_integer('0.0', knob_unit_other), 0)
self.assertEqual(self.test_dbms.convert_integer('0.5', knob_unit_other), 0)
self.assertEqual(self.test_dbms
.convert_integer('5kB', knob_unit_bytes), 5 * 1024)
self.assertEqual(self.test_dbms
.convert_integer('4MB', knob_unit_bytes), 4 * 1024 ** 2)
self.assertEqual(self.test_dbms.convert_integer('1d', knob_unit_time), 86400000)
self.assertEqual(self.test_dbms
.convert_integer('20h', knob_unit_time), 72000000)
self.assertEqual(self.test_dbms
.convert_integer('10min', knob_unit_time), 600000)
self.assertEqual(self.test_dbms.convert_integer('1s', knob_unit_time), 1000)
test_exceptions = [('A', knob_unit_other),
('', knob_unit_other),
('', knob_unit_bytes),
('', knob_unit_time),
('1S', knob_unit_time),
('1mb', knob_unit_bytes)]
for failure_case, knob_unit in test_exceptions:
with self.assertRaises(Exception):
self.test_dbms.convert_integer(failure_case, knob_unit)
def test_calculate_change_in_metrics(self):
super(Postgres96ParserTests, self).test_calculate_change_in_metrics()
test_metric_start = {'pg_stat_bgwriter.buffers_alloc': 256,
'pg_stat_archiver.last_failed_wal': "today",
'pg_stat_archiver.last_failed_time': "2018-01-10 11:24:30",
'pg_stat_user_tables.n_tup_upd': 123,
'pg_stat_user_tables.relname': "Customers",
'pg_stat_user_tables.relid': 2,
'pg_stat_user_tables.last_vacuum': "2018-01-09 12:00:00",
'pg_stat_database.tup_fetched': 156,
'pg_stat_database.datname': "testOttertune",
'pg_stat_database.datid': 1,
'pg_stat_database.stats_reset': "2018-01-09 13:00:00",
'pg_stat_user_indexes.idx_scan': 23,
'pg_stat_user_indexes.relname': "Managers",
'pg_stat_user_indexes.relid': 20}
test_metric_end = {'pg_stat_bgwriter.buffers_alloc': 300,
'pg_stat_archiver.last_failed_wal': "today",
'pg_stat_archiver.last_failed_time': "2018-01-11 11:24:30",
'pg_stat_user_tables.n_tup_upd': 150,
'pg_stat_user_tables.relname': "Customers",
'pg_stat_user_tables.relid': 2,
'pg_stat_user_tables.last_vacuum': "2018-01-10 12:00:00",
'pg_stat_database.tup_fetched': 260,
'pg_stat_database.datname': "testOttertune",
'pg_stat_database.datid': 1,
'pg_stat_database.stats_reset': "2018-01-10 13:00:00",
'pg_stat_user_indexes.idx_scan': 23,
'pg_stat_user_indexes.relname': "Managers",
'pg_stat_user_indexes.relid': 20}
test_adj_metrics = self.test_dbms.calculate_change_in_metrics(
test_metric_start, test_metric_end)
self.assertEqual(test_adj_metrics['pg_stat_bgwriter.buffers_alloc'], 44)
self.assertEqual(test_adj_metrics['pg_stat_archiver.last_failed_wal'], "today")
self.assertEqual(
test_adj_metrics['pg_stat_archiver.last_failed_time'], "2018-01-11 11:24:30")
self.assertEqual(test_adj_metrics['pg_stat_user_tables.n_tup_upd'], 27)
self.assertEqual(test_adj_metrics['pg_stat_user_tables.relname'], "Customers")
self.assertEqual(test_adj_metrics['pg_stat_user_tables.relid'], 2) # MetricType.INFO
self.assertEqual(test_adj_metrics['pg_stat_user_tables.last_vacuum'], "2018-01-10 12:00:00")
self.assertEqual(test_adj_metrics['pg_stat_database.tup_fetched'], 104)
self.assertEqual(test_adj_metrics['pg_stat_database.datname'], "testOttertune")
self.assertEqual(test_adj_metrics['pg_stat_database.datid'], 1) # MetricType.INFO
self.assertEqual(test_adj_metrics['pg_stat_database.stats_reset'], "2018-01-10 13:00:00")
self.assertEqual(test_adj_metrics['pg_stat_user_indexes.idx_scan'], 0)
self.assertEqual(test_adj_metrics['pg_stat_user_indexes.relid'], 20) # MetricType.INFO
def test_create_knob_configuration(self):
empty_config = self.test_dbms.create_knob_configuration({})
self.assertEqual(empty_config, {})
tuning_knobs = {"global.autovacuum": "on",
"global.log_planner_stats": "on",
"global.cpu_tuple_cost": 0.5,
"global.FAKE_KNOB": 20,
"pg_stat_archiver.last_failed_wal": "today"}
test_config = self.test_dbms.create_knob_configuration(tuning_knobs)
actual_keys = [("autovacuum", "on"),
("log_planner_stats", "on"),
("cpu_tuple_cost", 0.5),
("FAKE_KNOB", 20)]
self.assertTrue(len(list(test_config.keys())), 4)
for k, v in actual_keys:
self.assertEqual(test_config.get(k), v)
def test_format_integer(self):
test_dbms = PostgresParser(2)
knob_unit_bytes = KnobUnitType()
knob_unit_bytes.unit = 1
knob_unit_time = KnobUnitType()
knob_unit_time.unit = 2
knob_unit_other = KnobUnitType()
knob_unit_other.unit = 3
self.assertEqual(test_dbms.format_integer(5, knob_unit_other), 5)
self.assertEqual(test_dbms.format_integer(0, knob_unit_other), 0)
self.assertEqual(test_dbms.format_integer(-1, knob_unit_other), -1)
self.assertEqual(test_dbms.format_integer(5120, knob_unit_bytes), '5kB')
self.assertEqual(test_dbms.format_integer(4194304, knob_unit_bytes), '4MB')
self.assertEqual(test_dbms.format_integer(4194500, knob_unit_bytes), '4MB')
self.assertEqual(test_dbms.format_integer(86400000, knob_unit_time), '1d')
self.assertEqual(test_dbms.format_integer(72000000, knob_unit_time), '20h')
self.assertEqual(test_dbms.format_integer(600000, knob_unit_time), '10min')
self.assertEqual(test_dbms.format_integer(1000, knob_unit_time), '1s')
self.assertEqual(test_dbms.format_integer(500, knob_unit_time), '500ms')
def test_format_dbms_knobs(self):
super(Postgres96ParserTests, self).test_format_dbms_knobs()
test_knobs = {'global.wal_sync_method': 2, # Enum
'global.random_page_cost': 0.22, # Real
'global.archive_command': "archive", # String
'global.cpu_tuple_cost': 0.55, # Real
'global.force_parallel_mode': 2, # Enum
'global.enable_hashjoin': BooleanType.TRUE, # Bool
'global.geqo_effort': 5, # Int
'global.wal_buffers': 1024} # Int
test_formatted_knobs = self.test_dbms.format_dbms_knobs(test_knobs)
self.assertEqual(test_formatted_knobs.get('global.wal_sync_method'), 'open_sync')
self.assertEqual(test_formatted_knobs.get('global.random_page_cost'), 0.22)
self.assertEqual(test_formatted_knobs.get('global.archive_command'), "archive")
self.assertEqual(test_formatted_knobs.get('global.cpu_tuple_cost'), 0.55)
self.assertEqual(test_formatted_knobs.get('global.force_parallel_mode'), 'regress')
self.assertEqual(test_formatted_knobs.get('global.enable_hashjoin'), 'on')
self.assertEqual(test_formatted_knobs.get('global.geqo_effort'), 5)
self.assertEqual(test_formatted_knobs.get('global.wal_buffers'), '1kB')
def test_filter_numeric_metrics(self):
super(Postgres96ParserTests, self).test_filter_numeric_metrics()
test_metrics = {'pg_stat_bgwriter.checkpoints_req': (2, 'global'),
'pg_stat_archiver.last_failed_wal': (1, 'global'),
'pg_stat_database.stats_reset': (6, 'database'),
'pg_statio_user_indexes.indexrelname': (1, 'index'),
'pg_stat_bgwriter.maxwritten_clean': (2, 'global'),
'pg_stat_database.tup_fetched': (2, 'database'),
'pg_statio_user_tables.heap_blks_read': (2, 'table'),
'pg_FAKE_METRIC': (2, 'database')}
filtered_metrics = self.test_dbms.filter_numeric_metrics(test_metrics)
self.assertEqual(len(list(filtered_metrics.keys())), 4)
self.assertEqual(filtered_metrics.get('pg_stat_bgwriter.checkpoints_req'),
(2, 'global'))
self.assertEqual(filtered_metrics.get('pg_stat_archiver.last_failed_wal'), None)
self.assertEqual(filtered_metrics.get('pg_stat_database.stats_reset'), None)
self.assertEqual(filtered_metrics.get('pg_statio_user_indexes.indexrelname'),
None)
self.assertEqual(filtered_metrics.get('pg_stat_bgwriter.maxwritten_clean'),
(2, 'global'))
self.assertEqual(filtered_metrics.get('pg_stat_database.tup_fetched'),
(2, 'database'))
self.assertEqual(filtered_metrics.get('pg_statio_user_tables.heap_blks_read'),
(2, 'table'))
self.assertEqual(filtered_metrics.get('pg_FAKE_KNOB'), None)
def test_filter_tunable_knobs(self):
super(Postgres96ParserTests, self).test_filter_tunable_knobs()
test_knobs = {'global.wal_sync_method': 5,
'global.random_page_cost': 3,
'global.archive_command': 1,
'global.cpu_tuple_cost': 3,
'global.force_parallel_mode': 5,
'global.enable_hashjoin': 3,
'global.geqo_effort': 2,
'global.wal_buffers': 2,
'global.FAKE_KNOB': 2}
filtered_knobs = self.test_dbms.filter_tunable_knobs(test_knobs)
self.assertEqual(len(list(filtered_knobs.keys())), 3)
self.assertEqual(filtered_knobs.get('global.wal_sync_method'), 5)
self.assertEqual(filtered_knobs.get('global.wal_buffers'), 2)
self.assertEqual(filtered_knobs.get('global.random_page_cost'), 3)
self.assertEqual(filtered_knobs.get('global.cpu_tuple_cost'), None)
self.assertEqual(filtered_knobs.get('global.FAKE_KNOB'), None)
def test_parse_helper(self):
super(Postgres96ParserTests, self).test_parse_helper()
test_view_vars = {'global': {'wal_sync_method': 'open_sync',
'random_page_cost': 0.22},
'local': {'FAKE_KNOB': 'FAKE'}}
valid_vars = {}
test_scope = 'global'
test_parse = self.test_dbms.parse_helper(test_scope, valid_vars, test_view_vars)
self.assertEqual(len(list(test_parse.keys())), 3)
self.assertEqual(test_parse.get('global.wal_sync_method'), ['open_sync'])
self.assertEqual(test_parse.get('global.random_page_cost'), [0.22])
self.assertEqual(test_parse.get('local.FAKE_KNOB'), ['FAKE'])
def test_parse_dbms_knobs(self):
test_knobs = {'global': {'global':
{'wal_sync_method': 'fsync',
'random_page_cost': 0.22,
'wal_buffers': 1024,
'archive_command': 'archive',
'geqo_effort': 5,
'enable_hashjoin': 'on',
'cpu_tuple_cost': 0.55,
'force_parallel_mode': 'regress',
'FAKE_KNOB': 'fake'}}}
(test_parse_dict, test_parse_log) = self.test_dbms.parse_dbms_knobs(test_knobs)
self.assertEqual(len(test_parse_log), len(list(self.test_dbms.knob_catalog_.keys())) - 7)
self.assertTrue(('extra', None, 'global.FAKE_KNOB', 'fake') in test_parse_log)
self.assertEqual(len(list(test_parse_dict.keys())),
len(list(self.test_dbms.knob_catalog_.keys())))
self.assertEqual(test_parse_dict['global.wal_sync_method'], 'fsync')
self.assertEqual(test_parse_dict['global.random_page_cost'], 0.22)
def test_parse_dbms_metrics(self):
test_metrics = {'global':
{'pg_stat_archiver.last_failed_wal': "today",
'pg_stat_bgwriter.buffers_alloc': 256,
'pg_stat_archiver.last_failed_time': "2018-01-10 11:24:30"},
'database':
{'pg_stat_database.tup_fetched': 156,
'pg_stat_database.datid': 1,
'pg_stat_database.datname': "testOttertune",
'pg_stat_database.stats_reset': "2018-01-09 13:00:00"},
'table':
{'pg_stat_user_tables.last_vacuum': "2018-01-09 12:00:00",
'pg_stat_user_tables.relid': 20,
'pg_stat_user_tables.relname': "Managers",
'pg_stat_user_tables.n_tup_upd': 123},
'index':
{'pg_stat_user_indexes.idx_scan': 23,
'pg_stat_user_indexes.relname': "Customers",
'pg_stat_user_indexes.relid': 2}}
# Doesn't support table or index scope
with self.assertRaises(Exception):
test_parse_dict, test_parse_log = self.test_dbms.parse_dbms_metrics(test_metrics)
self.assertEqual(len(list(test_parse_dict.keys())),
len(list(self.test_dbms.metric_catalog_.keys())))
self.assertEqual(len(test_parse_log),
len(list(self.test_dbms.metric_catalog_.keys())) - 14)

View File

@@ -0,0 +1,189 @@
#
# OtterTune - test_tasks.py
#
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#
import copy
import numpy as np
from django.test import TestCase, override_settings
from django.db import transaction
from website.models import (Workload, PipelineRun, PipelineData,
Result, Session, DBMSCatalog, Hardware)
from website.tasks.periodic_tasks import (run_background_tasks,
aggregate_data,
run_workload_characterization,
run_knob_identification)
from website.types import PipelineTaskType, WorkloadStatusType
CELERY_TEST_RUNNER = 'djcelery.contrib.test_runner.CeleryTestSuiteRunner'
@override_settings(CELERY_ALWAYS_EAGER=True, TEST_RUNNER=CELERY_TEST_RUNNER)
class BackgroundTestCase(TestCase):
fixtures = ['test_website.json']
serialized_rollback = True
def testNoError(self):
result = run_background_tasks.delay()
self.assertTrue(result.successful())
def testProcessedWorkloadStatus(self):
before_workloads = Workload.objects.filter(status=WorkloadStatusType.MODIFIED)
run_background_tasks.delay()
for w in before_workloads:
self.assertEqual(w.status, WorkloadStatusType.PROCESSED)
def testNoModifiedWorkload(self):
# First Execution of Modified Workloads
run_background_tasks.delay()
first_pipeline_run = PipelineRun.objects.get_latest()
# Second Execution with no modified workloads
run_background_tasks.delay()
second_pipeline_run = PipelineRun.objects.get_latest()
# Check that the BG task has not run
self.assertEqual(first_pipeline_run.start_time, second_pipeline_run.start_time)
# Test that an empty workload is ignored by the BG task
def testEmptyWorkload(self):
with transaction.atomic():
# Create empty workload
empty_workload = Workload.objects.create_workload(dbms=DBMSCatalog.objects.get(pk=1),
hardware=Hardware.objects.get(pk=1),
name="empty_workload")
result = run_background_tasks.delay()
# Check that BG task successfully finished
self.assertTrue(result.successful())
# Check that the empty workload is still in MODIFIED Status
self.assertEqual(empty_workload.status, 1)
pipeline_data = PipelineData.objects.filter(pipeline_run=PipelineRun.objects.get_latest())
# Check that the empty workload is not in the pipeline datas
self.assertNotIn(empty_workload.pk, pipeline_data.values_list('workload_id', flat=True))
# Test that a workload that contain only one knob configuration will be ignored by the BG task
def testUniqueKnobConfigurationWorkload(self):
# Get workload to copy data from
origin_workload = Workload.objects.get(pk=1)
origin_session = Session.objects.get(pk=1)
# Create empty workload
fix_workload = Workload.objects.create_workload(dbms=origin_workload.dbms,
hardware=origin_workload.hardware,
name="fixed_knob_workload")
fix_knob_data = Result.objects.filter(workload=origin_workload,
session=origin_session)[0].knob_data
# Add 5 Result with the same Knob Configuration
for res in Result.objects.filter(workload=origin_workload, session=origin_session)[:4]:
Result.objects.create_result(res.session, res.dbms, fix_workload,
fix_knob_data, res.metric_data,
res.observation_start_time,
res.observation_end_time,
res.observation_time)
result = run_background_tasks.delay()
# Check that BG task successfully finished
self.assertTrue(result.successful())
# Check that the empty workload is still in MODIFIED Status
self.assertEqual(fix_workload.status, 1)
pipeline_data = PipelineData.objects.filter(pipeline_run=PipelineRun.objects.get_latest())
# Check that the empty workload is not in the pipeline datas
self.assertNotIn(fix_workload.pk, pipeline_data.values_list('workload_id', flat=True))
def testNoWorkloads(self):
# delete any existing workloads
workloads = Workload.objects.all()
workloads.delete()
# background task should not fail
result = run_background_tasks.delay()
self.assertTrue(result.successful())
def testNewPipelineRun(self):
# this test currently relies on the fixture data so that
# it actually tests anything
workloads = Workload.objects.all()
if len(workloads) > 0:
runs_before = len(PipelineRun.objects.all())
run_background_tasks.delay()
runs_after = len(PipelineRun.objects.all())
self.assertEqual(runs_before + 1, runs_after)
def checkNewTask(self, task_type):
workloads = Workload.objects.all()
pruned_before = [len(PipelineData.objects.filter(
workload=workload, task_type=task_type)) for workload in workloads]
run_background_tasks.delay()
pruned_after = [len(PipelineData.objects.filter(
workload=workload, task_type=task_type)) for workload in workloads]
for before, after in zip(pruned_before, pruned_after):
self.assertEqual(before + 1, after)
def testNewPrunedMetrics(self):
self.checkNewTask(PipelineTaskType.PRUNED_METRICS)
def testNewRankedKnobs(self):
self.checkNewTask(PipelineTaskType.RANKED_KNOBS)
class AggregateTestCase(TestCase):
fixtures = ['test_website.json']
def testValidWorkload(self):
workloads = Workload.objects.all()
valid_workload = workloads[0]
wkld_results = Result.objects.filter(workload=valid_workload)
dicts = aggregate_data(wkld_results)
keys = ['data', 'rowlabels', 'columnlabels']
for d in dicts:
for k in keys:
self.assertIn(k, d)
class PrunedMetricTestCase(TestCase):
fixtures = ['test_website.json']
def testValidPrunedMetrics(self):
workloads = Workload.objects.all()
wkld_results = Result.objects.filter(workload=workloads[0])
metric_data = aggregate_data(wkld_results)[1]
pruned_metrics = run_workload_characterization(metric_data)
for m in pruned_metrics:
self.assertIn(m, metric_data['columnlabels'])
class RankedKnobTestCase(TestCase):
fixtures = ['test_website.json']
def testValidImportantKnobs(self):
workloads = Workload.objects.all()
wkld_results = Result.objects.filter(workload=workloads[0])
knob_data, metric_data = aggregate_data(wkld_results)
# instead of doing actual metric pruning by factor analysis / clustering,
# just randomly select 5 nonconstant metrics
nonconst_metric_columnlabels = []
for col, cl in zip(metric_data['data'].T, metric_data['columnlabels']):
if np.any(col != col[0]):
nonconst_metric_columnlabels.append(cl)
num_metrics = min(5, len(nonconst_metric_columnlabels))
selected_columnlabels = np.random.choice(nonconst_metric_columnlabels,
num_metrics, replace=False)
pruned_metric_idxs = [i for i, metric_name in
enumerate(metric_data['columnlabels'])
if metric_name in selected_columnlabels]
pruned_metric_data = {
'data': metric_data['data'][:, pruned_metric_idxs],
'rowlabels': copy.deepcopy(metric_data['rowlabels']),
'columnlabels': [metric_data['columnlabels'][i] for i in pruned_metric_idxs]
}
# run knob_identification using knob_data and fake pruned metrics
ranked_knobs = run_knob_identification(knob_data, pruned_metric_data,
workloads[0].dbms)
for k in ranked_knobs:
self.assertIn(k, knob_data['columnlabels'])

View File

@@ -0,0 +1,106 @@
#
# OtterTune - test_upload.py
#
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#
import os
from django.core.urlresolvers import reverse
from django.test import TestCase
from website.models import Result, Workload
from website.settings import PROJECT_ROOT
from .utils import (TEST_BASIC_SESSION_ID, TEST_BASIC_SESSION_UPLOAD_CODE,
TEST_PASSWORD, TEST_TUNING_SESSION_ID, TEST_TUNING_SESSION_UPLOAD_CODE,
TEST_USERNAME, TEST_WORKLOAD_ID)
class UploadResultsTests(TestCase):
fixtures = ['test_website.json']
def setUp(self):
self.client.login(username=TEST_USERNAME, password=TEST_PASSWORD)
test_files_dir = os.path.join(PROJECT_ROOT, 'tests', 'test_files')
self.upload_files = {
'metrics_before': os.path.join(test_files_dir, 'sample_metrics_start.json'),
'metrics_after': os.path.join(test_files_dir, 'sample_metrics_end.json'),
'knobs': os.path.join(test_files_dir, 'sample_knobs.json'),
'summary': os.path.join(test_files_dir, 'sample_summary.json')
}
@staticmethod
def open_files(file_info):
files = {}
for name, path in list(file_info.items()):
files[name] = open(path)
return files
@staticmethod
def close_files(files):
for name, fp in list(files.items()):
if name != 'upload_code':
fp.close()
def upload_to_session_ok(self, session_id, upload_code):
num_initial_results = Result.objects.filter(session__id=session_id).count()
form_addr = reverse('new_result')
post_data = self.open_files(self.upload_files)
post_data['upload_code'] = upload_code
response = self.client.post(form_addr, post_data)
self.close_files(post_data)
self.assertEqual(response.status_code, 200)
num_final_results = Result.objects.filter(session__id=session_id).count()
self.assertEqual(num_final_results - num_initial_results, 1)
def upload_to_session_fail_invalidation(self, session_id, upload_code):
form_addr = reverse('new_result')
post_data = {'upload_code': upload_code}
response = self.client.post(form_addr, post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "New result form is not valid:")
self.assertContains(response, "This field is required", 4)
def upload_to_session_invalid_upload_code(self, session_id):
form_addr = reverse('new_result')
post_data = self.open_files(self.upload_files)
post_data['upload_code'] = "invalid_upload_code"
response = self.client.post(form_addr, post_data)
self.close_files(post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Invalid upload code")
def test_upload_form_not_post(self):
form_addr = reverse('new_result')
response = self.client.get(form_addr)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Request type was not POST")
def test_set_modified_workload_on_upload(self):
workload0 = Workload.objects.get(pk=TEST_WORKLOAD_ID)
workload0.status = 3
workload0.save()
self.upload_to_session_ok(TEST_BASIC_SESSION_ID, TEST_BASIC_SESSION_UPLOAD_CODE)
status = Workload.objects.get(pk=TEST_WORKLOAD_ID).status
self.assertEqual(status, 1)
def test_upload_to_basic_session_ok(self):
self.upload_to_session_ok(TEST_BASIC_SESSION_ID, TEST_BASIC_SESSION_UPLOAD_CODE)
def test_upload_to_tuning_session_ok(self):
self.upload_to_session_ok(TEST_TUNING_SESSION_ID, TEST_TUNING_SESSION_UPLOAD_CODE)
def test_upload_to_basic_session_fail_invalidation(self):
self.upload_to_session_fail_invalidation(TEST_BASIC_SESSION_ID,
TEST_BASIC_SESSION_UPLOAD_CODE)
def test_upload_to_tuning_session_fail_invalidation(self):
self.upload_to_session_fail_invalidation(TEST_TUNING_SESSION_ID,
TEST_TUNING_SESSION_UPLOAD_CODE)
def test_upload_to_basic_session_invalid_upload_code(self):
self.upload_to_session_invalid_upload_code(TEST_BASIC_SESSION_ID)
def test_upload_to_tuning_session_invalid_upload_code(self):
self.upload_to_session_invalid_upload_code(TEST_TUNING_SESSION_ID)

View File

@@ -0,0 +1,333 @@
#
# OtterTune - test_utils.py
#
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#
import string
import numpy as np
from django.test import TestCase
from website.utils import JSONUtil, MediaUtil, DataUtil, ConversionUtil, LabelUtil, TaskUtil
from website.parser.postgres import PostgresParser
from website.types import LabelStyleType, VarType
from website.models import Result, DBMSCatalog
class JSONUtilTest(TestCase):
def test_util(self):
json_str = \
"""{
"glossary": {
"title": "example glossary",
"GlossDiv": {
"title": "S",
"GlossList": {
"GlossEntry": {
"ID": "SGML",
"SortAs": "SGML",
"GlossTerm": "Standard Generalized Markup Language",
"Acronym": "SGML",
"Abbrev": "ISO 8879:1986",
"GlossDef": {
"para": "A meta-markup language",
"GlossSeeAlso": ["GML", "XML"]
},
"GlossSee": "markup"
}
}
}
}
}"""
compress_str = """{"glossary": {"title": "example glossary",
"GlossDiv": {"title": "S", "GlossList": {"GlossEntry": {"ID": "SGML",
"SortAs": "SGML", "GlossTerm": "Standard Generalized Markup
Language", "Acronym": "SGML", "Abbrev": "ISO 8879:1986", "GlossDef":
{"para": "A meta-markup language", "GlossSeeAlso": ["GML", "XML"]}, "GlossSee":
"markup"}}}}}"""
results = JSONUtil.loads(json_str)
self.assertEqual(list(results.keys())[0], "glossary")
self.assertTrue("title" in list(results["glossary"].keys()))
self.assertTrue("GlossDiv" in list(results["glossary"].keys()))
self.assertEqual(results["glossary"]["GlossDiv"]
["GlossList"]["GlossEntry"]["ID"], "SGML")
self.assertEqual(results["glossary"]["GlossDiv"]
["GlossList"]["GlossEntry"]["GlossSee"], "markup")
result_str = "".join(JSONUtil.dumps(results).split())
self.assertEqual(result_str, "".join(compress_str.split()))
class MediaUtilTest(TestCase):
def test_codegen(self):
code20 = MediaUtil.upload_code_generator(20)
self.assertEqual(len(code20), 20)
self.assertTrue(code20.isalnum())
code40 = MediaUtil.upload_code_generator(40)
self.assertEqual(len(code40), 40)
self.assertTrue(code40.isalnum())
digit_code = MediaUtil.upload_code_generator(40, string.digits)
self.assertEqual(len(digit_code), 40)
self.assertTrue(digit_code.isdigit())
letter_code = MediaUtil.upload_code_generator(60,
string.ascii_uppercase)
self.assertEqual(len(letter_code), 60)
self.assertTrue(letter_code.isalpha())
class TaskUtilTest(TestCase):
def test_get_task_status(self):
# FIXME: Actually setup celery tasks instead of a dummy class?
test_tasks = []
(status, num_complete) = TaskUtil.get_task_status(test_tasks)
self.assertTrue(status is None and num_complete == 0)
test_tasks2 = [VarType() for i in range(5)]
for task in test_tasks2:
task.status = "SUCCESS"
(status, num_complete) = TaskUtil.get_task_status(test_tasks2)
self.assertTrue(status == "SUCCESS" and num_complete == 5)
test_tasks3 = test_tasks2
test_tasks3[3].status = "FAILURE"
(status, num_complete) = TaskUtil.get_task_status(test_tasks3)
self.assertTrue(status == "FAILURE" and num_complete == 3)
test_tasks4 = test_tasks3
test_tasks4[2].status = "REVOKED"
(status, num_complete) = TaskUtil.get_task_status(test_tasks4)
self.assertTrue(status == "REVOKED" and num_complete == 2)
test_tasks5 = test_tasks4
test_tasks5[1].status = "RETRY"
(status, num_complete) = TaskUtil.get_task_status(test_tasks5)
self.assertTrue(status == "RETRY" and num_complete == 1)
test_tasks6 = [VarType() for i in range(10)]
for i, task in enumerate(test_tasks6):
task.status = "PENDING" if i % 2 == 0 else "SUCCESS"
(status, num_complete) = TaskUtil.get_task_status(test_tasks6)
self.assertTrue(status == "PENDING" and num_complete == 5)
test_tasks7 = test_tasks6
test_tasks7[9].status = "STARTED"
(status, num_complete) = TaskUtil.get_task_status(test_tasks7)
self.assertTrue(status == "STARTED" and num_complete == 4)
test_tasks8 = test_tasks7
test_tasks8[9].status = "RECEIVED"
(status, num_complete) = TaskUtil.get_task_status(test_tasks8)
self.assertTrue(status == "RECEIVED" and num_complete == 4)
with self.assertRaises(Exception):
test_tasks9 = [VarType() for i in range(1)]
test_tasks9[0].status = "attemped"
TaskUtil.get_task_status(test_tasks9)
class DataUtilTest(TestCase):
fixtures = ['test_website.json', 'postgres-96_knobs.json']
def test_aggregate(self):
workload2 = Result.objects.filter(workload=2)
num_results = Result.objects.filter(workload=2).count()
knobs = list(JSONUtil.loads(workload2[0].knob_data.data).keys())
metrics = list(JSONUtil.loads(workload2[0].metric_data.data).keys())
num_knobs = len(knobs)
num_metrics = len(metrics)
test_result = DataUtil.aggregate_data(workload2)
self.assertTrue('X_matrix' in list(test_result.keys()))
self.assertTrue('y_matrix' in list(test_result.keys()))
self.assertTrue('rowlabels' in list(test_result.keys()))
self.assertTrue('X_columnlabels' in list(test_result.keys()))
self.assertTrue('y_columnlabels' in list(test_result.keys()))
self.assertEqual(test_result['X_columnlabels'], knobs)
self.assertEqual(test_result['y_columnlabels'], metrics)
self.assertEqual(test_result['X_matrix'].shape[0], num_results)
self.assertEqual(test_result['y_matrix'].shape[0], num_results)
self.assertEqual(test_result['X_matrix'].shape[1], num_knobs)
self.assertEqual(test_result['y_matrix'].shape[1], num_metrics)
def test_combine(self):
test_dedup_row_labels = np.array(["Workload-0", "Workload-1"])
test_dedup_x = np.matrix([[0.22, 5, "string", "11:11", "fsync", True],
[0.21, 6, "string", "11:12", "fsync", True]])
test_dedup_y = np.matrix([[30, 30, 40],
[10, 10, 40]])
test_x, test_y, row_labels = DataUtil.combine_duplicate_rows(
test_dedup_x, test_dedup_y, test_dedup_row_labels)
self.assertEqual(len(test_x), len(test_y))
self.assertEqual(len(test_x), len(row_labels))
self.assertEqual(row_labels[0], tuple([test_dedup_row_labels[0]]))
self.assertEqual(row_labels[1], tuple([test_dedup_row_labels[1]]))
self.assertTrue((test_x[0] == test_dedup_x[0]).all())
self.assertTrue((test_x[1] == test_dedup_x[1]).all())
self.assertTrue((test_y[0] == test_dedup_y[0]).all())
self.assertTrue((test_y[1] == test_dedup_y[1]).all())
test_row_labels = np.array(["Workload-0",
"Workload-1",
"Workload-2",
"Workload-3"])
test_x_matrix = np.matrix([[0.22, 5, "string", "timestamp", "enum", True],
[0.3, 5, "rstring", "timestamp2", "enum", False],
[0.22, 5, "string", "timestamp", "enum", True],
[0.3, 5, "r", "timestamp2", "enum", False]])
test_y_matrix = np.matrix([[20, 30, 40],
[30, 30, 40],
[20, 30, 40],
[32, 30, 40]])
test_x, test_y, row_labels = DataUtil.combine_duplicate_rows(
test_x_matrix, test_y_matrix, test_row_labels)
self.assertTrue(len(test_x) <= len(test_x_matrix))
self.assertTrue(len(test_y) <= len(test_y_matrix))
self.assertEqual(len(test_x), len(test_y))
self.assertEqual(len(test_x), len(row_labels))
row_labels_set = set(row_labels)
self.assertTrue(tuple(["Workload-0", "Workload-2"]) in row_labels_set)
self.assertTrue(("Workload-1",) in row_labels_set)
self.assertTrue(("Workload-3",) in row_labels_set)
rows = set()
for i in test_x:
self.assertTrue(tuple(i) not in rows)
self.assertTrue(i in test_x_matrix)
rows.add(tuple(i))
rowys = set()
for i in test_y:
self.assertTrue(tuple(i) not in rowys)
self.assertTrue(i in test_y_matrix)
rowys.add(tuple(i))
def test_no_featured_categorical(self):
featured_knobs = ['global.backend_flush_after',
'global.bgwriter_delay',
'global.wal_writer_delay',
'global.work_mem']
postgres96 = DBMSCatalog.objects.get(pk=1)
categorical_info = DataUtil.dummy_encoder_helper(featured_knobs,
dbms=postgres96)
self.assertEqual(len(categorical_info['n_values']), 0)
self.assertEqual(len(categorical_info['categorical_features']), 0)
self.assertEqual(categorical_info['cat_columnlabels'], [])
self.assertEqual(categorical_info['noncat_columnlabels'], featured_knobs)
def test_featured_categorical(self):
featured_knobs = ['global.backend_flush_after',
'global.bgwriter_delay',
'global.wal_writer_delay',
'global.work_mem',
'global.wal_sync_method'] # last knob categorical
postgres96 = DBMSCatalog.objects.get(pk=1)
categorical_info = DataUtil.dummy_encoder_helper(featured_knobs,
dbms=postgres96)
self.assertEqual(len(categorical_info['n_values']), 1)
self.assertEqual(categorical_info['n_values'][0], 4)
self.assertEqual(len(categorical_info['categorical_features']), 1)
self.assertEqual(categorical_info['categorical_features'][0], 4)
self.assertEqual(categorical_info['cat_columnlabels'], ['global.wal_sync_method'])
self.assertEqual(categorical_info['noncat_columnlabels'], featured_knobs[:-1])
class ConversionUtilTest(TestCase):
def test_get_raw_size(self):
# Bytes - In Bytes
byte_test_convert = ['1PB', '2TB', '3GB', '4MB', '5kB', '6B']
byte_ans = [1024**5, 2 * 1024**4, 3 * 1024**3, 4 * 1024**2, 5 * 1024**1, 6]
for i, byte_test in enumerate(byte_test_convert):
byte_conversion = ConversionUtil.get_raw_size(
byte_test, system=PostgresParser.POSTGRES_BYTES_SYSTEM)
self.assertEqual(byte_conversion, byte_ans[i])
# Time - In Milliseconds
day_test_convert = ['1000ms', '1s', '10min', '20h', '1d']
day_ans = [1000, 1000, 600000, 72000000, 86400000]
for i, day_test in enumerate(day_test_convert):
day_conversion = ConversionUtil.get_raw_size(
day_test, system=PostgresParser.POSTGRES_TIME_SYSTEM)
self.assertEqual(day_conversion, day_ans[i])
def test_get_human_readable(self):
# Bytes
byte_test_convert = [1024**5, 2 * 1024**4, 3 * 1024**3,
4 * 1024**2, 5 * 1024**1, 6]
byte_ans = ['1PB', '2TB', '3GB', '4MB', '5kB', '6B']
for i, byte_test in enumerate(byte_test_convert):
byte_readable = ConversionUtil.get_human_readable(
byte_test, system=PostgresParser.POSTGRES_BYTES_SYSTEM)
self.assertEqual(byte_readable, byte_ans[i])
# Time
day_test_convert = [500, 1000, 55000, 600000, 72000000, 86400000]
day_ans = ['500ms', '1s', '55s', '10min', '20h', '1d']
for i, day_test in enumerate(day_test_convert):
day_readable = ConversionUtil.get_human_readable(
day_test, system=PostgresParser.POSTGRES_TIME_SYSTEM)
self.assertEqual(day_readable, day_ans[i])
class LabelUtilTest(TestCase):
def test_style_labels(self):
label_style = LabelStyleType()
test_label_map = {"Name": "Postgres",
"Test": "LabelUtils",
"DBMS": "dbms",
"??": "Dbms",
"???": "DBms",
"CapF": "random Word"}
res_title_label_map = LabelUtil.style_labels(test_label_map,
style=label_style.TITLE)
test_keys = ["Name", "Test", "DBMS", "??", "???", "CapF"]
title_ans = ["Postgres", "Labelutils", "DBMS", "DBMS", "DBMS",
"Random Word"]
for i, key in enumerate(test_keys):
self.assertEqual(res_title_label_map[key], title_ans[i])
res_capfirst_label_map = LabelUtil.style_labels(test_label_map,
style=label_style.CAPFIRST)
cap_ans = ["Postgres", "LabelUtils", "DBMS", "DBMS", "DBMS",
"Random Word"]
for i, key in enumerate(test_keys):
if (key == "???"): # DBms -> DBMS or DBms?
continue
self.assertEqual(res_capfirst_label_map[key], cap_ans[i])
res_lower_label_map = LabelUtil.style_labels(test_label_map,
style=label_style.LOWER)
lower_ans = ["postgres", "labelutils", "dbms", "dbms", "dbms",
"random word"]
for i, key in enumerate(test_keys):
self.assertEqual(res_lower_label_map[key], lower_ans[i])
with self.assertRaises(Exception):
LabelUtil.style_labels(test_label_map,
style=label_style.Invalid)

View File

@@ -0,0 +1,274 @@
#
# OtterTune - test_views.py
#
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#
'''
Created on Dec 13, 2017
@author: dvanaken
'''
from django.contrib.auth import get_user
from django.core.urlresolvers import reverse
from django.test import TestCase
from .utils import (TEST_BASIC_SESSION_ID, TEST_PASSWORD, TEST_PROJECT_ID, TEST_USERNAME)
class UserAuthViewTests(TestCase):
fixtures = ['test_user.json', 'test_user_sessions.json']
def setUp(self):
pass
def test_valid_login(self):
data = {
'username': TEST_USERNAME,
'password': TEST_PASSWORD
}
response = self.client.post(reverse('login'), data=data)
self.assertRedirects(response, reverse('home_projects'))
user = get_user(self.client)
self.assertTrue(user.is_authenticated())
def test_invalid_login(self):
data = {
'username': 'invalid_user',
'password': 'invalid_password'
}
response = self.client.post(reverse('login'), data=data)
self.assertEqual(response.status_code, 200)
user = get_user(self.client)
self.assertFalse(user.is_authenticated())
def test_login_view(self):
response = self.client.get(reverse('login'))
self.assertEqual(response.status_code, 200)
def test_new_signup(self):
response = self.client.get(reverse('signup'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Create Your Account")
def test_logout_view(self):
self.client.logout()
user = get_user(self.client)
self.assertFalse(user.is_authenticated())
class ProjectViewsTests(TestCase):
fixtures = ['test_website.json']
def setUp(self):
self.client.login(username=TEST_USERNAME, password=TEST_PASSWORD)
def test_new_project_form(self):
response = self.client.get(reverse('new_project'))
self.assertEqual(response.status_code, 200)
def test_create_project_fail_invalidation(self):
form_addr = reverse('new_project')
post_data = {}
response = self.client.post(form_addr, post_data)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'name', "This field is required.")
def test_create_project_ok(self):
form_addr = reverse('new_project')
post_data = {
'name': 'test_create_project',
'description': 'testing create project...'
}
response = self.client.post(form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
project_id = response.context['project'].pk
self.assertRedirects(response, reverse('project_sessions',
kwargs={'project_id': project_id}))
def test_edit_project_fail_invalidation(self):
form_addr = reverse('edit_project', kwargs={'project_id': TEST_PROJECT_ID})
post_data = {}
response = self.client.post(form_addr, post_data)
self.assertFormError(response, 'form', 'name', "This field is required.")
def test_edit_project_ok(self):
form_addr = reverse('edit_project', kwargs={'project_id': TEST_PROJECT_ID})
post_data = {'name': 'new_project_name'}
response = self.client.post(form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertRedirects(response, reverse('project_sessions',
kwargs={'project_id': TEST_PROJECT_ID}))
def test_delete_zero_project(self):
form_addr = reverse('delete_project')
post_data = {'projects': []}
response = self.client.post(form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertRedirects(response, reverse('home_projects'))
def test_delete_one_project(self):
form_addr = reverse('delete_project')
post_data = {'projects': [TEST_PROJECT_ID]}
response = self.client.post(form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertRedirects(response, reverse('home_projects'))
def test_delete_multiple_projects(self):
create_form_addr = reverse('new_project')
project_ids = []
for i in range(5):
post_data = {
'name': 'project_{}'.format(i),
'description': ""
}
response = self.client.post(create_form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
project_ids.append(response.context['project'].pk)
delete_form_addr = reverse('delete_project')
post_data = {'projects': project_ids}
response = self.client.post(delete_form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertRedirects(response, reverse('home_projects'))
class SessionViewsTests(TestCase):
fixtures = ['test_website.json']
def setUp(self):
self.client.login(username=TEST_USERNAME, password=TEST_PASSWORD)
def test_new_session_form(self):
response = self.client.get(reverse('new_session', kwargs={'project_id': TEST_PROJECT_ID}))
self.assertEqual(response.status_code, 200)
def test_create_session_fail_invalidation(self):
form_addr = reverse('new_session', kwargs={'project_id': TEST_PROJECT_ID})
post_data = {}
response = self.client.post(form_addr, post_data)
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'form', 'name', "This field is required.")
def test_create_basic_session_ok(self):
form_addr = reverse('new_session', kwargs={'project_id': TEST_PROJECT_ID})
post_data = {
'name': 'test_create_basic_session',
'description': 'testing create basic session...',
'tuning_session': 'no_tuning_session',
'cpu': '2',
'memory': '16.0',
'storage': '32',
'dbms': 1
}
response = self.client.post(form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
session_id = response.context['session'].pk
self.assertRedirects(response, reverse('session',
kwargs={'project_id': TEST_PROJECT_ID,
'session_id': session_id}))
def test_create_tuning_session_ok(self):
form_addr = reverse('new_session', kwargs={'project_id': TEST_PROJECT_ID})
post_data = {
'name': 'test_create_basic_session',
'description': 'testing create basic session...',
'tuning_session': 'tuning_session',
'cpu': '2',
'memory': '16.0',
'storage': '32',
'dbms': 1,
'target_objective': 'throughput_txn_per_sec'
}
response = self.client.post(form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
session_id = response.context['session'].pk
self.assertRedirects(response, reverse('session',
kwargs={'project_id': TEST_PROJECT_ID,
'session_id': session_id}))
def test_edit_session_fail_invalidation(self):
form_addr = reverse('edit_session', kwargs={'project_id': TEST_PROJECT_ID,
'session_id': TEST_BASIC_SESSION_ID})
post_data = {}
response = self.client.post(form_addr, post_data)
self.assertFormError(response, 'form', 'name', "This field is required.")
def test_edit_basic_session_ok(self):
form_addr = reverse('edit_session', kwargs={'project_id': TEST_PROJECT_ID,
'session_id': TEST_BASIC_SESSION_ID})
post_data = {
'name': 'new_session_name',
'description': 'testing edit basic session...',
'tuning_session': 'tuning_session',
'cpu': '2',
'memory': '16.0',
'storage': '32',
'dbms': 1,
'target_objective': 'throughput_txn_per_sec'
}
response = self.client.post(form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertRedirects(response, reverse('session',
kwargs={'project_id': TEST_PROJECT_ID,
'session_id': TEST_BASIC_SESSION_ID}))
def test_edit_all_knobs_ok(self):
response = self.client.get(reverse('edit_knobs',
kwargs={'project_id': TEST_PROJECT_ID,
'session_id': TEST_BASIC_SESSION_ID}))
self.assertEqual(response.status_code, 200)
def test_edit_knob_ok(self):
form_addr = reverse('edit_knobs', kwargs={'project_id': TEST_PROJECT_ID,
'session_id': TEST_BASIC_SESSION_ID})
post_data = {
'name': 'global.wal_writer_delay',
'minval': '1',
'maxval': '1000',
'tunable': 'on'
}
response = self.client.post(form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 204)
def test_delete_zero_sessions(self):
form_addr = reverse('delete_session', kwargs={'project_id': TEST_PROJECT_ID})
post_data = {'sessions': []}
response = self.client.post(form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertRedirects(response, reverse('project_sessions',
kwargs={'project_id': TEST_PROJECT_ID}))
def test_delete_one_session(self):
form_addr = reverse('delete_session', kwargs={'project_id': TEST_PROJECT_ID})
post_data = {'sessions': [TEST_BASIC_SESSION_ID]}
response = self.client.post(form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertRedirects(response, reverse('project_sessions',
kwargs={'project_id': TEST_PROJECT_ID}))
def test_delete_multiple_sessions(self):
create_form_addr = reverse('new_session', kwargs={'project_id': TEST_PROJECT_ID})
session_ids = []
for i in range(5):
post_data = {
'name': 'session_{}'.format(i),
'description': "",
'tuning_session': 'no_tuning_session',
'cpu': '2',
'memory': '16.0',
'storage': '32',
'dbms': 1,
'target_objective': 'throughput_txn_per_sec'
}
response = self.client.post(create_form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
session_ids.append(response.context['session'].pk)
delete_form_addr = reverse('delete_session', kwargs={'project_id': TEST_PROJECT_ID})
post_data = {'sessions': session_ids}
response = self.client.post(delete_form_addr, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertRedirects(response, reverse('project_sessions',
kwargs={'project_id': TEST_PROJECT_ID}))

View File

@@ -0,0 +1,20 @@
#
# OtterTune - utils.py
#
# Copyright (c) 2017-18, Carnegie Mellon University Database Group
#
TEST_USERNAME = 'user'
TEST_PASSWORD = 'abcd123'
TEST_PROJECT_ID = 1
TEST_BASIC_SESSION_ID = 1
TEST_TUNING_SESSION_ID = 2
TEST_WORKLOAD_ID = 1
TEST_BASIC_SESSION_UPLOAD_CODE = '1234567890'
TEST_TUNING_SESSION_UPLOAD_CODE = '0987654321'