Fixed celery task logging and added filehandler

This commit is contained in:
Dana Van Aken 2019-10-01 01:40:07 -04:00
parent 863d6619ba
commit e5edd2a7f4
3 changed files with 73 additions and 44 deletions

View File

@ -2,9 +2,7 @@
import logging import logging
from django.conf import settings from django.db import migrations, ProgrammingError
#from django.core.exceptions import ProgrammingError
from django.db import connection, migrations, ProgrammingError
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -15,6 +13,58 @@ TABLES_TO_COMPRESS = [
"website_pipelinedata", "website_pipelinedata",
] ]
MYSQL_MIN_VERSION = (5, 7, 0)
ALTER_SQL = "ALTER TABLE %s COMPRESSION='%s'"
OPTIMIZE_SQL = "OPTIMIZE TABLE %s"
def compression_supported(schema_editor):
supported = False
dbms = schema_editor.connection.vendor
if dbms == 'mysql':
with schema_editor.connection.cursor() as cursor:
cursor.execute('SELECT VERSION()')
res = cursor.fetchone()[0]
version_str = res.split('-')[0]
version = tuple(int(v) for v in version_str.split('.'))
assert len(version) == len(MYSQL_MIN_VERSION), \
'MySQL - current version: {}, min version: {}'.format(version, MYSQL_MIN_VERSION)
if version >= MYSQL_MIN_VERSION:
supported = True
LOG.debug("%s %s: table compression supported.", dbms.upper(), version_str)
else:
LOG.debug("%s %s: table compression NOT supported.", dbms.upper(), version_str)
else:
LOG.debug("%s: table compression NOT supported.", dbms.upper())
return supported
def enable_compression(apps, schema_editor):
# try:
if compression_supported(schema_editor):
for table in TABLES_TO_COMPRESS:
schema_editor.execute(ALTER_SQL % (table, 'zlib'))
schema_editor.execute(OPTIMIZE_SQL % table)
# except ProgrammingError:
# LOG.warning("Error applying forward migration '0002_enable_compression'... Skipping.")
def disable_compression(apps, schema_editor):
try:
if compression_supported(schema_editor):
for table in TABLES_TO_COMPRESS:
schema_editor.execute(ALTER_SQL % (table, 'none'))
schema_editor.execute(OPTIMIZE_SQL % table)
except ProgrammingError:
LOG.warning("Error applying reverse migration '0002_enable_compression'... Skipping.")
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -22,37 +72,6 @@ class Migration(migrations.Migration):
('website', '0001_initial'), ('website', '0001_initial'),
] ]
try: operations = [migrations.RunPython(enable_compression, disable_compression)]
if connection.vendor == 'mysql':
version = (0, 0, 0)
with connection.cursor() as cursor:
cursor.execute('SELECT VERSION()')
version = cursor.fetchone()[0]
version_str = version.split('-')[0]
version = version_str.split('.')
version = tuple(int(v) for v in version)
if version >= (5, 7, 0):
operations = [
migrations.RunSQL(["ALTER TABLE " + table_name + " COMPRESSION='zlib';",
"OPTIMIZE TABLE " + table_name + ";"],
["ALTER TABLE " + table_name + " COMPRESSION='none';",
"OPTIMIZE TABLE " + table_name + ";"])
for table_name in TABLES_TO_COMPRESS
]
LOG.debug("Enabled compression for '%s %s'", connection.vendor, version_str)
else:
operations = []
LOG.debug("Disabled compression for '%s %s': version not supported",
connection.vendor, version_str)
else:
LOG.debug("Disabled compression for '%s': vendor not supported", connection.vendor)
except ProgrammingError as err:
LOG.warning("Error applying migration '0002_enable_compression'... Skipping")
operations = []

View File

@ -123,8 +123,8 @@ class MetricManager(models.Manager):
@staticmethod @staticmethod
def get_metric_meta(dbms, target_objective=None): def get_metric_meta(dbms, target_objective=None):
numeric_metric_names = MetricCatalog.objects.filter( numeric_metric_names = MetricCatalog.objects.filter(dbms=dbms).exclude(
dbms=dbms, metric_type=MetricType.COUNTER).values_list('name', flat=True) metric_type=MetricType.INFO).values_list('name', flat=True)
numeric_metrics = {} numeric_metrics = {}
for metname in numeric_metric_names: for metname in numeric_metric_names:
numeric_metrics[metname] = MetricMeta( numeric_metrics[metname] = MetricMeta(

View File

@ -33,10 +33,6 @@ CONFIG_DIR = join(PROJECT_ROOT, 'config')
# Where the log files are stored # Where the log files are stored
LOG_DIR = join(PROJECT_ROOT, 'log') LOG_DIR = join(PROJECT_ROOT, 'log')
# File/directory upload permissions
FILE_UPLOAD_DIRECTORY_PERMISSIONS = 0o664
FILE_UPLOAD_PERMISSIONS = 0o664
# Path to OtterTune's website and ML modules # Path to OtterTune's website and ML modules
OTTERTUNE_LIBS = dirname(PROJECT_ROOT) OTTERTUNE_LIBS = dirname(PROJECT_ROOT)
@ -111,7 +107,7 @@ MEDIA_URL = '/media/'
# Don't put anything in this directory yourself; store your static files # Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS. # in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/" # Example: "/var/www/example.com/static/"
STATIC_ROOT = join(PROJECT_ROOT, 'website', 'static') STATIC_ROOT = join(PROJECT_ROOT, 'static')
# URL prefix for static files. # URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/" # Example: "http://example.com/static/", "http://static.example.com/"
@ -218,6 +214,8 @@ CELERYD_MAX_TASKS_PER_CHILD = 50
# Number of concurrent workers. # Number of concurrent workers.
CELERYD_CONCURRENCY = 8 CELERYD_CONCURRENCY = 8
CELERYD_HIJACK_ROOT_LOGGER = False
djcelery.setup_loader() djcelery.setup_loader()
# ============================================== # ==============================================
@ -247,8 +245,16 @@ LOGGING = {
'backupCount': 2, 'backupCount': 2,
'formatter': 'standard', 'formatter': 'standard',
}, },
'celery': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': join(LOG_DIR, 'celery.log'),
'maxBytes': 50000,
'backupCount': 2,
'formatter': 'standard',
},
'console': { 'console': {
'level': 'INFO', 'level': 'DEBUG',
'class': 'logging.StreamHandler', 'class': 'logging.StreamHandler',
'formatter': 'standard' 'formatter': 'standard'
}, },
@ -276,7 +282,6 @@ LOGGING = {
}, },
'website': { 'website': {
'handlers': ['console', 'logfile'], 'handlers': ['console', 'logfile'],
'propagate': False,
'level': 'DEBUG', 'level': 'DEBUG',
}, },
'django.request': { 'django.request': {
@ -284,6 +289,11 @@ LOGGING = {
'level': 'DEBUG', 'level': 'DEBUG',
'propagate': False, 'propagate': False,
}, },
'celery': {
'handlers': ['console', 'celery'],
'level': 'DEBUG',
'propogate': True,
},
# Uncomment to email admins after encountering an error (and debug=False) # Uncomment to email admins after encountering an error (and debug=False)
# 'django.request': { # 'django.request': {
# 'handlers': ['mail_admins'], # 'handlers': ['mail_admins'],