Speedup travis-ci tests

This commit is contained in:
dvanaken 2019-12-05 09:56:27 -05:00 committed by Dana Van Aken
parent 9e9c9c7510
commit 783b94cd4d
4 changed files with 50 additions and 9 deletions

View File

@ -58,7 +58,7 @@ matrix:
repo: cmu-db/ottertune repo: cmu-db/ottertune
branch: master branch: master
- name: Tests (MySQL v5.7) - name: Unit Tests (MySQL v5.7)
env: env:
- BACKEND=mysql - BACKEND=mysql
services: services:
@ -76,8 +76,9 @@ matrix:
before_install: before_install:
- mysql -e "CREATE DATABASE IF NOT EXISTS ${DB_NAME}" - mysql -e "CREATE DATABASE IF NOT EXISTS ${DB_NAME}"
- sed -i '/psycopg2/d' $WEB/requirements.txt - sed -i '/psycopg2/d' $WEB/requirements.txt
- echo "codecov" >> $WEB/requirements.txt
- name: Tests (PostgreSQL v9.6) - name: Unit Tests (PostgreSQL v9.6)
env: env:
- BACKEND=postgresql - BACKEND=postgresql
addons: addons:
@ -92,9 +93,49 @@ matrix:
before_install: before_install:
- psql -U postgres -c "CREATE DATABASE ${DB_NAME}" - psql -U postgres -c "CREATE DATABASE ${DB_NAME}"
- sed -i '/mysqlclient/d' $WEB/requirements.txt - sed -i '/mysqlclient/d' $WEB/requirements.txt
- echo "codecov" >> $WEB/requirements.txt
- name: Integration Tests (MySQL v5.7)
env:
- BACKEND=mysql
services:
- mysql
addons:
apt:
update: true
packages:
- mysql-server
- python-mysqldb
- rabbitmq-server
before_install:
- mysql -e "CREATE DATABASE IF NOT EXISTS ${DB_NAME}"
- sed -i '/psycopg2/d' $WEB/requirements.txt
- sed -i 's/^RUN_EVERY =.*$/RUN_EVERY = 60/' $WEB/website/settings/constants.py
script:
- cd $WEB && python manage.py runserver 0.0.0.0:8000 &
- sleep 10 && cd $DRIVER && fab integration_tests
after_success: []
- name: Integration Tests (PostgreSQL v9.6)
env:
- BACKEND=postgresql
addons:
postgresql: "9.6"
apt:
update: true
packages:
- rabbitmq-server
before_install:
- psql -U postgres -c "CREATE DATABASE ${DB_NAME}"
- sed -i '/mysqlclient/d' $WEB/requirements.txt
- sed -i 's/^RUN_EVERY =.*$/RUN_EVERY = 60/' $WEB/website/settings/constants.py
script:
- cd $WEB && python manage.py runserver 0.0.0.0:8000 &
- sleep 10 && cd $DRIVER && fab integration_tests
after_success: []
install: install:
- pip install codecov -r $WEB/requirements.txt - pip install -r $WEB/requirements.txt
- pip freeze - pip freeze
before_script: before_script:
- env | sort - env | sort
@ -110,9 +151,6 @@ script:
- cd $ROOT/server && coverage run --omit="*/tests/*" -m unittest discover -s analysis/tests -v - cd $ROOT/server && coverage run --omit="*/tests/*" -m unittest discover -s analysis/tests -v
- cd $WEB && coverage run manage.py test --noinput -v 2 - cd $WEB && coverage run manage.py test --noinput -v 2
- cd $CONTROLLER && gradle build - cd $CONTROLLER && gradle build
- cd $WEB
- python manage.py runserver 0.0.0.0:8000 &
- sleep 10 && cd $DRIVER && fab integration_tests
- cd $ROOT && git reset --soft ${TRAVIS_COMMIT_RANGE%...*} && git status && git - cd $ROOT && git reset --soft ${TRAVIS_COMMIT_RANGE%...*} && git status && git
log | head -n 1 && git lint log | head -n 1 && git lint
after_success: after_success:

View File

@ -5,6 +5,9 @@
# #
# ---PIPELINE CONSTANTS--- # ---PIPELINE CONSTANTS---
# how often to run the background tests, in seconds
RUN_EVERY = 300
# the number of samples (staring points) in gradient descent # the number of samples (staring points) in gradient descent
NUM_SAMPLES = 30 NUM_SAMPLES = 30

View File

@ -107,7 +107,6 @@ class ConfigurationRecommendation(UpdateTask): # pylint: disable=abstract-metho
def on_success(self, retval, task_id, args, kwargs): def on_success(self, retval, task_id, args, kwargs):
super(ConfigurationRecommendation, self).on_success(retval, task_id, args, kwargs) super(ConfigurationRecommendation, self).on_success(retval, task_id, args, kwargs)
LOG.info("NEXT CONFIG: %s", retval['recommendation'])
result_id = retval['result_id'] result_id = retval['result_id']
result = Result.objects.get(pk=result_id) result = Result.objects.get(pk=result_id)

View File

@ -18,6 +18,7 @@ from analysis.preprocessing import (Bin, get_shuffle_indices,
DummyEncoder, DummyEncoder,
consolidate_columnlabels) consolidate_columnlabels)
from website.models import PipelineData, PipelineRun, Result, Workload from website.models import PipelineData, PipelineRun, Result, Workload
from website.settings import RUN_EVERY
from website.types import PipelineTaskType, WorkloadStatusType from website.types import PipelineTaskType, WorkloadStatusType
from website.utils import DataUtil, JSONUtil from website.utils import DataUtil, JSONUtil
@ -27,8 +28,8 @@ LOG = get_task_logger(__name__)
MIN_WORKLOAD_RESULTS_COUNT = 5 MIN_WORKLOAD_RESULTS_COUNT = 5
# Run the background tasks every 5 minutes # Run the background tasks every 'RUN_EVERY' seconds
@periodic_task(run_every=300, name="run_background_tasks") @periodic_task(run_every=RUN_EVERY, name="run_background_tasks")
def run_background_tasks(): def run_background_tasks():
LOG.debug("Starting background tasks") LOG.debug("Starting background tasks")
# Find modified and not modified workloads, we only have to calculate for the # Find modified and not modified workloads, we only have to calculate for the