зеркало из https://github.com/mozilla/treeherder.git
Merge branch 'master' of ssh://github.com/mozilla/treeherder-service into performance-schema
This commit is contained in:
Коммит
1168c6c821
|
@ -1,6 +1,21 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
|
||||||
cd $( dirname $curr_dir)
|
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
||||||
source /etc/profile.d/treeherder.sh
|
cd $( dirname $curr_dir)
|
||||||
source ../venv/bin/activate
|
|
||||||
exec ../venv/bin/python manage.py celerymon
|
LOGFILE=/var/log/celery/celerymon.log
|
||||||
|
|
||||||
|
if [ ! -f $LOGFILE ]; then
|
||||||
|
touch $LOGFILE
|
||||||
|
fi
|
||||||
|
|
||||||
|
source /etc/profile.d/treeherder.sh
|
||||||
|
|
||||||
|
if [ -f ../venv/bin/python ]; then
|
||||||
|
source ../venv/bin/activate
|
||||||
|
PYTHON=../venv/bin/python
|
||||||
|
else
|
||||||
|
PYTHON=python
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec $PYTHON manage.py celerymon -f $LOGFILE
|
||||||
|
|
|
@ -1,12 +1,24 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
||||||
cd $( dirname $curr_dir)
|
cd $( dirname $curr_dir)
|
||||||
|
|
||||||
source /etc/profile.d/treeherder.sh
|
source /etc/profile.d/treeherder.sh
|
||||||
source ../venv/bin/activate
|
|
||||||
|
|
||||||
if [[ ${NEW_RELIC_LICENSE_KEY+isset} = isset ]]; then
|
if [[ ${NEW_RELIC_LICENSE_KEY+isset} = isset ]]; then
|
||||||
NEWRELIC_ADMIN="newrelic-admin run-program"
|
NEWRELIC_ADMIN="newrelic-admin run-program"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
exec $NEWRELIC_ADMIN ../venv/bin/python manage.py celeryd -c 3 -Q default -E --maxtasksperchild=500 --logfile=/var/log/celery/treeherder_worker.log
|
if [ -f ../venv/bin/python ]; then
|
||||||
|
source ../venv/bin/activate
|
||||||
|
PYTHON=../venv/bin/python
|
||||||
|
else
|
||||||
|
PYTHON=python
|
||||||
|
fi
|
||||||
|
|
||||||
|
LOGFILE=/var/log/celery/celery_worker.log
|
||||||
|
|
||||||
|
if [ ! -f $LOGFILE ]; then
|
||||||
|
touch $LOGFILE
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec $NEWRELIC_ADMIN $PYTHON manage.py celeryd -c 3 -Q default -E --maxtasksperchild=500 --logfile=$LOGFILE
|
||||||
|
|
|
@ -1,11 +1,25 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
||||||
cd $( dirname $curr_dir)
|
cd $( dirname $curr_dir)
|
||||||
source /etc/profile.d/treeherder.sh
|
source /etc/profile.d/treeherder.sh
|
||||||
source ../venv/bin/activate
|
|
||||||
|
|
||||||
if [[ ${NEW_RELIC_LICENSE_KEY+isset} = isset ]]; then
|
if [[ ${NEW_RELIC_LICENSE_KEY+isset} = isset ]]; then
|
||||||
NEWRELIC_ADMIN="newrelic-admin run-program"
|
NEWRELIC_ADMIN="newrelic-admin run-program"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
exec $NEWRELIC_ADMIN ../venv/bin/python manage.py celeryd -Q log_parser_fail,log_parser -P gevent --concurrency=10 -E --logfile=/var/log/celery/treeherder_worker_gevent.log -n gevent_worker.%h -l debug --maxtasksperchild=500
|
if [ -f ../venv/bin/python ]; then
|
||||||
|
source ../venv/bin/activate
|
||||||
|
PYTHON=../venv/bin/python
|
||||||
|
else
|
||||||
|
PYTHON=python
|
||||||
|
fi
|
||||||
|
|
||||||
|
LOGFILE=/var/log/celery/celery_worker_gevent.log
|
||||||
|
|
||||||
|
if [ ! -f $LOGFILE ]; then
|
||||||
|
touch $LOGFILE
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec $NEWRELIC_ADMIN $PYTHON manage.py celeryd -Q log_parser_fail,log_parser -P gevent \
|
||||||
|
--concurrency=10 -E --logfile=$LOGFILE -n gevent_worker.%h -l debug \
|
||||||
|
--maxtasksperchild=500
|
||||||
|
|
|
@ -1,6 +1,21 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
|
||||||
cd $( dirname $curr_dir)
|
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
||||||
source /etc/profile.d/treeherder.sh
|
cd $( dirname $curr_dir)
|
||||||
source ../venv/bin/activate
|
|
||||||
exec ../venv/bin/python manage.py celerybeat
|
LOGFILE=/var/log/celery/celerybeat.log
|
||||||
|
|
||||||
|
if [ ! -f $LOGFILE ]; then
|
||||||
|
touch $LOGFILE
|
||||||
|
fi
|
||||||
|
|
||||||
|
source /etc/profile.d/treeherder.sh
|
||||||
|
|
||||||
|
if [ -f ../venv/bin/python ]; then
|
||||||
|
source ../venv/bin/activate
|
||||||
|
PYTHON=../venv/bin/python
|
||||||
|
else
|
||||||
|
PYTHON=python
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec $PYTHON manage.py celerybeat -f $LOGFILE
|
||||||
|
|
|
@ -1,19 +1,33 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
||||||
cd $( dirname $curr_dir)
|
cd $( dirname $curr_dir)
|
||||||
|
|
||||||
LOGDIR=/var/log/gunicorn
|
LOGDIR=/var/log/gunicorn
|
||||||
ACCESS_LOGFILE=$LOGDIR/treeherder_access.log
|
ACCESS_LOGFILE=$LOGDIR/treeherder_access.log
|
||||||
ERROR_LOGFILE=$LOGDIR/treeherder_error.log
|
ERROR_LOGFILE=$LOGDIR/treeherder_error.log
|
||||||
|
|
||||||
NUM_WORKERS=5
|
if [ ! -f $ACCESS_LOGFILE ]; then
|
||||||
|
touch $ACCESS_LOGFILE
|
||||||
|
fi
|
||||||
|
|
||||||
source /etc/profile.d/treeherder.sh
|
if [ ! -f $ERROR_LOGFILE ]; then
|
||||||
source ../venv/bin/activate
|
touch $ERROR_LOGFILE
|
||||||
|
fi
|
||||||
|
|
||||||
exec ../venv/bin/gunicorn -w $NUM_WORKERS \
|
NUM_WORKERS=5
|
||||||
|
|
||||||
|
source /etc/profile.d/treeherder.sh
|
||||||
|
|
||||||
|
if [ -f ../venv/bin/gunicorn ]; then
|
||||||
|
source ../venv/bin/activate
|
||||||
|
GUNICORN=../venv/bin/gunicorn
|
||||||
|
else
|
||||||
|
GUNICORN=/usr/bin/gunicorn
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec $GUNICORN -w $NUM_WORKERS \
|
||||||
--max-requests=2000 \
|
--max-requests=2000 \
|
||||||
--access-logfile=$ACCESS_LOGFILE \
|
--access-logfile=$ACCESS_LOGFILE \
|
||||||
--error-logfile=$ERROR_LOGFILE treeherder.webapp.wsgi:application
|
--error-logfile=$ERROR_LOGFILE treeherder.webapp.wsgi:application
|
||||||
|
|
|
@ -1,4 +1,18 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
source /etc/profile.d/treeherder.sh
|
|
||||||
source ../venv/bin/activate
|
LOGDIR=/var/log/pulse_consumer/
|
||||||
exec ../venv/bin/python manage.py start_pulse_consumer --start
|
|
||||||
|
if [ ! -d $LOGDIR ]; then
|
||||||
|
mkdir -p $LOGDIR
|
||||||
|
fi
|
||||||
|
|
||||||
|
source /etc/profile.d/treeherder.sh
|
||||||
|
|
||||||
|
if [ -f ../venv/bin/python ]; then
|
||||||
|
source ../venv/bin/activate
|
||||||
|
PYTHON=../venv/bin/python
|
||||||
|
else
|
||||||
|
PYTHON=python
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec ../venv/bin/python manage.py start_pulse_consumer --start --logdir $LOGDIR
|
||||||
|
|
|
@ -1,21 +1,32 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
|
||||||
|
|
||||||
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
set -e
|
||||||
cd $( dirname $curr_dir)
|
|
||||||
|
|
||||||
LOGFILE=/var/log/socketio/treeherder.log
|
curr_dir=$( dirname "${BASH_SOURCE[0]}" )
|
||||||
|
cd $( dirname $curr_dir)
|
||||||
|
|
||||||
source /etc/profile.d/treeherder.sh
|
LOGFILE=/var/log/socketio/treeherder.log
|
||||||
source ../venv/bin/activate
|
|
||||||
USER=$TREEHERDER_RABBITMQ_USER
|
|
||||||
PASS=$TREEHERDER_RABBITMQ_PASSWORD
|
|
||||||
HOST=$TREEHERDER_RABBITMQ_HOST
|
|
||||||
PORT=$TREEHERDER_RABBITMQ_PORT
|
|
||||||
VHOST=$TREEHERDER_RABBITMQ_VHOST
|
|
||||||
|
|
||||||
|
if [ ! -f $LOGFILE ]; then
|
||||||
exec python treeherder/events/run_socketio.py \
|
touch $LOGFILE
|
||||||
--broker-url amqp://$USER:$PASS@$HOST:$PORT/$VHOST \
|
fi
|
||||||
--log-file LOGFILE
|
|
||||||
|
|
||||||
|
if [ -f ../venv/bin/python ]; then
|
||||||
|
source ../venv/bin/activate
|
||||||
|
PYTHON=../venv/bin/python
|
||||||
|
else
|
||||||
|
PYTHON=python
|
||||||
|
fi
|
||||||
|
|
||||||
|
source /etc/profile.d/treeherder.sh
|
||||||
|
|
||||||
|
USER=$TREEHERDER_RABBITMQ_USER
|
||||||
|
PASS=$TREEHERDER_RABBITMQ_PASSWORD
|
||||||
|
HOST=$TREEHERDER_RABBITMQ_HOST
|
||||||
|
PORT=$TREEHERDER_RABBITMQ_PORT
|
||||||
|
VHOST=$TREEHERDER_RABBITMQ_VHOST
|
||||||
|
|
||||||
|
|
||||||
|
exec python treeherder/events/run_socketio.py \
|
||||||
|
--broker-url amqp://$USER:$PASS@$HOST:$PORT/$VHOST \
|
||||||
|
--log-file $LOGFILE
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
SRC_DIR = ''
|
SRC_DIR = ''
|
||||||
BIN_DIR = '/usr/sbin'
|
BIN_DIR = '/usr/bin'
|
||||||
|
SBIN_DIR = '/sbin'
|
||||||
|
|
||||||
DEPLOY_SCRIPT = ''
|
DEPLOY_SCRIPT = ''
|
||||||
REMOTE_UPDATE_SCRIPT = ''
|
REMOTE_UPDATE_SCRIPT = ''
|
||||||
|
|
|
@ -14,8 +14,9 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||||
from commander.deploy import task, hostgroups
|
from commander.deploy import task, hostgroups
|
||||||
import commander_settings as settings
|
import commander_settings as settings
|
||||||
|
|
||||||
th_service_src = "{0}/treeherder-service/".format(settings.SRC_DIR)
|
th_service_src = os.path.join(settings.SRC_DIR, 'treeherder-service')
|
||||||
th_ui_src = "{0}/treeherder-ui/".format(settings.SRC_DIR)
|
th_ui_src = os.path.join(settings.SRC_DIR, 'treeherder-ui')
|
||||||
|
|
||||||
|
|
||||||
@task
|
@task
|
||||||
def update_code(ctx, tag):
|
def update_code(ctx, tag):
|
||||||
|
@ -34,12 +35,16 @@ def update_code(ctx, tag):
|
||||||
ctx.local('git submodule update --init --recursive')
|
ctx.local('git submodule update --init --recursive')
|
||||||
ctx.local("find . -type f -name '*.pyc' -delete")
|
ctx.local("find . -type f -name '*.pyc' -delete")
|
||||||
|
|
||||||
|
|
||||||
def update_assets(ctx):
|
def update_assets(ctx):
|
||||||
ctx.remote("{0}grunt build".format(th_ui_src))
|
grunt_path = os.path.join(th_ui_src, 'grunt')
|
||||||
|
ctx.local("{0} build".format(grunt_path))
|
||||||
|
|
||||||
|
|
||||||
def update_oauth_credentials(ctx):
|
def update_oauth_credentials(ctx):
|
||||||
ctx.local("python2.6 manage.py export_project_credentials")
|
ctx.local("python2.6 manage.py export_project_credentials")
|
||||||
|
|
||||||
|
|
||||||
@task
|
@task
|
||||||
def update_db(ctx):
|
def update_db(ctx):
|
||||||
"""Update the database schema, if necessary."""
|
"""Update the database schema, if necessary."""
|
||||||
|
@ -48,13 +53,15 @@ def update_db(ctx):
|
||||||
ctx.local('python2.6 manage.py syncdb')
|
ctx.local('python2.6 manage.py syncdb')
|
||||||
ctx.local('python2.6 manage.py migrate')
|
ctx.local('python2.6 manage.py migrate')
|
||||||
|
|
||||||
|
|
||||||
@task
|
@task
|
||||||
def checkin_changes(ctx):
|
def checkin_changes(ctx):
|
||||||
"""Use the local, IT-written deploy script to check in changes."""
|
"""Use the local, IT-written deploy script to check in changes."""
|
||||||
ctx.local(settings.DEPLOY_SCRIPT)
|
ctx.local(settings.DEPLOY_SCRIPT)
|
||||||
|
|
||||||
|
|
||||||
@hostgroups(settings.WEB_HOSTGROUP, remote_kwargs={'ssh_key': settings.SSH_KEY})
|
@hostgroups(
|
||||||
|
settings.WEB_HOSTGROUP, remote_kwargs={'ssh_key': settings.SSH_KEY})
|
||||||
def deploy_web_app(ctx):
|
def deploy_web_app(ctx):
|
||||||
"""Call the remote update script to push changes to webheads."""
|
"""Call the remote update script to push changes to webheads."""
|
||||||
ctx.remote(settings.REMOTE_UPDATE_SCRIPT)
|
ctx.remote(settings.REMOTE_UPDATE_SCRIPT)
|
||||||
|
@ -65,23 +72,29 @@ def deploy_web_app(ctx):
|
||||||
# this is primarely for the persona ui
|
# this is primarely for the persona ui
|
||||||
ctx.remote("python2.6 manage.py collectstatic --noinput")
|
ctx.remote("python2.6 manage.py collectstatic --noinput")
|
||||||
|
|
||||||
ctx.remote( '{0}/supervisorctl graceful httpd'.format(settings.BIN_DIR) )
|
ctx.remote( '{0}/service httpd graceful'.format(settings.SBIN_DIR) )
|
||||||
ctx.remote( '{0}/supervisorctl restart gunicorn'.format(settings.BIN_DIR) )
|
ctx.remote( '{0}/supervisorctl restart gunicorn'.format(settings.BIN_DIR) )
|
||||||
|
|
||||||
@hostgroups(settings.CELERY_HOSTGROUP, remote_kwargs={'ssh_key': settings.SSH_KEY})
|
|
||||||
|
@hostgroups(
|
||||||
|
settings.CELERY_HOSTGROUP, remote_kwargs={'ssh_key': settings.SSH_KEY})
|
||||||
def deploy_workers(ctx):
|
def deploy_workers(ctx):
|
||||||
"""Call the remote update script to push changes to workers."""
|
"""Call the remote update script to push changes to workers."""
|
||||||
ctx.remote(settings.REMOTE_UPDATE_SCRIPT)
|
ctx.remote(settings.REMOTE_UPDATE_SCRIPT)
|
||||||
|
|
||||||
# Restarts celery worker on the celery hostgroup to listen to the
|
# Restarts celery worker on the celery hostgroup to listen to the
|
||||||
# celery queues: log_parser_fail,log_parser
|
# celery queues: log_parser_fail,log_parser
|
||||||
ctx.remote( '{0}/supervisorctl restart celery_gevent'.format(settings.BIN_DIR) )
|
ctx.remote(
|
||||||
|
'{0}/supervisorctl restart celery_gevent'.format(settings.BIN_DIR))
|
||||||
|
|
||||||
|
|
||||||
def deploy_admin_node(ctx):
|
def deploy_admin_node(ctx):
|
||||||
|
|
||||||
# Restarts celery worker on the admin node listening to the
|
# Restarts celery worker on the admin node listening to the
|
||||||
# celery queues: default
|
# celery queues: default
|
||||||
ctx.remote( '{0}/supervisorctl restart run_celery_worker'.format(settings.BIN_DIR) )
|
ctx.remote(
|
||||||
|
'{0}/supervisorctl restart run_celery_worker'.format(settings.BIN_DIR))
|
||||||
|
|
||||||
|
|
||||||
@task
|
@task
|
||||||
def update_info(ctx):
|
def update_info(ctx):
|
||||||
|
@ -104,9 +117,9 @@ def pre_update(ctx, ref=settings.UPDATE_REF):
|
||||||
|
|
||||||
@task
|
@task
|
||||||
def update(ctx):
|
def update(ctx):
|
||||||
update_assets()
|
update_assets(ctx)
|
||||||
update_db()
|
update_db(ctx)
|
||||||
update_oauth_credentials()
|
update_oauth_credentials(ctx)
|
||||||
|
|
||||||
|
|
||||||
@task
|
@task
|
||||||
|
@ -116,4 +129,3 @@ def deploy(ctx):
|
||||||
deploy_workers()
|
deploy_workers()
|
||||||
deploy_admin_node()
|
deploy_admin_node()
|
||||||
update_info()
|
update_info()
|
||||||
|
|
||||||
|
|
|
@ -103,8 +103,8 @@ def initial_data():
|
||||||
|
|
||||||
call_command('load_initial_data')
|
call_command('load_initial_data')
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture(scope='function')
|
||||||
def jm():
|
def jm(request):
|
||||||
""" Give a test access to a JobsModel instance. """
|
""" Give a test access to a JobsModel instance. """
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from treeherder.model.derived.jobs import JobsModel
|
from treeherder.model.derived.jobs import JobsModel
|
||||||
|
@ -125,6 +125,8 @@ def jm():
|
||||||
def fin():
|
def fin():
|
||||||
model.disconnect()
|
model.disconnect()
|
||||||
|
|
||||||
|
request.addfinalizer(fin)
|
||||||
|
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
|
||||||
|
@ -238,8 +240,8 @@ def mock_get_resultset(monkeypatch, result_set_stored):
|
||||||
|
|
||||||
monkeypatch.setattr(common, 'lookup_revisions', _get_resultset)
|
monkeypatch.setattr(common, 'lookup_revisions', _get_resultset)
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture(scope='function')
|
||||||
def refdata():
|
def refdata(request):
|
||||||
"""returns a patched RefDataManager for testing purpose"""
|
"""returns a patched RefDataManager for testing purpose"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
@ -254,9 +256,15 @@ def refdata():
|
||||||
)
|
)
|
||||||
|
|
||||||
add_test_procs_file(refdata.dhub, 'reference', proc_path)
|
add_test_procs_file(refdata.dhub, 'reference', proc_path)
|
||||||
|
|
||||||
|
def fin():
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
|
request.addfinalizer(fin)
|
||||||
|
|
||||||
return refdata
|
return refdata
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_message_broker(monkeypatch):
|
def mock_message_broker(monkeypatch):
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
monkeypatch.setattr(settings, 'BROKER_URL', 'memory://')
|
monkeypatch.setattr(settings, 'BROKER_URL', 'memory://')
|
||||||
|
|
|
@ -86,11 +86,11 @@ def completed_jobs_stored(
|
||||||
|
|
||||||
test_utils.post_collection(jm.project, tjc)
|
test_utils.post_collection(jm.project, tjc)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def completed_jobs_loaded(jm, completed_jobs_stored):
|
def completed_jobs_loaded(jm, completed_jobs_stored):
|
||||||
jm.process_objects(1, raise_errors=True)
|
jm.process_objects(1, raise_errors=True)
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_send_request(monkeypatch, jm):
|
def mock_send_request(monkeypatch, jm):
|
||||||
|
@ -110,7 +110,8 @@ def mock_send_request(monkeypatch, jm):
|
||||||
str(signed_uri), params=th_collection.get_collection_data()
|
str(signed_uri), params=th_collection.get_collection_data()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
response.getcode = lambda: response.status_int
|
response.getcode = lambda: response.status_int
|
||||||
return response
|
return response
|
||||||
|
|
||||||
monkeypatch.setattr(TreeherderRequest, 'send', _send)
|
monkeypatch.setattr(TreeherderRequest, 'send', _send)
|
||||||
|
|
|
@ -35,6 +35,9 @@ def test_bz_api_process(mock_extract, refdata):
|
||||||
proc='refdata_test.selects.test_bugscache',
|
proc='refdata_test.selects.test_bugscache',
|
||||||
return_type='tuple'
|
return_type='tuple'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
# the number of rows inserted should equal to the number of bugs
|
# the number of rows inserted should equal to the number of bugs
|
||||||
assert len(row_data) == 10
|
assert len(row_data) == 10
|
||||||
|
|
||||||
|
|
|
@ -58,5 +58,7 @@ def test_ingest_pending_jobs(jm, initial_data,
|
||||||
stored_obj = jm.get_jobs_dhub().execute(
|
stored_obj = jm.get_jobs_dhub().execute(
|
||||||
proc="jobs_test.selects.jobs")
|
proc="jobs_test.selects.jobs")
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
assert len(stored_obj) == 1
|
assert len(stored_obj) == 1
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
def test_get_revision_hash(jm, initial_data,
|
def test_get_revision_hash(initial_data,
|
||||||
result_set_stored, mock_get_remote_content):
|
result_set_stored, mock_get_remote_content):
|
||||||
"""That the correct revision_hash is retrieved is the revision exists"""
|
"""That the correct revision_hash is retrieved is the revision exists"""
|
||||||
from treeherder.etl import common
|
from treeherder.etl import common
|
||||||
|
@ -12,7 +12,7 @@ def test_get_revision_hash(jm, initial_data,
|
||||||
assert resultset[project][revision]['revision_hash'] == result_set_stored[0]['revision_hash']
|
assert resultset[project][revision]['revision_hash'] == result_set_stored[0]['revision_hash']
|
||||||
|
|
||||||
|
|
||||||
def test_get_revision_hash_none(jm, mock_get_remote_content,
|
def test_get_revision_hash_none(mock_get_remote_content,
|
||||||
initial_data, result_set_stored):
|
initial_data, result_set_stored):
|
||||||
"""Test that none is returned if the revision doesn't exist"""
|
"""Test that none is returned if the revision doesn't exist"""
|
||||||
from treeherder.etl import common
|
from treeherder.etl import common
|
||||||
|
|
|
@ -62,6 +62,8 @@ def test_load_data(sample_data, jm, mock_post_json_data,
|
||||||
stored_obj = jm.get_os_dhub().execute(
|
stored_obj = jm.get_os_dhub().execute(
|
||||||
proc="objectstore_test.selects.all")
|
proc="objectstore_test.selects.all")
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
assert len(stored_obj) == 1
|
assert len(stored_obj) == 1
|
||||||
|
|
||||||
|
|
||||||
|
@ -91,4 +93,6 @@ def test_load_data_missing_attribute(sample_data, jm, mock_post_json_data, initi
|
||||||
stored_obj = jm.get_os_dhub().execute(
|
stored_obj = jm.get_os_dhub().execute(
|
||||||
proc="objectstore_test.selects.all")
|
proc="objectstore_test.selects.all")
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
assert len(stored_obj) == 0
|
assert len(stored_obj) == 0
|
||||||
|
|
|
@ -24,3 +24,5 @@ def test_ingest_hg_pushlog(jm, initial_data, test_base_dir,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert len(revisions_stored) == 15
|
assert len(revisions_stored) == 15
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
|
@ -36,48 +36,47 @@ def do_test(log):
|
||||||
# assert act == exp, json.dumps(act, indent=4)
|
# assert act == exp, json.dumps(act, indent=4)
|
||||||
|
|
||||||
|
|
||||||
def test_crashtest_passing(jm, initial_data):
|
def test_crashtest_passing(initial_data):
|
||||||
"""Process a job with a single log reference."""
|
"""Process a job with a single log reference."""
|
||||||
|
|
||||||
do_test("mozilla-central_fedora-b2g_test-crashtest-1-bm54-tests1-linux-build50")
|
do_test("mozilla-central_fedora-b2g_test-crashtest-1-bm54-tests1-linux-build50")
|
||||||
|
|
||||||
|
def test_opt_test_failing(initial_data):
|
||||||
def test_opt_test_failing(jm, initial_data):
|
|
||||||
"""Process log with printlines and errors"""
|
"""Process log with printlines and errors"""
|
||||||
do_test("mozilla-central_mountainlion_test-mochitest-2-bm80-tests1-macosx-build138")
|
do_test("mozilla-central_mountainlion_test-mochitest-2-bm80-tests1-macosx-build138")
|
||||||
|
|
||||||
|
|
||||||
def test_build_failing(jm, initial_data):
|
def test_build_failing(initial_data):
|
||||||
"""Process a job with a single log reference."""
|
"""Process a job with a single log reference."""
|
||||||
|
|
||||||
do_test("mozilla-central-macosx64-debug-bm65-build1-build15")
|
do_test("mozilla-central-macosx64-debug-bm65-build1-build15")
|
||||||
|
|
||||||
|
|
||||||
def test_mochitest_debug_passing(jm, initial_data):
|
def test_mochitest_debug_passing(initial_data):
|
||||||
"""Process a job with a single log reference."""
|
"""Process a job with a single log reference."""
|
||||||
|
|
||||||
do_test("mozilla-central_mountainlion-debug_test-mochitest-2-bm80-tests1-macosx-build93")
|
do_test("mozilla-central_mountainlion-debug_test-mochitest-2-bm80-tests1-macosx-build93")
|
||||||
|
|
||||||
|
|
||||||
def test_mochitest_pass(jm, initial_data):
|
def test_mochitest_pass(initial_data):
|
||||||
"""Process a job with a single log reference."""
|
"""Process a job with a single log reference."""
|
||||||
|
|
||||||
do_test("mozilla-central_mountainlion_test-mochitest-2-bm77-tests1-macosx-build141")
|
do_test("mozilla-central_mountainlion_test-mochitest-2-bm77-tests1-macosx-build141")
|
||||||
|
|
||||||
|
|
||||||
def test_mochitest_fail(jm, initial_data):
|
def test_mochitest_fail(initial_data):
|
||||||
"""Process a job with a single log reference."""
|
"""Process a job with a single log reference."""
|
||||||
|
|
||||||
do_test("mozilla-esr17_xp_test_pgo-mochitest-browser-chrome-bm74-tests1-windows-build12")
|
do_test("mozilla-esr17_xp_test_pgo-mochitest-browser-chrome-bm74-tests1-windows-build12")
|
||||||
|
|
||||||
|
|
||||||
def test_mochitest_process_crash(jm, initial_data):
|
def test_mochitest_process_crash(initial_data):
|
||||||
"""Test a mochitest log that has PROCESS-CRASH """
|
"""Test a mochitest log that has PROCESS-CRASH """
|
||||||
|
|
||||||
do_test("mozilla-inbound_ubuntu64_vm-debug_test-mochitest-other-bm53-tests1-linux-build122")
|
do_test("mozilla-inbound_ubuntu64_vm-debug_test-mochitest-other-bm53-tests1-linux-build122")
|
||||||
|
|
||||||
|
|
||||||
def test_jetpack_fail(jm, initial_data):
|
def test_jetpack_fail(initial_data):
|
||||||
"""Process a job with a single log reference."""
|
"""Process a job with a single log reference."""
|
||||||
|
|
||||||
do_test("ux_ubuntu32_vm_test-jetpack-bm67-tests1-linux-build16")
|
do_test("ux_ubuntu32_vm_test-jetpack-bm67-tests1-linux-build16")
|
||||||
|
|
|
@ -52,14 +52,14 @@ def do_test(log, check_errors=True):
|
||||||
# assert act == exp, json.dumps(act, indent=4)
|
# assert act == exp, json.dumps(act, indent=4)
|
||||||
|
|
||||||
|
|
||||||
def test_crashtest_passing(jm, initial_data):
|
def test_crashtest_passing(initial_data):
|
||||||
"""Process a job with a single log reference."""
|
"""Process a job with a single log reference."""
|
||||||
|
|
||||||
do_test(
|
do_test(
|
||||||
"mozilla-central_fedora-b2g_test-crashtest-1-bm54-tests1-linux-build50"
|
"mozilla-central_fedora-b2g_test-crashtest-1-bm54-tests1-linux-build50"
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_mochitest_pass(jm, initial_data):
|
def test_mochitest_pass(initial_data):
|
||||||
"""Process a job with a single log reference."""
|
"""Process a job with a single log reference."""
|
||||||
|
|
||||||
do_test(
|
do_test(
|
||||||
|
@ -68,7 +68,7 @@ def test_mochitest_pass(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_mochitest_fail(jm, initial_data):
|
def test_mochitest_fail(initial_data):
|
||||||
"""Process a job with a single log reference."""
|
"""Process a job with a single log reference."""
|
||||||
|
|
||||||
do_test(
|
do_test(
|
||||||
|
@ -76,7 +76,7 @@ def test_mochitest_fail(jm, initial_data):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_mochitest_process_crash(jm, initial_data):
|
def test_mochitest_process_crash(initial_data):
|
||||||
"""Test a mochitest log that has PROCESS-CRASH """
|
"""Test a mochitest log that has PROCESS-CRASH """
|
||||||
|
|
||||||
do_test(
|
do_test(
|
||||||
|
@ -84,7 +84,7 @@ def test_mochitest_process_crash(jm, initial_data):
|
||||||
)
|
)
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_jetpack_fail(jm, initial_data):
|
def test_jetpack_fail(initial_data):
|
||||||
"""Process a job with a single log reference."""
|
"""Process a job with a single log reference."""
|
||||||
|
|
||||||
do_test(
|
do_test(
|
||||||
|
@ -93,7 +93,7 @@ def test_jetpack_fail(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_crash_1(jm, initial_data):
|
def test_crash_1(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"crash-1"
|
"crash-1"
|
||||||
|
@ -101,7 +101,7 @@ def test_crash_1(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_crash_2(jm, initial_data):
|
def test_crash_2(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"crash-2"
|
"crash-2"
|
||||||
|
@ -109,7 +109,7 @@ def test_crash_2(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_crash_mac_1(jm, initial_data):
|
def test_crash_mac_1(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"crash-mac-1"
|
"crash-mac-1"
|
||||||
|
@ -117,7 +117,7 @@ def test_crash_mac_1(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_crashtest_timeout(jm, initial_data):
|
def test_crashtest_timeout(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"crashtest-timeout"
|
"crashtest-timeout"
|
||||||
|
@ -125,7 +125,7 @@ def test_crashtest_timeout(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_jsreftest_fail(jm, initial_data):
|
def test_jsreftest_fail(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"jsreftest-fail"
|
"jsreftest-fail"
|
||||||
|
@ -133,7 +133,7 @@ def test_jsreftest_fail(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_jsreftest_timeout_crash(jm, initial_data):
|
def test_jsreftest_timeout_crash(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"jsreftest-timeout-crash"
|
"jsreftest-timeout-crash"
|
||||||
|
@ -141,7 +141,7 @@ def test_jsreftest_timeout_crash(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_leaks_1(jm, initial_data):
|
def test_leaks_1(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"leaks-1"
|
"leaks-1"
|
||||||
|
@ -149,7 +149,7 @@ def test_leaks_1(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_mochitest_test_end(jm, initial_data):
|
def test_mochitest_test_end(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"mochitest-test-end"
|
"mochitest-test-end"
|
||||||
|
@ -157,7 +157,7 @@ def test_mochitest_test_end(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_multiple_timeouts(jm, initial_data):
|
def test_multiple_timeouts(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"multiple-timeouts"
|
"multiple-timeouts"
|
||||||
|
@ -165,7 +165,7 @@ def test_multiple_timeouts(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_opt_objc_exception(jm, initial_data):
|
def test_opt_objc_exception(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"opt-objc-exception"
|
"opt-objc-exception"
|
||||||
|
@ -173,7 +173,7 @@ def test_opt_objc_exception(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_reftest_fail_crash(jm, initial_data):
|
def test_reftest_fail_crash(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"reftest-fail-crash"
|
"reftest-fail-crash"
|
||||||
|
@ -181,7 +181,7 @@ def test_reftest_fail_crash(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_reftest_jserror(jm, initial_data):
|
def test_reftest_jserror(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"reftest-jserror"
|
"reftest-jserror"
|
||||||
|
@ -189,7 +189,7 @@ def test_reftest_jserror(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_reftest_opt_fail(jm, initial_data):
|
def test_reftest_opt_fail(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"reftest-opt-fail"
|
"reftest-opt-fail"
|
||||||
|
@ -197,7 +197,7 @@ def test_reftest_opt_fail(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_reftest_timeout(jm, initial_data):
|
def test_reftest_timeout(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"reftest-timeout"
|
"reftest-timeout"
|
||||||
|
@ -205,35 +205,35 @@ def test_reftest_timeout(jm, initial_data):
|
||||||
|
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_tinderbox_exception(jm, initial_data):
|
def test_tinderbox_exception(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"tinderbox-exception"
|
"tinderbox-exception"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_xpcshell_crash(jm, initial_data):
|
def test_xpcshell_crash(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"xpcshell-crash"
|
"xpcshell-crash"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_xpcshell_multiple(jm, initial_data):
|
def test_xpcshell_multiple(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"xpcshell-multiple"
|
"xpcshell-multiple"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_xpcshell_timeout(jm, initial_data):
|
def test_xpcshell_timeout(initial_data):
|
||||||
"""Test from old log parser"""
|
"""Test from old log parser"""
|
||||||
do_test(
|
do_test(
|
||||||
"xpcshell-timeout"
|
"xpcshell-timeout"
|
||||||
)
|
)
|
||||||
|
|
||||||
@slow
|
@slow
|
||||||
def test_check_errors_false(jm, initial_data, monkeypatch):
|
def test_check_errors_false(initial_data, monkeypatch):
|
||||||
"""ensure that parse_line is not called on the error parser."""
|
"""ensure that parse_line is not called on the error parser."""
|
||||||
|
|
||||||
mock_pl = MagicMock(name="parse_line")
|
mock_pl = MagicMock(name="parse_line")
|
||||||
|
@ -245,7 +245,7 @@ def test_check_errors_false(jm, initial_data, monkeypatch):
|
||||||
)
|
)
|
||||||
assert mock_pl.called is False
|
assert mock_pl.called is False
|
||||||
|
|
||||||
def test_check_errors_true(jm, initial_data, monkeypatch):
|
def test_check_errors_true(initial_data, monkeypatch):
|
||||||
"""ensure that parse_line is called on the error parser."""
|
"""ensure that parse_line is called on the error parser."""
|
||||||
|
|
||||||
mock_pl = MagicMock(name="parse_line")
|
mock_pl = MagicMock(name="parse_line")
|
||||||
|
|
|
@ -8,7 +8,7 @@ from treeherder.log_parser.parsers import ErrorParser
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def jobs_with_local_log(jm, initial_data):
|
def jobs_with_local_log(initial_data):
|
||||||
log = "mozilla-central_fedora-b2g_test-crashtest-1-bm54-tests1-linux-build50"
|
log = "mozilla-central_fedora-b2g_test-crashtest-1-bm54-tests1-linux-build50"
|
||||||
sample_data = SampleData()
|
sample_data = SampleData()
|
||||||
url = "file://{0}".format(
|
url = "file://{0}".format(
|
||||||
|
@ -48,6 +48,9 @@ def test_parse_log(jm, initial_data, jobs_with_local_log, sample_resultset, mock
|
||||||
placeholders=[job_id]
|
placeholders=[job_id]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
# we must have at least 2 artifacts: one for the log viewer and another one
|
# we must have at least 2 artifacts: one for the log viewer and another one
|
||||||
# for the job artifact panel
|
# for the job artifact panel
|
||||||
|
|
||||||
assert len(job_artifacts) >= 2
|
assert len(job_artifacts) >= 2
|
||||||
|
|
|
@ -17,6 +17,8 @@ def test_unicode(jm):
|
||||||
"""Unicode representation of a ``JobModel`` is the project name."""
|
"""Unicode representation of a ``JobModel`` is the project name."""
|
||||||
assert unicode(jm) == unicode(jm.project)
|
assert unicode(jm) == unicode(jm.project)
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
|
|
||||||
def test_disconnect(jm):
|
def test_disconnect(jm):
|
||||||
"""test that your model disconnects"""
|
"""test that your model disconnects"""
|
||||||
|
@ -45,6 +47,7 @@ def test_ingest_single_sample_job(jm, refdata, sample_data, initial_data,
|
||||||
test_utils.do_job_ingestion(jm, refdata, job_data, sample_resultset)
|
test_utils.do_job_ingestion(jm, refdata, job_data, sample_resultset)
|
||||||
|
|
||||||
jm.disconnect()
|
jm.disconnect()
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
def test_ingest_all_sample_jobs(jm, refdata, sample_data, initial_data, sample_resultset, mock_log_parser):
|
def test_ingest_all_sample_jobs(jm, refdata, sample_data, initial_data, sample_resultset, mock_log_parser):
|
||||||
"""
|
"""
|
||||||
|
@ -58,6 +61,7 @@ def test_ingest_all_sample_jobs(jm, refdata, sample_data, initial_data, sample_r
|
||||||
test_utils.do_job_ingestion(jm, refdata, job_data, sample_resultset)
|
test_utils.do_job_ingestion(jm, refdata, job_data, sample_resultset)
|
||||||
|
|
||||||
jm.disconnect()
|
jm.disconnect()
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
def test_cycle_all_data(jm, refdata, sample_data, initial_data, sample_resultset, mock_log_parser):
|
def test_cycle_all_data(jm, refdata, sample_data, initial_data, sample_resultset, mock_log_parser):
|
||||||
"""
|
"""
|
||||||
|
@ -88,6 +92,7 @@ def test_cycle_all_data(jm, refdata, sample_data, initial_data, sample_resultset
|
||||||
jobs_after = jm.get_dhub(jm.CT_JOBS).execute(proc="jobs_test.selects.jobs")
|
jobs_after = jm.get_dhub(jm.CT_JOBS).execute(proc="jobs_test.selects.jobs")
|
||||||
|
|
||||||
jm.disconnect()
|
jm.disconnect()
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
assert len(jobs_before) == job_count
|
assert len(jobs_before) == job_count
|
||||||
|
|
||||||
|
@ -141,6 +146,7 @@ def test_cycle_one_job(jm, refdata, sample_data, initial_data, sample_resultset,
|
||||||
assert len(jobs_count_after_delete) == 0
|
assert len(jobs_count_after_delete) == 0
|
||||||
|
|
||||||
jm.disconnect()
|
jm.disconnect()
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
def test_bad_date_value_ingestion(jm, initial_data, mock_log_parser):
|
def test_bad_date_value_ingestion(jm, initial_data, mock_log_parser):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -28,6 +28,9 @@ def test_claim_objects(jm, sample_data):
|
||||||
loading_rows = jm.get_dhub(jm.CT_OBJECTSTORE).execute(
|
loading_rows = jm.get_dhub(jm.CT_OBJECTSTORE).execute(
|
||||||
proc="objectstore_test.counts.loading")[0]["loading_count"]
|
proc="objectstore_test.counts.loading")[0]["loading_count"]
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
jm2.disconnect()
|
||||||
|
|
||||||
assert len(rows1) == 2
|
assert len(rows1) == 2
|
||||||
# second worker asked for two rows but only got one that was left
|
# second worker asked for two rows but only got one that was left
|
||||||
assert len(rows2) == 1
|
assert len(rows2) == 1
|
||||||
|
@ -57,6 +60,8 @@ def test_mark_object_complete(jm):
|
||||||
row_data = jm.get_dhub(jm.CT_OBJECTSTORE).execute(
|
row_data = jm.get_dhub(jm.CT_OBJECTSTORE).execute(
|
||||||
proc="objectstore_test.selects.row", placeholders=[row_id])[0]
|
proc="objectstore_test.selects.row", placeholders=[row_id])[0]
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
assert row_data["revision_hash"] == revision_hash
|
assert row_data["revision_hash"] == revision_hash
|
||||||
assert row_data["processed_state"] == "complete"
|
assert row_data["processed_state"] == "complete"
|
||||||
|
|
||||||
|
@ -93,6 +98,8 @@ def test_process_objects(jm, initial_data, mock_log_parser):
|
||||||
loading_count = jm.get_dhub(jm.CT_OBJECTSTORE).execute(
|
loading_count = jm.get_dhub(jm.CT_OBJECTSTORE).execute(
|
||||||
proc="objectstore_test.counts.loading")[0]["loading_count"]
|
proc="objectstore_test.counts.loading")[0]["loading_count"]
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
assert complete_count == 2
|
assert complete_count == 2
|
||||||
assert loading_count == 0
|
assert loading_count == 0
|
||||||
assert date_set.issubset(expected_dates)
|
assert date_set.issubset(expected_dates)
|
||||||
|
@ -107,6 +114,8 @@ def test_process_objects_unknown_error(jm):
|
||||||
|
|
||||||
row_id = jm._get_last_insert_id("objectstore")
|
row_id = jm._get_last_insert_id("objectstore")
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
assert row_id == 0
|
assert row_id == 0
|
||||||
assert response == exp_resp
|
assert response == exp_resp
|
||||||
|
|
||||||
|
@ -136,6 +145,8 @@ def test_ingest_sample_data(jm, sample_data, sample_resultset, mock_log_parser):
|
||||||
loading_count = jm.get_os_dhub().execute(
|
loading_count = jm.get_os_dhub().execute(
|
||||||
proc="objectstore_test.counts.loading")[0]["loading_count"]
|
proc="objectstore_test.counts.loading")[0]["loading_count"]
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
assert complete_count == resultset_count
|
assert complete_count == resultset_count
|
||||||
assert loading_count == 0
|
assert loading_count == 0
|
||||||
assert len(job_rows) == resultset_count
|
assert len(job_rows) == resultset_count
|
||||||
|
@ -159,6 +170,8 @@ def test_objectstore_update_content(jm, sample_data):
|
||||||
placeholders=[obj_updated["job"]["job_guid"]]
|
placeholders=[obj_updated["job"]["job_guid"]]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
# check that it didn't create a new object
|
# check that it didn't create a new object
|
||||||
assert len(stored_objs) == 1
|
assert len(stored_objs) == 1
|
||||||
|
|
||||||
|
|
|
@ -215,6 +215,8 @@ def test_refdata_manager(refdata, params):
|
||||||
expected = getattr(refdata, params['func'])(params['input'])
|
expected = getattr(refdata, params['func'])(params['input'])
|
||||||
assert expected == params['expected']
|
assert expected == params['expected']
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
|
|
||||||
# some tests don't fit into a standard layout
|
# some tests don't fit into a standard layout
|
||||||
def test_reference_data_signatures(refdata):
|
def test_reference_data_signatures(refdata):
|
||||||
|
@ -243,6 +245,8 @@ def test_reference_data_signatures(refdata):
|
||||||
proc='refdata_test.selects.test_reference_data_signatures'
|
proc='refdata_test.selects.test_reference_data_signatures'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
for row, expected_signature in zip(row_data, expected_signatures):
|
for row, expected_signature in zip(row_data, expected_signatures):
|
||||||
assert row['signature'] == expected_signature
|
assert row['signature'] == expected_signature
|
||||||
|
|
||||||
|
@ -292,6 +296,8 @@ def test_add_job_type(refdata):
|
||||||
proc='refdata_test.selects.test_all_job_group_ids'
|
proc='refdata_test.selects.test_all_job_group_ids'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
assert row_data == expected
|
assert row_data == expected
|
||||||
|
|
||||||
def test_get_or_create_repository_version(refdata, repository_id):
|
def test_get_or_create_repository_version(refdata, repository_id):
|
||||||
|
@ -305,6 +311,8 @@ def test_get_or_create_repository_version(refdata, repository_id):
|
||||||
return_type='iter'
|
return_type='iter'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
assert row_data.get_column_data('repository_id') == repository_id
|
assert row_data.get_column_data('repository_id') == repository_id
|
||||||
assert row_data.get_column_data('version') == 'v1.0'
|
assert row_data.get_column_data('version') == 'v1.0'
|
||||||
assert row_data.get_column_data('version_timestamp') == 1367248930
|
assert row_data.get_column_data('version_timestamp') == 1367248930
|
||||||
|
@ -325,12 +333,18 @@ def test_get_repository_info(refdata, repository_id):
|
||||||
"repository_group_id": 1,
|
"repository_group_id": 1,
|
||||||
"description": ""
|
"description": ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
for k, v in expected.items():
|
for k, v in expected.items():
|
||||||
assert info[k] == v
|
assert info[k] == v
|
||||||
|
|
||||||
|
|
||||||
def test_get_hg_repository_version(refdata, mock_urllib):
|
def test_get_hg_repository_version(refdata, mock_urllib):
|
||||||
version = refdata.get_hg_repository_version("https://hg.mozilla.org/mozilla-central")
|
version = refdata.get_hg_repository_version("https://hg.mozilla.org/mozilla-central")
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
assert version == 'latest version'
|
assert version == 'latest version'
|
||||||
|
|
||||||
|
|
||||||
|
@ -344,6 +358,8 @@ def test_update_repo_version_if_old(refdata, old_version_repository, mock_urllib
|
||||||
|
|
||||||
updated_version = refdata.get_repository_version_id(repo_id)
|
updated_version = refdata.get_repository_version_id(repo_id)
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
assert old_version != updated_version
|
assert old_version != updated_version
|
||||||
|
|
||||||
|
|
||||||
|
@ -363,10 +379,13 @@ def test_update_repo_version_unchanged(refdata, latest_version_repository, mock_
|
||||||
return_type='iter'
|
return_type='iter'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
assert row_data.get_column_data('version') == 'latest version'
|
assert row_data.get_column_data('version') == 'latest version'
|
||||||
assert row_data.get_column_data('version_timestamp') >= long(time_now)
|
assert row_data.get_column_data('version_timestamp') >= long(time_now)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_update_repo_version_command(refdata, old_version_repository, initial_data, mock_urllib):
|
def test_update_repo_version_command(refdata, old_version_repository, initial_data, mock_urllib):
|
||||||
"""Test the django command extension
|
"""Test the django command extension
|
||||||
update_repository_version without using filters"""
|
update_repository_version without using filters"""
|
||||||
|
@ -377,6 +396,8 @@ def test_update_repo_version_command(refdata, old_version_repository, initial_da
|
||||||
|
|
||||||
updated_version = refdata.get_repository_version_id(repo_id)
|
updated_version = refdata.get_repository_version_id(repo_id)
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
assert old_version < updated_version
|
assert old_version < updated_version
|
||||||
|
|
||||||
|
|
||||||
|
@ -393,6 +414,8 @@ def test_update_repo_version_command_with_filters(refdata, old_version_repositor
|
||||||
|
|
||||||
updated_version = refdata.get_repository_version_id(repo_id)
|
updated_version = refdata.get_repository_version_id(repo_id)
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
assert old_version < updated_version
|
assert old_version < updated_version
|
||||||
|
|
||||||
|
|
||||||
|
@ -430,6 +453,8 @@ def test_update_bugscache(refdata, sample_bugs):
|
||||||
return_type='tuple'
|
return_type='tuple'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
||||||
assert len(bug_list) == len(row_data)
|
assert len(bug_list) == len(row_data)
|
||||||
|
|
||||||
|
|
||||||
|
@ -453,3 +478,5 @@ def test_get_bugscache(refdata, sample_bugs):
|
||||||
for search_term in search_terms:
|
for search_term in search_terms:
|
||||||
suggestions = refdata.get_bug_suggestions(search_term)
|
suggestions = refdata.get_bug_suggestions(search_term)
|
||||||
assert len(suggestions) >= 0
|
assert len(suggestions) >= 0
|
||||||
|
|
||||||
|
refdata.disconnect()
|
||||||
|
|
|
@ -304,6 +304,7 @@ def verify_artifacts(jm, artifacts_ref):
|
||||||
assert artifacts[key]['type'] == artifacts_ref[key]['type']
|
assert artifacts[key]['type'] == artifacts_ref[key]['type']
|
||||||
assert json.loads(artifacts[key]['blob']) == artifacts_ref[key]['blob']
|
assert json.loads(artifacts[key]['blob']) == artifacts_ref[key]['blob']
|
||||||
|
|
||||||
|
|
||||||
def verify_coalesced(jm, coalesced_job_guids, coalesced_replacements):
|
def verify_coalesced(jm, coalesced_job_guids, coalesced_replacements):
|
||||||
|
|
||||||
coalesced_job_guid_list = coalesced_job_guids.keys()
|
coalesced_job_guid_list = coalesced_job_guids.keys()
|
||||||
|
@ -344,183 +345,6 @@ def load_exp(filename):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
class SourceDictBuilder(object):
|
|
||||||
"""Given a ``job_id``, rebuild the dictionary the source came from."""
|
|
||||||
|
|
||||||
def __init__(self, jm, job_guid):
|
|
||||||
self.jm = jm
|
|
||||||
self.job_guid = job_guid
|
|
||||||
job_data = self.jm.get_jobs_dhub().execute(
|
|
||||||
proc="jobs_test.selects.row_by_guid",
|
|
||||||
placeholders=[self.job_guid],
|
|
||||||
return_type="iter"
|
|
||||||
).next()
|
|
||||||
self.job_id = job_data['id']
|
|
||||||
|
|
||||||
def as_dict(self):
|
|
||||||
|
|
||||||
source = self.jm.get_jobs_dhub().execute(
|
|
||||||
proc="jobs_test.selects.job_source",
|
|
||||||
placeholders=[self.job_id],
|
|
||||||
return_type="iter"
|
|
||||||
).next()
|
|
||||||
|
|
||||||
source["repository"] = self._get_repository(
|
|
||||||
source["repository_id"])
|
|
||||||
del(source["repository_id"])
|
|
||||||
|
|
||||||
return unicode_keys(source)
|
|
||||||
|
|
||||||
def _get_repository(self, obj_id):
|
|
||||||
obj = self.jm.refdata_model.get_row_by_id(
|
|
||||||
"repository",
|
|
||||||
obj_id,
|
|
||||||
).get_column_data("name")
|
|
||||||
return obj
|
|
||||||
|
|
||||||
|
|
||||||
class JobDictBuilder(object):
|
|
||||||
"""Given a ``job_id``, rebuild the dictionary the job came from."""
|
|
||||||
|
|
||||||
def __init__(self, jm, job_guid):
|
|
||||||
self.jm = jm
|
|
||||||
self.job_guid = job_guid
|
|
||||||
job_data = self.jm.get_jobs_dhub().execute(
|
|
||||||
proc="jobs_test.selects.row_by_guid",
|
|
||||||
placeholders=[self.job_guid],
|
|
||||||
return_type="iter"
|
|
||||||
).next()
|
|
||||||
self.job_id = job_data['id']
|
|
||||||
|
|
||||||
def as_dict(self):
|
|
||||||
job = self.jm.get_job(self.job_id)
|
|
||||||
|
|
||||||
job["artifact"] = self._get_artifact()
|
|
||||||
job["log_references"] = self._get_logs()
|
|
||||||
|
|
||||||
job["option_collection"] = self._get_option_collection(
|
|
||||||
job["option_collection_hash"])
|
|
||||||
del(job["option_collection_hash"])
|
|
||||||
|
|
||||||
job["machine_platform"] = self._get_machine_platform(
|
|
||||||
job["machine_platform_id"])
|
|
||||||
del(job["machine_platform_id"])
|
|
||||||
|
|
||||||
job["build_platform"] = self._get_build_platform(
|
|
||||||
job["build_platform_id"])
|
|
||||||
del(job["build_platform_id"])
|
|
||||||
|
|
||||||
job["machine"] = self._get_machine(
|
|
||||||
job["machine_id"])
|
|
||||||
del(job["machine_id"])
|
|
||||||
del(job["machine_name"])
|
|
||||||
|
|
||||||
job["product_name"] = self._get_product(
|
|
||||||
job["product_id"])
|
|
||||||
del(job["product_id"])
|
|
||||||
|
|
||||||
job["name"] = self._get_name(
|
|
||||||
job["job_type_id"])
|
|
||||||
del(job["job_type_id"])
|
|
||||||
|
|
||||||
del(job["id"])
|
|
||||||
del(job["active_status"])
|
|
||||||
del(job["result_set_id"])
|
|
||||||
|
|
||||||
if not job["job_coalesced_to_guid"]:
|
|
||||||
del(job["job_coalesced_to_guid"])
|
|
||||||
|
|
||||||
return unicode_keys(job)
|
|
||||||
|
|
||||||
def _get_option_collection(self, option_collection_hash):
|
|
||||||
"""
|
|
||||||
Needs to work with hash. Get row by id won't work anymore.
|
|
||||||
probably need to a new getter where it gets the option id
|
|
||||||
but the hash means there's possibly more than one option.
|
|
||||||
maybe I need mauro to make a splitter get method?
|
|
||||||
"""
|
|
||||||
option_iter = self.jm.refdata_model.get_option_names(
|
|
||||||
option_collection_hash)
|
|
||||||
options = {}
|
|
||||||
for name_dict in option_iter:
|
|
||||||
options[name_dict["name"]] = True
|
|
||||||
|
|
||||||
return options
|
|
||||||
|
|
||||||
def _get_machine_platform(self, obj_id):
|
|
||||||
obj = self.jm.refdata_model.get_row_by_id(
|
|
||||||
"machine_platform",
|
|
||||||
obj_id,
|
|
||||||
).next()
|
|
||||||
del(obj["active_status"])
|
|
||||||
del(obj["id"])
|
|
||||||
return unicode_keys(obj)
|
|
||||||
|
|
||||||
def _get_build_platform(self, obj_id):
|
|
||||||
obj = self.jm.refdata_model.get_row_by_id(
|
|
||||||
"build_platform",
|
|
||||||
obj_id,
|
|
||||||
).next()
|
|
||||||
del(obj["active_status"])
|
|
||||||
del(obj["id"])
|
|
||||||
return unicode_keys(obj)
|
|
||||||
|
|
||||||
def _get_machine(self, obj_id):
|
|
||||||
obj = self.jm.refdata_model.get_row_by_id(
|
|
||||||
"machine",
|
|
||||||
obj_id,
|
|
||||||
).get_column_data("name")
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def _get_product(self, obj_id):
|
|
||||||
obj = self.jm.refdata_model.get_row_by_id(
|
|
||||||
"product",
|
|
||||||
obj_id,
|
|
||||||
).get_column_data("name")
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def _get_name(self, obj_id):
|
|
||||||
job_type = self.jm.refdata_model.get_row_by_id(
|
|
||||||
"job_type",
|
|
||||||
obj_id,
|
|
||||||
).next()
|
|
||||||
return job_type['name']
|
|
||||||
|
|
||||||
def _get_logs(self):
|
|
||||||
logs = self.jm.get_jobs_dhub().execute(
|
|
||||||
proc="jobs_test.selects.job_log_urls",
|
|
||||||
placeholders=[self.job_id],
|
|
||||||
key_column="id",
|
|
||||||
return_type='dict',
|
|
||||||
)
|
|
||||||
|
|
||||||
log_values = []
|
|
||||||
for log in logs.values():
|
|
||||||
del(log["active_status"])
|
|
||||||
del(log["id"])
|
|
||||||
del(log["job_id"])
|
|
||||||
log_values.append(unicode_keys(log))
|
|
||||||
|
|
||||||
return log_values
|
|
||||||
|
|
||||||
def _get_artifact(self):
|
|
||||||
artifact = self.jm.get_jobs_dhub().execute(
|
|
||||||
proc="jobs_test.selects.job_artifact",
|
|
||||||
placeholders=[self.job_id],
|
|
||||||
key_column="id",
|
|
||||||
return_type='dict',
|
|
||||||
)
|
|
||||||
if not len(artifact):
|
|
||||||
artifact = {}
|
|
||||||
else:
|
|
||||||
artifact = artifact[self.job_id]
|
|
||||||
del(artifact["active_status"])
|
|
||||||
del(artifact["id"])
|
|
||||||
del(artifact["job_id"])
|
|
||||||
|
|
||||||
return unicode_keys(artifact)
|
|
||||||
|
|
||||||
|
|
||||||
def unicode_keys(d):
|
def unicode_keys(d):
|
||||||
return dict([(unicode(k), v) for k, v in d.items()])
|
return dict([(unicode(k), v) for k, v in d.items()])
|
||||||
|
|
||||||
|
|
|
@ -31,6 +31,8 @@ def test_artifact_detail(webapp, eleven_jobs_processed, sample_artifacts, jm):
|
||||||
"name"
|
"name"
|
||||||
])
|
])
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
|
|
||||||
def test_artifact_detail_not_found(webapp, jm):
|
def test_artifact_detail_not_found(webapp, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -44,6 +46,7 @@ def test_artifact_detail_not_found(webapp, jm):
|
||||||
)
|
)
|
||||||
assert resp.status_int == 404
|
assert resp.status_int == 404
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_artifact_detail_bad_project(webapp, jm):
|
def test_artifact_detail_bad_project(webapp, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -58,3 +61,5 @@ def test_artifact_detail_bad_project(webapp, jm):
|
||||||
assert resp.status_int == 404
|
assert resp.status_int == 404
|
||||||
assert resp.json == {"detail": "No project with name foo"}
|
assert resp.json == {"detail": "No project with name foo"}
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,8 @@ def test_create_bug_job_map_no_auth(eleven_jobs_processed, jm):
|
||||||
|
|
||||||
assert resp.status_code == 403
|
assert resp.status_code == 403
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
|
|
||||||
def test_create_bug_job_map(eleven_jobs_processed, mock_message_broker, jm):
|
def test_create_bug_job_map(eleven_jobs_processed, mock_message_broker, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -52,6 +54,8 @@ def test_create_bug_job_map(eleven_jobs_processed, mock_message_broker, jm):
|
||||||
|
|
||||||
assert (bug_job_map_obj,) == jm.get_bug_job_map_list(0, 1)
|
assert (bug_job_map_obj,) == jm.get_bug_job_map_list(0, 1)
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
|
|
||||||
def test_create_bug_job_map_dup(eleven_jobs_processed, mock_message_broker, jm):
|
def test_create_bug_job_map_dup(eleven_jobs_processed, mock_message_broker, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -84,6 +88,7 @@ def test_create_bug_job_map_dup(eleven_jobs_processed, mock_message_broker, jm):
|
||||||
|
|
||||||
assert (bug_job_map_obj,) == jm.get_bug_job_map_list(0, 1)
|
assert (bug_job_map_obj,) == jm.get_bug_job_map_list(0, 1)
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_bug_job_map_list(webapp, jm, eleven_jobs_processed):
|
def test_bug_job_map_list(webapp, jm, eleven_jobs_processed):
|
||||||
"""
|
"""
|
||||||
|
@ -107,6 +112,8 @@ def test_bug_job_map_list(webapp, jm, eleven_jobs_processed):
|
||||||
for i, v in enumerate(expected):
|
for i, v in enumerate(expected):
|
||||||
assert v == resp.json[i]
|
assert v == resp.json[i]
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_bug_job_map_detail(webapp, jm, eleven_jobs_processed):
|
def test_bug_job_map_detail(webapp, jm, eleven_jobs_processed):
|
||||||
"""
|
"""
|
||||||
test retrieving a list of bug_job_map
|
test retrieving a list of bug_job_map
|
||||||
|
@ -129,6 +136,7 @@ def test_bug_job_map_detail(webapp, jm, eleven_jobs_processed):
|
||||||
|
|
||||||
assert resp.json == {"job_id": job_id, "bug_id": bug_id, "type": "manual"}
|
assert resp.json == {"job_id": job_id, "bug_id": bug_id, "type": "manual"}
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_bug_job_map_delete(webapp, eleven_jobs_processed,
|
def test_bug_job_map_delete(webapp, eleven_jobs_processed,
|
||||||
jm, mock_message_broker):
|
jm, mock_message_broker):
|
||||||
|
@ -160,6 +168,7 @@ def test_bug_job_map_delete(webapp, eleven_jobs_processed,
|
||||||
content = json.loads(resp.content)
|
content = json.loads(resp.content)
|
||||||
assert content == {"message": "Bug job map deleted"}
|
assert content == {"message": "Bug job map deleted"}
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_bug_job_map_delete_no_auth(jm, eleven_jobs_processed):
|
def test_bug_job_map_delete_no_auth(jm, eleven_jobs_processed):
|
||||||
"""
|
"""
|
||||||
|
@ -184,3 +193,5 @@ def test_bug_job_map_delete_no_auth(jm, eleven_jobs_processed):
|
||||||
)
|
)
|
||||||
|
|
||||||
assert resp.status_code == 403
|
assert resp.status_code == 403
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
|
@ -104,6 +104,8 @@ def test_job_detail(webapp, eleven_jobs_processed, sample_artifacts, jm):
|
||||||
assert isinstance(resp.json, dict)
|
assert isinstance(resp.json, dict)
|
||||||
assert resp.json["id"] == job["id"]
|
assert resp.json["id"] == job["id"]
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
|
|
||||||
def test_job_detail_bad_project(webapp, eleven_jobs_processed, jm):
|
def test_job_detail_bad_project(webapp, eleven_jobs_processed, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -117,6 +119,7 @@ def test_job_detail_bad_project(webapp, eleven_jobs_processed, jm):
|
||||||
|
|
||||||
webapp.get(badurl, status=404)
|
webapp.get(badurl, status=404)
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_job_detail_not_found(webapp, jm):
|
def test_job_detail_not_found(webapp, jm):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -17,6 +17,7 @@ def test_note_list(webapp, sample_notes, jm):
|
||||||
assert isinstance(resp.json, list)
|
assert isinstance(resp.json, list)
|
||||||
note_list = resp.json
|
note_list = resp.json
|
||||||
|
|
||||||
|
|
||||||
assert set(note_list[0].keys()) == set([
|
assert set(note_list[0].keys()) == set([
|
||||||
'note_timestamp',
|
'note_timestamp',
|
||||||
'job_id',
|
'job_id',
|
||||||
|
@ -56,6 +57,8 @@ def test_note_list(webapp, sample_notes, jm):
|
||||||
"act": note_list
|
"act": note_list
|
||||||
})
|
})
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
|
|
||||||
def test_note_detail(webapp, sample_notes, jm):
|
def test_note_detail(webapp, sample_notes, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -83,6 +86,8 @@ def test_note_detail(webapp, sample_notes, jm):
|
||||||
'id'
|
'id'
|
||||||
])
|
])
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
|
|
||||||
def test_note_detail_not_found(webapp, jm):
|
def test_note_detail_not_found(webapp, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -96,6 +101,7 @@ def test_note_detail_not_found(webapp, jm):
|
||||||
)
|
)
|
||||||
assert resp.status_int == 404
|
assert resp.status_int == 404
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_note_detail_bad_project(webapp, jm):
|
def test_note_detail_bad_project(webapp, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -110,6 +116,7 @@ def test_note_detail_bad_project(webapp, jm):
|
||||||
assert resp.status_int == 404
|
assert resp.status_int == 404
|
||||||
assert resp.json == {"detail": "No project with name foo"}
|
assert resp.json == {"detail": "No project with name foo"}
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_create_note(webapp, eleven_jobs_processed, mock_message_broker, jm):
|
def test_create_note(webapp, eleven_jobs_processed, mock_message_broker, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -149,6 +156,7 @@ def test_create_note(webapp, eleven_jobs_processed, mock_message_broker, jm):
|
||||||
u'id': 1
|
u'id': 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_create_note_no_auth(eleven_jobs_processed, jm):
|
def test_create_note_no_auth(eleven_jobs_processed, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -169,6 +177,8 @@ def test_create_note_no_auth(eleven_jobs_processed, jm):
|
||||||
|
|
||||||
assert resp.status_code == 403
|
assert resp.status_code == 403
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_delete_note(webapp, sample_notes, mock_message_broker, jm):
|
def test_delete_note(webapp, sample_notes, mock_message_broker, jm):
|
||||||
"""
|
"""
|
||||||
test creating a single note via endpoint
|
test creating a single note via endpoint
|
||||||
|
@ -189,3 +199,5 @@ def test_delete_note(webapp, sample_notes, mock_message_broker, jm):
|
||||||
assert resp.status_code == 200, resp
|
assert resp.status_code == 200, resp
|
||||||
|
|
||||||
assert len(new_notes) == len(notes)-1
|
assert len(new_notes) == len(notes)-1
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
|
@ -32,6 +32,8 @@ def test_objectstore_create(job_sample, jm):
|
||||||
|
|
||||||
assert stored_objs[0]['job_guid'] == job_sample["job"]["job_guid"]
|
assert stored_objs[0]['job_guid'] == job_sample["job"]["job_guid"]
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
|
|
||||||
def test_objectstore_list(webapp, eleven_jobs_stored, jm):
|
def test_objectstore_list(webapp, eleven_jobs_stored, jm):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -115,6 +115,7 @@ def test_resultset_list_empty_rs_still_show(webapp, initial_data,
|
||||||
assert resp.status_int == 200
|
assert resp.status_int == 200
|
||||||
assert len(resp.json['results']) == 10
|
assert len(resp.json['results']) == 10
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_resultset_list_filter_by_revision(webapp, eleven_jobs_processed, jm):
|
def test_resultset_list_filter_by_revision(webapp, eleven_jobs_processed, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -179,6 +180,8 @@ def test_resultset_list_filter_by_date(webapp, initial_data,
|
||||||
u'startdate': u'2013-08-10'}
|
u'startdate': u'2013-08-10'}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
|
|
||||||
def test_resultset_list_without_jobs(webapp, initial_data,
|
def test_resultset_list_without_jobs(webapp, initial_data,
|
||||||
sample_resultset, jm):
|
sample_resultset, jm):
|
||||||
|
@ -207,6 +210,7 @@ def test_resultset_list_without_jobs(webapp, initial_data,
|
||||||
u'repository': u'test_treeherder'
|
u'repository': u'test_treeherder'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_resultset_detail(webapp, eleven_jobs_processed, jm):
|
def test_resultset_detail(webapp, eleven_jobs_processed, jm):
|
||||||
"""
|
"""
|
||||||
|
@ -281,6 +285,8 @@ def test_resultset_create(sample_resultset, jm, initial_data):
|
||||||
assert len(stored_objs) == 1
|
assert len(stored_objs) == 1
|
||||||
assert stored_objs[0]['revision_hash'] == sample_resultset[0]['revision_hash']
|
assert stored_objs[0]['revision_hash'] == sample_resultset[0]['revision_hash']
|
||||||
|
|
||||||
|
jm.disconnect()
|
||||||
|
|
||||||
def test_resultset_with_bad_secret(sample_resultset, jm, initial_data):
|
def test_resultset_with_bad_secret(sample_resultset, jm, initial_data):
|
||||||
|
|
||||||
trsc = TreeherderResultSetCollection()
|
trsc = TreeherderResultSetCollection()
|
||||||
|
|
|
@ -73,6 +73,7 @@ def sample_artifacts(jm, sample_data):
|
||||||
|
|
||||||
jm.load_job_data(jobs)
|
jm.load_job_data(jobs)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def sample_notes(jm, sample_data, eleven_jobs_processed):
|
def sample_notes(jm, sample_data, eleven_jobs_processed):
|
||||||
"""provide 11 jobs with job notes."""
|
"""provide 11 jobs with job notes."""
|
||||||
|
@ -87,3 +88,4 @@ def sample_notes(jm, sample_data, eleven_jobs_processed):
|
||||||
"kellyclarkson",
|
"kellyclarkson",
|
||||||
"you look like a man-o-lantern"
|
"you look like a man-o-lantern"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -418,6 +418,7 @@ class Datasource(models.Model):
|
||||||
user=DB_USER,
|
user=DB_USER,
|
||||||
passwd=DB_PASS,
|
passwd=DB_PASS,
|
||||||
)
|
)
|
||||||
|
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
cur.execute("DROP DATABASE {0}".format(self.name))
|
cur.execute("DROP DATABASE {0}".format(self.name))
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
Загрузка…
Ссылка в новой задаче