2016-03-03 19:46:47 +03:00
|
|
|
import copy
|
2016-03-09 23:07:13 +03:00
|
|
|
import datetime
|
2015-08-10 01:02:37 +03:00
|
|
|
import json
|
2013-03-12 00:01:17 +04:00
|
|
|
import os
|
2014-07-03 17:56:45 +04:00
|
|
|
|
2015-02-14 12:27:13 +03:00
|
|
|
import kombu
|
2013-03-19 23:03:26 +04:00
|
|
|
import pytest
|
2015-08-10 01:02:37 +03:00
|
|
|
import responses
|
2015-09-15 19:13:08 +03:00
|
|
|
from django.conf import settings
|
2015-07-15 19:16:59 +03:00
|
|
|
from requests import Request
|
2015-09-30 14:01:38 +03:00
|
|
|
from requests_hawk import HawkAuth
|
2015-08-10 01:02:37 +03:00
|
|
|
from webtest.app import TestApp
|
2014-07-03 17:56:45 +04:00
|
|
|
|
2015-09-30 14:01:38 +03:00
|
|
|
from treeherder.client import TreeherderClient
|
2015-10-08 16:21:17 +03:00
|
|
|
from treeherder.config.wsgi import application
|
2015-09-16 15:23:04 +03:00
|
|
|
from treeherder.model.derived.jobs import JobsModel
|
2014-07-03 17:56:45 +04:00
|
|
|
|
2013-03-12 00:01:17 +04:00
|
|
|
|
2013-04-26 22:18:18 +04:00
|
|
|
def pytest_addoption(parser):
|
|
|
|
parser.addoption(
|
|
|
|
"--runslow",
|
|
|
|
action="store_true",
|
|
|
|
help="run slow tests",
|
|
|
|
)
|
|
|
|
|
2013-03-20 17:59:48 +04:00
|
|
|
|
2013-03-12 00:01:17 +04:00
|
|
|
def pytest_runtest_setup(item):
|
|
|
|
"""
|
2013-04-16 22:00:34 +04:00
|
|
|
Per-test setup.
|
2013-05-01 14:50:55 +04:00
|
|
|
- Add an option to run those tests marked as 'slow'
|
|
|
|
- Provide cache isolation incrementing the cache key prefix
|
|
|
|
- Drop and recreate tables in the master db
|
2013-04-16 22:00:34 +04:00
|
|
|
"""
|
2013-03-12 00:01:17 +04:00
|
|
|
|
2013-05-01 14:50:55 +04:00
|
|
|
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
|
|
|
|
pytest.skip("need --runslow option to run")
|
2013-03-12 00:01:17 +04:00
|
|
|
|
2013-03-28 04:40:42 +04:00
|
|
|
increment_cache_key_prefix()
|
|
|
|
|
|
|
|
|
|
|
|
def increment_cache_key_prefix():
|
|
|
|
"""Increment a cache prefix to effectively clear the cache."""
|
|
|
|
from django.core.cache import cache
|
|
|
|
cache.key_prefix = ""
|
|
|
|
prefix_counter_cache_key = "treeherder-tests-key-prefix-counter"
|
|
|
|
try:
|
|
|
|
key_prefix_counter = cache.incr(prefix_counter_cache_key)
|
|
|
|
except ValueError:
|
|
|
|
key_prefix_counter = 0
|
|
|
|
cache.set(prefix_counter_cache_key, key_prefix_counter)
|
|
|
|
cache.key_prefix = "t{0}".format(key_prefix_counter)
|
2013-04-19 03:23:58 +04:00
|
|
|
|
2013-05-01 14:50:55 +04:00
|
|
|
|
2015-09-16 15:23:04 +03:00
|
|
|
@pytest.fixture
|
2015-10-26 14:18:08 +03:00
|
|
|
def jobs_ds(request, transactional_db):
|
2015-09-16 15:23:04 +03:00
|
|
|
from treeherder.model.models import Datasource
|
2015-09-16 15:00:34 +03:00
|
|
|
ds = Datasource.objects.create(project=settings.TREEHERDER_TEST_PROJECT)
|
2015-09-16 15:23:04 +03:00
|
|
|
|
|
|
|
def fin():
|
|
|
|
ds.delete()
|
|
|
|
request.addfinalizer(fin)
|
|
|
|
|
|
|
|
return ds
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2016-03-15 03:22:48 +03:00
|
|
|
def jm(request, test_repository, jobs_ds):
|
2013-04-18 19:49:19 +04:00
|
|
|
""" Give a test access to a JobsModel instance. """
|
2015-09-16 15:23:04 +03:00
|
|
|
model = JobsModel(jobs_ds.project)
|
2013-04-19 21:58:46 +04:00
|
|
|
|
2013-04-20 04:35:59 +04:00
|
|
|
# patch in additional test-only procs on the datasources
|
|
|
|
add_test_procs_file(
|
2015-06-18 22:50:50 +03:00
|
|
|
model.get_dhub(),
|
|
|
|
model.get_datasource().key,
|
2013-04-20 04:35:59 +04:00
|
|
|
"jobs_test.json",
|
|
|
|
)
|
|
|
|
|
2013-09-04 18:38:59 +04:00
|
|
|
def fin():
|
|
|
|
model.disconnect()
|
2014-06-03 19:51:15 +04:00
|
|
|
request.addfinalizer(fin)
|
|
|
|
|
2013-04-20 04:35:59 +04:00
|
|
|
return model
|
2013-04-17 20:38:40 +04:00
|
|
|
|
2013-04-26 04:41:24 +04:00
|
|
|
|
2013-04-20 04:35:59 +04:00
|
|
|
def add_test_procs_file(dhub, key, filename):
|
|
|
|
"""Add an extra procs file in for testing purposes."""
|
|
|
|
test_proc_file = os.path.join(
|
|
|
|
os.path.abspath(os.path.dirname(__file__)),
|
|
|
|
filename,
|
|
|
|
)
|
|
|
|
del dhub.procs[key]
|
|
|
|
proclist = dhub.data_sources[key]["procs"]
|
Bug 1059814 - Non-whitespace pep8 fixes using autopep8 aggressive mode
Generated using:
autopep8 --in-place --recursive --aggressive --aggressive
--max-line-length 999 --exclude='.git,__pycache__,.vagrant,build,vendor,
0001_initial.py,models.py,test_note_api.py,test_bug_job_map_api.py' .
autopep8's aggressive mode, unlike standard mode, makes non-whitespace
changes. It also uses lib2to3 to correct deprecated code (W690), some of
which aren't pep8 failures. Some of these changes are more dubious, but
rather than disable W690 completely, I've just excluded the files where
the unwanted changes would have been made, so we can benefit from the
rest.
2015-02-26 18:16:02 +03:00
|
|
|
if test_proc_file not in proclist:
|
2013-04-20 04:35:59 +04:00
|
|
|
proclist.append(test_proc_file)
|
|
|
|
dhub.data_sources[key]["procs"] = proclist
|
|
|
|
dhub.load_procs(key)
|
2013-04-17 20:13:24 +04:00
|
|
|
|
2013-05-01 14:50:55 +04:00
|
|
|
|
2013-04-19 03:47:10 +04:00
|
|
|
@pytest.fixture(scope='session')
|
2013-04-19 03:23:58 +04:00
|
|
|
def sample_data():
|
|
|
|
"""Returns a SampleData() object"""
|
2016-01-18 19:13:42 +03:00
|
|
|
from .sampledata import SampleData
|
2013-04-19 03:23:58 +04:00
|
|
|
return SampleData()
|
2013-09-04 18:38:59 +04:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
|
|
|
def test_base_dir():
|
|
|
|
return os.path.dirname(__file__)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2013-11-14 05:24:24 +04:00
|
|
|
def sample_resultset(sample_data):
|
2016-03-03 19:46:47 +03:00
|
|
|
return copy.deepcopy(sample_data.resultset_data)
|
2013-09-16 17:58:03 +04:00
|
|
|
|
|
|
|
|
2015-04-15 00:49:03 +03:00
|
|
|
@pytest.fixture
|
2015-09-16 15:00:34 +03:00
|
|
|
def test_project(jm):
|
|
|
|
return jm.project
|
2015-04-15 00:49:03 +03:00
|
|
|
|
|
|
|
|
2013-09-16 17:58:03 +04:00
|
|
|
@pytest.fixture
|
2016-03-15 03:22:48 +03:00
|
|
|
def test_repository(transactional_db):
|
2014-08-27 01:05:17 +04:00
|
|
|
from treeherder.model.models import Repository, RepositoryGroup
|
|
|
|
|
|
|
|
RepositoryGroup.objects.create(
|
2015-02-15 17:52:31 +03:00
|
|
|
name="development",
|
|
|
|
description=""
|
2014-08-27 01:05:17 +04:00
|
|
|
)
|
2013-09-16 17:58:03 +04:00
|
|
|
|
2016-03-15 03:22:48 +03:00
|
|
|
r = Repository.objects.create(
|
2015-02-15 17:52:31 +03:00
|
|
|
dvcs_type="hg",
|
2016-03-15 03:22:48 +03:00
|
|
|
name=settings.TREEHERDER_TEST_PROJECT,
|
2015-02-15 17:52:31 +03:00
|
|
|
url="https://hg.mozilla.org/mozilla-central",
|
|
|
|
active_status="active",
|
|
|
|
codebase="gecko",
|
|
|
|
repository_group_id=1,
|
2016-04-12 20:01:03 +03:00
|
|
|
description="",
|
|
|
|
performance_alerts_enabled=True
|
2013-09-16 17:58:03 +04:00
|
|
|
)
|
2016-03-15 03:22:48 +03:00
|
|
|
return r
|
2013-09-16 17:58:03 +04:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def mock_log_parser(monkeypatch):
|
|
|
|
from celery import task
|
|
|
|
from treeherder.log_parser import tasks
|
|
|
|
|
|
|
|
@task
|
|
|
|
def task_mock(*args, **kwargs):
|
|
|
|
pass
|
|
|
|
|
|
|
|
monkeypatch.setattr(tasks,
|
|
|
|
'parse_log',
|
|
|
|
task_mock)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2016-03-15 03:22:48 +03:00
|
|
|
def result_set_stored(jm, sample_resultset):
|
2013-11-05 21:09:26 +04:00
|
|
|
jm.store_result_set_data(sample_resultset)
|
|
|
|
|
2013-09-16 17:58:03 +04:00
|
|
|
return sample_resultset
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='function')
|
|
|
|
def mock_get_resultset(monkeypatch, result_set_stored):
|
|
|
|
from treeherder.etl import common
|
|
|
|
|
2013-11-08 01:00:15 +04:00
|
|
|
def _get_resultset(params):
|
|
|
|
for k in params:
|
|
|
|
rev = params[k][0]
|
|
|
|
params[k] = {
|
|
|
|
rev: {
|
|
|
|
'id': 1,
|
2016-03-03 02:02:22 +03:00
|
|
|
'revision': result_set_stored[0]['revision']
|
2013-11-08 01:00:15 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return params
|
|
|
|
|
|
|
|
monkeypatch.setattr(common, 'lookup_revisions', _get_resultset)
|
2013-11-05 21:09:26 +04:00
|
|
|
|
2015-02-15 17:52:31 +03:00
|
|
|
|
2014-03-13 18:17:34 +04:00
|
|
|
@pytest.fixture
|
|
|
|
def mock_message_broker(monkeypatch):
|
|
|
|
from django.conf import settings
|
2014-06-02 07:00:52 +04:00
|
|
|
monkeypatch.setattr(settings, 'BROKER_URL', 'memory://')
|
2014-06-03 20:46:20 +04:00
|
|
|
|
Bug 1059814 - Non-whitespace pep8 fixes using autopep8 aggressive mode
Generated using:
autopep8 --in-place --recursive --aggressive --aggressive
--max-line-length 999 --exclude='.git,__pycache__,.vagrant,build,vendor,
0001_initial.py,models.py,test_note_api.py,test_bug_job_map_api.py' .
autopep8's aggressive mode, unlike standard mode, makes non-whitespace
changes. It also uses lib2to3 to correct deprecated code (W690), some of
which aren't pep8 failures. Some of these changes are more dubious, but
rather than disable W690 completely, I've just excluded the files where
the unwanted changes would have been made, so we can benefit from the
rest.
2015-02-26 18:16:02 +03:00
|
|
|
|
2015-02-14 12:27:13 +03:00
|
|
|
@pytest.fixture
|
2015-11-23 02:56:57 +03:00
|
|
|
def resultset_with_three_jobs(jm, sample_data, sample_resultset, test_repository):
|
2015-02-14 12:27:13 +03:00
|
|
|
"""
|
|
|
|
Stores a number of jobs in the same resultset.
|
|
|
|
"""
|
|
|
|
num_jobs = 3
|
|
|
|
resultset = sample_resultset[0]
|
|
|
|
jobs = sample_data.job_data[0:num_jobs]
|
|
|
|
|
|
|
|
# Only store data for the first resultset....
|
|
|
|
resultset_creation = jm.store_result_set_data([resultset])
|
|
|
|
|
|
|
|
blobs = []
|
|
|
|
for index, blob in enumerate(jobs):
|
|
|
|
# Modify job structure to sync with the resultset sample data
|
|
|
|
if 'sources' in blob:
|
|
|
|
del blob['sources']
|
|
|
|
|
|
|
|
# Skip log references since they do not work correctly in pending state.
|
|
|
|
if 'log_references' in blob['job']:
|
|
|
|
del blob['job']['log_references']
|
|
|
|
|
2016-03-03 02:02:22 +03:00
|
|
|
blob['revision'] = resultset['revision']
|
2015-02-14 12:27:13 +03:00
|
|
|
blob['job']['state'] = 'pending'
|
|
|
|
blobs.append(blob)
|
|
|
|
|
|
|
|
# Store and process the jobs so they are present in the tables.
|
|
|
|
jm.store_job_data(blobs)
|
|
|
|
return resultset_creation['inserted_result_set_ids'][0]
|
|
|
|
|
|
|
|
|
2014-06-03 20:46:20 +04:00
|
|
|
@pytest.fixture
|
2015-11-23 02:56:57 +03:00
|
|
|
def eleven_jobs_stored(jm, sample_data, sample_resultset, test_repository, mock_log_parser):
|
2014-06-03 20:46:20 +04:00
|
|
|
"""stores a list of 11 job samples"""
|
|
|
|
|
|
|
|
jm.store_result_set_data(sample_resultset)
|
|
|
|
|
|
|
|
num_jobs = 11
|
|
|
|
jobs = sample_data.job_data[0:num_jobs]
|
|
|
|
|
|
|
|
max_index = len(sample_resultset) - 1
|
|
|
|
resultset_index = 0
|
|
|
|
|
|
|
|
blobs = []
|
|
|
|
for index, blob in enumerate(jobs):
|
|
|
|
|
|
|
|
if resultset_index > max_index:
|
|
|
|
resultset_index = 0
|
|
|
|
|
|
|
|
# Modify job structure to sync with the resultset sample data
|
|
|
|
if 'sources' in blob:
|
|
|
|
del blob['sources']
|
|
|
|
|
2016-03-03 02:02:22 +03:00
|
|
|
blob['revision'] = sample_resultset[resultset_index]['revision']
|
2014-06-03 20:46:20 +04:00
|
|
|
|
|
|
|
blobs.append(blob)
|
|
|
|
|
|
|
|
resultset_index += 1
|
|
|
|
|
|
|
|
jm.store_job_data(blobs)
|
|
|
|
|
|
|
|
|
2014-10-14 19:25:37 +04:00
|
|
|
@pytest.fixture
|
2015-09-30 14:01:38 +03:00
|
|
|
def mock_post_json(monkeypatch, client_credentials):
|
2016-06-21 14:07:44 +03:00
|
|
|
def _post_json(th_client, project, endpoint, data):
|
2016-06-13 16:40:40 +03:00
|
|
|
auth = th_client.session.auth
|
2015-09-30 14:01:38 +03:00
|
|
|
if not auth:
|
2015-11-13 04:11:35 +03:00
|
|
|
auth = HawkAuth(id=client_credentials.client_id,
|
|
|
|
key=str(client_credentials.secret))
|
2015-09-30 14:01:38 +03:00
|
|
|
app = TestApp(application)
|
2016-06-21 15:13:54 +03:00
|
|
|
url = th_client._get_endpoint_url(endpoint, project=project)
|
|
|
|
req = Request('POST', url, json=data, auth=auth)
|
2015-07-15 19:16:59 +03:00
|
|
|
prepped_request = req.prepare()
|
2015-05-14 23:47:06 +03:00
|
|
|
|
2016-03-09 01:03:55 +03:00
|
|
|
return getattr(app, 'post')(
|
2015-07-15 19:16:59 +03:00
|
|
|
prepped_request.url,
|
|
|
|
params=json.dumps(data),
|
2015-09-30 14:01:38 +03:00
|
|
|
content_type='application/json',
|
|
|
|
extra_environ={
|
|
|
|
'HTTP_AUTHORIZATION': str(prepped_request.headers['Authorization'])
|
|
|
|
}
|
2014-07-03 17:56:45 +04:00
|
|
|
)
|
|
|
|
|
2015-07-15 19:16:59 +03:00
|
|
|
monkeypatch.setattr(TreeherderClient, '_post_json', _post_json)
|
2014-07-16 20:39:31 +04:00
|
|
|
|
2014-07-16 20:56:13 +04:00
|
|
|
|
|
|
|
@pytest.fixture
|
2015-12-04 20:52:25 +03:00
|
|
|
def mock_fetch_json(monkeypatch):
|
|
|
|
def _fetch_json(url, params=None):
|
2015-08-03 20:22:47 +03:00
|
|
|
response = TestApp(application).get(url, params=params, status=200)
|
|
|
|
return response.json
|
2014-07-16 20:56:13 +04:00
|
|
|
|
2014-07-17 01:14:23 +04:00
|
|
|
import treeherder.etl.common
|
2014-10-14 19:25:37 +04:00
|
|
|
monkeypatch.setattr(treeherder.etl.common,
|
2015-12-04 20:52:25 +03:00
|
|
|
'fetch_json', _fetch_json)
|
2014-09-26 20:08:28 +04:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def activate_responses(request):
|
|
|
|
|
|
|
|
responses.start()
|
|
|
|
|
|
|
|
def fin():
|
2014-10-08 21:54:36 +04:00
|
|
|
responses.reset()
|
2014-09-26 20:08:28 +04:00
|
|
|
responses.stop()
|
|
|
|
|
|
|
|
request.addfinalizer(fin)
|
2015-02-14 12:27:13 +03:00
|
|
|
|
Bug 1059814 - Non-whitespace pep8 fixes using autopep8 aggressive mode
Generated using:
autopep8 --in-place --recursive --aggressive --aggressive
--max-line-length 999 --exclude='.git,__pycache__,.vagrant,build,vendor,
0001_initial.py,models.py,test_note_api.py,test_bug_job_map_api.py' .
autopep8's aggressive mode, unlike standard mode, makes non-whitespace
changes. It also uses lib2to3 to correct deprecated code (W690), some of
which aren't pep8 failures. Some of these changes are more dubious, but
rather than disable W690 completely, I've just excluded the files where
the unwanted changes would have been made, so we can benefit from the
rest.
2015-02-26 18:16:02 +03:00
|
|
|
|
2015-02-14 12:27:13 +03:00
|
|
|
def pulse_consumer(exchange, request):
|
|
|
|
exchange_name = 'exchange/{}/v1/{}'.format(
|
|
|
|
settings.PULSE_EXCHANGE_NAMESPACE,
|
|
|
|
exchange
|
|
|
|
)
|
|
|
|
|
|
|
|
connection = kombu.Connection(settings.PULSE_URI)
|
|
|
|
|
|
|
|
exchange = kombu.Exchange(
|
Bug 1059814 - Non-whitespace pep8 fixes using autopep8 aggressive mode
Generated using:
autopep8 --in-place --recursive --aggressive --aggressive
--max-line-length 999 --exclude='.git,__pycache__,.vagrant,build,vendor,
0001_initial.py,models.py,test_note_api.py,test_bug_job_map_api.py' .
autopep8's aggressive mode, unlike standard mode, makes non-whitespace
changes. It also uses lib2to3 to correct deprecated code (W690), some of
which aren't pep8 failures. Some of these changes are more dubious, but
rather than disable W690 completely, I've just excluded the files where
the unwanted changes would have been made, so we can benefit from the
rest.
2015-02-26 18:16:02 +03:00
|
|
|
name=exchange_name,
|
2015-02-14 12:27:13 +03:00
|
|
|
type='topic'
|
|
|
|
)
|
|
|
|
|
|
|
|
queue = kombu.Queue(
|
|
|
|
no_ack=True,
|
Bug 1059814 - Non-whitespace pep8 fixes using autopep8 aggressive mode
Generated using:
autopep8 --in-place --recursive --aggressive --aggressive
--max-line-length 999 --exclude='.git,__pycache__,.vagrant,build,vendor,
0001_initial.py,models.py,test_note_api.py,test_bug_job_map_api.py' .
autopep8's aggressive mode, unlike standard mode, makes non-whitespace
changes. It also uses lib2to3 to correct deprecated code (W690), some of
which aren't pep8 failures. Some of these changes are more dubious, but
rather than disable W690 completely, I've just excluded the files where
the unwanted changes would have been made, so we can benefit from the
rest.
2015-02-26 18:16:02 +03:00
|
|
|
exchange=exchange, # Exchange name
|
|
|
|
routing_key='#', # Bind to all messages
|
|
|
|
auto_delete=True, # Delete after each test
|
|
|
|
exclusive=False) # Disallow multiple consumers
|
2015-02-14 12:27:13 +03:00
|
|
|
|
|
|
|
simpleQueue = connection.SimpleQueue(
|
Bug 1059814 - Non-whitespace pep8 fixes using autopep8 aggressive mode
Generated using:
autopep8 --in-place --recursive --aggressive --aggressive
--max-line-length 999 --exclude='.git,__pycache__,.vagrant,build,vendor,
0001_initial.py,models.py,test_note_api.py,test_bug_job_map_api.py' .
autopep8's aggressive mode, unlike standard mode, makes non-whitespace
changes. It also uses lib2to3 to correct deprecated code (W690), some of
which aren't pep8 failures. Some of these changes are more dubious, but
rather than disable W690 completely, I've just excluded the files where
the unwanted changes would have been made, so we can benefit from the
rest.
2015-02-26 18:16:02 +03:00
|
|
|
name=queue,
|
2015-02-14 12:27:13 +03:00
|
|
|
channel=connection,
|
|
|
|
no_ack=True)
|
|
|
|
|
|
|
|
def fin():
|
|
|
|
connection.release()
|
|
|
|
|
|
|
|
request.addfinalizer(fin)
|
|
|
|
return simpleQueue
|
|
|
|
|
Bug 1059814 - Non-whitespace pep8 fixes using autopep8 aggressive mode
Generated using:
autopep8 --in-place --recursive --aggressive --aggressive
--max-line-length 999 --exclude='.git,__pycache__,.vagrant,build,vendor,
0001_initial.py,models.py,test_note_api.py,test_bug_job_map_api.py' .
autopep8's aggressive mode, unlike standard mode, makes non-whitespace
changes. It also uses lib2to3 to correct deprecated code (W690), some of
which aren't pep8 failures. Some of these changes are more dubious, but
rather than disable W690 completely, I've just excluded the files where
the unwanted changes would have been made, so we can benefit from the
rest.
2015-02-26 18:16:02 +03:00
|
|
|
|
2015-02-14 12:27:13 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def pulse_action_consumer(request):
|
|
|
|
return pulse_consumer('job-actions', request)
|
2015-05-22 00:29:05 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def mock_error_summary(monkeypatch):
|
|
|
|
bs_obj = ["foo", "bar"]
|
|
|
|
|
|
|
|
from treeherder.model import error_summary
|
|
|
|
|
|
|
|
def _get_error_summary(params):
|
|
|
|
return bs_obj
|
|
|
|
|
|
|
|
monkeypatch.setattr(error_summary, "get_error_summary", _get_error_summary)
|
|
|
|
|
|
|
|
return bs_obj
|
2015-08-17 20:01:39 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2016-01-07 19:30:43 +03:00
|
|
|
def failure_lines(jm, test_repository, eleven_jobs_stored):
|
2015-08-17 20:01:39 +03:00
|
|
|
from tests.autoclassify.utils import test_line, create_failure_lines
|
|
|
|
|
2016-01-07 19:30:43 +03:00
|
|
|
test_repository.save()
|
2015-11-02 12:50:25 +03:00
|
|
|
|
2016-01-07 19:30:43 +03:00
|
|
|
job = jm.get_job(1)[0]
|
2016-03-07 15:07:41 +03:00
|
|
|
return create_failure_lines(test_repository,
|
2015-08-17 20:01:39 +03:00
|
|
|
job["job_guid"],
|
|
|
|
[(test_line, {}),
|
|
|
|
(test_line, {"subtest": "subtest2"})])
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2016-01-07 19:30:43 +03:00
|
|
|
def failure_classifications():
|
|
|
|
from treeherder.model.models import FailureClassification
|
|
|
|
for name in ["not classified", "fixed by commit", "expected fail",
|
|
|
|
"intermittent", "infra", "intermittent needs filing",
|
|
|
|
"autoclassified intermittent"]:
|
|
|
|
FailureClassification(name=name).save()
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2016-04-29 16:53:50 +03:00
|
|
|
def test_matcher(request):
|
2015-08-17 20:01:39 +03:00
|
|
|
from treeherder.autoclassify import detectors
|
2016-04-29 16:53:50 +03:00
|
|
|
from treeherder.model.models import MatcherManager
|
2015-08-17 20:01:39 +03:00
|
|
|
|
|
|
|
class TreeherderUnitTestDetector(detectors.Detector):
|
|
|
|
def __call__(self, failure_lines):
|
2016-06-06 13:54:45 +03:00
|
|
|
return True
|
2015-08-17 20:01:39 +03:00
|
|
|
|
2016-04-29 16:53:50 +03:00
|
|
|
MatcherManager._detector_funcs = {}
|
|
|
|
MatcherManager._matcher_funcs = {}
|
2016-01-07 19:30:43 +03:00
|
|
|
test_matcher = MatcherManager.register_detector(TreeherderUnitTestDetector)
|
2015-08-17 20:01:39 +03:00
|
|
|
|
|
|
|
def finalize():
|
2016-01-07 19:30:43 +03:00
|
|
|
MatcherManager._detector_funcs = {}
|
|
|
|
MatcherManager._matcher_funcs = {}
|
2015-08-17 20:01:39 +03:00
|
|
|
request.addfinalizer(finalize)
|
|
|
|
|
2016-04-29 16:53:50 +03:00
|
|
|
return test_matcher
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def classified_failures(request, jm, eleven_jobs_stored, failure_lines,
|
2016-05-03 17:21:20 +03:00
|
|
|
test_matcher, failure_classifications):
|
|
|
|
from treeherder.model.models import ClassifiedFailure
|
2016-05-03 18:02:02 +03:00
|
|
|
|
2016-04-29 16:53:50 +03:00
|
|
|
job_1 = jm.get_job(1)[0]
|
|
|
|
|
2015-08-17 20:01:39 +03:00
|
|
|
classified_failures = []
|
|
|
|
|
|
|
|
for failure_line in failure_lines:
|
|
|
|
if failure_line.job_guid == job_1["job_guid"]:
|
|
|
|
classified_failure = ClassifiedFailure()
|
|
|
|
classified_failure.save()
|
2016-05-03 17:21:20 +03:00
|
|
|
failure_line.set_classification(test_matcher.db_object, classified_failure,
|
|
|
|
mark_best=True)
|
2015-08-17 20:01:39 +03:00
|
|
|
classified_failures.append(classified_failure)
|
|
|
|
|
|
|
|
return classified_failures
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def retriggers(jm, eleven_jobs_stored):
|
|
|
|
original = jm.get_job(2)[0]
|
|
|
|
retrigger = original.copy()
|
|
|
|
retrigger['job_guid'] = "f1c75261017c7c5ce3000931dce4c442fe0a1298"
|
|
|
|
|
|
|
|
jm.execute(proc="jobs_test.inserts.duplicate_job",
|
|
|
|
placeholders=[retrigger['job_guid'], original['job_guid']])
|
|
|
|
|
|
|
|
return [retrigger]
|
2015-09-30 14:01:38 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2016-05-02 21:21:45 +03:00
|
|
|
def test_user(request, transactional_db):
|
|
|
|
# a user *without* sheriff/staff permissions
|
2015-09-30 14:01:38 +03:00
|
|
|
from django.contrib.auth.models import User
|
2016-05-02 21:21:45 +03:00
|
|
|
user = User.objects.create(username="testuser1",
|
|
|
|
email='user@foo.com',
|
|
|
|
is_staff=False)
|
2015-09-30 14:01:38 +03:00
|
|
|
|
|
|
|
def fin():
|
|
|
|
user.delete()
|
|
|
|
request.addfinalizer(fin)
|
|
|
|
|
|
|
|
return user
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2016-05-02 21:21:45 +03:00
|
|
|
def test_sheriff(request, transactional_db):
|
|
|
|
# a user *with* sheriff/staff permissions
|
|
|
|
from django.contrib.auth.models import User
|
|
|
|
|
|
|
|
user = User.objects.create(username="testsheriff1",
|
|
|
|
email='sheriff@foo.com',
|
|
|
|
is_staff=True)
|
|
|
|
|
|
|
|
def fin():
|
|
|
|
user.delete()
|
|
|
|
request.addfinalizer(fin)
|
|
|
|
|
|
|
|
return user
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def client_credentials(request, test_user):
|
2015-09-30 14:01:38 +03:00
|
|
|
from django.conf import settings
|
|
|
|
from treeherder.credentials.models import Credentials
|
|
|
|
|
|
|
|
# We need to get_or_create here because of bug 1133273.
|
|
|
|
# It can be a straight create once that bug is solved.
|
|
|
|
client_credentials, _ = Credentials.objects.get_or_create(
|
|
|
|
client_id=settings.ETL_CLIENT_ID,
|
2016-05-02 21:21:45 +03:00
|
|
|
defaults={'owner': test_user, 'authorized': True}
|
2015-09-30 14:01:38 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
def fin():
|
|
|
|
client_credentials.delete()
|
|
|
|
request.addfinalizer(fin)
|
|
|
|
|
|
|
|
return client_credentials
|
2015-12-04 00:59:21 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2016-06-09 19:50:54 +03:00
|
|
|
def test_perf_framework(transactional_db):
|
2016-06-01 02:54:49 +03:00
|
|
|
from treeherder.perf.models import PerformanceFramework
|
|
|
|
return PerformanceFramework.objects.create(
|
|
|
|
name='test_talos')
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def test_perf_signature(test_repository, test_perf_framework):
|
2015-12-04 00:59:21 +03:00
|
|
|
from treeherder.model.models import (MachinePlatform,
|
|
|
|
Option,
|
|
|
|
OptionCollection)
|
2016-06-01 02:54:49 +03:00
|
|
|
from treeherder.perf.models import PerformanceSignature
|
2015-12-04 00:59:21 +03:00
|
|
|
|
|
|
|
option = Option.objects.create(name='opt')
|
|
|
|
option_collection = OptionCollection.objects.create(
|
|
|
|
option_collection_hash='my_option_hash',
|
|
|
|
option=option)
|
|
|
|
platform = MachinePlatform.objects.create(
|
|
|
|
os_name='win',
|
|
|
|
platform='win7',
|
|
|
|
architecture='x86',
|
|
|
|
active_status='active')
|
|
|
|
|
|
|
|
signature = PerformanceSignature.objects.create(
|
|
|
|
repository=test_repository,
|
|
|
|
signature_hash=(40*'t'),
|
2016-06-01 02:54:49 +03:00
|
|
|
framework=test_perf_framework,
|
2015-12-04 00:59:21 +03:00
|
|
|
platform=platform,
|
|
|
|
option_collection=option_collection,
|
|
|
|
suite='mysuite',
|
2016-03-09 23:07:13 +03:00
|
|
|
test='mytest',
|
2016-03-28 23:32:42 +03:00
|
|
|
has_subtests=False,
|
2016-03-09 23:07:13 +03:00
|
|
|
last_updated=datetime.datetime.now()
|
2015-12-04 00:59:21 +03:00
|
|
|
)
|
|
|
|
return signature
|
2016-01-07 19:30:43 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def mock_autoclassify_jobs_true(monkeypatch):
|
|
|
|
from django.conf import settings
|
|
|
|
monkeypatch.setattr(settings, 'AUTOCLASSIFY_JOBS', True)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2015-12-07 20:49:08 +03:00
|
|
|
def mock_bugzilla_api_request(monkeypatch):
|
|
|
|
"""Mock fetch_json() used by Bugzilla ETL to return a local sample file."""
|
|
|
|
import treeherder.etl.bugzilla
|
2016-01-07 19:30:43 +03:00
|
|
|
|
2015-12-07 20:49:08 +03:00
|
|
|
def _fetch_json(url, params=None):
|
2016-01-07 19:30:43 +03:00
|
|
|
tests_folder = os.path.dirname(__file__)
|
|
|
|
bug_list_path = os.path.join(
|
|
|
|
tests_folder,
|
|
|
|
"sample_data",
|
|
|
|
"bug_list.json"
|
|
|
|
)
|
|
|
|
with open(bug_list_path) as f:
|
2016-05-27 18:19:59 +03:00
|
|
|
return json.load(f)
|
2016-01-07 19:30:43 +03:00
|
|
|
|
2015-12-07 20:49:08 +03:00
|
|
|
monkeypatch.setattr(treeherder.etl.bugzilla,
|
|
|
|
'fetch_json',
|
|
|
|
_fetch_json)
|
2016-01-07 19:30:43 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2015-12-07 20:49:08 +03:00
|
|
|
def bugs(mock_bugzilla_api_request):
|
2016-01-07 19:30:43 +03:00
|
|
|
from treeherder.etl.bugzilla import BzApiBugProcess
|
|
|
|
from treeherder.model.models import Bugscache
|
|
|
|
|
|
|
|
process = BzApiBugProcess()
|
|
|
|
process.run()
|
|
|
|
|
|
|
|
return Bugscache.objects.all()
|
2016-02-05 13:44:58 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def artifacts(jm, failure_lines, test_repository):
|
|
|
|
from treeherder.model.models import FailureLine
|
|
|
|
from autoclassify.utils import create_summary_lines_failures, create_bug_suggestions_failures
|
|
|
|
job = jm.get_job(1)[0]
|
|
|
|
|
|
|
|
lines = [(item, {}) for item in FailureLine.objects.filter(job_guid=job["job_guid"]).values()]
|
|
|
|
|
|
|
|
summary_lines = create_summary_lines_failures(test_repository.name, job, lines)
|
|
|
|
bug_suggestions = create_bug_suggestions_failures(test_repository.name, job, lines)
|
|
|
|
|
|
|
|
return summary_lines, bug_suggestions
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def text_summary_lines(jm, failure_lines, test_repository, artifacts):
|
|
|
|
from treeherder.model.models import TextLogSummary, TextLogSummaryLine
|
|
|
|
job = jm.get_job(1)[0]
|
|
|
|
|
|
|
|
summary = TextLogSummary(
|
|
|
|
job_guid=job["job_guid"],
|
|
|
|
repository=test_repository,
|
|
|
|
text_log_summary_artifact_id=artifacts[0]["id"],
|
|
|
|
bug_suggestions_artifact_id=artifacts[1]["id"]
|
|
|
|
)
|
|
|
|
summary.save()
|
|
|
|
|
|
|
|
summary_lines = []
|
|
|
|
for line in failure_lines:
|
|
|
|
summary_line = TextLogSummaryLine(
|
|
|
|
summary=summary,
|
|
|
|
line_number=line.line,
|
|
|
|
failure_line=line)
|
|
|
|
summary_line.save()
|
|
|
|
summary_lines.append(summary_line)
|
|
|
|
|
|
|
|
return summary_lines
|
2016-04-23 00:13:29 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2016-06-01 02:54:49 +03:00
|
|
|
def test_perf_alert_summary(test_repository, test_perf_framework):
|
2016-04-23 00:13:29 +03:00
|
|
|
from treeherder.perf.models import PerformanceAlertSummary
|
|
|
|
return PerformanceAlertSummary.objects.create(
|
|
|
|
repository=test_repository,
|
2016-06-01 02:54:49 +03:00
|
|
|
framework=test_perf_framework,
|
2016-04-23 00:13:29 +03:00
|
|
|
prev_result_set_id=0,
|
|
|
|
result_set_id=1,
|
2016-06-01 02:54:49 +03:00
|
|
|
manually_created=False,
|
2016-04-23 00:13:29 +03:00
|
|
|
last_updated=datetime.datetime.now())
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def test_perf_alert(test_perf_signature, test_perf_alert_summary):
|
|
|
|
from treeherder.perf.models import PerformanceAlert
|
|
|
|
return PerformanceAlert.objects.create(
|
|
|
|
summary=test_perf_alert_summary,
|
|
|
|
series_signature=test_perf_signature,
|
|
|
|
is_regression=True,
|
|
|
|
amount_pct=0.5,
|
|
|
|
amount_abs=50.0,
|
|
|
|
prev_value=100.0,
|
|
|
|
new_value=150.0,
|
|
|
|
t_value=20.0)
|