2016-03-03 19:46:47 +03:00
|
|
|
import copy
|
2016-03-09 23:07:13 +03:00
|
|
|
import datetime
|
2015-08-10 01:02:37 +03:00
|
|
|
import json
|
2013-03-12 00:01:17 +04:00
|
|
|
import os
|
2020-02-05 22:55:03 +03:00
|
|
|
import platform
|
2020-05-20 18:15:48 +03:00
|
|
|
from os.path import join, dirname
|
2021-03-08 22:46:07 +03:00
|
|
|
from unittest.mock import MagicMock
|
2014-07-03 17:56:45 +04:00
|
|
|
|
2015-02-14 12:27:13 +03:00
|
|
|
import kombu
|
2013-03-19 23:03:26 +04:00
|
|
|
import pytest
|
2015-08-10 01:02:37 +03:00
|
|
|
import responses
|
2017-06-09 20:28:30 +03:00
|
|
|
from _pytest.monkeypatch import MonkeyPatch
|
2015-09-15 19:13:08 +03:00
|
|
|
from django.conf import settings
|
2021-09-13 13:43:45 +03:00
|
|
|
from django.core.management import call_command
|
2018-04-12 20:06:53 +03:00
|
|
|
from rest_framework.test import APIClient
|
2014-07-03 17:56:45 +04:00
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
from tests.autoclassify.utils import test_line, create_failure_lines, create_text_log_errors
|
|
|
|
import treeherder.etl.bugzilla
|
2017-01-06 01:55:33 +03:00
|
|
|
from treeherder.etl.jobs import store_job_data
|
2017-08-04 19:38:57 +03:00
|
|
|
from treeherder.etl.push import store_push_data
|
2022-08-08 17:40:05 +03:00
|
|
|
from treeherder.model import models as th_models
|
|
|
|
from treeherder.perf import models as perf_models
|
2021-03-08 22:46:07 +03:00
|
|
|
from treeherder.services import taskcluster
|
2018-08-17 10:43:06 +03:00
|
|
|
from treeherder.services.pulse.exchange import get_exchange
|
2022-08-08 17:40:05 +03:00
|
|
|
from treeherder.webapp.api import perfcompare_utils
|
2014-07-03 17:56:45 +04:00
|
|
|
|
2020-02-05 22:55:03 +03:00
|
|
|
IS_WINDOWS = "windows" in platform.system().lower()
|
2020-05-20 18:15:48 +03:00
|
|
|
SAMPLE_DATA_PATH = join(dirname(__file__), 'sample_data')
|
2020-02-05 22:55:03 +03:00
|
|
|
|
2013-03-12 00:01:17 +04:00
|
|
|
|
2013-04-26 22:18:18 +04:00
|
|
|
def pytest_addoption(parser):
|
|
|
|
parser.addoption(
|
2020-08-31 21:01:43 +03:00
|
|
|
"--runslow",
|
|
|
|
action="store_true",
|
|
|
|
help="run slow tests",
|
2013-04-26 22:18:18 +04:00
|
|
|
)
|
|
|
|
|
2013-03-20 17:59:48 +04:00
|
|
|
|
2013-03-12 00:01:17 +04:00
|
|
|
def pytest_runtest_setup(item):
|
|
|
|
"""
|
2013-04-16 22:00:34 +04:00
|
|
|
Per-test setup.
|
2013-05-01 14:50:55 +04:00
|
|
|
- Add an option to run those tests marked as 'slow'
|
2017-08-25 17:31:02 +03:00
|
|
|
- Clear the django cache between runs
|
2013-04-16 22:00:34 +04:00
|
|
|
"""
|
2013-03-12 00:01:17 +04:00
|
|
|
|
2013-05-01 14:50:55 +04:00
|
|
|
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
|
|
|
|
pytest.skip("need --runslow option to run")
|
2013-03-12 00:01:17 +04:00
|
|
|
|
2013-03-28 04:40:42 +04:00
|
|
|
from django.core.cache import cache
|
2020-04-21 17:35:59 +03:00
|
|
|
|
2017-08-25 17:31:02 +03:00
|
|
|
cache.clear()
|
2013-04-19 03:23:58 +04:00
|
|
|
|
2013-05-01 14:50:55 +04:00
|
|
|
|
2021-09-13 13:43:45 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def setup_repository_data(django_db_setup, django_db_blocker):
|
|
|
|
with django_db_blocker.unblock():
|
|
|
|
call_command('loaddata', join(SAMPLE_DATA_PATH, 'repository_group.json'))
|
|
|
|
with django_db_blocker.unblock():
|
|
|
|
call_command('loaddata', join(SAMPLE_DATA_PATH, 'repository.json'))
|
|
|
|
|
|
|
|
|
2017-06-09 20:28:30 +03:00
|
|
|
@pytest.fixture(scope="session", autouse=True)
|
|
|
|
def block_unmocked_requests():
|
|
|
|
"""
|
|
|
|
Prevents requests from being made unless they are mocked.
|
|
|
|
|
|
|
|
Helps avoid inadvertent dependencies on external resources during the test run.
|
|
|
|
"""
|
2020-04-21 17:35:59 +03:00
|
|
|
|
2017-06-09 20:28:30 +03:00
|
|
|
def mocked_send(*args, **kwargs):
|
|
|
|
raise RuntimeError('Tests must mock all HTTP requests!')
|
|
|
|
|
|
|
|
# The standard monkeypatch fixture cannot be used with session scope:
|
|
|
|
# https://github.com/pytest-dev/pytest/issues/363
|
|
|
|
monkeypatch = MonkeyPatch()
|
|
|
|
# Monkeypatching here since any higher level would break responses:
|
|
|
|
# https://github.com/getsentry/responses/blob/0.5.1/responses.py#L295
|
|
|
|
monkeypatch.setattr('requests.adapters.HTTPAdapter.send', mocked_send)
|
|
|
|
yield monkeypatch
|
|
|
|
monkeypatch.undo()
|
|
|
|
|
|
|
|
|
2017-01-21 00:08:04 +03:00
|
|
|
@pytest.fixture
|
2013-04-19 03:23:58 +04:00
|
|
|
def sample_data():
|
|
|
|
"""Returns a SampleData() object"""
|
2016-01-18 19:13:42 +03:00
|
|
|
from .sampledata import SampleData
|
2020-04-21 17:35:59 +03:00
|
|
|
|
2013-04-19 03:23:58 +04:00
|
|
|
return SampleData()
|
2013-09-04 18:38:59 +04:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='session')
|
|
|
|
def test_base_dir():
|
|
|
|
return os.path.dirname(__file__)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2017-08-04 19:38:57 +03:00
|
|
|
def sample_push(sample_data):
|
|
|
|
return copy.deepcopy(sample_data.push_data)
|
2013-09-16 17:58:03 +04:00
|
|
|
|
|
|
|
|
2018-04-06 18:49:17 +03:00
|
|
|
@pytest.fixture(name='create_push')
|
|
|
|
def fixture_create_push():
|
|
|
|
"""Return a function to create a push"""
|
2020-04-21 17:35:59 +03:00
|
|
|
|
|
|
|
def create(
|
2021-02-26 17:00:44 +03:00
|
|
|
repository,
|
|
|
|
revision='4c45a777949168d16c03a4cba167678b7ab65f76',
|
|
|
|
author='foo@bar.com',
|
|
|
|
time=None,
|
|
|
|
explicit_id=None,
|
2020-04-21 17:35:59 +03:00
|
|
|
):
|
2022-08-08 17:40:05 +03:00
|
|
|
return th_models.Push.objects.create(
|
2021-02-26 17:00:44 +03:00
|
|
|
id=explicit_id,
|
|
|
|
repository=repository,
|
|
|
|
revision=revision,
|
|
|
|
author=author,
|
|
|
|
time=time or datetime.datetime.now(),
|
2020-04-21 17:35:59 +03:00
|
|
|
)
|
|
|
|
|
2018-04-06 18:49:17 +03:00
|
|
|
return create
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(name='create_commit')
|
|
|
|
def fixture_create_commit():
|
|
|
|
"""Return a function to create a commit"""
|
2020-04-21 17:35:59 +03:00
|
|
|
|
2018-04-06 18:49:17 +03:00
|
|
|
def create(push, comments='Bug 12345 - This is a message'):
|
2022-08-08 17:40:05 +03:00
|
|
|
return th_models.Commit.objects.create(
|
2020-04-21 17:35:59 +03:00
|
|
|
push=push, revision=push.revision, author=push.author, comments=comments
|
|
|
|
)
|
|
|
|
|
2018-04-06 18:49:17 +03:00
|
|
|
return create
|
|
|
|
|
|
|
|
|
2022-07-07 17:57:30 +03:00
|
|
|
@pytest.fixture(name='create_signature')
|
|
|
|
def fixture_create_signature():
|
|
|
|
"""Returns a function to create a signature"""
|
|
|
|
|
|
|
|
def create(
|
2022-08-04 13:47:17 +03:00
|
|
|
signature_hash,
|
|
|
|
extra_options,
|
|
|
|
platform,
|
|
|
|
measurement_unit,
|
|
|
|
suite,
|
|
|
|
test,
|
|
|
|
test_perf_signature,
|
|
|
|
repository,
|
2022-07-07 17:57:30 +03:00
|
|
|
):
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.PerformanceSignature.objects.create(
|
2022-08-04 13:47:17 +03:00
|
|
|
repository=repository,
|
2022-07-07 17:57:30 +03:00
|
|
|
signature_hash=signature_hash,
|
|
|
|
framework=test_perf_signature.framework,
|
|
|
|
platform=platform,
|
|
|
|
option_collection=test_perf_signature.option_collection,
|
|
|
|
suite=suite,
|
|
|
|
test=test,
|
|
|
|
has_subtests=test_perf_signature.has_subtests,
|
|
|
|
extra_options=extra_options,
|
|
|
|
last_updated=datetime.datetime.now(),
|
|
|
|
measurement_unit=measurement_unit,
|
|
|
|
)
|
|
|
|
|
|
|
|
return create
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(name='create_perf_datum')
|
|
|
|
def fixture_create_perf_datum():
|
|
|
|
"""Returns a function to create a performance datum"""
|
|
|
|
|
|
|
|
def create(index, job, push, sig, sig_values):
|
|
|
|
job.push = push
|
|
|
|
job.save()
|
2022-08-08 17:40:05 +03:00
|
|
|
perf_datum = perf_models.PerformanceDatum.objects.create(
|
2022-07-07 17:57:30 +03:00
|
|
|
value=sig_values[index],
|
|
|
|
push_timestamp=job.push.time,
|
|
|
|
job=job,
|
|
|
|
push=job.push,
|
|
|
|
repository=job.repository,
|
|
|
|
signature=sig,
|
|
|
|
)
|
|
|
|
perf_datum.push.time = job.push.time
|
|
|
|
perf_datum.push.save()
|
|
|
|
return perf_datum
|
|
|
|
|
|
|
|
return create
|
|
|
|
|
|
|
|
|
2013-09-16 17:58:03 +04:00
|
|
|
@pytest.fixture
|
2020-12-22 03:42:26 +03:00
|
|
|
def test_repository(django_db_reset_sequences):
|
2022-08-08 17:40:05 +03:00
|
|
|
th_models.RepositoryGroup.objects.get_or_create(name="development", description="")
|
2014-08-27 01:05:17 +04:00
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
r = th_models.Repository.objects.create(
|
2015-02-15 17:52:31 +03:00
|
|
|
dvcs_type="hg",
|
2017-01-06 22:32:06 +03:00
|
|
|
name=settings.TREEHERDER_TEST_REPOSITORY_NAME,
|
2015-02-15 17:52:31 +03:00
|
|
|
url="https://hg.mozilla.org/mozilla-central",
|
|
|
|
active_status="active",
|
|
|
|
codebase="gecko",
|
|
|
|
repository_group_id=1,
|
2016-04-12 20:01:03 +03:00
|
|
|
description="",
|
2017-03-10 23:35:25 +03:00
|
|
|
performance_alerts_enabled=True,
|
2020-04-03 21:38:27 +03:00
|
|
|
tc_root_url="https://firefox-ci-tc.services.mozilla.com",
|
2013-09-16 17:58:03 +04:00
|
|
|
)
|
2016-03-15 03:22:48 +03:00
|
|
|
return r
|
2013-09-16 17:58:03 +04:00
|
|
|
|
|
|
|
|
2020-10-16 03:14:47 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def try_repository(transactional_db):
|
2022-08-08 17:40:05 +03:00
|
|
|
repo_group, _ = th_models.RepositoryGroup.objects.get_or_create(
|
|
|
|
name="development", description=""
|
|
|
|
)
|
2020-10-16 03:14:47 +03:00
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
r = th_models.Repository.objects.create(
|
2020-10-16 03:14:47 +03:00
|
|
|
id=4,
|
|
|
|
dvcs_type="hg",
|
|
|
|
name="try",
|
|
|
|
url="https://hg.mozilla.org/try",
|
|
|
|
active_status="active",
|
|
|
|
codebase="gecko",
|
|
|
|
repository_group_id=repo_group.id,
|
|
|
|
description="",
|
|
|
|
is_try_repo=True,
|
|
|
|
tc_root_url="https://firefox-ci-tc.services.mozilla.com",
|
|
|
|
)
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2020-10-28 09:40:42 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def relevant_repository(transactional_db):
|
2022-08-08 17:40:05 +03:00
|
|
|
repo_group, _ = th_models.RepositoryGroup.objects.get_or_create(
|
|
|
|
name="development", description=""
|
|
|
|
)
|
2020-10-28 09:40:42 +03:00
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
r = th_models.Repository.objects.create(
|
2020-10-28 09:40:42 +03:00
|
|
|
dvcs_type="hg",
|
|
|
|
name="relevant_repository",
|
|
|
|
url="https://hg.mozilla.org/try",
|
|
|
|
active_status="active",
|
|
|
|
codebase="gecko",
|
|
|
|
repository_group_id=repo_group.id,
|
|
|
|
description="",
|
|
|
|
tc_root_url="https://firefox-ci-tc.services.mozilla.com",
|
|
|
|
)
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2018-05-23 18:13:51 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def test_issue_tracker(transactional_db):
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.IssueTracker.objects.create(
|
2020-04-21 17:35:59 +03:00
|
|
|
name="Bugzilla", task_base_url="https://bugzilla.mozilla.org/show_bug.cgi?id="
|
2018-05-23 18:13:51 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2016-12-29 22:34:04 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def test_repository_2(test_repository):
|
2022-08-08 17:40:05 +03:00
|
|
|
return th_models.Repository.objects.create(
|
2016-12-29 22:34:04 +03:00
|
|
|
repository_group=test_repository.repository_group,
|
|
|
|
name=test_repository.name + '_2',
|
|
|
|
dvcs_type=test_repository.dvcs_type,
|
|
|
|
url=test_repository.url + '_2',
|
2020-04-21 17:35:59 +03:00
|
|
|
codebase=test_repository.codebase,
|
|
|
|
)
|
2016-12-29 22:34:04 +03:00
|
|
|
|
|
|
|
|
2018-01-05 16:50:12 +03:00
|
|
|
@pytest.fixture
|
2018-04-06 18:49:17 +03:00
|
|
|
def test_push(create_push, test_repository):
|
|
|
|
return create_push(test_repository)
|
2018-01-05 16:50:12 +03:00
|
|
|
|
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def test_perfcomp_push(create_push, test_repository):
|
|
|
|
return create_push(test_repository, '1377267c6dc1')
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def test_perfcomp_push_2(create_push, test_repository):
|
|
|
|
return create_push(test_repository, '08038e535f58')
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def test_linux_platform():
|
|
|
|
return th_models.MachinePlatform.objects.create(
|
|
|
|
os_name='-', platform='linux1804-64-shippable-qr', architecture='-'
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def test_macosx_platform():
|
|
|
|
return th_models.MachinePlatform.objects.create(
|
|
|
|
os_name='', platform='macosx1015-64-shippable-qr', architecture=''
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def test_option_collection():
|
|
|
|
return perfcompare_utils.get_option_collection_map()
|
|
|
|
|
|
|
|
|
2018-01-05 16:50:12 +03:00
|
|
|
@pytest.fixture
|
2018-04-06 18:49:17 +03:00
|
|
|
def test_commit(create_commit, test_push):
|
|
|
|
return create_commit(test_push)
|
2018-01-05 16:50:12 +03:00
|
|
|
|
|
|
|
|
2018-04-06 20:26:01 +03:00
|
|
|
@pytest.fixture(name='create_jobs')
|
|
|
|
def fixture_create_jobs(test_repository, failure_classifications):
|
|
|
|
"""Return a function to create jobs"""
|
2016-09-23 21:51:57 +03:00
|
|
|
|
2018-04-06 20:26:01 +03:00
|
|
|
def create(jobs):
|
|
|
|
store_job_data(test_repository, jobs)
|
2022-08-08 17:40:05 +03:00
|
|
|
return [th_models.Job.objects.get(id=i) for i in range(1, len(jobs) + 1)]
|
2020-04-21 17:35:59 +03:00
|
|
|
|
2018-04-06 20:26:01 +03:00
|
|
|
return create
|
2016-09-23 21:51:57 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2018-04-06 20:26:01 +03:00
|
|
|
def test_job(eleven_job_blobs, create_jobs):
|
2020-03-27 02:59:38 +03:00
|
|
|
job = eleven_job_blobs[0]
|
2020-04-21 17:35:59 +03:00
|
|
|
job['job'].update(
|
|
|
|
{'taskcluster_task_id': 'V3SVuxO8TFy37En_6HcXLs', 'taskcluster_retry_id': '0'}
|
|
|
|
)
|
2020-03-27 02:59:38 +03:00
|
|
|
return create_jobs([job])[0]
|
2016-09-23 21:51:57 +03:00
|
|
|
|
|
|
|
|
2022-03-23 19:20:44 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def test_two_jobs_tc_metadata(eleven_job_blobs, create_jobs):
|
|
|
|
job_1, job_2 = eleven_job_blobs[0:2]
|
|
|
|
job_1['job'].update(
|
|
|
|
{'taskcluster_task_id': 'V3SVuxO8TFy37En_6HcXLs', 'taskcluster_retry_id': '0'}
|
|
|
|
)
|
|
|
|
job_2['job'].update(
|
|
|
|
{'taskcluster_task_id': 'FJtjczXfTAGClIl6wNBo9g', 'taskcluster_retry_id': '0'}
|
|
|
|
)
|
|
|
|
return create_jobs([job_1, job_2])
|
|
|
|
|
|
|
|
|
2018-04-06 20:26:01 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def test_job_2(eleven_job_blobs, create_jobs):
|
|
|
|
return create_jobs(eleven_job_blobs[0:2])[1]
|
2016-09-23 21:51:57 +03:00
|
|
|
|
|
|
|
|
2013-09-16 17:58:03 +04:00
|
|
|
@pytest.fixture
|
|
|
|
def mock_log_parser(monkeypatch):
|
2022-05-06 00:30:20 +03:00
|
|
|
from celery import shared_task
|
2013-09-16 17:58:03 +04:00
|
|
|
from treeherder.log_parser import tasks
|
|
|
|
|
2022-05-06 00:30:20 +03:00
|
|
|
@shared_task
|
2013-09-16 17:58:03 +04:00
|
|
|
def task_mock(*args, **kwargs):
|
|
|
|
pass
|
|
|
|
|
2017-06-07 12:41:07 +03:00
|
|
|
monkeypatch.setattr(tasks, 'parse_logs', task_mock)
|
2013-09-16 17:58:03 +04:00
|
|
|
|
|
|
|
|
2020-12-09 16:28:51 +03:00
|
|
|
@pytest.fixture
|
2021-03-08 22:46:07 +03:00
|
|
|
def taskcluster_notify_mock(monkeypatch):
|
|
|
|
mock = MagicMock()
|
|
|
|
|
|
|
|
def mockreturn(*arg, **kwargs):
|
|
|
|
nonlocal mock
|
|
|
|
return mock
|
|
|
|
|
|
|
|
monkeypatch.setattr(taskcluster, 'notify_client_factory', mockreturn)
|
|
|
|
return mock
|
2020-12-09 16:28:51 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2021-04-07 14:47:08 +03:00
|
|
|
def mock_tc_prod_backfill_credentials(monkeypatch):
|
|
|
|
monkeypatch.setattr(settings, 'PERF_SHERIFF_BOT_CLIENT_ID', "client_id")
|
|
|
|
monkeypatch.setattr(settings, 'PERF_SHERIFF_BOT_ACCESS_TOKEN', "access_token")
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def mock_tc_prod_notify_credentials(monkeypatch):
|
2020-12-09 16:28:51 +03:00
|
|
|
monkeypatch.setattr(settings, 'NOTIFY_CLIENT_ID', "client_id")
|
|
|
|
monkeypatch.setattr(settings, 'NOTIFY_ACCESS_TOKEN', "access_token")
|
|
|
|
|
|
|
|
|
2013-09-16 17:58:03 +04:00
|
|
|
@pytest.fixture
|
2017-08-04 19:38:57 +03:00
|
|
|
def push_stored(test_repository, sample_push):
|
|
|
|
store_push_data(test_repository, sample_push)
|
2013-11-05 21:09:26 +04:00
|
|
|
|
2017-08-04 19:38:57 +03:00
|
|
|
return sample_push
|
2013-09-16 17:58:03 +04:00
|
|
|
|
|
|
|
|
2020-10-16 03:14:47 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def try_push_stored(try_repository, sample_push):
|
|
|
|
store_push_data(try_repository, sample_push)
|
|
|
|
|
|
|
|
return sample_push
|
|
|
|
|
|
|
|
|
2014-06-03 20:46:20 +04:00
|
|
|
@pytest.fixture
|
2017-08-04 19:38:57 +03:00
|
|
|
def eleven_job_blobs(sample_data, sample_push, test_repository, mock_log_parser):
|
|
|
|
store_push_data(test_repository, sample_push)
|
2014-06-03 20:46:20 +04:00
|
|
|
|
|
|
|
num_jobs = 11
|
|
|
|
jobs = sample_data.job_data[0:num_jobs]
|
|
|
|
|
2017-08-04 19:38:57 +03:00
|
|
|
max_index = len(sample_push) - 1
|
|
|
|
push_index = 0
|
2020-03-27 02:59:38 +03:00
|
|
|
task_id_index = 0
|
2014-06-03 20:46:20 +04:00
|
|
|
|
|
|
|
blobs = []
|
2018-04-07 00:43:26 +03:00
|
|
|
for blob in jobs:
|
2014-06-03 20:46:20 +04:00
|
|
|
|
2017-08-04 19:38:57 +03:00
|
|
|
if push_index > max_index:
|
|
|
|
push_index = 0
|
2014-06-03 20:46:20 +04:00
|
|
|
|
2017-08-04 19:38:57 +03:00
|
|
|
# Modify job structure to sync with the push sample data
|
2014-06-03 20:46:20 +04:00
|
|
|
if 'sources' in blob:
|
|
|
|
del blob['sources']
|
|
|
|
|
2017-08-04 19:38:57 +03:00
|
|
|
blob['revision'] = sample_push[push_index]['revision']
|
2020-03-27 02:59:38 +03:00
|
|
|
blob['taskcluster_task_id'] = 'V3SVuxO8TFy37En_6HcXL{}'.format(task_id_index)
|
|
|
|
blob['taskcluster_retry_id'] = '0'
|
2014-06-03 20:46:20 +04:00
|
|
|
blobs.append(blob)
|
|
|
|
|
2017-08-04 19:38:57 +03:00
|
|
|
push_index += 1
|
2020-03-27 02:59:38 +03:00
|
|
|
task_id_index += 1
|
2016-07-06 00:15:52 +03:00
|
|
|
return blobs
|
2014-06-03 20:46:20 +04:00
|
|
|
|
2016-07-06 00:15:52 +03:00
|
|
|
|
|
|
|
@pytest.fixture
|
2017-01-06 01:55:33 +03:00
|
|
|
def eleven_jobs_stored(test_repository, failure_classifications, eleven_job_blobs):
|
2016-07-06 00:15:52 +03:00
|
|
|
"""stores a list of 11 job samples"""
|
2017-01-06 01:55:33 +03:00
|
|
|
store_job_data(test_repository, eleven_job_blobs)
|
2014-06-03 20:46:20 +04:00
|
|
|
|
|
|
|
|
2017-01-06 20:37:27 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def taskcluster_jobs_stored(test_repository, sample_data):
|
|
|
|
"""stores a list of TaskCluster job samples"""
|
|
|
|
store_job_data(test_repository, sample_data.transformed_pulse_jobs)
|
|
|
|
|
|
|
|
|
2016-12-02 18:49:06 +03:00
|
|
|
@pytest.fixture
|
2016-12-23 20:31:45 +03:00
|
|
|
def test_job_with_notes(test_job, test_user):
|
|
|
|
"""test job with job notes."""
|
|
|
|
|
2016-10-03 19:56:51 +03:00
|
|
|
for failure_classification_id in [2, 3]:
|
2022-08-08 17:40:05 +03:00
|
|
|
th_models.JobNote.objects.create(
|
2020-04-21 17:35:59 +03:00
|
|
|
job=test_job,
|
|
|
|
failure_classification_id=failure_classification_id,
|
|
|
|
user=test_user,
|
|
|
|
text="you look like a man-o-lantern",
|
|
|
|
)
|
2016-12-23 20:31:45 +03:00
|
|
|
|
|
|
|
test_job.refresh_from_db()
|
|
|
|
|
|
|
|
return test_job
|
2016-12-02 18:49:06 +03:00
|
|
|
|
|
|
|
|
2014-09-26 20:08:28 +04:00
|
|
|
@pytest.fixture
|
|
|
|
def activate_responses(request):
|
|
|
|
|
|
|
|
responses.start()
|
|
|
|
|
|
|
|
def fin():
|
2014-10-08 21:54:36 +04:00
|
|
|
responses.reset()
|
2014-09-26 20:08:28 +04:00
|
|
|
responses.stop()
|
|
|
|
|
|
|
|
request.addfinalizer(fin)
|
2015-02-14 12:27:13 +03:00
|
|
|
|
Bug 1059814 - Non-whitespace pep8 fixes using autopep8 aggressive mode
Generated using:
autopep8 --in-place --recursive --aggressive --aggressive
--max-line-length 999 --exclude='.git,__pycache__,.vagrant,build,vendor,
0001_initial.py,models.py,test_note_api.py,test_bug_job_map_api.py' .
autopep8's aggressive mode, unlike standard mode, makes non-whitespace
changes. It also uses lib2to3 to correct deprecated code (W690), some of
which aren't pep8 failures. Some of these changes are more dubious, but
rather than disable W690 completely, I've just excluded the files where
the unwanted changes would have been made, so we can benefit from the
rest.
2015-02-26 18:16:02 +03:00
|
|
|
|
2018-08-17 13:57:32 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def pulse_connection():
|
|
|
|
"""
|
|
|
|
Build a Pulse connection with the Kombu library
|
2015-02-14 12:27:13 +03:00
|
|
|
|
2018-08-17 13:57:32 +03:00
|
|
|
This is a non-lazy mirror of our Pulse service's build_connection as
|
|
|
|
explained in: https://bugzilla.mozilla.org/show_bug.cgi?id=1484196
|
|
|
|
"""
|
2019-02-28 22:52:22 +03:00
|
|
|
return kombu.Connection(settings.CELERY_BROKER_URL)
|
2018-08-17 13:57:32 +03:00
|
|
|
|
|
|
|
|
2018-08-17 10:43:06 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def pulse_exchange(pulse_connection, request):
|
|
|
|
def build_exchange(name, create_exchange):
|
|
|
|
return get_exchange(pulse_connection, name, create=create_exchange)
|
2020-04-21 17:35:59 +03:00
|
|
|
|
2018-08-17 10:43:06 +03:00
|
|
|
return build_exchange
|
|
|
|
|
|
|
|
|
2015-08-17 20:01:39 +03:00
|
|
|
@pytest.fixture
|
2019-11-12 19:26:41 +03:00
|
|
|
def failure_lines(test_job):
|
2020-04-21 17:35:59 +03:00
|
|
|
return create_failure_lines(test_job, [(test_line, {}), (test_line, {"subtest": "subtest2"})])
|
2015-08-17 20:01:39 +03:00
|
|
|
|
|
|
|
|
2019-12-12 23:24:22 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def failure_line_logs(test_job):
|
2020-04-21 17:35:59 +03:00
|
|
|
return create_failure_lines(
|
|
|
|
test_job,
|
|
|
|
[(test_line, {'action': 'log', 'test': None}), (test_line, {'subtest': 'subtest2'})],
|
|
|
|
)
|
2019-12-12 23:24:22 +03:00
|
|
|
|
|
|
|
|
2015-08-17 20:01:39 +03:00
|
|
|
@pytest.fixture
|
2016-11-24 20:56:50 +03:00
|
|
|
def failure_classifications(transactional_db):
|
2020-04-21 17:35:59 +03:00
|
|
|
for name in [
|
|
|
|
"not classified",
|
|
|
|
"fixed by commit",
|
|
|
|
"expected fail",
|
|
|
|
"intermittent",
|
|
|
|
"infra",
|
|
|
|
"intermittent needs filing",
|
|
|
|
"autoclassified intermittent",
|
|
|
|
]:
|
2022-08-08 17:40:05 +03:00
|
|
|
th_models.FailureClassification(name=name).save()
|
2016-01-07 19:30:43 +03:00
|
|
|
|
|
|
|
|
2017-02-28 17:26:50 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def text_log_errors_failure_lines(test_job, failure_lines):
|
2020-04-21 17:35:59 +03:00
|
|
|
lines = [(test_line, {}), (test_line, {"subtest": "subtest2"})]
|
2017-02-28 17:26:50 +03:00
|
|
|
|
|
|
|
text_log_errors = create_text_log_errors(test_job, lines)
|
|
|
|
|
|
|
|
for error_line, failure_line in zip(text_log_errors, failure_lines):
|
2022-08-08 17:40:05 +03:00
|
|
|
th_models.TextLogErrorMetadata.objects.create(
|
|
|
|
text_log_error=error_line, failure_line=failure_line
|
|
|
|
)
|
2017-02-28 17:26:50 +03:00
|
|
|
|
|
|
|
return text_log_errors, failure_lines
|
|
|
|
|
|
|
|
|
2016-01-07 19:30:43 +03:00
|
|
|
@pytest.fixture
|
2016-04-29 16:53:50 +03:00
|
|
|
def test_matcher(request):
|
2018-05-18 16:14:21 +03:00
|
|
|
return "TreeherderUnitTestDetector"
|
2016-04-29 16:53:50 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2020-04-21 17:35:59 +03:00
|
|
|
def classified_failures(
|
|
|
|
test_job, text_log_errors_failure_lines, test_matcher, failure_classifications
|
|
|
|
):
|
2017-02-28 17:26:50 +03:00
|
|
|
_, failure_lines = text_log_errors_failure_lines
|
|
|
|
|
2015-08-17 20:01:39 +03:00
|
|
|
classified_failures = []
|
|
|
|
|
|
|
|
for failure_line in failure_lines:
|
2016-09-23 21:51:57 +03:00
|
|
|
if failure_line.job_guid == test_job.guid:
|
2022-08-08 17:40:05 +03:00
|
|
|
classified_failure = th_models.ClassifiedFailure.objects.create()
|
2018-06-01 14:19:27 +03:00
|
|
|
|
2018-06-05 18:39:21 +03:00
|
|
|
failure_line.error.create_match(test_matcher, classified_failure)
|
2018-06-01 14:19:27 +03:00
|
|
|
|
2015-08-17 20:01:39 +03:00
|
|
|
classified_failures.append(classified_failure)
|
|
|
|
|
|
|
|
return classified_failures
|
|
|
|
|
|
|
|
|
2015-09-30 14:01:38 +03:00
|
|
|
@pytest.fixture
|
2019-02-22 19:27:05 +03:00
|
|
|
def test_user(db):
|
2016-05-02 21:21:45 +03:00
|
|
|
# a user *without* sheriff/staff permissions
|
2022-08-08 17:40:05 +03:00
|
|
|
user = th_models.User.objects.create(username="testuser1", email='user@foo.com', is_staff=False)
|
2015-09-30 14:01:38 +03:00
|
|
|
return user
|
|
|
|
|
|
|
|
|
2017-08-18 23:22:56 +03:00
|
|
|
@pytest.fixture
|
2019-02-22 19:27:05 +03:00
|
|
|
def test_ldap_user(db):
|
|
|
|
"""
|
|
|
|
A user whose username matches those generated for LDAP SSO logins,
|
|
|
|
and who does not have `is_staff` permissions.
|
|
|
|
"""
|
2022-08-08 17:40:05 +03:00
|
|
|
user = th_models.User.objects.create(
|
2020-04-21 17:35:59 +03:00
|
|
|
username="mozilla-ldap/user@foo.com", email='user@foo.com', is_staff=False
|
|
|
|
)
|
2017-08-18 23:22:56 +03:00
|
|
|
return user
|
|
|
|
|
|
|
|
|
2015-09-30 14:01:38 +03:00
|
|
|
@pytest.fixture
|
2019-02-22 19:27:05 +03:00
|
|
|
def test_sheriff(db):
|
2016-05-02 21:21:45 +03:00
|
|
|
# a user *with* sheriff/staff permissions
|
2022-08-08 17:40:05 +03:00
|
|
|
user = th_models.User.objects.create(
|
|
|
|
username="testsheriff1", email='sheriff@foo.com', is_staff=True
|
|
|
|
)
|
2016-05-02 21:21:45 +03:00
|
|
|
return user
|
|
|
|
|
|
|
|
|
2015-12-04 00:59:21 +03:00
|
|
|
@pytest.fixture
|
2016-06-09 19:50:54 +03:00
|
|
|
def test_perf_framework(transactional_db):
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.PerformanceFramework.objects.create(name='test_talos', enabled=True)
|
2016-06-01 02:54:49 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2022-08-08 17:40:05 +03:00
|
|
|
def test_perf_signature(test_repository, test_perf_framework) -> perf_models.PerformanceSignature:
|
|
|
|
windows_7_platform = th_models.MachinePlatform.objects.create(
|
2021-03-08 22:46:07 +03:00
|
|
|
os_name='win', platform='win7', architecture='x86'
|
|
|
|
)
|
|
|
|
return create_perf_signature(test_perf_framework, test_repository, windows_7_platform)
|
|
|
|
|
2015-12-04 00:59:21 +03:00
|
|
|
|
2021-03-08 22:46:07 +03:00
|
|
|
def create_perf_signature(
|
2022-08-08 17:40:05 +03:00
|
|
|
perf_framework, repository, machine_platform: th_models.MachinePlatform
|
|
|
|
) -> perf_models.PerformanceSignature:
|
|
|
|
option = th_models.Option.objects.create(name='opt')
|
|
|
|
option_collection = th_models.OptionCollection.objects.create(
|
2020-04-21 17:35:59 +03:00
|
|
|
option_collection_hash='my_option_hash', option=option
|
|
|
|
)
|
2015-12-04 00:59:21 +03:00
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.PerformanceSignature.objects.create(
|
2021-03-08 22:46:07 +03:00
|
|
|
repository=repository,
|
2020-04-21 17:35:59 +03:00
|
|
|
signature_hash=(40 * 't'),
|
2021-03-08 22:46:07 +03:00
|
|
|
framework=perf_framework,
|
|
|
|
platform=machine_platform,
|
2015-12-04 00:59:21 +03:00
|
|
|
option_collection=option_collection,
|
|
|
|
suite='mysuite',
|
2016-03-09 23:07:13 +03:00
|
|
|
test='mytest',
|
2020-01-22 15:46:14 +03:00
|
|
|
application='firefox',
|
2016-03-28 23:32:42 +03:00
|
|
|
has_subtests=False,
|
2020-01-07 11:10:45 +03:00
|
|
|
tags='warm pageload',
|
2017-02-06 17:20:12 +03:00
|
|
|
extra_options='e10s opt',
|
2020-01-08 13:03:29 +03:00
|
|
|
measurement_unit='ms',
|
2020-04-21 17:35:59 +03:00
|
|
|
last_updated=datetime.datetime.now(),
|
2015-12-04 00:59:21 +03:00
|
|
|
)
|
2016-01-07 19:30:43 +03:00
|
|
|
|
|
|
|
|
2018-08-29 00:27:15 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def test_perf_signature_2(test_perf_signature):
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.PerformanceSignature.objects.create(
|
2018-08-29 00:27:15 +03:00
|
|
|
repository=test_perf_signature.repository,
|
2020-04-21 17:35:59 +03:00
|
|
|
signature_hash=(20 * 't2'),
|
2018-08-29 00:27:15 +03:00
|
|
|
framework=test_perf_signature.framework,
|
|
|
|
platform=test_perf_signature.platform,
|
|
|
|
option_collection=test_perf_signature.option_collection,
|
|
|
|
suite='mysuite2',
|
|
|
|
test='mytest2',
|
|
|
|
has_subtests=test_perf_signature.has_subtests,
|
|
|
|
extra_options=test_perf_signature.extra_options,
|
2020-04-21 17:35:59 +03:00
|
|
|
last_updated=datetime.datetime.now(),
|
2018-08-29 00:27:15 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-03-25 15:50:42 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def test_stalled_data_signature(test_perf_signature):
|
|
|
|
stalled_data_timestamp = datetime.datetime.now() - datetime.timedelta(days=120)
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.PerformanceSignature.objects.create(
|
2021-03-25 15:50:42 +03:00
|
|
|
repository=test_perf_signature.repository,
|
|
|
|
signature_hash=(20 * 't3'),
|
|
|
|
framework=test_perf_signature.framework,
|
|
|
|
platform=test_perf_signature.platform,
|
|
|
|
option_collection=test_perf_signature.option_collection,
|
|
|
|
suite='mysuite3',
|
|
|
|
test='mytest3',
|
|
|
|
has_subtests=test_perf_signature.has_subtests,
|
|
|
|
extra_options=test_perf_signature.extra_options,
|
|
|
|
last_updated=stalled_data_timestamp,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-08-29 00:27:15 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def test_perf_data(test_perf_signature, eleven_jobs_stored):
|
|
|
|
# for making things easier, ids for jobs
|
|
|
|
# and push should be the same;
|
|
|
|
# also, we only need a subset of jobs
|
2022-08-08 17:40:05 +03:00
|
|
|
perf_jobs = th_models.Job.objects.filter(pk__in=range(7, 11)).order_by('push__time').all()
|
2018-08-29 00:27:15 +03:00
|
|
|
|
|
|
|
for index, job in enumerate(perf_jobs, start=1):
|
|
|
|
job.push_id = index
|
|
|
|
job.save()
|
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
perf_datum = perf_models.PerformanceDatum.objects.create(
|
2018-08-29 00:27:15 +03:00
|
|
|
value=10,
|
2019-09-05 09:57:24 +03:00
|
|
|
push_timestamp=job.push.time,
|
2018-08-29 00:27:15 +03:00
|
|
|
job=job,
|
|
|
|
push=job.push,
|
|
|
|
repository=job.repository,
|
2020-04-21 17:35:59 +03:00
|
|
|
signature=test_perf_signature,
|
2018-08-29 00:27:15 +03:00
|
|
|
)
|
2019-09-05 09:57:24 +03:00
|
|
|
perf_datum.push.time = job.push.time
|
2018-08-29 00:27:15 +03:00
|
|
|
perf_datum.push.save()
|
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.PerformanceDatum.objects.order_by('id').all()
|
2018-08-29 00:27:15 +03:00
|
|
|
|
|
|
|
|
2016-01-07 19:30:43 +03:00
|
|
|
@pytest.fixture
|
2015-12-07 20:49:08 +03:00
|
|
|
def mock_bugzilla_api_request(monkeypatch):
|
|
|
|
"""Mock fetch_json() used by Bugzilla ETL to return a local sample file."""
|
2016-01-07 19:30:43 +03:00
|
|
|
|
2015-12-07 20:49:08 +03:00
|
|
|
def _fetch_json(url, params=None):
|
2016-01-07 19:30:43 +03:00
|
|
|
tests_folder = os.path.dirname(__file__)
|
2020-04-21 17:35:59 +03:00
|
|
|
bug_list_path = os.path.join(tests_folder, "sample_data", "bug_list.json")
|
2016-01-07 19:30:43 +03:00
|
|
|
with open(bug_list_path) as f:
|
2021-12-06 13:26:14 +03:00
|
|
|
last_change_time = (datetime.datetime.utcnow() - datetime.timedelta(days=30)).strftime(
|
|
|
|
'%Y-%m-%dT%H:%M:%SZ'
|
|
|
|
)
|
|
|
|
data = json.load(f)
|
|
|
|
for bug in data["bugs"]:
|
|
|
|
bug["last_change_time"] = last_change_time
|
|
|
|
return data
|
2016-01-07 19:30:43 +03:00
|
|
|
|
2020-04-21 17:35:59 +03:00
|
|
|
monkeypatch.setattr(treeherder.etl.bugzilla, 'fetch_json', _fetch_json)
|
2016-01-07 19:30:43 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2015-12-07 20:49:08 +03:00
|
|
|
def bugs(mock_bugzilla_api_request):
|
2016-01-07 19:30:43 +03:00
|
|
|
from treeherder.etl.bugzilla import BzApiBugProcess
|
|
|
|
|
|
|
|
process = BzApiBugProcess()
|
|
|
|
process.run()
|
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
return th_models.Bugscache.objects.all()
|
2016-02-05 13:44:58 +03:00
|
|
|
|
|
|
|
|
2021-12-17 14:31:54 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def mock_bugzilla_reopen_request(monkeypatch, request):
|
|
|
|
"""Mock reopen_request() used to reopen incomplete bugs."""
|
|
|
|
|
|
|
|
def _reopen_request(url, method, headers, json):
|
|
|
|
import json as json_module
|
|
|
|
|
|
|
|
reopened_bugs = request.config.cache.get('reopened_bugs', {})
|
|
|
|
reopened_bugs[url] = json_module.dumps(json)
|
|
|
|
request.config.cache.set('reopened_bugs', reopened_bugs)
|
|
|
|
|
|
|
|
monkeypatch.setattr(treeherder.etl.bugzilla, 'reopen_request', _reopen_request)
|
|
|
|
|
|
|
|
|
2018-02-07 20:59:30 +03:00
|
|
|
@pytest.fixture
|
2018-04-12 20:06:53 +03:00
|
|
|
def client():
|
2018-02-07 20:59:30 +03:00
|
|
|
"""
|
2018-04-12 20:06:53 +03:00
|
|
|
A django-rest-framework APIClient instance:
|
|
|
|
http://www.django-rest-framework.org/api-guide/testing/#apiclient
|
2018-02-07 20:59:30 +03:00
|
|
|
"""
|
2018-04-12 20:06:53 +03:00
|
|
|
return APIClient()
|
2018-02-07 20:59:30 +03:00
|
|
|
|
|
|
|
|
2019-03-01 20:51:27 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def authorized_sheriff_client(client, test_sheriff):
|
|
|
|
client.force_authenticate(user=test_sheriff)
|
|
|
|
return client
|
|
|
|
|
|
|
|
|
2021-09-13 13:43:45 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def mock_file_bugzilla_map_request(monkeypatch):
|
|
|
|
"""
|
|
|
|
Mock fetch_json() used by files_bugzilla_map ETL to return local sample
|
|
|
|
files which map source files to Bugzilla components.
|
|
|
|
"""
|
|
|
|
import treeherder.etl.files_bugzilla_map
|
|
|
|
|
|
|
|
def _fetch_data(self, project):
|
|
|
|
url = (
|
|
|
|
'https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/gecko.v2.%s.latest.source.source-bugzilla-info/artifacts/public/components.json'
|
|
|
|
% project
|
|
|
|
)
|
|
|
|
files_bugzilla_data = None
|
|
|
|
file_name = "files_bugzilla_map_%s_%s.json" % (project, self.run_id)
|
|
|
|
exception = None
|
|
|
|
try:
|
|
|
|
tests_folder = os.path.dirname(__file__)
|
|
|
|
data_path = os.path.join(tests_folder, "sample_data", "files_bugzilla_map", file_name)
|
|
|
|
with open(data_path) as f:
|
|
|
|
files_bugzilla_data = json.load(f)
|
|
|
|
except Exception as e:
|
|
|
|
exception = e
|
|
|
|
return {
|
|
|
|
"url": url,
|
|
|
|
"files_bugzilla_data": files_bugzilla_data,
|
|
|
|
"exception": exception,
|
|
|
|
}
|
|
|
|
|
|
|
|
monkeypatch.setattr(
|
|
|
|
treeherder.etl.files_bugzilla_map.FilesBugzillaMapProcess, 'fetch_data', _fetch_data
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-12-06 13:26:14 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def mock_bugscache_bugzilla_request(monkeypatch):
|
|
|
|
"""
|
|
|
|
Mock fetch_intermittent_bugs() used by bugzilla ETL to return local Bugzilla
|
|
|
|
sample data.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def _fetch_intermittent_bugs(additional_params, limit, duplicate_chain_length):
|
|
|
|
tests_folder = os.path.dirname(__file__)
|
|
|
|
file_name = "run-%s.json" % str(duplicate_chain_length)
|
|
|
|
data_path = os.path.join(tests_folder, "sample_data", "bugscache_population", file_name)
|
|
|
|
with open(data_path) as f:
|
|
|
|
bugzilla_data = json.load(f)
|
2022-04-22 22:42:17 +03:00
|
|
|
for bug in bugzilla_data["bugs"]:
|
|
|
|
bug["last_change_time"] = (
|
|
|
|
datetime.datetime.now() - datetime.timedelta(20)
|
|
|
|
).isoformat(timespec='seconds') + 'Z'
|
2021-12-06 13:26:14 +03:00
|
|
|
return bugzilla_data["bugs"]
|
|
|
|
|
|
|
|
monkeypatch.setattr(
|
|
|
|
treeherder.etl.bugzilla, 'fetch_intermittent_bugs', _fetch_intermittent_bugs
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2016-02-05 13:44:58 +03:00
|
|
|
@pytest.fixture
|
2016-10-18 18:32:52 +03:00
|
|
|
def text_log_error_lines(test_job, failure_lines):
|
2022-08-08 17:40:05 +03:00
|
|
|
lines = [
|
|
|
|
(item, {}) for item in th_models.FailureLine.objects.filter(job_guid=test_job.guid).values()
|
|
|
|
]
|
2016-02-05 13:44:58 +03:00
|
|
|
|
2016-09-23 21:51:57 +03:00
|
|
|
errors = create_text_log_errors(test_job, lines)
|
2016-02-05 13:44:58 +03:00
|
|
|
|
2016-10-18 18:32:52 +03:00
|
|
|
return errors
|
2016-02-05 13:44:58 +03:00
|
|
|
|
|
|
|
|
2020-06-11 08:58:17 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def test_perf_tag():
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.PerformanceTag.objects.create(name='first_tag')
|
2020-06-11 08:58:17 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def test_perf_tag_2():
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.PerformanceTag.objects.create(name='second_tag')
|
2020-06-11 08:58:17 +03:00
|
|
|
|
|
|
|
|
2016-04-23 00:13:29 +03:00
|
|
|
@pytest.fixture
|
2018-05-23 18:13:51 +03:00
|
|
|
def test_perf_alert_summary(test_repository, push_stored, test_perf_framework, test_issue_tracker):
|
2022-08-08 17:40:05 +03:00
|
|
|
test_perf_tag = perf_models.PerformanceTag.objects.create(name='harness')
|
2020-05-28 15:08:25 +03:00
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
performance_alert_summary = perf_models.PerformanceAlertSummary.objects.create(
|
2016-04-23 00:13:29 +03:00
|
|
|
repository=test_repository,
|
2016-06-01 02:54:49 +03:00
|
|
|
framework=test_perf_framework,
|
2016-11-09 21:51:00 +03:00
|
|
|
prev_push_id=1,
|
|
|
|
push_id=2,
|
2016-06-01 02:54:49 +03:00
|
|
|
manually_created=False,
|
2020-04-21 17:35:59 +03:00
|
|
|
created=datetime.datetime.now(),
|
|
|
|
)
|
2020-05-28 15:08:25 +03:00
|
|
|
performance_alert_summary.performance_tags.add(test_perf_tag)
|
|
|
|
|
|
|
|
return performance_alert_summary
|
2016-04-23 00:13:29 +03:00
|
|
|
|
|
|
|
|
2018-08-29 00:27:15 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def test_perf_alert_summary_2(test_perf_alert_summary):
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.PerformanceAlertSummary.objects.create(
|
2018-08-29 00:27:15 +03:00
|
|
|
repository=test_perf_alert_summary.repository,
|
|
|
|
framework=test_perf_alert_summary.framework,
|
2020-04-21 17:35:59 +03:00
|
|
|
prev_push_id=test_perf_alert_summary.prev_push_id + 1,
|
|
|
|
push_id=test_perf_alert_summary.push_id + 1,
|
2018-08-29 00:27:15 +03:00
|
|
|
manually_created=False,
|
2020-04-21 17:35:59 +03:00
|
|
|
created=datetime.datetime.now(),
|
|
|
|
)
|
2018-08-29 00:27:15 +03:00
|
|
|
|
|
|
|
|
2019-05-07 17:26:52 +03:00
|
|
|
@pytest.fixture
|
2020-04-21 17:35:59 +03:00
|
|
|
def test_perf_alert_summary_with_bug(
|
|
|
|
test_repository, push_stored, test_perf_framework, test_issue_tracker
|
|
|
|
):
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.PerformanceAlertSummary.objects.create(
|
2019-05-07 17:26:52 +03:00
|
|
|
repository=test_repository,
|
|
|
|
framework=test_perf_framework,
|
|
|
|
prev_push_id=1,
|
|
|
|
push_id=2,
|
|
|
|
manually_created=False,
|
|
|
|
created=datetime.datetime.now(),
|
|
|
|
bug_number=123456,
|
2020-04-21 17:35:59 +03:00
|
|
|
bug_updated=datetime.datetime.now(),
|
|
|
|
)
|
2019-05-07 17:26:52 +03:00
|
|
|
|
|
|
|
|
2016-04-23 00:13:29 +03:00
|
|
|
@pytest.fixture
|
2022-08-08 17:40:05 +03:00
|
|
|
def test_perf_alert(test_perf_signature, test_perf_alert_summary) -> perf_models.PerformanceAlert:
|
2021-03-08 22:46:07 +03:00
|
|
|
return create_perf_alert(summary=test_perf_alert_summary, series_signature=test_perf_signature)
|
|
|
|
|
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
def create_perf_alert(**alert_properties) -> perf_models.PerformanceAlert:
|
2021-03-08 22:46:07 +03:00
|
|
|
defaults = dict(
|
2016-04-23 00:13:29 +03:00
|
|
|
amount_abs=50.0,
|
2021-03-08 22:46:07 +03:00
|
|
|
amount_pct=0.5,
|
|
|
|
is_regression=True,
|
2016-04-23 00:13:29 +03:00
|
|
|
new_value=150.0,
|
2021-03-08 22:46:07 +03:00
|
|
|
prev_value=100.0,
|
2020-04-21 17:35:59 +03:00
|
|
|
t_value=20.0,
|
|
|
|
)
|
2021-03-08 22:46:07 +03:00
|
|
|
alert_properties = {**defaults, **alert_properties}
|
2022-08-08 17:40:05 +03:00
|
|
|
return perf_models.PerformanceAlert.objects.create(**alert_properties)
|
2016-11-24 22:07:32 +03:00
|
|
|
|
|
|
|
|
2019-03-01 20:51:27 +03:00
|
|
|
@pytest.fixture
|
2022-08-08 17:40:05 +03:00
|
|
|
def test_conflicting_perf_alert(
|
|
|
|
test_perf_signature, test_perf_alert_summary_2
|
|
|
|
) -> perf_models.PerformanceAlert:
|
2021-03-08 22:46:07 +03:00
|
|
|
return create_perf_alert(
|
|
|
|
summary=test_perf_alert_summary_2, series_signature=test_perf_signature
|
2020-04-21 17:35:59 +03:00
|
|
|
)
|
2019-03-01 20:51:27 +03:00
|
|
|
|
|
|
|
|
2018-08-29 00:27:15 +03:00
|
|
|
@pytest.fixture
|
2021-03-08 22:46:07 +03:00
|
|
|
def test_perf_alert_2(
|
|
|
|
test_perf_alert, test_perf_signature_2, test_perf_alert_summary_2
|
2022-08-08 17:40:05 +03:00
|
|
|
) -> perf_models.PerformanceAlert:
|
2021-03-08 22:46:07 +03:00
|
|
|
return create_perf_alert(
|
|
|
|
summary=test_perf_alert_summary_2, series_signature=test_perf_signature_2
|
2020-04-21 17:35:59 +03:00
|
|
|
)
|
2018-08-29 00:27:15 +03:00
|
|
|
|
|
|
|
|
2016-11-24 22:07:32 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def generic_reference_data(test_repository):
|
2020-08-31 21:01:43 +03:00
|
|
|
"""
|
2016-11-24 22:07:32 +03:00
|
|
|
Generic reference data (if you want to create a bunch of mock jobs)
|
2020-08-31 21:01:43 +03:00
|
|
|
"""
|
2016-11-24 22:07:32 +03:00
|
|
|
|
2019-02-25 11:48:05 +03:00
|
|
|
class RefdataHolder:
|
2016-11-24 22:07:32 +03:00
|
|
|
pass
|
2020-04-21 17:35:59 +03:00
|
|
|
|
2016-11-24 22:07:32 +03:00
|
|
|
r = RefdataHolder()
|
|
|
|
|
2022-08-08 17:40:05 +03:00
|
|
|
r.option = th_models.Option.objects.create(name='my_option')
|
|
|
|
r.option_collection = th_models.OptionCollection.objects.create(
|
2020-04-21 17:35:59 +03:00
|
|
|
option_collection_hash='my_option_hash', option=r.option
|
|
|
|
)
|
2016-11-24 22:07:32 +03:00
|
|
|
r.option_collection_hash = r.option_collection.option_collection_hash
|
2022-08-08 17:40:05 +03:00
|
|
|
r.machine_platform = th_models.MachinePlatform.objects.create(
|
2020-04-21 17:35:59 +03:00
|
|
|
os_name="my_os", platform="my_platform", architecture="x86"
|
|
|
|
)
|
2022-08-08 17:40:05 +03:00
|
|
|
r.build_platform = th_models.BuildPlatform.objects.create(
|
2020-04-21 17:35:59 +03:00
|
|
|
os_name="my_os", platform="my_platform", architecture="x86"
|
|
|
|
)
|
2022-08-08 17:40:05 +03:00
|
|
|
r.machine = th_models.Machine.objects.create(name='mymachine')
|
|
|
|
r.job_group = th_models.JobGroup.objects.create(symbol='S', name='myjobgroup')
|
|
|
|
r.job_type = th_models.JobType.objects.create(symbol='j', name='myjob')
|
|
|
|
r.product = th_models.Product.objects.create(name='myproduct')
|
|
|
|
r.signature = th_models.ReferenceDataSignatures.objects.create(
|
2016-11-24 22:07:32 +03:00
|
|
|
name='myreferencedatasignaeture',
|
|
|
|
signature='1234',
|
|
|
|
build_os_name=r.build_platform.os_name,
|
|
|
|
build_platform=r.build_platform.platform,
|
|
|
|
build_architecture=r.build_platform.architecture,
|
|
|
|
machine_os_name=r.machine_platform.os_name,
|
|
|
|
machine_platform=r.machine_platform.platform,
|
|
|
|
machine_architecture=r.machine_platform.architecture,
|
|
|
|
job_group_name=r.job_group.name,
|
|
|
|
job_group_symbol=r.job_group.symbol,
|
|
|
|
job_type_name=r.job_type.name,
|
|
|
|
job_type_symbol=r.job_type.symbol,
|
|
|
|
option_collection_hash=r.option_collection_hash,
|
|
|
|
build_system_type='buildbot',
|
|
|
|
repository=test_repository.name,
|
2020-04-21 17:35:59 +03:00
|
|
|
first_submission_timestamp=0,
|
|
|
|
)
|
2016-11-24 22:07:32 +03:00
|
|
|
|
|
|
|
return r
|
2018-05-14 12:58:26 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def bug_data(eleven_jobs_stored, test_repository, test_push, bugs):
|
2022-08-08 17:40:05 +03:00
|
|
|
jobs = th_models.Job.objects.all()
|
2018-05-14 12:58:26 +03:00
|
|
|
bug_id = bugs[0].id
|
2018-08-08 17:03:33 +03:00
|
|
|
job_id = jobs[0].id
|
2022-08-08 17:40:05 +03:00
|
|
|
th_models.BugJobMap.create(job_id=job_id, bug_id=bug_id)
|
2020-04-21 17:35:59 +03:00
|
|
|
query_string = '?startday=2012-05-09&endday=2018-05-10&tree={}'.format(test_repository.name)
|
2018-05-14 12:58:26 +03:00
|
|
|
|
|
|
|
return {
|
|
|
|
'tree': test_repository.name,
|
2022-08-08 17:40:05 +03:00
|
|
|
'option': th_models.Option.objects.first(),
|
2018-05-14 12:58:26 +03:00
|
|
|
'bug_id': bug_id,
|
|
|
|
'job': jobs[0],
|
|
|
|
'jobs': jobs,
|
2020-04-21 17:35:59 +03:00
|
|
|
'query_string': query_string,
|
2018-05-14 12:58:26 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def test_run_data(bug_data):
|
2022-08-08 17:40:05 +03:00
|
|
|
pushes = th_models.Push.objects.all()
|
2018-05-14 12:58:26 +03:00
|
|
|
time = pushes[0].time.strftime('%Y-%m-%d')
|
|
|
|
test_runs = 0
|
|
|
|
for push in list(pushes):
|
|
|
|
if push.time.strftime('%Y-%m-%d') == time:
|
|
|
|
test_runs += 1
|
|
|
|
|
2020-04-21 17:35:59 +03:00
|
|
|
return {'test_runs': test_runs, 'push_time': time}
|
2019-03-01 20:51:27 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def generate_enough_perf_datum(test_repository, test_perf_signature):
|
|
|
|
# generate enough data for a proper alert to be generated (with enough
|
|
|
|
# extra data on both sides to make sure we're using the proper values
|
|
|
|
# to generate the actual alert)
|
2020-02-10 17:38:31 +03:00
|
|
|
|
|
|
|
for (push_id, value) in zip([1] * 30 + [2] * 30, [1] * 30 + [2] * 30):
|
2022-08-08 17:40:05 +03:00
|
|
|
push = th_models.Push.objects.get(id=push_id)
|
|
|
|
perf_models.PerformanceDatum.objects.create(
|
2020-04-21 17:35:59 +03:00
|
|
|
repository=test_repository,
|
|
|
|
push_id=push_id,
|
|
|
|
signature=test_perf_signature,
|
|
|
|
value=value,
|
|
|
|
push_timestamp=push.time,
|
|
|
|
)
|
2019-08-27 00:10:49 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def sample_option_collections(transactional_db):
|
2022-08-08 17:40:05 +03:00
|
|
|
option1 = th_models.Option.objects.create(name='opt1')
|
|
|
|
option2 = th_models.Option.objects.create(name='opt2')
|
|
|
|
th_models.OptionCollection.objects.create(option_collection_hash='option_hash1', option=option1)
|
|
|
|
th_models.OptionCollection.objects.create(option_collection_hash='option_hash2', option=option2)
|
2019-11-14 16:29:55 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def backfill_record_context():
|
2020-04-21 17:35:59 +03:00
|
|
|
return {
|
|
|
|
"data_points_to_retrigger": [
|
|
|
|
{
|
|
|
|
"perf_datum_id": 933219901,
|
|
|
|
"value": 0.8714208119774209,
|
|
|
|
"job_id": 269034923,
|
|
|
|
"push_id": 565159,
|
|
|
|
"push_timestamp": "2019-10-02 02:22:28",
|
|
|
|
"push__revision": "04e8766a29242d4deae31b5b04e6ac61ebf61ffd",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"perf_datum_id": 933219962,
|
|
|
|
"value": 0.9160434865973892,
|
|
|
|
"job_id": 269034920,
|
|
|
|
"push_id": 565160,
|
|
|
|
"push_timestamp": "2019-10-02 02:23:29",
|
|
|
|
"push__revision": "9b42bdc4889fe7782df9b2a0aa990ed5e62cb04c",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"perf_datum_id": 931772364,
|
|
|
|
"value": 0.9508247997807697,
|
|
|
|
"job_id": 268828343,
|
|
|
|
"push_id": 565161,
|
|
|
|
"push_timestamp": "2019-10-02 02:24:35",
|
|
|
|
"push__revision": "057b59fdadad75e888a739e85a683b2ff7bfc62e",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"perf_datum_id": 931924904,
|
|
|
|
"value": 0.9829230628232519,
|
|
|
|
"job_id": 268840223,
|
|
|
|
"push_id": 565188,
|
|
|
|
"push_timestamp": "2019-10-02 04:03:09",
|
|
|
|
"push__revision": "49ef9afb62bb909389b105a1751e9b46e6f1688d",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"perf_datum_id": 931927300,
|
|
|
|
"value": 0.9873498499464002,
|
|
|
|
"job_id": 268840309,
|
|
|
|
"push_id": 565193,
|
|
|
|
"push_timestamp": "2019-10-02 04:08:06",
|
|
|
|
"push__revision": "f5cce52461bac31945b083e51a085fb429a36f04",
|
|
|
|
},
|
|
|
|
]
|
|
|
|
}
|
2020-05-20 18:15:48 +03:00
|
|
|
|
|
|
|
|
|
|
|
class JSONFixtureLoader:
|
|
|
|
def __init__(self, *prior_dirs):
|
|
|
|
self._prior_dirs = prior_dirs
|
|
|
|
|
|
|
|
def __call__(self, fixture_filename):
|
|
|
|
fixture_path = join(*self._prior_dirs, fixture_filename)
|
|
|
|
with open(fixture_path, 'r') as f:
|
|
|
|
return json.load(f)
|
|
|
|
|
|
|
|
|
|
|
|
class SampleDataJSONLoader:
|
|
|
|
def __init__(self, *sub_dirs):
|
|
|
|
global SAMPLE_DATA_PATH
|
|
|
|
|
|
|
|
self.load_json = JSONFixtureLoader(SAMPLE_DATA_PATH, *sub_dirs)
|
|
|
|
|
|
|
|
def __call__(self, fixture_filename):
|
|
|
|
return self.load_json(fixture_filename)
|