Bug 1077136 - fix builds4hr tests and improve tasks per feedback

This commit is contained in:
Cameron Dawson 2014-10-08 12:25:05 -07:00
Родитель 9cf09d8372
Коммит 89639f96ae
15 изменённых файлов: 2112 добавлений и 5484 удалений

Просмотреть файл

@ -12,20 +12,7 @@ def mock_buildapi_pending_url(monkeypatch):
path = os.path.join( path = os.path.join(
tests_folder, tests_folder,
"sample_data", "sample_data",
"builds-pending.js" "builds-pending.json"
)
monkeypatch.setattr(settings,
'BUILDAPI_PENDING_URL',
"file://{0}".format(path))
@pytest.fixture
def mock_buildapi_pending_missing1_url(monkeypatch):
tests_folder = os.path.dirname(os.path.dirname(__file__))
path = os.path.join(
tests_folder,
"sample_data",
"builds-pending-missing1.js"
) )
monkeypatch.setattr(settings, monkeypatch.setattr(settings,
'BUILDAPI_PENDING_URL', 'BUILDAPI_PENDING_URL',
@ -38,7 +25,7 @@ def mock_buildapi_running_url(monkeypatch):
path = os.path.join( path = os.path.join(
tests_folder, tests_folder,
"sample_data", "sample_data",
"builds-running.js" "builds-running.json"
) )
monkeypatch.setattr(settings, monkeypatch.setattr(settings,
'BUILDAPI_RUNNING_URL', 'BUILDAPI_RUNNING_URL',
@ -51,7 +38,46 @@ def mock_buildapi_builds4h_url(monkeypatch):
path = os.path.join( path = os.path.join(
tests_folder, tests_folder,
"sample_data", "sample_data",
"builds-4h.js" "builds-4h.json"
)
monkeypatch.setattr(settings,
'BUILDAPI_BUILDS4H_URL',
"file://{0}".format(path))
@pytest.fixture
def mock_buildapi_pending_missing1_url(monkeypatch):
tests_folder = os.path.dirname(os.path.dirname(__file__))
path = os.path.join(
tests_folder,
"sample_data",
"builds-pending-missing1.json"
)
monkeypatch.setattr(settings,
'BUILDAPI_PENDING_URL',
"file://{0}".format(path))
@pytest.fixture
def mock_buildapi_running_missing1_url(monkeypatch):
tests_folder = os.path.dirname(os.path.dirname(__file__))
path = os.path.join(
tests_folder,
"sample_data",
"builds-running-missing1.json"
)
monkeypatch.setattr(settings,
'BUILDAPI_RUNNING_URL',
"file://{0}".format(path))
@pytest.fixture
def mock_buildapi_builds4h_missing1_url(monkeypatch):
tests_folder = os.path.dirname(os.path.dirname(__file__))
path = os.path.join(
tests_folder,
"sample_data",
"builds-4h-missing1.json"
) )
monkeypatch.setattr(settings, monkeypatch.setattr(settings,
'BUILDAPI_BUILDS4H_URL', 'BUILDAPI_BUILDS4H_URL',
@ -79,16 +105,112 @@ def test_ingest_pending_jobs(jm, initial_data,
assert len(stored_obj) == 1 assert len(stored_obj) == 1
def test_ingest_pending_jobs_1_missing_resultset(jm, initial_data, sample_resultset, def test_ingest_running_jobs(jm, initial_data,
test_repository, mock_buildapi_running_url,
mock_buildapi_pending_missing1_url,
mock_post_json_data, mock_post_json_data,
mock_log_parser,
mock_get_resultset, mock_get_resultset,
mock_get_remote_content, mock_get_remote_content):
activate_responses):
""" """
Ensure the job with the missing resultset is queued for refetching a new buildapi running job creates a new obj in the job table
""" """
from treeherder.etl.buildapi import RunningJobsProcess
etl_process = RunningJobsProcess()
etl_process.run()
stored_obj = jm.get_jobs_dhub().execute(
proc="jobs_test.selects.jobs")
jm.disconnect()
assert len(stored_obj) == 1
def test_ingest_builds4h_jobs(jm, initial_data,
mock_buildapi_builds4h_url,
mock_post_json_data,
mock_log_parser,
mock_get_resultset,
mock_get_remote_content):
"""
a new buildapi completed job creates a new obj in the job table
"""
from treeherder.etl.buildapi import Builds4hJobsProcess
etl_process = Builds4hJobsProcess()
etl_process.run()
jm.process_objects(20)
stored_obj = jm.get_jobs_dhub().execute(
proc="jobs_test.selects.jobs")
jm.disconnect()
assert len(stored_obj) == 20
def test_ingest_running_job_fields(jm, initial_data,
mock_buildapi_running_url,
mock_post_json_data,
mock_log_parser,
mock_get_resultset,
mock_get_remote_content):
"""
a new buildapi running job creates a new obj in the job table
"""
from treeherder.etl.buildapi import RunningJobsProcess
etl_process = RunningJobsProcess()
etl_process.run()
stored_obj = jm.get_jobs_dhub().execute(
proc="jobs_test.selects.jobs")
jm.disconnect()
assert len(stored_obj) == 1
assert stored_obj[0]["start_timestamp"] is not 0
#####################
# MISSING RESULTSETS
#####################
def test_ingest_pending_jobs_1_missing_resultset(jm, initial_data,
sample_resultset, test_repository, mock_buildapi_pending_missing1_url,
mock_post_json_data, mock_get_resultset, mock_get_remote_content,
activate_responses):
"""
Ensure the pending job with the missing resultset is queued for refetching
"""
from treeherder.etl.buildapi import PendingJobsProcess
etl_process = PendingJobsProcess()
_do_missing_resultset_test(jm, etl_process)
def test_ingest_running_jobs_1_missing_resultset(jm, initial_data,
sample_resultset, test_repository, mock_buildapi_running_missing1_url,
mock_post_json_data, mock_get_resultset, mock_get_remote_content,
activate_responses):
"""
Ensure the running job with the missing resultset is queued for refetching
"""
from treeherder.etl.buildapi import RunningJobsProcess
etl_process = RunningJobsProcess()
_do_missing_resultset_test(jm, etl_process)
def test_ingest_builds4h_jobs_1_missing_resultset(jm, initial_data,
sample_resultset, test_repository, mock_buildapi_builds4h_missing1_url,
mock_post_json_data, mock_get_resultset, mock_get_remote_content,
activate_responses):
"""
Ensure the builds4h job with the missing resultset is queued for refetching
"""
from treeherder.etl.buildapi import Builds4hJobsProcess
etl_process = Builds4hJobsProcess()
_do_missing_resultset_test(jm, etl_process)
def _do_missing_resultset_test(jm, etl_process):
new_revision = '222222222222' new_revision = '222222222222'
pushlog_content = json.dumps( pushlog_content = json.dumps(
{"33270": { {"33270": {
@ -114,9 +236,8 @@ def test_ingest_pending_jobs_1_missing_resultset(jm, initial_data, sample_result
match_querystring=True, match_querystring=True,
content_type='application/json') content_type='application/json')
from treeherder.etl.buildapi import PendingJobsProcess
etl_process = PendingJobsProcess()
etl_process.run() etl_process.run()
jm.process_objects(2)
stored_obj = jm.get_jobs_dhub().execute( stored_obj = jm.get_jobs_dhub().execute(
proc="jobs_test.selects.jobs") proc="jobs_test.selects.jobs")
@ -135,48 +256,3 @@ def test_ingest_pending_jobs_1_missing_resultset(jm, initial_data, sample_result
was_stored = True was_stored = True
assert was_stored assert was_stored
jm.disconnect()
def test_ingest_running_jobs(jm, initial_data,
mock_buildapi_running_url,
mock_post_json_data,
mock_log_parser,
mock_get_resultset,
mock_get_remote_content):
"""
a new buildapi running job creates a new obj in the job table
"""
from treeherder.etl.buildapi import RunningJobsProcess
etl_process = RunningJobsProcess()
etl_process.run()
stored_obj = jm.get_jobs_dhub().execute(
proc="jobs_test.selects.jobs")
jm.disconnect()
assert len(stored_obj) == 1
def test_ingest_running_job_fields(jm, initial_data,
mock_buildapi_running_url,
mock_post_json_data,
mock_log_parser,
mock_get_resultset,
mock_get_remote_content):
"""
a new buildapi running job creates a new obj in the job table
"""
from treeherder.etl.buildapi import RunningJobsProcess
etl_process = RunningJobsProcess()
etl_process.run()
stored_obj = jm.get_jobs_dhub().execute(
proc="jobs_test.selects.jobs")
jm.disconnect()
assert len(stored_obj) == 1
assert stored_obj[0]["start_timestamp"] is not 0

Просмотреть файл

@ -0,0 +1,144 @@
{
"builds": [
{
"builder_id": 274259,
"buildnumber": 42,
"endtime": 1412776103,
"id": 49799397,
"master_id": 132,
"properties": {
"base_bundle_urls": [
"https://ftp-ssl.mozilla.org/pub/mozilla.org/firefox/bundles"
],
"base_mirror_urls": null,
"basedir": "/builds/slave/b2g_fx-team_emu-kk-d_nu-000000",
"branch": "test_treeherder",
"buildername": "b2g_fx-team_emulator-kk-debug_nonunified",
"buildid": "20141008023001",
"buildnumber": 42,
"builduid": "28d3ea4c5aa144b1b160670dc37d8d39",
"compare_locales_revision": "1fc4e9bc8287",
"gaia_revision": "0bc74ce502672cf0265b24cf3a25d117c3de5e71",
"gecko_revision": "9637293b166a",
"hgurl": "https://hg.mozilla.org/",
"log_url": "http://ftp.mozilla.org/pub/mozilla.org/b2g/tinderbox-builds/fx-team-emulator-kk-debug/1412760601/b2g_fx-team_emulator-kk-debug_nonunified-bm94-build1-build42.txt.gz",
"master": "http://buildbot-master94.srv.releng.use1.mozilla.com:8001/",
"mock_target": null,
"platform": "emulator-kk-debug",
"product": "b2g",
"project": "",
"repo_path": "integration/fx-team",
"repository": "",
"request_ids": [
52163682
],
"request_times": {
"52163682": 1412760601
},
"revision": "45f8637cb9f7",
"scheduler": "b2g_fx-team periodic",
"script_repo_revision": "1b5e35f7a0ec",
"slavename": "bld-linux64-spot-007",
"tools_revision": "2469042323a6",
"tooltool_url_list": [
"http://runtime-binaries.pvt.build.mozilla.org/tooltool"
],
"upload_ssh_key": "ffxbld_dsa",
"upload_ssh_server": "stage.mozilla.org",
"upload_ssh_user": "ffxbld"
},
"reason": "The Nightly scheduler named 'b2g_fx-team periodic' triggered this build",
"request_ids": [
52163682
],
"requesttime": 1412760601,
"result": 0,
"slave_id": 6971,
"starttime": 1412761403
},
{
"builder_id": 202754,
"buildnumber": 76,
"endtime": 1412775820,
"id": 49799157,
"master_id": 106,
"properties": {
"appName": "Firefox",
"appVersion": "35.0a1",
"basedir": "c:/builds/moz2_slave/m-cen-w32-ntly-000000000000000",
"branch": "test_treeherder",
"builddir": "m-cen-w32-ntly-000000000000000",
"buildername": "WINNT 5.2 mozilla-central nightly",
"buildid": "20141008030202",
"buildnumber": 76,
"builduid": "4c3bcb4280a146869a709cd9c588fb6e",
"comments": "",
"completeMarFilename": "firefox-35.0a1.en-US.win32.complete.mar",
"completeMarHash": "8ed585ec103aec121af4f81633b1201994c9d8e9bb797cbdad1323f84f176c8086cb522d801cbb7c8dc8e18b6339fbb80211d9d0a8b9118c3a92969f23f59501",
"completeMarSize": "48921988",
"completeMarUrl": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/2014/10/2014-10-08-03-02-02-mozilla-central/firefox-35.0a1.en-US.win32.complete.mar",
"completesnippetFilename": "build/obj-firefox/dist/update/complete.update.snippet",
"filepath": null,
"forced_clobber": false,
"hashType": "sha512",
"installerFilename": "firefox-35.0a1.en-US.win32.installer.exe",
"installerHash": "97d6149ff2649ac3ee63429376d4a0d0cfd6c4017071c16aedbaf1b73a6c181715e4b18779f19c5896de64d84b56cca7eac2f67b1c958b623707115ce9d10e74",
"installerSize": "40424984",
"jsshellUrl": "http://ftp.mozilla.org/pub/mozilla.org/firefox/tinderbox-builds/mozilla-central-win32/1412762522/jsshell-win32.zip",
"log_url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/2014/10/2014-10-08-03-02-02-mozilla-central/mozilla-central-win32-nightly-bm85-build1-build76.txt.gz",
"master": "http://buildbot-master85.srv.releng.scl3.mozilla.com:8001/",
"nightly_build": true,
"packageFilename": "firefox-35.0a1.en-US.win32.zip",
"packageHash": "9f3f0b59d33b719c9c6b8d029c557e396f120b471db31b523289eacfee7268a2c8f50aca937dd2faf20fa6af7ba562e5d9c0e724bd361648be40e8e39147e19b",
"packageSize": "50571473",
"packageUrl": "http://ftp.mozilla.org/pub/mozilla.org/firefox/tinderbox-builds/mozilla-central-win32/1412762522/firefox-35.0a1.en-US.win32.zip",
"partialInfo": [
{
"from_buildid": "20141007030202",
"hash": "97b74d6163fa92503dfeb4f6f116975b92ca815fa8e3bcedaef97f37868782062d801b5cdad656ff5c2eed08ee0739f4ad946036b92778b0edb3ef3f82beba55",
"size": "4203960",
"url": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/2014/10/2014-10-08-03-02-02-mozilla-central/firefox-35.0a1.en-US.win32.partial.20141007030202-20141008030202.mar"
}
],
"partialMarFilename": "firefox-35.0a1.en-US.win32.partial.20141007030202-20141008030202.mar",
"partialMarHash": "97b74d6163fa92503dfeb4f6f116975b92ca815fa8e3bcedaef97f37868782062d801b5cdad656ff5c2eed08ee0739f4ad946036b92778b0edb3ef3f82beba55",
"partialMarSize": "4203960",
"partialMarUrl": "http://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/2014/10/2014-10-08-03-02-02-mozilla-central/firefox-35.0a1.en-US.win32.partial.20141007030202-20141008030202.mar",
"partialsnippetFilename": "build/obj-firefox/dist/update/partial.update.snippet",
"periodic_clobber": false,
"platform": "win32",
"previousMarFilename": "firefox-35.0a1.en-US.win32.complete.mar",
"previous_buildid": "20141007030202",
"previous_inipath": "previous/application.ini",
"product": "firefox",
"project": "",
"purge_actual": "60.69GB",
"purge_target": "12GB",
"purged_clobber": true,
"repository": "",
"request_ids": [
52167028
],
"request_times": {
"52167028": 1412762523
},
"revision": "222222222222902057b6f698a5ca9f78aea25d06",
"scheduler": "mozilla-central nightly",
"slavebuilddir": "m-cen-w32-ntly-000000000000000",
"slavename": "b-2008-ix-0140",
"stage_platform": "win32",
"symbolsUrl": "http://ftp.mozilla.org/pub/mozilla.org/firefox/tinderbox-builds/mozilla-central-win32/1412762522/firefox-35.0a1.en-US.win32.crashreporter-symbols.zip",
"testsUrl": "http://ftp.mozilla.org/pub/mozilla.org/firefox/tinderbox-builds/mozilla-central-win32/1412762522/firefox-35.0a1.en-US.win32.tests.zip",
"toolsdir": "c:/builds/moz2_slave/m-cen-w32-ntly-000000000000000/tools"
},
"reason": "The Nightly scheduler named 'mozilla-central nightly' triggered this build",
"request_ids": [
52167028
],
"requesttime": 1412762523,
"result": 0,
"slave_id": 9671,
"starttime": 1412762525
}
]
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,36 @@
{
"running": {
"test_treeherder": {
"45f8637cb9f7": [
{
"submitted_at": 1369231311,
"buildername": "WINNT 5.2 profiling build",
"start_time": 1369231311,
"number": 3,
"claimed_by_name": "buildbot-master66.srv.releng.usw2.mozilla.com:/builds/buildbot/build1/master",
"request_ids": [
24526180
],
"last_heartbeat": 1369231939,
"id": 24767134,
"revision": "45f8637cb9f7"
}
],
"222222222222": [
{
"submitted_at": 1369231312,
"buildername": "WINNT 5.2 profiling build",
"start_time": 1369231312,
"number": 3,
"claimed_by_name": "buildbot-master66.srv.releng.usw2.mozilla.com:/builds/buildbot/build1/master",
"request_ids": [
24526181
],
"last_heartbeat": 1369231940,
"id": 24767134,
"revision": "222222222222aa71f463867b15d956a972ae7574"
}
]
}
}
}

Просмотреть файл

@ -12,7 +12,7 @@ from thclient import TreeherderRequest, TreeherderJobCollection
from treeherder.etl import common, buildbot from treeherder.etl import common, buildbot
from treeherder.etl.mixins import JsonExtractorMixin, OAuthLoaderMixin from treeherder.etl.mixins import JsonExtractorMixin, OAuthLoaderMixin
from treeherder.model.models import Datasource from treeherder.model.models import Datasource
from .cleanup_tasks import fetch_missing_push_logs from treeherder.etl.tasks.cleanup_tasks import fetch_missing_push_logs
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -110,8 +110,8 @@ class Builds4hTransformerMixin(object):
continue continue
prop['revision'] = prop.get('revision', prop['revision'] = prop.get('revision',
prop.get('got_revision', prop.get('got_revision',
prop.get('sourcestamp', None))) prop.get('sourcestamp', None)))
if not prop['revision']: if not prop['revision']:
logger.warning("property 'revision' not found in build4h") logger.warning("property 'revision' not found in build4h")
@ -127,24 +127,26 @@ class Builds4hTransformerMixin(object):
for build in data['builds']: for build in data['builds']:
prop = build['properties'] prop = build['properties']
project = prop['branch']
artifact_build = copy.deepcopy(build) artifact_build = copy.deepcopy(build)
try: try:
branch = revisions_lookup[prop['branch']] branch = revisions_lookup[project]
try: try:
resultset = branch[prop['revision']] resultset = branch[prop['revision']]
print "found " + prop['revision']
except KeyError: except KeyError:
# we don't have the resultset for this build/job yet # we don't have the resultset for this build/job yet
# we need to queue fetching that resultset # we need to queue fetching that resultset
missing_revisions[prop['branch']].append(prop['revision']) missing_revisions[project].append(prop['revision'])
print "missing " + prop['revision']
continue continue
except KeyError: except KeyError:
# this branch is not one of those we care about # this branch is not one of those we care about
continue continue
project = prop['branch']
treeherder_data = { treeherder_data = {
'revision_hash': resultset['revision_hash'], 'revision_hash': resultset['revision_hash'],
'resultset_id': resultset['id'], 'resultset_id': resultset['id'],
@ -385,7 +387,6 @@ class PendingTransformerMixin(object):
} }
treeherder_data['job'] = new_job treeherder_data['job'] = new_job
print project
if project not in th_collections: if project not in th_collections:
th_collections[project] = TreeherderJobCollection( th_collections[project] = TreeherderJobCollection(
job_type='update' job_type='update'

Просмотреть файл

@ -1,136 +0,0 @@
"""
This module contains
"""
import urllib
from celery import task, group
from treeherder.model.derived import RefDataManager
from .buildapi import (RunningJobsProcess,
PendingJobsProcess,
Builds4hJobsProcess,
Builds4hAnalyzer)
from .bugzilla import BzApiBugProcess
from .tbpl import OrangeFactorBugRequest, TbplBugRequest, BugzillaBugRequest
from .pushlog import HgPushlogProcess
@task(name='fetch-buildapi-pending', time_limit=3*60)
def fetch_buildapi_pending():
"""
Fetches the buildapi pending jobs api and load them to
the objectstore ingestion endpoint
"""
PendingJobsProcess().run()
@task(name='fetch-buildapi-running', time_limit=3*60)
def fetch_buildapi_running():
"""
Fetches the buildapi running jobs api and load them to
the objectstore ingestion endpoint
"""
RunningJobsProcess().run()
@task(name='fetch-buildapi-build4h', time_limit=3*60)
def fetch_buildapi_build4h():
"""
Fetches the buildapi running jobs api and load them to
the objectstore ingestion endpoint
"""
Builds4hJobsProcess().run()
@task(name='fetch-push-logs')
def fetch_push_logs():
"""
Run several fetch_hg_push_log subtasks, one per repository
"""
rdm = RefDataManager()
try:
repos = filter(lambda x: x['url'], rdm.get_all_repository_info())
for repo in repos:
if repo['dvcs_type'] == 'hg':
fetch_hg_push_log.apply_async(
args=(repo['name'], repo['url']),
routing_key='pushlog'
)
finally:
rdm.disconnect()
@task(name='fetch-hg-push-logs', time_limit=3*60)
def fetch_hg_push_log(repo_name, repo_url):
"""
Run a HgPushlog etl process
"""
process = HgPushlogProcess()
process.run(repo_url + '/json-pushes/?full=1', repo_name)
@task(name='fetch-bugs', time_limit=10 * 60)
def fetch_bugs():
"""
Run a BzApiBug process
"""
process = BzApiBugProcess()
process.run()
@task(name='run-builds4h-analyzer')
def run_builds4h_analyzer():
"""
Run a Builds4h Analysis process
"""
process = Builds4hAnalyzer()
process.run()
@task(name="submit-star-comment", max_retries=10, time_limit=30)
def submit_star_comment(project, job_id, bug_id, submit_timestamp, who):
"""
Send a post request to tbpl's starcomment.php containing a bug association.
starcomment.php proxies then the request to orange factor
"""
try:
req = OrangeFactorBugRequest(project, job_id, bug_id, submit_timestamp, who)
req.generate_request_body()
req.send_request()
except Exception, e:
submit_star_comment.retry(exc=e)
# this exception will be raised once the number of retries
# exceeds max_retries
raise
@task(name="submit-build-star", max_retries=10, time_limit=30)
def submit_build_star(project, job_id, who, bug_id=None, classification_id=None, note=None):
"""
Send a post request to tbpl's submitBuildStar.php to mirror sheriff's activity
from treeherder to tbpl. It can be used for both bug association and classification
"""
try:
req = TbplBugRequest(project, job_id, who, bug_id=bug_id, classification_id=classification_id, note=note)
req.generate_request_body()
req.send_request()
except Exception, e:
submit_build_star.retry(exc=e)
# this exception will be raised once the number of retries
# exceeds max_retries
raise
@task(name="submit-bug-comment", max_retries=10, time_limit=30)
def submit_bug_comment(project, job_id, bug_id):
"""
Send a post request to tbpl's submitBugzillaComment.php
to add a new comment to the associated bug on bugzilla.
"""
try:
req = BugzillaBugRequest(project, job_id, bug_id)
req.generate_request_body()
req.send_request()
except Exception, e:
submit_bug_comment.retry(exc=e)
# this exception will be raised once the number of retries
# exceeds max_retries
raise

Просмотреть файл

@ -0,0 +1,4 @@
from .buildapi_tasks import *
from .cleanup_tasks import *
from .tbpl_tasks import *
from .tasks import *

Просмотреть файл

@ -0,0 +1,76 @@
"""
This module contains
"""
from celery import task, group
from treeherder.model.derived import RefDataManager
from treeherder.etl.buildapi import (RunningJobsProcess,
PendingJobsProcess,
Builds4hJobsProcess,
Builds4hAnalyzer)
from treeherder.etl.pushlog import HgPushlogProcess
@task(name='fetch-buildapi-pending', time_limit=3*60)
def fetch_buildapi_pending():
"""
Fetches the buildapi pending jobs api and load them to
the objectstore ingestion endpoint
"""
PendingJobsProcess().run()
@task(name='fetch-buildapi-running', time_limit=3*60)
def fetch_buildapi_running():
"""
Fetches the buildapi running jobs api and load them to
the objectstore ingestion endpoint
"""
RunningJobsProcess().run()
@task(name='fetch-buildapi-build4h', time_limit=3*60)
def fetch_buildapi_build4h():
"""
Fetches the buildapi running jobs api and load them to
the objectstore ingestion endpoint
"""
Builds4hJobsProcess().run()
@task(name='fetch-push-logs')
def fetch_push_logs():
"""
Run several fetch_hg_push_log subtasks, one per repository
"""
rdm = RefDataManager()
try:
repos = filter(lambda x: x['url'], rdm.get_all_repository_info())
for repo in repos:
if repo['dvcs_type'] == 'hg':
fetch_hg_push_log.apply_async(
args=(repo['name'], repo['url']),
routing_key='pushlog'
)
finally:
rdm.disconnect()
@task(name='fetch-hg-push-logs', time_limit=3*60)
def fetch_hg_push_log(repo_name, repo_url):
"""
Run a HgPushlog etl process
"""
process = HgPushlogProcess()
process.run(repo_url + '/json-pushes/?full=1', repo_name)
@task(name='run-builds4h-analyzer')
def run_builds4h_analyzer():
"""
Run a Builds4h Analysis process
"""
process = Builds4hAnalyzer()
process.run()

Просмотреть файл

@ -1,7 +1,7 @@
import urllib import urllib
from celery import task, group from celery import task, group
from treeherder.model.derived import RefDataManager from treeherder.model.derived import RefDataManager
from .pushlog import MissingHgPushlogProcess from treeherder.etl.pushlog import MissingHgPushlogProcess
@task(name='fetch-missing-push-logs') @task(name='fetch-missing-push-logs')

Просмотреть файл

@ -0,0 +1,14 @@
"""
This module contains
"""
from celery import task, group
from treeherder.etl.bugzilla import BzApiBugProcess
@task(name='fetch-bugs', time_limit=10 * 60)
def fetch_bugs():
"""
Run a BzApiBug process
"""
process = BzApiBugProcess()
process.run()

Просмотреть файл

@ -0,0 +1,56 @@
"""
This module contains
"""
from celery import task, group
from treeherder.etl.tbpl import OrangeFactorBugRequest, TbplBugRequest, BugzillaBugRequest
@task(name="submit-star-comment", max_retries=10, time_limit=30)
def submit_star_comment(project, job_id, bug_id, submit_timestamp, who):
"""
Send a post request to tbpl's starcomment.php containing a bug association.
starcomment.php proxies then the request to orange factor
"""
try:
req = OrangeFactorBugRequest(project, job_id, bug_id, submit_timestamp, who)
req.generate_request_body()
req.send_request()
except Exception, e:
submit_star_comment.retry(exc=e)
# this exception will be raised once the number of retries
# exceeds max_retries
raise
@task(name="submit-build-star", max_retries=10, time_limit=30)
def submit_build_star(project, job_id, who, bug_id=None, classification_id=None, note=None):
"""
Send a post request to tbpl's submitBuildStar.php to mirror sheriff's activity
from treeherder to tbpl. It can be used for both bug association and classification
"""
try:
req = TbplBugRequest(project, job_id, who, bug_id=bug_id, classification_id=classification_id, note=note)
req.generate_request_body()
req.send_request()
except Exception, e:
submit_build_star.retry(exc=e)
# this exception will be raised once the number of retries
# exceeds max_retries
raise
@task(name="submit-bug-comment", max_retries=10, time_limit=30)
def submit_bug_comment(project, job_id, bug_id):
"""
Send a post request to tbpl's submitBugzillaComment.php
to add a new comment to the associated bug on bugzilla.
"""
try:
req = BugzillaBugRequest(project, job_id, bug_id)
req.generate_request_body()
req.send_request()
except Exception, e:
submit_bug_comment.retry(exc=e)
# this exception will be raised once the number of retries
# exceeds max_retries
raise