diff --git a/schemas/resultset-message.json b/schemas/resultset-message.json deleted file mode 100644 index a13f98107..000000000 --- a/schemas/resultset-message.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "id": "https://treeherder.mozilla.org/schemas/v1/resultset-message.json#", - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "New ResultSet Message", - "description": "Pulse message sent whenever a new result-set is created.", - "type": "object", - "properties": { - "version": { - "title": "Message-format version", - "enum": [1] - }, - "project": { - "title": "Project Name", - "description": "Identifier for treeherder project, like `try` or `mozilla-central`.", - "type": "string" - }, - "revision_hash": { - "title": "Revision Hash Identifier", - "description": "Identifier for the result-set that was created.", - "type": "string" - }, - "repository_url": { - "title": "Repository URL", - "description": "URL for the repository for the revision.", - "type": "string" - } - }, - "additionalProperties": true, - "required": ["version", "revision_hash"] -} diff --git a/tests/conftest.py b/tests/conftest.py index 09d7b0e9d..661ede4dd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -353,11 +353,6 @@ def pulse_consumer(exchange, request): return simpleQueue -@pytest.fixture -def pulse_resultset_consumer(request): - return pulse_consumer('new-result-set', request) - - @pytest.fixture def pulse_action_consumer(request): return pulse_consumer('job-actions', request) diff --git a/tests/etl/test_pushlog.py b/tests/etl/test_pushlog.py index 4030e602d..e73118e4e 100644 --- a/tests/etl/test_pushlog.py +++ b/tests/etl/test_pushlog.py @@ -11,7 +11,7 @@ from treeherder.etl.pushlog import (HgPushlogProcess, def test_ingest_hg_pushlog(jm, initial_data, test_base_dir, test_repository, mock_post_json, - activate_responses, pulse_resultset_consumer): + activate_responses): """ingesting a number of pushes should populate result set and revisions""" pushlog_path = os.path.join(test_base_dir, 'sample_data', 'hg_pushlog.json') @@ -34,19 +34,6 @@ def test_ingest_hg_pushlog(jm, initial_data, test_base_dir, assert len(pushes_stored) == push_num - rev_to_push = set() - for push in json.loads(pushlog_content)['pushes'].values(): - # Add each rev to the set remember we shorten them all down to 12 chars - rev_to_push.add(push['changesets'][-1]['node'][0:12]) - - # Ensure for each push we sent a pulse notification... - for _ in range(0, push_num): - message = pulse_resultset_consumer.get(block=True, timeout=2) - content = message.payload - assert content['revision'] in rev_to_push - # Ensure we don't match the same revision twice... - rev_to_push.remove(content['revision']) - revisions_stored = jm.get_dhub().execute( proc="jobs_test.selects.revision_ids", return_type='tuple' diff --git a/tests/model/derived/test_refdata.py b/tests/model/derived/test_refdata.py index e225f3d27..95e6d159e 100644 --- a/tests/model/derived/test_refdata.py +++ b/tests/model/derived/test_refdata.py @@ -258,27 +258,6 @@ def test_add_job_type(refdata): assert row_data == expected -def test_get_repository_info(refdata, repository_id): - """test get_repository_info retrieves the right informations""" - - info = refdata.get_repository_info(repository_id) - - expected = { - "dvcs_type": "hg", - "name": "mozilla-central", - "url": "https://hg.mozilla.org/mozilla-central", - "active_status": "active", - "codebase": "gecko", - "repository_group_id": 1, - "description": "" - } - - refdata.disconnect() - - for k, v in expected.items(): - assert info[k] == v - - @pytest.fixture def sample_bugs(test_base_dir): filename = os.path.join( diff --git a/treeherder/model/derived/jobs.py b/treeherder/model/derived/jobs.py index d0fc48895..13ea56e44 100644 --- a/treeherder/model/derived/jobs.py +++ b/treeherder/model/derived/jobs.py @@ -17,7 +17,6 @@ from treeherder.model.models import (Datasource, Repository) from treeherder.model.tasks import (populate_error_summary, publish_job_action, - publish_resultset, publish_resultset_action) from .artifacts import ArtifactsModel diff --git a/treeherder/model/derived/refdata.py b/treeherder/model/derived/refdata.py index 94090ce39..7e188c51f 100644 --- a/treeherder/model/derived/refdata.py +++ b/treeherder/model/derived/refdata.py @@ -1161,18 +1161,6 @@ class RefDataManager(object): return id_iter.get_column_data('id') - def get_repository_info(self, repository_id): - """retrieves all the attributes of a repository""" - - repo = self.execute( - proc='reference.selects.get_repository_info', - placeholders=[repository_id], - debug_show=self.DEBUG, - return_type='iter') - # retrieve the first elem from DataIterator - for r in repo: - return r - def get_all_repository_info(self): return self.execute( proc='reference.selects.get_all_repository_info', diff --git a/treeherder/model/exchanges.py b/treeherder/model/exchanges.py index dbf7b259d..692b4af8a 100644 --- a/treeherder/model/exchanges.py +++ b/treeherder/model/exchanges.py @@ -10,26 +10,6 @@ class TreeherderPublisher(PulsePublisher): """ exchange_prefix = "v1/" - new_result_set = Exchange( - exchange='new-result-set', - title="New Result-Set Messages", - description=""" - Whenever a new result-set is created a message featuring the - `revision_hash` is published on this exchange. - """, - routing_keys=[ - Key( - name='project', - summary="Project (or branch) that this result-set concerns" - ), - Key( - name='revision_hash', - summary="result-set identifier for the message" - ) - ], - schema="https://treeherder.mozilla.org/schemas/v1/resultset-message.json#" - ) - resultset_action = Exchange( exchange="resultset-actions", title="Actions issued by resultset", diff --git a/treeherder/model/sql/reference.json b/treeherder/model/sql/reference.json index b0fcddb3a..5b9287fcf 100644 --- a/treeherder/model/sql/reference.json +++ b/treeherder/model/sql/reference.json @@ -203,19 +203,6 @@ WHERE `name` = ?", "host_type":"read_host" }, - "get_repository_info":{ - "sql": "SELECT * - FROM `repository` - WHERE `id` = ? AND `active_status` = 'active'", - "host_type":"read_host" - }, - "get_all_repository_info":{ - "sql": "SELECT * - FROM `repository` - WHERE - `active_status` = 'active'", - "host_type":"read_host" - }, "get_all_option_collections":{ "sql":"SELECT option_collection_hash, GROUP_CONCAT( name SEPARATOR ' ' ) as opt diff --git a/treeherder/model/tasks.py b/treeherder/model/tasks.py index 914821047..0c467ba72 100644 --- a/treeherder/model/tasks.py +++ b/treeherder/model/tasks.py @@ -124,52 +124,6 @@ def publish_resultset_runnable_job_action(project, resultset_id, requester, ) -@task(name='publish-resultset') -def publish_resultset(project, ids): - # If we don't have a publisher (because of missing configs), then we can't - # publish any pulse messages. This is okay, local installs etc. doesn't - # need to publish on pulse, and requiring a pulse user is adding more - # overhead to an already large development setup process. - publisher = pulse_connection.get_publisher() - if not publisher: - return - - from treeherder.model.derived.jobs import JobsModel - - with JobsModel(project) as jm: - # Publish messages with new result-sets - for entry in jm.get_result_set_list_by_ids(ids): - repository = jm.refdata_model.get_repository_info(entry['repository_id']) - - if repository is None: - return - - entry['repository_url'] = repository['url'] - - # Don't expose these properties, they are internal, at least that's - # what I think without documentation I have no clue... what any of - # this is - del entry['revisions'] # Not really internal, but too big - del entry['repository_id'] - - # Set required properties - entry['version'] = 1 - entry['project'] = project - # Property revision_hash should already be there, I suspect it is the - # result-set identifier... - - # publish the data to pulse - publisher.new_result_set(**entry) - - # Basically, I have no idea what context this runs and was inherently - # unable to make kombu with or without pyamqp, etc. confirm-publish, - # so we're stuck with this super ugly hack where we just close the - # connection so that if the process context is destroyed then at least - # messages will still get published... Well, assuming nothing goes - # wrong, because we're not using confirm channels for publishing... - publisher.connection.release() - - @task(name='populate-error-summary') def populate_error_summary(project, artifacts, job_id_lookup): """