This commit is contained in:
Jonathan Eads 2014-06-02 13:48:58 -07:00
Родитель a8d0d10e50
Коммит 5ee8880971
18 изменённых файлов: 101 добавлений и 216 удалений

Просмотреть файл

@ -86,11 +86,11 @@ def completed_jobs_stored(
test_utils.post_collection(jm.project, tjc)
@pytest.fixture
def completed_jobs_loaded(jm, completed_jobs_stored):
jm.process_objects(1, raise_errors=True)
jm.disconnect()
@pytest.fixture
def mock_send_request(monkeypatch, jm):
@ -110,7 +110,8 @@ def mock_send_request(monkeypatch, jm):
str(signed_uri), params=th_collection.get_collection_data()
)
response.getcode = lambda: response.status_int
return response
monkeypatch.setattr(TreeherderRequest, 'send', _send)
monkeypatch.setattr(TreeherderRequest, 'send', _send)

Просмотреть файл

@ -35,6 +35,9 @@ def test_bz_api_process(mock_extract, refdata):
proc='refdata_test.selects.test_bugscache',
return_type='tuple'
)
refdata.disconnect()
# the number of rows inserted should equal to the number of bugs
assert len(row_data) == 10

Просмотреть файл

@ -58,5 +58,7 @@ def test_ingest_pending_jobs(jm, initial_data,
stored_obj = jm.get_jobs_dhub().execute(
proc="jobs_test.selects.jobs")
jm.disconnect()
assert len(stored_obj) == 1

Просмотреть файл

@ -2,7 +2,7 @@
from django.conf import settings
def test_get_revision_hash(jm, initial_data,
def test_get_revision_hash(initial_data,
result_set_stored, mock_get_remote_content):
"""That the correct revision_hash is retrieved is the revision exists"""
from treeherder.etl import common
@ -12,7 +12,7 @@ def test_get_revision_hash(jm, initial_data,
assert resultset[project][revision]['revision_hash'] == result_set_stored[0]['revision_hash']
def test_get_revision_hash_none(jm, mock_get_remote_content,
def test_get_revision_hash_none(mock_get_remote_content,
initial_data, result_set_stored):
"""Test that none is returned if the revision doesn't exist"""
from treeherder.etl import common

Просмотреть файл

@ -62,6 +62,8 @@ def test_load_data(sample_data, jm, mock_post_json_data,
stored_obj = jm.get_os_dhub().execute(
proc="objectstore_test.selects.all")
jm.disconnect()
assert len(stored_obj) == 1
@ -91,4 +93,6 @@ def test_load_data_missing_attribute(sample_data, jm, mock_post_json_data, initi
stored_obj = jm.get_os_dhub().execute(
proc="objectstore_test.selects.all")
jm.disconnect()
assert len(stored_obj) == 0

Просмотреть файл

@ -24,3 +24,5 @@ def test_ingest_hg_pushlog(jm, initial_data, test_base_dir,
)
assert len(revisions_stored) == 15
jm.disconnect()

Просмотреть файл

@ -36,48 +36,47 @@ def do_test(log):
# assert act == exp, json.dumps(act, indent=4)
def test_crashtest_passing(jm, initial_data):
def test_crashtest_passing(initial_data):
"""Process a job with a single log reference."""
do_test("mozilla-central_fedora-b2g_test-crashtest-1-bm54-tests1-linux-build50")
def test_opt_test_failing(jm, initial_data):
def test_opt_test_failing(initial_data):
"""Process log with printlines and errors"""
do_test("mozilla-central_mountainlion_test-mochitest-2-bm80-tests1-macosx-build138")
def test_build_failing(jm, initial_data):
def test_build_failing(initial_data):
"""Process a job with a single log reference."""
do_test("mozilla-central-macosx64-debug-bm65-build1-build15")
def test_mochitest_debug_passing(jm, initial_data):
def test_mochitest_debug_passing(initial_data):
"""Process a job with a single log reference."""
do_test("mozilla-central_mountainlion-debug_test-mochitest-2-bm80-tests1-macosx-build93")
def test_mochitest_pass(jm, initial_data):
def test_mochitest_pass(initial_data):
"""Process a job with a single log reference."""
do_test("mozilla-central_mountainlion_test-mochitest-2-bm77-tests1-macosx-build141")
def test_mochitest_fail(jm, initial_data):
def test_mochitest_fail(initial_data):
"""Process a job with a single log reference."""
do_test("mozilla-esr17_xp_test_pgo-mochitest-browser-chrome-bm74-tests1-windows-build12")
def test_mochitest_process_crash(jm, initial_data):
def test_mochitest_process_crash(initial_data):
"""Test a mochitest log that has PROCESS-CRASH """
do_test("mozilla-inbound_ubuntu64_vm-debug_test-mochitest-other-bm53-tests1-linux-build122")
def test_jetpack_fail(jm, initial_data):
def test_jetpack_fail(initial_data):
"""Process a job with a single log reference."""
do_test("ux_ubuntu32_vm_test-jetpack-bm67-tests1-linux-build16")

Просмотреть файл

@ -52,14 +52,14 @@ def do_test(log, check_errors=True):
# assert act == exp, json.dumps(act, indent=4)
def test_crashtest_passing(jm, initial_data):
def test_crashtest_passing(initial_data):
"""Process a job with a single log reference."""
do_test(
"mozilla-central_fedora-b2g_test-crashtest-1-bm54-tests1-linux-build50"
)
def test_mochitest_pass(jm, initial_data):
def test_mochitest_pass(initial_data):
"""Process a job with a single log reference."""
do_test(
@ -68,7 +68,7 @@ def test_mochitest_pass(jm, initial_data):
@slow
def test_mochitest_fail(jm, initial_data):
def test_mochitest_fail(initial_data):
"""Process a job with a single log reference."""
do_test(
@ -76,7 +76,7 @@ def test_mochitest_fail(jm, initial_data):
)
def test_mochitest_process_crash(jm, initial_data):
def test_mochitest_process_crash(initial_data):
"""Test a mochitest log that has PROCESS-CRASH """
do_test(
@ -84,7 +84,7 @@ def test_mochitest_process_crash(jm, initial_data):
)
@slow
def test_jetpack_fail(jm, initial_data):
def test_jetpack_fail(initial_data):
"""Process a job with a single log reference."""
do_test(
@ -93,7 +93,7 @@ def test_jetpack_fail(jm, initial_data):
@slow
def test_crash_1(jm, initial_data):
def test_crash_1(initial_data):
"""Test from old log parser"""
do_test(
"crash-1"
@ -101,7 +101,7 @@ def test_crash_1(jm, initial_data):
@slow
def test_crash_2(jm, initial_data):
def test_crash_2(initial_data):
"""Test from old log parser"""
do_test(
"crash-2"
@ -109,7 +109,7 @@ def test_crash_2(jm, initial_data):
@slow
def test_crash_mac_1(jm, initial_data):
def test_crash_mac_1(initial_data):
"""Test from old log parser"""
do_test(
"crash-mac-1"
@ -117,7 +117,7 @@ def test_crash_mac_1(jm, initial_data):
@slow
def test_crashtest_timeout(jm, initial_data):
def test_crashtest_timeout(initial_data):
"""Test from old log parser"""
do_test(
"crashtest-timeout"
@ -125,7 +125,7 @@ def test_crashtest_timeout(jm, initial_data):
@slow
def test_jsreftest_fail(jm, initial_data):
def test_jsreftest_fail(initial_data):
"""Test from old log parser"""
do_test(
"jsreftest-fail"
@ -133,7 +133,7 @@ def test_jsreftest_fail(jm, initial_data):
@slow
def test_jsreftest_timeout_crash(jm, initial_data):
def test_jsreftest_timeout_crash(initial_data):
"""Test from old log parser"""
do_test(
"jsreftest-timeout-crash"
@ -141,7 +141,7 @@ def test_jsreftest_timeout_crash(jm, initial_data):
@slow
def test_leaks_1(jm, initial_data):
def test_leaks_1(initial_data):
"""Test from old log parser"""
do_test(
"leaks-1"
@ -149,7 +149,7 @@ def test_leaks_1(jm, initial_data):
@slow
def test_mochitest_test_end(jm, initial_data):
def test_mochitest_test_end(initial_data):
"""Test from old log parser"""
do_test(
"mochitest-test-end"
@ -157,7 +157,7 @@ def test_mochitest_test_end(jm, initial_data):
@slow
def test_multiple_timeouts(jm, initial_data):
def test_multiple_timeouts(initial_data):
"""Test from old log parser"""
do_test(
"multiple-timeouts"
@ -165,7 +165,7 @@ def test_multiple_timeouts(jm, initial_data):
@slow
def test_opt_objc_exception(jm, initial_data):
def test_opt_objc_exception(initial_data):
"""Test from old log parser"""
do_test(
"opt-objc-exception"
@ -173,7 +173,7 @@ def test_opt_objc_exception(jm, initial_data):
@slow
def test_reftest_fail_crash(jm, initial_data):
def test_reftest_fail_crash(initial_data):
"""Test from old log parser"""
do_test(
"reftest-fail-crash"
@ -181,7 +181,7 @@ def test_reftest_fail_crash(jm, initial_data):
@slow
def test_reftest_jserror(jm, initial_data):
def test_reftest_jserror(initial_data):
"""Test from old log parser"""
do_test(
"reftest-jserror"
@ -189,7 +189,7 @@ def test_reftest_jserror(jm, initial_data):
@slow
def test_reftest_opt_fail(jm, initial_data):
def test_reftest_opt_fail(initial_data):
"""Test from old log parser"""
do_test(
"reftest-opt-fail"
@ -197,7 +197,7 @@ def test_reftest_opt_fail(jm, initial_data):
@slow
def test_reftest_timeout(jm, initial_data):
def test_reftest_timeout(initial_data):
"""Test from old log parser"""
do_test(
"reftest-timeout"
@ -205,35 +205,35 @@ def test_reftest_timeout(jm, initial_data):
@slow
def test_tinderbox_exception(jm, initial_data):
def test_tinderbox_exception(initial_data):
"""Test from old log parser"""
do_test(
"tinderbox-exception"
)
def test_xpcshell_crash(jm, initial_data):
def test_xpcshell_crash(initial_data):
"""Test from old log parser"""
do_test(
"xpcshell-crash"
)
def test_xpcshell_multiple(jm, initial_data):
def test_xpcshell_multiple(initial_data):
"""Test from old log parser"""
do_test(
"xpcshell-multiple"
)
def test_xpcshell_timeout(jm, initial_data):
def test_xpcshell_timeout(initial_data):
"""Test from old log parser"""
do_test(
"xpcshell-timeout"
)
@slow
def test_check_errors_false(jm, initial_data, monkeypatch):
def test_check_errors_false(initial_data, monkeypatch):
"""ensure that parse_line is not called on the error parser."""
mock_pl = MagicMock(name="parse_line")
@ -245,7 +245,7 @@ def test_check_errors_false(jm, initial_data, monkeypatch):
)
assert mock_pl.called is False
def test_check_errors_true(jm, initial_data, monkeypatch):
def test_check_errors_true(initial_data, monkeypatch):
"""ensure that parse_line is called on the error parser."""
mock_pl = MagicMock(name="parse_line")

Просмотреть файл

@ -8,7 +8,7 @@ from treeherder.log_parser.parsers import ErrorParser
@pytest.fixture
def jobs_with_local_log(jm, initial_data):
def jobs_with_local_log(initial_data):
log = "mozilla-central_fedora-b2g_test-crashtest-1-bm54-tests1-linux-build50"
sample_data = SampleData()
url = "file://{0}".format(
@ -48,6 +48,9 @@ def test_parse_log(jm, initial_data, jobs_with_local_log, sample_resultset, mock
placeholders=[job_id]
)
jm.disconnect()
# we must have at least 2 artifacts: one for the log viewer and another one
# for the job artifact panel
assert len(job_artifacts) >= 2

Просмотреть файл

@ -17,6 +17,8 @@ def test_unicode(jm):
"""Unicode representation of a ``JobModel`` is the project name."""
assert unicode(jm) == unicode(jm.project)
jm.disconnect()
def test_disconnect(jm):
"""test that your model disconnects"""
@ -45,6 +47,7 @@ def test_ingest_single_sample_job(jm, refdata, sample_data, initial_data,
test_utils.do_job_ingestion(jm, refdata, job_data, sample_resultset)
jm.disconnect()
refdata.disconnect()
def test_ingest_all_sample_jobs(jm, refdata, sample_data, initial_data, sample_resultset, mock_log_parser):
"""
@ -58,6 +61,7 @@ def test_ingest_all_sample_jobs(jm, refdata, sample_data, initial_data, sample_r
test_utils.do_job_ingestion(jm, refdata, job_data, sample_resultset)
jm.disconnect()
refdata.disconnect()
def test_cycle_all_data(jm, refdata, sample_data, initial_data, sample_resultset, mock_log_parser):
"""
@ -88,6 +92,7 @@ def test_cycle_all_data(jm, refdata, sample_data, initial_data, sample_resultset
jobs_after = jm.get_dhub(jm.CT_JOBS).execute(proc="jobs_test.selects.jobs")
jm.disconnect()
refdata.disconnect()
assert len(jobs_before) == job_count
@ -141,6 +146,7 @@ def test_cycle_one_job(jm, refdata, sample_data, initial_data, sample_resultset,
assert len(jobs_count_after_delete) == 0
jm.disconnect()
refdata.disconnect()
def test_bad_date_value_ingestion(jm, initial_data, mock_log_parser):
"""

Просмотреть файл

@ -304,6 +304,7 @@ def verify_artifacts(jm, artifacts_ref):
assert artifacts[key]['type'] == artifacts_ref[key]['type']
assert json.loads(artifacts[key]['blob']) == artifacts_ref[key]['blob']
def verify_coalesced(jm, coalesced_job_guids, coalesced_replacements):
coalesced_job_guid_list = coalesced_job_guids.keys()
@ -344,183 +345,6 @@ def load_exp(filename):
return {}
class SourceDictBuilder(object):
"""Given a ``job_id``, rebuild the dictionary the source came from."""
def __init__(self, jm, job_guid):
self.jm = jm
self.job_guid = job_guid
job_data = self.jm.get_jobs_dhub().execute(
proc="jobs_test.selects.row_by_guid",
placeholders=[self.job_guid],
return_type="iter"
).next()
self.job_id = job_data['id']
def as_dict(self):
source = self.jm.get_jobs_dhub().execute(
proc="jobs_test.selects.job_source",
placeholders=[self.job_id],
return_type="iter"
).next()
source["repository"] = self._get_repository(
source["repository_id"])
del(source["repository_id"])
return unicode_keys(source)
def _get_repository(self, obj_id):
obj = self.jm.refdata_model.get_row_by_id(
"repository",
obj_id,
).get_column_data("name")
return obj
class JobDictBuilder(object):
"""Given a ``job_id``, rebuild the dictionary the job came from."""
def __init__(self, jm, job_guid):
self.jm = jm
self.job_guid = job_guid
job_data = self.jm.get_jobs_dhub().execute(
proc="jobs_test.selects.row_by_guid",
placeholders=[self.job_guid],
return_type="iter"
).next()
self.job_id = job_data['id']
def as_dict(self):
job = self.jm.get_job(self.job_id)
job["artifact"] = self._get_artifact()
job["log_references"] = self._get_logs()
job["option_collection"] = self._get_option_collection(
job["option_collection_hash"])
del(job["option_collection_hash"])
job["machine_platform"] = self._get_machine_platform(
job["machine_platform_id"])
del(job["machine_platform_id"])
job["build_platform"] = self._get_build_platform(
job["build_platform_id"])
del(job["build_platform_id"])
job["machine"] = self._get_machine(
job["machine_id"])
del(job["machine_id"])
del(job["machine_name"])
job["product_name"] = self._get_product(
job["product_id"])
del(job["product_id"])
job["name"] = self._get_name(
job["job_type_id"])
del(job["job_type_id"])
del(job["id"])
del(job["active_status"])
del(job["result_set_id"])
if not job["job_coalesced_to_guid"]:
del(job["job_coalesced_to_guid"])
return unicode_keys(job)
def _get_option_collection(self, option_collection_hash):
"""
Needs to work with hash. Get row by id won't work anymore.
probably need to a new getter where it gets the option id
but the hash means there's possibly more than one option.
maybe I need mauro to make a splitter get method?
"""
option_iter = self.jm.refdata_model.get_option_names(
option_collection_hash)
options = {}
for name_dict in option_iter:
options[name_dict["name"]] = True
return options
def _get_machine_platform(self, obj_id):
obj = self.jm.refdata_model.get_row_by_id(
"machine_platform",
obj_id,
).next()
del(obj["active_status"])
del(obj["id"])
return unicode_keys(obj)
def _get_build_platform(self, obj_id):
obj = self.jm.refdata_model.get_row_by_id(
"build_platform",
obj_id,
).next()
del(obj["active_status"])
del(obj["id"])
return unicode_keys(obj)
def _get_machine(self, obj_id):
obj = self.jm.refdata_model.get_row_by_id(
"machine",
obj_id,
).get_column_data("name")
return obj
def _get_product(self, obj_id):
obj = self.jm.refdata_model.get_row_by_id(
"product",
obj_id,
).get_column_data("name")
return obj
def _get_name(self, obj_id):
job_type = self.jm.refdata_model.get_row_by_id(
"job_type",
obj_id,
).next()
return job_type['name']
def _get_logs(self):
logs = self.jm.get_jobs_dhub().execute(
proc="jobs_test.selects.job_log_urls",
placeholders=[self.job_id],
key_column="id",
return_type='dict',
)
log_values = []
for log in logs.values():
del(log["active_status"])
del(log["id"])
del(log["job_id"])
log_values.append(unicode_keys(log))
return log_values
def _get_artifact(self):
artifact = self.jm.get_jobs_dhub().execute(
proc="jobs_test.selects.job_artifact",
placeholders=[self.job_id],
key_column="id",
return_type='dict',
)
if not len(artifact):
artifact = {}
else:
artifact = artifact[self.job_id]
del(artifact["active_status"])
del(artifact["id"])
del(artifact["job_id"])
return unicode_keys(artifact)
def unicode_keys(d):
return dict([(unicode(k), v) for k, v in d.items()])

Просмотреть файл

@ -31,6 +31,8 @@ def test_artifact_detail(webapp, eleven_jobs_processed, sample_artifacts, jm):
"name"
])
jm.disconnect()
def test_artifact_detail_not_found(webapp, jm):
"""
@ -44,6 +46,7 @@ def test_artifact_detail_not_found(webapp, jm):
)
assert resp.status_int == 404
jm.disconnect()
def test_artifact_detail_bad_project(webapp, jm):
"""
@ -58,3 +61,5 @@ def test_artifact_detail_bad_project(webapp, jm):
assert resp.status_int == 404
assert resp.json == {"detail": "No project with name foo"}
jm.disconnect()

Просмотреть файл

@ -25,6 +25,8 @@ def test_create_bug_job_map_no_auth(eleven_jobs_processed, jm):
assert resp.status_code == 403
jm.disconnect()
def test_create_bug_job_map(eleven_jobs_processed, mock_message_broker, jm):
"""
@ -52,6 +54,8 @@ def test_create_bug_job_map(eleven_jobs_processed, mock_message_broker, jm):
assert (bug_job_map_obj,) == jm.get_bug_job_map_list(0, 1)
jm.disconnect()
def test_create_bug_job_map_dup(eleven_jobs_processed, mock_message_broker, jm):
"""
@ -84,6 +88,7 @@ def test_create_bug_job_map_dup(eleven_jobs_processed, mock_message_broker, jm):
assert (bug_job_map_obj,) == jm.get_bug_job_map_list(0, 1)
jm.disconnect()
def test_bug_job_map_list(webapp, jm, eleven_jobs_processed):
"""
@ -107,6 +112,8 @@ def test_bug_job_map_list(webapp, jm, eleven_jobs_processed):
for i, v in enumerate(expected):
assert v == resp.json[i]
jm.disconnect()
def test_bug_job_map_detail(webapp, jm, eleven_jobs_processed):
"""
test retrieving a list of bug_job_map
@ -129,6 +136,7 @@ def test_bug_job_map_detail(webapp, jm, eleven_jobs_processed):
assert resp.json == {"job_id": job_id, "bug_id": bug_id, "type": "manual"}
jm.disconnect()
def test_bug_job_map_delete(webapp, eleven_jobs_processed,
jm, mock_message_broker):
@ -160,6 +168,7 @@ def test_bug_job_map_delete(webapp, eleven_jobs_processed,
content = json.loads(resp.content)
assert content == {"message": "Bug job map deleted"}
jm.disconnect()
def test_bug_job_map_delete_no_auth(jm, eleven_jobs_processed):
"""
@ -184,3 +193,5 @@ def test_bug_job_map_delete_no_auth(jm, eleven_jobs_processed):
)
assert resp.status_code == 403
jm.disconnect()

Просмотреть файл

@ -104,6 +104,8 @@ def test_job_detail(webapp, eleven_jobs_processed, sample_artifacts, jm):
assert isinstance(resp.json, dict)
assert resp.json["id"] == job["id"]
jm.disconnect()
def test_job_detail_bad_project(webapp, eleven_jobs_processed, jm):
"""
@ -117,6 +119,7 @@ def test_job_detail_bad_project(webapp, eleven_jobs_processed, jm):
webapp.get(badurl, status=404)
jm.disconnect()
def test_job_detail_not_found(webapp, jm):
"""

Просмотреть файл

@ -17,6 +17,7 @@ def test_note_list(webapp, sample_notes, jm):
assert isinstance(resp.json, list)
note_list = resp.json
assert set(note_list[0].keys()) == set([
'note_timestamp',
'job_id',
@ -56,6 +57,8 @@ def test_note_list(webapp, sample_notes, jm):
"act": note_list
})
jm.disconnect()
def test_note_detail(webapp, sample_notes, jm):
"""
@ -83,6 +86,8 @@ def test_note_detail(webapp, sample_notes, jm):
'id'
])
jm.disconnect()
def test_note_detail_not_found(webapp, jm):
"""
@ -96,6 +101,7 @@ def test_note_detail_not_found(webapp, jm):
)
assert resp.status_int == 404
jm.disconnect()
def test_note_detail_bad_project(webapp, jm):
"""
@ -110,6 +116,7 @@ def test_note_detail_bad_project(webapp, jm):
assert resp.status_int == 404
assert resp.json == {"detail": "No project with name foo"}
jm.disconnect()
def test_create_note(webapp, eleven_jobs_processed, mock_message_broker, jm):
"""
@ -149,6 +156,7 @@ def test_create_note(webapp, eleven_jobs_processed, mock_message_broker, jm):
u'id': 1
}
jm.disconnect()
def test_create_note_no_auth(eleven_jobs_processed, jm):
"""
@ -169,6 +177,8 @@ def test_create_note_no_auth(eleven_jobs_processed, jm):
assert resp.status_code == 403
jm.disconnect()
def test_delete_note(webapp, sample_notes, mock_message_broker, jm):
"""
test creating a single note via endpoint
@ -189,3 +199,5 @@ def test_delete_note(webapp, sample_notes, mock_message_broker, jm):
assert resp.status_code == 200, resp
assert len(new_notes) == len(notes)-1
jm.disconnect()

Просмотреть файл

@ -32,6 +32,8 @@ def test_objectstore_create(job_sample, jm):
assert stored_objs[0]['job_guid'] == job_sample["job"]["job_guid"]
jm.disconnect()
def test_objectstore_list(webapp, eleven_jobs_stored, jm):
"""

Просмотреть файл

@ -115,6 +115,7 @@ def test_resultset_list_empty_rs_still_show(webapp, initial_data,
assert resp.status_int == 200
assert len(resp.json['results']) == 10
jm.disconnect()
def test_resultset_list_filter_by_revision(webapp, eleven_jobs_processed, jm):
"""
@ -179,6 +180,8 @@ def test_resultset_list_filter_by_date(webapp, initial_data,
u'startdate': u'2013-08-10'}
)
jm.disconnect()
def test_resultset_list_without_jobs(webapp, initial_data,
sample_resultset, jm):
@ -207,6 +210,7 @@ def test_resultset_list_without_jobs(webapp, initial_data,
u'repository': u'test_treeherder'
}
jm.disconnect()
def test_resultset_detail(webapp, eleven_jobs_processed, jm):
"""
@ -281,6 +285,8 @@ def test_resultset_create(sample_resultset, jm, initial_data):
assert len(stored_objs) == 1
assert stored_objs[0]['revision_hash'] == sample_resultset[0]['revision_hash']
jm.disconnect()
def test_resultset_with_bad_secret(sample_resultset, jm, initial_data):
trsc = TreeherderResultSetCollection()

Просмотреть файл

@ -73,6 +73,7 @@ def sample_artifacts(jm, sample_data):
jm.load_job_data(jobs)
@pytest.fixture
def sample_notes(jm, sample_data, eleven_jobs_processed):
"""provide 11 jobs with job notes."""
@ -87,3 +88,4 @@ def sample_notes(jm, sample_data, eleven_jobs_processed):
"kellyclarkson",
"you look like a man-o-lantern"
)