Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions docs/testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -96,4 +96,24 @@ Just the `test_ingest_pending_pulse_job` within the `/etl` tests
docker-compose run backend pytest tests/ -k test_ingest_pending_pulse_job
```

### Updating backend python test data

There are many parts of the backend data, this section will continue to be updated as we document this process.

For the `sample_data/` there is `tests/sample_data/transform.py`:

- `push_data.json`: list of commits
- `job_data.txt`: list of job data as returned from the TH jobs api
- `pulse_consumer/job_data.json`: specific data that pulse would have for related jobs and pushes
- `pulse_consumer/transformed_job_data.json`: what we transform the pulse data to

That will update the data used for `etl/` using recent live data from autoland.

There are a lot of taskid, revisions, and expected fields to update in tests. Future work could be done to:

- create a revision list and reference it instead of raw revisions
- create an input file as a start date, end date, and use that instead of hard coded dates in many tests
- ensure variety of platforms, builds, tests, pass/fail, etc. are included
- push_data.json - adjust the dates to have multiple days (1st +1, 2nd +2, 3rd +3)

[eslint]: https://eslint.org
2 changes: 2 additions & 0 deletions schemas/pulse-job.yml
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,7 @@ properties:
description: |
fail: A failure
exception: An infrastructure error/exception
retry: Task has a known exception and is automatically retried
success: Build/Test executed without error or failure
canceled: The job was cancelled by a user
unknown: When the job is not yet completed
Expand All @@ -155,6 +156,7 @@ properties:
- success
- fail
- exception
- retry
- canceled
- superseded
- unknown
Expand Down
72 changes: 37 additions & 35 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,22 +321,22 @@ def create(jobs):


@pytest.fixture
def test_job(eleven_job_blobs, create_jobs):
job = eleven_job_blobs[0]
def test_job(hundred_job_blobs, create_jobs):
job = hundred_job_blobs[0]
job["job"].update(
{"taskcluster_task_id": "V3SVuxO8TFy37En_6HcXLs", "taskcluster_retry_id": "0"}
)
return create_jobs([job])[0]


@pytest.fixture
def test_jobs(eleven_job_blobs_new_date, create_jobs):
return create_jobs(eleven_job_blobs_new_date)
def test_jobs(hundred_job_blobs_new_date, create_jobs):
return create_jobs(hundred_job_blobs_new_date)


@pytest.fixture
def test_two_jobs_tc_metadata(eleven_job_blobs_new_date, create_jobs):
job_1, job_2 = eleven_job_blobs_new_date[0:2]
def test_two_jobs_tc_metadata(hundred_job_blobs_new_date, create_jobs):
job_1, job_2 = hundred_job_blobs_new_date[0:2]
job_1["job"].update(
{
"status": "completed",
Expand All @@ -357,13 +357,13 @@ def test_two_jobs_tc_metadata(eleven_job_blobs_new_date, create_jobs):


@pytest.fixture
def test_job_2(eleven_job_blobs, create_jobs):
return create_jobs(eleven_job_blobs[0:2])[1]
def test_job_2(hundred_job_blobs, create_jobs):
return create_jobs(hundred_job_blobs[0:2])[1]


@pytest.fixture
def test_job_3(eleven_job_blobs, create_jobs):
return create_jobs(eleven_job_blobs[0:3])[2]
def test_job_3(hundred_job_blobs, create_jobs):
return create_jobs(hundred_job_blobs[0:3])[2]


@pytest.fixture
Expand Down Expand Up @@ -418,10 +418,11 @@ def try_push_stored(try_repository, sample_push):


@pytest.fixture
def eleven_job_blobs(sample_data, sample_push, test_repository, mock_log_parser):
def hundred_job_blobs(sample_data, sample_push, test_repository, mock_log_parser):
store_push_data(test_repository, sample_push)

num_jobs = 11
# NOTE: when generating new data, we appear to need more jobs to find similar jobs
num_jobs = 100
jobs = sample_data.job_data[0:num_jobs]

max_index = len(sample_push) - 1
Expand All @@ -448,7 +449,7 @@ def eleven_job_blobs(sample_data, sample_push, test_repository, mock_log_parser)


@pytest.fixture
def eleven_job_blobs_new_date(sample_data, sample_push, test_repository, mock_log_parser):
def hundred_job_blobs_new_date(sample_data, sample_push, test_repository, mock_log_parser):
# make unique revisions
counter = 0
for push in sample_push:
Expand Down Expand Up @@ -489,16 +490,16 @@ def eleven_job_blobs_new_date(sample_data, sample_push, test_repository, mock_lo

@pytest.fixture
def eleven_jobs_stored_new_date(
test_repository, failure_classifications, eleven_job_blobs_new_date
test_repository, failure_classifications, hundred_job_blobs_new_date
):
"""stores a list of 11 job samples"""
store_job_data(test_repository, eleven_job_blobs_new_date)
store_job_data(test_repository, hundred_job_blobs_new_date)


@pytest.fixture
def eleven_jobs_stored(test_repository, failure_classifications, eleven_job_blobs):
def eleven_jobs_stored(test_repository, failure_classifications, hundred_job_blobs):
"""stores a list of 11 job samples"""
store_job_data(test_repository, eleven_job_blobs)
store_job_data(test_repository, hundred_job_blobs)


@pytest.fixture
Expand Down Expand Up @@ -563,7 +564,10 @@ def failure_lines(test_job):
def failure_line_logs(test_job):
return create_failure_lines(
test_job,
[(test_line, {"action": "log", "test": None}), (test_line, {"subtest": "subtest2"})],
[
(test_line, {"action": "log", "test": None}),
(test_line, {"subtest": "subtest2"}),
],
)


Expand Down Expand Up @@ -757,7 +761,7 @@ def test_perf_data(test_perf_signature, eleven_jobs_stored):

perf_datum = perf_models.PerformanceDatum.objects.create(
value=10,
push_timestamp=job.push.time,
push_timestamp=job.push.time - datetime.timedelta(days=100),
job=job,
push=job.push,
repository=job.repository,
Expand Down Expand Up @@ -867,7 +871,9 @@ def _fetch_data(self, project):
}

monkeypatch.setattr(
treeherder.etl.files_bugzilla_map.FilesBugzillaMapProcess, "fetch_data", _fetch_data
treeherder.etl.files_bugzilla_map.FilesBugzillaMapProcess,
"fetch_data",
_fetch_data,
)


Expand Down Expand Up @@ -1146,7 +1152,7 @@ def bug_data(eleven_jobs_stored, test_repository, test_push, bugs):
bug_id = bugs[0].bugzilla_id
job_id = jobs[0].id
th_models.BugJobMap.create(job_id=job_id, bugzilla_id=bug_id)
query_string = f"?startday=2012-05-09&endday=2018-05-10&tree={test_repository.name}"
query_string = f"?startday=2025-02-28&endday=2025-03-03&tree={test_repository.name}"

return {
"tree": test_repository.name,
Expand Down Expand Up @@ -1187,23 +1193,19 @@ def test_run_data(bug_data):


@pytest.fixture
def group_data(transactional_db, eleven_job_blobs, create_jobs):
query_string = "?manifest=/test&date=2022-10-01"
def group_data(transactional_db, hundred_job_blobs, create_jobs):
query_string = "?manifest=/test&date=2025-03-01"

jt = []
jt.append(th_models.JobType.objects.create(name="test-windows11-64-24h2/opt-mochitest-plain-1"))
jt.append(th_models.JobType.objects.create(name="test-windows11-64-24h2/opt-mochitest-plain-2"))
jt.append(
th_models.JobType.objects.create(name="test-windows10-64-2004-qr/opt-mochitest-plain-1")
)
jt.append(
th_models.JobType.objects.create(name="test-windows10-64-2004-qr/opt-mochitest-plain-2")
)
jt.append(
th_models.JobType.objects.create(name="test-windows10-64-2004-qr/opt-mochitest-plain-swr-1")
th_models.JobType.objects.create(name="test-windows11-64-24h2/opt-mochitest-plain-swr-1")
)

g1 = th_models.Group.objects.create(name="/test")
for i in range(3):
job = eleven_job_blobs[i]
job = hundred_job_blobs[i]
job["job"].update(
{
"taskcluster_task_id": f"V3SVuxO8TFy37En_6HcXL{i}",
Expand All @@ -1213,23 +1215,23 @@ def group_data(transactional_db, eleven_job_blobs, create_jobs):
)
j = create_jobs([job])[0]

# when creating the job, we also create the joblog, we want the last job log entry
# when creating the job, we also create the joblog, we want the last entry
job_log = th_models.JobLog.objects.last()

th_models.GroupStatus.objects.create(status=1, duration=1, job_log=job_log, group=g1)

query_string = "?manifest=/test&startdate=2025-03-01"
return {
"date": j.submit_time,
"manifest": "/test",
"query_string": query_string,
"expected": {
"job_type_names": [
"test-windows10-64-2004-qr/opt-mochitest-plain",
"test-windows10-64-2004-qr/opt-mochitest-plain-swr",
"test-windows11-64-24h2/opt-mochitest-plain",
],
"manifests": [
{
"/test": [[0, "passed", 1, 2], [1, "passed", 1, 1]],
"/test": [[0, "passed", 1, 1]],
}
],
},
Expand Down
2 changes: 1 addition & 1 deletion tests/etl/test_job_ingestion.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def test_ingest_single_sample_job(
job = Job.objects.get(id=1)
# Ensure we don't inadvertently change the way we generate job-related hashes.
assert job.option_collection_hash == "32faaecac742100f7753f0c1d0aa0add01b4046b"
assert job.signature.signature == "d900aca1e93a9ef2d9e00c1877c838ea920abca1"
assert job.signature.signature == "6202a0ad903a317ad2220d84ef19a676544b5d66"


def test_ingest_all_sample_jobs(
Expand Down
53 changes: 37 additions & 16 deletions tests/etl/test_job_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,29 +120,50 @@ def test_ingest_pulse_jobs(
jl.process_job(job, "https://firefox-ci-tc.services.mozilla.com")

jobs = Job.objects.all()
assert len(jobs) == 5
assert len(jobs) == 30

assert [job.taskcluster_metadata for job in jobs]
assert set(TaskclusterMetadata.objects.values_list("task_id", flat=True)) == set(
[
"IYyscnNMTLuxzna7PNqUJQ",
"XJCbbRQ6Sp-UL1lL-tw5ng",
"ZsSzJQu3Q7q2MfehIBAzKQ",
"bIzVZt9jQQKgvQYD3a2HQw",
"AI3Nrr3gSDSpZ9E9aBA3rg",
"BAG7ifS1QbGCDwiOP7NklQ",
"CaK6NlfBSf6F-NAVrrKJDQ",
"CilZCnmiTKmagJe_h6Hq5A",
"FNT3BLiQRHO14NNgonjQQg",
"FcbIUoVbS4utxFES84wrPw",
"FclD6gA-TTGgvq_r9-LSDg",
"GPLk78m6Sz6TTLJFVca4Xw",
"GcvHP6HLSeO_rKYDN2y_Tg",
"I-Hg7bM4TUOq4JqnX0pt0g",
"I2Y-TBNcQPSJzsKlB95rfQ",
"M1ECjPJBTlmwJxZq5pWyvg",
"MKq8mMM-RIOxztXO5ng-_A",
"MrrbifzBQJefUbS2ym4Qag",
"ORYYNMhET0yxGMvel4Jujg",
"TqWDDGoWSbCH93RTTxPAWg",
"V8rtIDroRV-G9bzjJglS0A",
"VVa2amzMS-2cSDbig9RHsw",
"YIOK401yR2GvygIFcfPVBg",
"b_QCzMjVQmKPyO5Il0Jedw",
"bljbLRFdT4KGCWJ2_C6RsQ",
"c2dxYucCSMWPlTkb70r89g",
"cPe8y071Spat09dlAzCGug",
"cZ7gc9JYQa2UPEC_EIxIug",
"dB8R5AXORZeCpDfQlYUlow",
"e1YPllz6TMawISpugkRx1g",
"eJ9PG41tSaWzNU1uY7-uSQ",
"edzgzCphTAS-QN_TAnf7eA",
"ekQaeC_yR0K8jPKx28E7EA",
"ftXsRyOwRgeiiYHyITXUOA",
]
)

job_logs = JobLog.objects.filter(job_id=1)
assert job_logs.count() == 2
assert job_logs.count() == 1
logs_expected = [
{
"name": "errorsummary_json",
"url": "http://example.com/blobs/Mozilla-Inbound-Non-PGO/sha512/05c7f57df6583c6351c6b49e439e2678e0f43c2e5b66695ea7d096a7519e1805f441448b5ffd4cc3b80b8b2c74b244288fda644f55ed0e226ef4e25ba02ca466",
"parse_status": 0,
},
{
"name": "live_backing_log",
"url": "http://ftp.mozilla.org/pub/mozilla.org/spidermonkey/tinderbox-builds/mozilla-inbound-linux64/mozilla-inbound_linux64_spidermonkey-warnaserr-bm57-build1-build352.txt.gz",
"url": "https://firefox-ci-tc.services.mozilla.com/api/queue/v1/task/AI3Nrr3gSDSpZ9E9aBA3rg/runs/0/artifacts/public/logs/live_backing.log",
"parse_status": 0,
},
]
Expand Down Expand Up @@ -191,10 +212,10 @@ def test_ingest_pending_pulse_job(

job = jobs[0]
assert job.taskcluster_metadata
assert job.taskcluster_metadata.task_id == "IYyscnNMTLuxzna7PNqUJQ"
assert job.taskcluster_metadata.task_id == "AI3Nrr3gSDSpZ9E9aBA3rg"

# should not have processed any log or details for pending jobs
assert JobLog.objects.count() == 2
assert JobLog.objects.count() == 1


def test_ingest_pulse_jobs_bad_project(
Expand All @@ -214,7 +235,7 @@ def test_ingest_pulse_jobs_bad_project(
jl.process_job(pulse_job, "https://firefox-ci-tc.services.mozilla.com")

# length of pulse jobs is 5, so one will be skipped due to bad project
assert Job.objects.count() == 4
assert Job.objects.count() == 29


@responses.activate
Expand All @@ -228,7 +249,7 @@ def test_ingest_pulse_jobs_with_missing_push(pulse_jobs):
job["origin"]["revision"] = "1234567890123456789012345678901234567890"
responses.add(
responses.GET,
"https://firefox-ci-tc.services.mozilla.com/api/queue/v1/task/IYyscnNMTLuxzna7PNqUJQ",
"https://firefox-ci-tc.services.mozilla.com/api/queue/v1/task/AI3Nrr3gSDSpZ9E9aBA3rg",
json={},
content_type="application/json",
status=200,
Expand Down
6 changes: 3 additions & 3 deletions tests/intermittents_commenter/expected_comment.text
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@ This is the #1 most frequent failure this week.
* mozilla-central: 1

## Table
| |**no_variant**|
| |**spi-nw**|
|---|:-:|
|**linux1804-x86/debug**|1|
|**macosx1015-64-qr/debug**|1|

## For more details, see:
https://treeherder.mozilla.org/intermittent-failures/bugdetails?bug=1&startday=2012-05-09&endday=2018-05-10&tree=all
https://treeherder.mozilla.org/intermittent-failures/bugdetails?bug=1&startday=2022-05-09&endday=2025-05-10&tree=all
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,11 @@ This is the #1 most frequent failure this week.
* mozilla-central: 5

## Table
| |**headless**|**no_variant**|
|---|:-:|:-:|
|**linux1804-x86/debug**| |1|
|**linux1804-x86/opt**| |1|
|**mac1015-x86_64/debug**| |1|
|**mac1120-x86_64/debug**|1| |
|**windows7-32-x86/debug**|1| |
| |**headless**|**spi-nw**|**swr**|
|---|:-:|:-:|:-:|
|**linux1804-64-qr/debug**|2|1| |
|**macosx1015-64-qr/debug**| |1| |
|**macosx1470-64/debug**| | |1|

## For more details, see:
https://treeherder.mozilla.org/intermittent-failures/bugdetails?bug=1&startday=2012-05-09&endday=2018-05-10&tree=all
https://treeherder.mozilla.org/intermittent-failures/bugdetails?bug=1&startday=2022-05-09&endday=2025-05-10&tree=all
8 changes: 4 additions & 4 deletions tests/intermittents_commenter/test_commenter.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@

@responses.activate
def test_intermittents_commenter(bug_data, mock_test_variants_firefoxci_request):
startday = "2012-05-09"
endday = "2018-05-10"
startday = "2022-05-09"
endday = "2025-05-10"
alt_startday = startday
alt_endday = endday

Expand Down Expand Up @@ -46,8 +46,8 @@ def test_intermittents_commenter(bug_data, mock_test_variants_firefoxci_request)
def test_intermittents_commenter_with_failures(
bug_data_with_5_failures, mock_test_variants_firefoxci_request
):
startday = "2012-05-09"
endday = "2018-05-10"
startday = "2022-05-09"
endday = "2025-05-10"
alt_startday = startday
alt_endday = endday

Expand Down
Loading