• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1import logging
2from unittest.mock import AsyncMock, patch
3
4import pytest
5
6from pipeline_message import (
7    get_failed_test_summary_message,
8    get_problem_jobs,
9    get_trace_failures,
10    main,
11    process_problem_jobs,
12    search_job_log_for_errors,
13    sort_failed_tests_by_status,
14    unexpected_improvements,
15)
16
17
18def test_get_problem_jobs():
19    jobs = [
20        {"stage": "build", "status": "failed"},
21        {"stage": "test", "status": "canceled"},
22        {"stage": "postmerge", "status": "failed"},
23        {"stage": "performance", "status": "failed"},
24        {"stage": "deploy", "status": "failed"},
25    ]
26
27    problem_jobs = get_problem_jobs(jobs)
28
29    assert len(problem_jobs) == 3
30    assert problem_jobs[0]["stage"] == "build"
31    assert problem_jobs[1]["stage"] == "test"
32    assert problem_jobs[2]["stage"] == "deploy"
33
34
35def test_sort_failed_tests_by_status():
36    failures_csv = """\
37Test1,UnexpectedImprovement
38Test2,Fail
39Test3,Crash
40Test4,Timeout
41Test5,Fail
42Test6,UnexpectedImprovement
43"""
44    sorted_tests = sort_failed_tests_by_status(failures_csv)
45
46    assert len(sorted_tests["unexpected_improvements"]) == 2
47    assert len(sorted_tests["fails"]) == 2
48    assert len(sorted_tests["crashes"]) == 1
49    assert len(sorted_tests["timeouts"]) == 1
50
51    assert sorted_tests["unexpected_improvements"] == [
52        "Test1,UnexpectedImprovement",
53        "Test6,UnexpectedImprovement",
54    ]
55    assert sorted_tests["fails"] == ["Test2,Fail", "Test5,Fail"]
56    assert sorted_tests["crashes"] == ["Test3,Crash"]
57    assert sorted_tests["timeouts"] == ["Test4,Timeout"]
58
59
60def test_get_failed_test_summary_message():
61    failed_test_array = {
62        "unexpected_improvements": [
63            "test1 UnexpectedImprovement",
64            "test2 UnexpectedImprovement",
65        ],
66        "fails": ["test3 Fail", "test4 Fail", "test5 Fail"],
67        "crashes": ["test6 Crash"],
68        "timeouts": [],
69    }
70
71    summary_message = get_failed_test_summary_message(failed_test_array)
72
73    assert "<summary>" in summary_message
74    assert "2 improved tests" in summary_message
75    assert "3 failed tests" in summary_message
76    assert "1 crashed test" in summary_message
77    assert "</summary>" in summary_message
78
79
80def test_unexpected_improvements():
81    message = "<summary>"
82    failed_test_array = {
83        "unexpected_improvements": ["test_improvement_1", "test_improvement_2"],
84        "fails": [],
85        "crashes": [],
86        "timeouts": [],
87    }
88    result = unexpected_improvements(failed_test_array)
89    assert result == " 2 improved tests", f"Unexpected result: {result}"
90
91
92@pytest.mark.asyncio
93@patch("pipeline_message.get_pipeline_status", new_callable=AsyncMock)
94async def test_gitlab_api_failure(mock_get_pipeline_status):
95    mock_get_pipeline_status.side_effect = Exception("GitLab API not responding")
96    message = await main("1234567")
97    assert message == ""
98
99
100@pytest.mark.asyncio
101async def test_no_message_when_pipeline_not_failed():
102    project_id = "176"
103    pipeline_id = "12345"
104
105    with patch(
106        "pipeline_message.get_pipeline_status", new_callable=AsyncMock
107    ) as mock_get_pipeline_status:
108        mock_get_pipeline_status.return_value = "success"
109
110        message = await main(pipeline_id, project_id)
111        assert (
112            message == ""
113        ), f"Expected no message for successful pipeline, but got: {message}"
114
115
116@pytest.mark.asyncio
117async def test_single_problem_job_not_summarized():
118    session = AsyncMock()
119    project_id = "176"
120    problem_jobs = [
121        {
122            "id": 1234,
123            "name": "test-job",
124            "web_url": "http://example.com/job/1234",
125            "status": "canceled",
126        }
127    ]
128
129    mock_response = AsyncMock()
130    mock_response.status = 200
131    mock_response.text.return_value = ""  # Empty CSV response for test
132    session.get.return_value = mock_response
133
134    message = await process_problem_jobs(session, project_id, problem_jobs)
135
136    assert "summary" not in message
137    assert "[test-job](http://example.com/job/1234)" in message
138
139
140@pytest.mark.asyncio
141@patch("pipeline_message.get_project_json", new_callable=AsyncMock)
142@patch("pipeline_message.aiohttp.ClientSession", autospec=True)
143async def test_get_trace_failures_no_response(
144    mock_client_session_cls, mock_get_project_json, caplog
145):
146    caplog.set_level(logging.DEBUG)
147    namespace = "mesa"
148    mock_get_project_json.return_value = {"path": namespace}
149
150    mock_get = AsyncMock()
151    mock_get.status = 404
152
153    mock_session_instance = mock_client_session_cls.return_value
154    mock_session_instance.get.return_value = mock_get
155
156    job_id = 12345678
157    job = {"id": job_id}
158    url = await get_trace_failures(mock_session_instance, "176", job)
159
160    assert url == ""
161
162    expected_log_message = f"No response from: https://mesa.pages.freedesktop.org/-/{namespace}/-/jobs/{job_id}/artifacts/results/summary/problems.html"
163    assert any(expected_log_message in record.message for record in caplog.records)
164
165
166@pytest.mark.asyncio
167@patch("pipeline_message.get_job_log", new_callable=AsyncMock)
168async def test_search_job_log_for_errors(mock_get_job_log):
169    session = AsyncMock()
170    project_id = "176"
171    job = {"id": 12345}
172
173    job_log = r"""
174error_msg: something useful
175[0m15:41:36.102:                GL_KHR_no_error GL_KHR_texture_compression_astc_sliced_3d
1761 error generated
1773 errors generated.
178-- Looking for strerror_r - found
179-- Looking for strerror_s - not found
180[49/176] Building CXX object lib/Support/CMakeFiles/LLVMSupport.dir/ErrorHandling.cpp.o
181[127/2034] Building C object lib/Support/CMakeFiles/LLVMSupport.dir/regerror.c.o
182-- Performing Test HAS_WERROR_GLOBAL_CTORS
183-- Performing Test C_SUPPORTS_WERROR_UNGUARDED_AVAILABILITY_NEW - Success
184-- Performing Test LLVM_LIBSTDCXX_SOFT_ERROR
185error aborting
186error_msg      : None
187error_type     : Job
188[0Ksection_end:1734694783:job_data
189[0K
190[0m11:39:43.438: [1mFinished executing LAVA job in the attempt #3 [0m
191[0Ksection_end:1734694783:lava_submit
192[0K
193[0;31m[01:54] ERROR: lava_submit: ret code: 1 [0m
194
195[0;31m[01:54] ERROR: unknown-section: ret code: 1 [0m
196section_end:1734694783:step_script
197[0Ksection_start:1734694783:after_script
198[0K[0K[36;1mRunning after_script[0;m[0;m
199[32;1mRunning after script...[0;m
200[32;1m$ curl -L --retry 4 -f --retry-all-errors --retry-delay 60 -s "https://" | tar --warning=no-timestamp --zstd -x[0;m
201zstd: /*stdin*\: unexpected end of file # noqa: W605
202tar: Child returned status 1
203tar: Error is not recoverable: exiting now
204section_end:1734695025:after_script
205[0K[0;33mWARNING: after_script failed, but job will continue unaffected: exit code 1[0;m
206section_start:1734695025:upload_artifacts_on_failure
207[0K[0K[36;1mUploading artifacts for failed job[0;m[0;m
208[32;1mUploading artifacts...[0;m
209results/: found 11 matching artifact files and directories[0;m
210Uploading artifacts as "archive" to coordinator... 201 Created[0;m  id[0;m=68509685 responseStatus[0;m=201 Created token[0;m=glcbt-64
211[32;1mUploading artifacts...[0;m
212[0;33mWARNING: results/junit.xml: no matching files. Ensure that the artifact path is relative to the working directory (/builds/mesa/mesa)[0;m
213[31;1mERROR: No files to upload                         [0;m
214section_end:1734695027:upload_artifacts_on_failure
215[0Ksection_start:1734695027:cleanup_file_variables
216[0K[0K[36;1mCleaning up project directory and file based variables[0;m[0;m
217section_end:1734695027:cleanup_file_variables
218[0K[31;1mERROR: Job failed: exit code 1
219[0;m
220[0;m
221    """
222
223    mock_get_job_log.return_value = job_log
224
225    error_message = await search_job_log_for_errors(session, project_id, job)
226    assert "something useful" in error_message
227
228
229@pytest.mark.asyncio
230@patch("pipeline_message.get_job_log", new_callable=AsyncMock)
231async def test_search_job_log_for_fatal_errors(mock_get_job_log):
232    session = AsyncMock()
233    project_id = "176"
234    job = {"id": 12345}
235
236    job_log = r"""
237[0m15:41:36.105: [15:41:31.951] fatal: something fatal
238Uploading artifacts as "archive" to coordinator... 201 Created[0;m  id[0;m=68509685 responseStatus[0;m=201 Created token[0;m=glcbt-64
239[32;1mUploading artifacts...[0;m
240[0;33mWARNING: results/junit.xml: no matching files. Ensure that the artifact path is relative to the working directory (/builds/mesa/mesa)[0;m
241[31;1mERROR: No files to upload                         [0;m
242section_end:1734695027:upload_artifacts_on_failure
243[0Ksection_start:1734695027:cleanup_file_variables
244[0K[0K[36;1mCleaning up project directory and file based variables[0;m[0;m
245section_end:1734695027:cleanup_file_variables
246[0K[31;1mERROR: Job failed: exit code 1
247[0;m
248[0;m
249    """
250
251    mock_get_job_log.return_value = job_log
252
253    error_message = await search_job_log_for_errors(session, project_id, job)
254    assert "something fatal" in error_message
255
256
257@pytest.mark.asyncio
258@patch("pipeline_message.get_job_log", new_callable=AsyncMock)
259async def test_search_job_log_for_errors_but_find_none(mock_get_job_log):
260    session = AsyncMock()
261    project_id = "176"
262    job = {"id": 12345}
263
264    job_log = r"""
265[0KRunning with gitlab-runner 17.4.0 (b92ee590)[0;m
266[0K  on fdo-equinix-m3l-30-placeholder_63 XmDXAt7xd, system ID: s_785ae19292ea[0;m
267section_start:1734736110:prepare_executor
268[0K[0K[36;1mPreparing the "docker" executor[0;m[0;m
269[0KUsing Docker executor with image registry.freedesktop.org/mesa/mesa/debian
270[0KAuthenticating with credentials from job payload (GitLab Registry)[0;m
271[0KPulling docker image registry.freedesktop.org/mesa/mesa/debian/x86_64_pyuti
272[0KUsing docker image sha256:ebc7b3fe89be4d390775303adddb33539c235a2663165d78d
273[0Ksection_start:1734736124:prepare_script
274[0K[0K[36;1mPreparing environment[0;m[0;m
275Running on runner-xmdxat7xd-project-23076-concurrent-1 via fdo-equinix-m3l-30...
276section_end:1734736125:prepare_script
277[0Ksection_start:1734736125:get_sources
278[0K[0K[36;1mGetting source from Git repository[0;m[0;m
279[32;1m$ /host/bin/curl -s -L --cacert /host/ca-certificates.crt --retry 4 -f --retry-delay 60 https://gitlab.
280Checking if the user of the pipeline is allowed...
281Checking if the job's project is part of a well-known group...
282Checking if the job is part of an official MR pipeline...
283Thank you for contributing to freedesktop.org
284Running pre-clone script: 'set -o xtrace
285wget -q -O download-git-cache.sh https://gitlab.freedesktop.org/mesa/mesa/-/raw/0d43b4cba639b809ad0e08a065ce01846e262249/.gitlab-ci/download-git-cache.sh
286bash download-git-cache.sh
287rm download-git-cache.sh
288[31;1m errors
289[0K[31;1mERROR:
290[31;1m error
291[31;1m Here is a blank error:
292/builds/mesa/mesa/bin/ci/test/test_pipeline_message.py:162: AssertionError
293Uploading artifacts as "archive" to coordinator... 201 Created[0;m  id[0;m=68509685 responseStatus[0;m=201 Created token[0;m=glcbt-64
294[32;1mUploading artifacts...[0;m
295[0;33mWARNING: results/junit.xml: no matching files. Ensure that the artifact path is relative to the working directory (/builds/mesa/mesa)[0;m
296[31;1mERROR: No files to upload                         [0;m
297section_end:1734695027:upload_artifacts_on_failure
298[0Ksection_start:1734695027:cleanup_file_variables
299[0K[0K[36;1mCleaning up project directory and file based variables[0;m[0;m
300section_end:1734695027:cleanup_file_variables
301[0K[31;1mERROR: Job failed: exit code 1
302[0;m
303[0;m
304    """
305
306    mock_get_job_log.return_value = job_log
307
308    error_message = await search_job_log_for_errors(session, project_id, job)
309    assert error_message == "", f"Unexpected error message: {error_message}"
310