1# Copyright (c) 2023 Nordic Semiconductor ASA
2# SPDX-License-Identifier: Apache-2.0
3from __future__ import annotations
4
5import pytest
6import textwrap
7
8from unittest import mock
9from pathlib import Path
10
11from twisterlib.harness import Pytest
12from twisterlib.testsuite import TestSuite
13from twisterlib.testinstance import TestInstance
14from twisterlib.platform import Platform
15
16
17@pytest.fixture
18def testinstance() -> TestInstance:
19    testsuite = TestSuite('.', 'samples/hello', 'unit.test')
20    testsuite.harness_config = {}
21    testsuite.harness = 'pytest'
22    testsuite.ignore_faults = False
23    testsuite.sysbuild = False
24    platform = Platform()
25
26    testinstance = TestInstance(testsuite, platform, 'zephyr', 'outdir')
27    testinstance.handler = mock.Mock()
28    testinstance.handler.options = mock.Mock()
29    testinstance.handler.options.verbose = 1
30    testinstance.handler.options.fixture = ['fixture1:option1', 'fixture2']
31    testinstance.handler.options.pytest_args = None
32    testinstance.handler.options.extra_test_args = []
33    testinstance.handler.type_str = 'native'
34    return testinstance
35
36
37@pytest.mark.parametrize('device_type', ['native', 'qemu'])
38def test_pytest_command(testinstance: TestInstance, device_type):
39    pytest_harness = Pytest()
40    pytest_harness.configure(testinstance)
41
42    testinstance.handler.type_str = device_type
43    ref_command = [
44        'pytest',
45        'samples/hello/pytest',
46        f'--build-dir={testinstance.build_dir}',
47        f'--junit-xml={testinstance.build_dir}/report.xml',
48        f'--device-type={device_type}',
49        '--twister-fixture=fixture1:option1',
50        '--twister-fixture=fixture2'
51    ]
52
53    command = pytest_harness.generate_command()
54    for c in ref_command:
55        assert c in command
56
57
58def test_pytest_command_dut_scope(testinstance: TestInstance):
59    pytest_harness = Pytest()
60    dut_scope = 'session'
61    testinstance.testsuite.harness_config['pytest_dut_scope'] = dut_scope
62    pytest_harness.configure(testinstance)
63    command = pytest_harness.generate_command()
64    assert f'--dut-scope={dut_scope}' in command
65
66
67def test_pytest_command_extra_args(testinstance: TestInstance):
68    pytest_harness = Pytest()
69    pytest_args = ['-k test1', '-m mark1']
70    testinstance.testsuite.harness_config['pytest_args'] = pytest_args
71    pytest_harness.configure(testinstance)
72    command = pytest_harness.generate_command()
73    for c in pytest_args:
74        assert c in command
75
76
77def test_pytest_command_extra_test_args(testinstance: TestInstance):
78    pytest_harness = Pytest()
79    extra_test_args = ['-stop_at=3', '-no-rt']
80    testinstance.handler.options.extra_test_args = extra_test_args
81    pytest_harness.configure(testinstance)
82    command = pytest_harness.generate_command()
83    assert f'--extra-test-args={extra_test_args[0]} {extra_test_args[1]}' in command
84
85
86def test_pytest_command_extra_args_in_options(testinstance: TestInstance):
87    pytest_harness = Pytest()
88    pytest_args_from_yaml = '--extra-option'
89    pytest_args_from_cmd = ['-k', 'test_from_cmd']
90    testinstance.testsuite.harness_config['pytest_args'] = [pytest_args_from_yaml]
91    testinstance.handler.options.pytest_args = pytest_args_from_cmd
92    pytest_harness.configure(testinstance)
93    command = pytest_harness.generate_command()
94    assert pytest_args_from_cmd[0] in command
95    assert pytest_args_from_cmd[1] in command
96    assert pytest_args_from_yaml in command
97    assert command.index(pytest_args_from_yaml) < command.index(pytest_args_from_cmd[1])
98
99
100@pytest.mark.parametrize(
101    ('pytest_root', 'expected'),
102    [
103        (
104            ['pytest/test_shell_help.py'],
105            ['samples/hello/pytest/test_shell_help.py']
106        ),
107        (
108            ['pytest/test_shell_help.py', 'pytest/test_shell_version.py', 'test_dir'],
109            ['samples/hello/pytest/test_shell_help.py',
110             'samples/hello/pytest/test_shell_version.py',
111             'samples/hello/test_dir']
112        ),
113        (
114            ['../shell/pytest/test_shell.py'],
115            ['samples/shell/pytest/test_shell.py']
116        ),
117        (
118            ['/tmp/test_temp.py'],
119            ['/tmp/test_temp.py']
120        ),
121        (
122            ['~/tmp/test_temp.py'],
123            ['/home/joe/tmp/test_temp.py']
124        ),
125        (
126            ['$ZEPHYR_BASE/samples/subsys/testsuite/pytest/shell/pytest'],
127            ['/zephyr_base/samples/subsys/testsuite/pytest/shell/pytest']
128        ),
129        (
130            ['pytest/test_shell_help.py::test_A', 'pytest/test_shell_help.py::test_B'],
131            ['samples/hello/pytest/test_shell_help.py::test_A',
132             'samples/hello/pytest/test_shell_help.py::test_B']
133        ),
134        (
135            ['pytest/test_shell_help.py::test_A[param_a]'],
136            ['samples/hello/pytest/test_shell_help.py::test_A[param_a]']
137        )
138    ],
139    ids=[
140        'one_file',
141        'more_files',
142        'relative_path',
143        'absollute_path',
144        'user_dir',
145        'with_env_var',
146        'subtests',
147        'subtest_with_param'
148    ]
149)
150def test_pytest_handle_source_list(testinstance: TestInstance, monkeypatch, pytest_root, expected):
151    monkeypatch.setenv('ZEPHYR_BASE', '/zephyr_base')
152    monkeypatch.setenv('HOME', '/home/joe')
153    testinstance.testsuite.harness_config['pytest_root'] = pytest_root
154    pytest_harness = Pytest()
155    pytest_harness.configure(testinstance)
156    command = pytest_harness.generate_command()
157    for pytest_src in expected:
158        assert pytest_src in command
159
160
161def test_if_report_is_parsed(pytester, testinstance: TestInstance):
162    test_file_content = textwrap.dedent("""
163        def test_1():
164            pass
165        def test_2():
166            pass
167    """)
168    test_file = pytester.path / 'test_valid.py'
169    test_file.write_text(test_file_content)
170    report_file = Path('report.xml')
171    result = pytester.runpytest(
172        str(test_file),
173        f'--junit-xml={str(report_file)}'
174    )
175    result.assert_outcomes(passed=2)
176    assert report_file.is_file()
177
178    pytest_harness = Pytest()
179    pytest_harness.configure(testinstance)
180    pytest_harness.report_file = report_file
181
182    pytest_harness._update_test_status()
183
184    assert pytest_harness.status == "passed"
185    assert testinstance.status == "passed"
186    assert len(testinstance.testcases) == 2
187    for tc in testinstance.testcases:
188        assert tc.status == "passed"
189
190
191def test_if_report_with_error(pytester, testinstance: TestInstance):
192    test_file_content = textwrap.dedent("""
193        def test_exp():
194            raise Exception('Test error')
195        def test_err():
196            assert False
197    """)
198    test_file = pytester.path / 'test_error.py'
199    test_file.write_text(test_file_content)
200    report_file = pytester.path / 'report.xml'
201    result = pytester.runpytest(
202        str(test_file),
203        f'--junit-xml={str(report_file)}'
204    )
205    result.assert_outcomes(failed=2)
206    assert report_file.is_file()
207
208    pytest_harness = Pytest()
209    pytest_harness.configure(testinstance)
210    pytest_harness.report_file = report_file
211
212    pytest_harness._update_test_status()
213
214    assert pytest_harness.status == "failed"
215    assert testinstance.status == "failed"
216    assert len(testinstance.testcases) == 2
217    for tc in testinstance.testcases:
218        assert tc.status == "failed"
219        assert tc.output
220        assert tc.reason
221    assert testinstance.reason
222    assert '2/2' in testinstance.reason
223
224
225def test_if_report_with_skip(pytester, testinstance: TestInstance):
226    test_file_content = textwrap.dedent("""
227        import pytest
228        @pytest.mark.skip('Test skipped')
229        def test_skip_1():
230            pass
231        def test_skip_2():
232            pytest.skip('Skipped on runtime')
233    """)
234    test_file = pytester.path / 'test_skip.py'
235    test_file.write_text(test_file_content)
236    report_file = pytester.path / 'report.xml'
237    result = pytester.runpytest(
238        str(test_file),
239        f'--junit-xml={str(report_file)}'
240    )
241    result.assert_outcomes(skipped=2)
242    assert report_file.is_file()
243
244    pytest_harness = Pytest()
245    pytest_harness.configure(testinstance)
246    pytest_harness.report_file = report_file
247
248    pytest_harness._update_test_status()
249
250    assert pytest_harness.status == "skipped"
251    assert testinstance.status == "skipped"
252    assert len(testinstance.testcases) == 2
253    for tc in testinstance.testcases:
254        assert tc.status == "skipped"
255
256
257def test_if_report_with_filter(pytester, testinstance: TestInstance):
258    test_file_content = textwrap.dedent("""
259        import pytest
260        def test_A():
261            pass
262        def test_B():
263            pass
264    """)
265    test_file = pytester.path / 'test_filter.py'
266    test_file.write_text(test_file_content)
267    report_file = pytester.path / 'report.xml'
268    result = pytester.runpytest(
269        str(test_file),
270        '-k', 'test_B',
271        f'--junit-xml={str(report_file)}'
272    )
273    result.assert_outcomes(passed=1)
274    assert report_file.is_file()
275
276    pytest_harness = Pytest()
277    pytest_harness.configure(testinstance)
278    pytest_harness.report_file = report_file
279    pytest_harness._update_test_status()
280    assert pytest_harness.status == "passed"
281    assert testinstance.status == "passed"
282    assert len(testinstance.testcases) == 1
283
284
285def test_if_report_with_no_collected(pytester, testinstance: TestInstance):
286    test_file_content = textwrap.dedent("""
287        import pytest
288        def test_A():
289            pass
290    """)
291    test_file = pytester.path / 'test_filter.py'
292    test_file.write_text(test_file_content)
293    report_file = pytester.path / 'report.xml'
294    result = pytester.runpytest(
295        str(test_file),
296        '-k', 'test_B',
297        f'--junit-xml={str(report_file)}'
298    )
299    result.assert_outcomes(passed=0)
300    assert report_file.is_file()
301
302    pytest_harness = Pytest()
303    pytest_harness.configure(testinstance)
304    pytest_harness.report_file = report_file
305    pytest_harness._update_test_status()
306    assert pytest_harness.status == "skipped"
307    assert testinstance.status == "skipped"
308