1# Copyright (c) 2023 Nordic Semiconductor ASA
2# SPDX-License-Identifier: Apache-2.0
3from __future__ import annotations
4
5import pytest
6import textwrap
7
8from unittest import mock
9from pathlib import Path
10
11from twisterlib.harness import Pytest
12from twisterlib.testsuite import TestSuite
13from twisterlib.testinstance import TestInstance
14from twisterlib.platform import Platform
15
16
17@pytest.fixture
18def testinstance() -> TestInstance:
19    testsuite = TestSuite('.', 'samples/hello', 'unit.test')
20    testsuite.harness_config = {}
21    testsuite.ignore_faults = False
22    testsuite.sysbuild = False
23    platform = Platform()
24
25    testinstance = TestInstance(testsuite, platform, 'outdir')
26    testinstance.handler = mock.Mock()
27    testinstance.handler.options = mock.Mock()
28    testinstance.handler.options.verbose = 1
29    testinstance.handler.options.fixture = ['fixture1:option1', 'fixture2']
30    testinstance.handler.options.pytest_args = None
31    testinstance.handler.options.extra_test_args = []
32    testinstance.handler.type_str = 'native'
33    return testinstance
34
35
36@pytest.mark.parametrize('device_type', ['native', 'qemu'])
37def test_pytest_command(testinstance: TestInstance, device_type):
38    pytest_harness = Pytest()
39    pytest_harness.configure(testinstance)
40
41    testinstance.handler.type_str = device_type
42    ref_command = [
43        'pytest',
44        'samples/hello/pytest',
45        f'--build-dir={testinstance.build_dir}',
46        f'--junit-xml={testinstance.build_dir}/report.xml',
47        f'--device-type={device_type}',
48        '--twister-fixture=fixture1:option1',
49        '--twister-fixture=fixture2'
50    ]
51
52    command = pytest_harness.generate_command()
53    for c in ref_command:
54        assert c in command
55
56
57def test_pytest_command_dut_scope(testinstance: TestInstance):
58    pytest_harness = Pytest()
59    dut_scope = 'session'
60    testinstance.testsuite.harness_config['pytest_dut_scope'] = dut_scope
61    pytest_harness.configure(testinstance)
62    command = pytest_harness.generate_command()
63    assert f'--dut-scope={dut_scope}' in command
64
65
66def test_pytest_command_extra_args(testinstance: TestInstance):
67    pytest_harness = Pytest()
68    pytest_args = ['-k test1', '-m mark1']
69    testinstance.testsuite.harness_config['pytest_args'] = pytest_args
70    pytest_harness.configure(testinstance)
71    command = pytest_harness.generate_command()
72    for c in pytest_args:
73        assert c in command
74
75
76def test_pytest_command_extra_test_args(testinstance: TestInstance):
77    pytest_harness = Pytest()
78    extra_test_args = ['-stop_at=3', '-no-rt']
79    testinstance.handler.options.extra_test_args = extra_test_args
80    pytest_harness.configure(testinstance)
81    command = pytest_harness.generate_command()
82    assert f'--extra-test-args={extra_test_args[0]} {extra_test_args[1]}' in command
83
84
85def test_pytest_command_extra_args_in_options(testinstance: TestInstance):
86    pytest_harness = Pytest()
87    pytest_args_from_yaml = '--extra-option'
88    pytest_args_from_cmd = ['-k', 'test_from_cmd']
89    testinstance.testsuite.harness_config['pytest_args'] = [pytest_args_from_yaml]
90    testinstance.handler.options.pytest_args = pytest_args_from_cmd
91    pytest_harness.configure(testinstance)
92    command = pytest_harness.generate_command()
93    assert pytest_args_from_cmd[0] in command
94    assert pytest_args_from_cmd[1] in command
95    assert pytest_args_from_yaml in command
96    assert command.index(pytest_args_from_yaml) < command.index(pytest_args_from_cmd[1])
97
98
99@pytest.mark.parametrize(
100    ('pytest_root', 'expected'),
101    [
102        (
103            ['pytest/test_shell_help.py'],
104            ['samples/hello/pytest/test_shell_help.py']
105        ),
106        (
107            ['pytest/test_shell_help.py', 'pytest/test_shell_version.py', 'test_dir'],
108            ['samples/hello/pytest/test_shell_help.py',
109             'samples/hello/pytest/test_shell_version.py',
110             'samples/hello/test_dir']
111        ),
112        (
113            ['../shell/pytest/test_shell.py'],
114            ['samples/shell/pytest/test_shell.py']
115        ),
116        (
117            ['/tmp/test_temp.py'],
118            ['/tmp/test_temp.py']
119        ),
120        (
121            ['~/tmp/test_temp.py'],
122            ['/home/joe/tmp/test_temp.py']
123        ),
124        (
125            ['$ZEPHYR_BASE/samples/subsys/testsuite/pytest/shell/pytest'],
126            ['/zephyr_base/samples/subsys/testsuite/pytest/shell/pytest']
127        ),
128        (
129            ['pytest/test_shell_help.py::test_A', 'pytest/test_shell_help.py::test_B'],
130            ['samples/hello/pytest/test_shell_help.py::test_A',
131             'samples/hello/pytest/test_shell_help.py::test_B']
132        ),
133        (
134            ['pytest/test_shell_help.py::test_A[param_a]'],
135            ['samples/hello/pytest/test_shell_help.py::test_A[param_a]']
136        )
137    ],
138    ids=[
139        'one_file',
140        'more_files',
141        'relative_path',
142        'absollute_path',
143        'user_dir',
144        'with_env_var',
145        'subtests',
146        'subtest_with_param'
147    ]
148)
149def test_pytest_handle_source_list(testinstance: TestInstance, monkeypatch, pytest_root, expected):
150    monkeypatch.setenv('ZEPHYR_BASE', '/zephyr_base')
151    monkeypatch.setenv('HOME', '/home/joe')
152    testinstance.testsuite.harness_config['pytest_root'] = pytest_root
153    pytest_harness = Pytest()
154    pytest_harness.configure(testinstance)
155    command = pytest_harness.generate_command()
156    for pytest_src in expected:
157        assert pytest_src in command
158
159
160def test_if_report_is_parsed(pytester, testinstance: TestInstance):
161    test_file_content = textwrap.dedent("""
162        def test_1():
163            pass
164        def test_2():
165            pass
166    """)
167    test_file = pytester.path / 'test_valid.py'
168    test_file.write_text(test_file_content)
169    report_file = Path('report.xml')
170    result = pytester.runpytest(
171        str(test_file),
172        f'--junit-xml={str(report_file)}'
173    )
174    result.assert_outcomes(passed=2)
175    assert report_file.is_file()
176
177    pytest_harness = Pytest()
178    pytest_harness.configure(testinstance)
179    pytest_harness.report_file = report_file
180
181    pytest_harness._update_test_status()
182
183    assert pytest_harness.status == "passed"
184    assert testinstance.status == "passed"
185    assert len(testinstance.testcases) == 2
186    for tc in testinstance.testcases:
187        assert tc.status == "passed"
188
189
190def test_if_report_with_error(pytester, testinstance: TestInstance):
191    test_file_content = textwrap.dedent("""
192        def test_exp():
193            raise Exception('Test error')
194        def test_err():
195            assert False
196    """)
197    test_file = pytester.path / 'test_error.py'
198    test_file.write_text(test_file_content)
199    report_file = pytester.path / 'report.xml'
200    result = pytester.runpytest(
201        str(test_file),
202        f'--junit-xml={str(report_file)}'
203    )
204    result.assert_outcomes(failed=2)
205    assert report_file.is_file()
206
207    pytest_harness = Pytest()
208    pytest_harness.configure(testinstance)
209    pytest_harness.report_file = report_file
210
211    pytest_harness._update_test_status()
212
213    assert pytest_harness.status == "failed"
214    assert testinstance.status == "failed"
215    assert len(testinstance.testcases) == 2
216    for tc in testinstance.testcases:
217        assert tc.status == "failed"
218        assert tc.output
219        assert tc.reason
220    assert testinstance.reason
221    assert '2/2' in testinstance.reason
222
223
224def test_if_report_with_skip(pytester, testinstance: TestInstance):
225    test_file_content = textwrap.dedent("""
226        import pytest
227        @pytest.mark.skip('Test skipped')
228        def test_skip_1():
229            pass
230        def test_skip_2():
231            pytest.skip('Skipped on runtime')
232    """)
233    test_file = pytester.path / 'test_skip.py'
234    test_file.write_text(test_file_content)
235    report_file = pytester.path / 'report.xml'
236    result = pytester.runpytest(
237        str(test_file),
238        f'--junit-xml={str(report_file)}'
239    )
240    result.assert_outcomes(skipped=2)
241    assert report_file.is_file()
242
243    pytest_harness = Pytest()
244    pytest_harness.configure(testinstance)
245    pytest_harness.report_file = report_file
246
247    pytest_harness._update_test_status()
248
249    assert pytest_harness.status == "skipped"
250    assert testinstance.status == "skipped"
251    assert len(testinstance.testcases) == 2
252    for tc in testinstance.testcases:
253        assert tc.status == "skipped"
254
255
256def test_if_report_with_filter(pytester, testinstance: TestInstance):
257    test_file_content = textwrap.dedent("""
258        import pytest
259        def test_A():
260            pass
261        def test_B():
262            pass
263    """)
264    test_file = pytester.path / 'test_filter.py'
265    test_file.write_text(test_file_content)
266    report_file = pytester.path / 'report.xml'
267    result = pytester.runpytest(
268        str(test_file),
269        '-k', 'test_B',
270        f'--junit-xml={str(report_file)}'
271    )
272    result.assert_outcomes(passed=1)
273    assert report_file.is_file()
274
275    pytest_harness = Pytest()
276    pytest_harness.configure(testinstance)
277    pytest_harness.report_file = report_file
278    pytest_harness._update_test_status()
279    assert pytest_harness.status == "passed"
280    assert testinstance.status == "passed"
281    assert len(testinstance.testcases) == 1
282
283
284def test_if_report_with_no_collected(pytester, testinstance: TestInstance):
285    test_file_content = textwrap.dedent("""
286        import pytest
287        def test_A():
288            pass
289    """)
290    test_file = pytester.path / 'test_filter.py'
291    test_file.write_text(test_file_content)
292    report_file = pytester.path / 'report.xml'
293    result = pytester.runpytest(
294        str(test_file),
295        '-k', 'test_B',
296        f'--junit-xml={str(report_file)}'
297    )
298    result.assert_outcomes(passed=0)
299    assert report_file.is_file()
300
301    pytest_harness = Pytest()
302    pytest_harness.configure(testinstance)
303    pytest_harness.report_file = report_file
304    pytest_harness._update_test_status()
305    assert pytest_harness.status == "skipped"
306    assert testinstance.status == "skipped"
307