1# Copyright (c) 2023 Nordic Semiconductor ASA
2# SPDX-License-Identifier: Apache-2.0
3from __future__ import annotations
4
5import pytest
6import textwrap
7
8from unittest import mock
9from pathlib import Path
10
11from twisterlib.harness import Pytest
12from twisterlib.testsuite import TestSuite
13from twisterlib.testinstance import TestInstance
14from twisterlib.platform import Platform
15
16
17@pytest.fixture
18def testinstance() -> TestInstance:
19    testsuite = TestSuite('.', 'samples/hello', 'unit.test')
20    testsuite.harness_config = {}
21    testsuite.ignore_faults = False
22    platform = Platform()
23
24    testinstance = TestInstance(testsuite, platform, 'outdir')
25    testinstance.handler = mock.Mock()
26    testinstance.handler.options = mock.Mock()
27    testinstance.handler.options.verbose = 1
28    testinstance.handler.options.pytest_args = None
29    testinstance.handler.type_str = 'native'
30    return testinstance
31
32
33@pytest.mark.parametrize('device_type', ['native', 'qemu'])
34def test_pytest_command(testinstance: TestInstance, device_type):
35    pytest_harness = Pytest()
36    pytest_harness.configure(testinstance)
37
38    testinstance.handler.type_str = device_type
39    ref_command = [
40        'pytest',
41        'samples/hello/pytest',
42        f'--build-dir={testinstance.build_dir}',
43        f'--junit-xml={testinstance.build_dir}/report.xml',
44        f'--device-type={device_type}'
45    ]
46
47    command = pytest_harness.generate_command()
48    for c in ref_command:
49        assert c in command
50
51
52def test_pytest_command_dut_scope(testinstance: TestInstance):
53    pytest_harness = Pytest()
54    dut_scope = 'session'
55    testinstance.testsuite.harness_config['pytest_dut_scope'] = dut_scope
56    pytest_harness.configure(testinstance)
57    command = pytest_harness.generate_command()
58    assert f'--dut-scope={dut_scope}' in command
59
60
61def test_pytest_command_extra_args(testinstance: TestInstance):
62    pytest_harness = Pytest()
63    pytest_args = ['-k test1', '-m mark1']
64    testinstance.testsuite.harness_config['pytest_args'] = pytest_args
65    pytest_harness.configure(testinstance)
66    command = pytest_harness.generate_command()
67    for c in pytest_args:
68        assert c in command
69
70
71def test_pytest_command_extra_args_in_options(testinstance: TestInstance):
72    pytest_harness = Pytest()
73    pytest_args_from_yaml = '-k test_from_yaml'
74    pytest_args_from_cmd = ['-k', 'test_from_cmd']
75    testinstance.testsuite.harness_config['pytest_args'] = [pytest_args_from_yaml]
76    testinstance.handler.options.pytest_args = pytest_args_from_cmd
77    pytest_harness.configure(testinstance)
78    command = pytest_harness.generate_command()
79    assert pytest_args_from_cmd[0] in command
80    assert pytest_args_from_cmd[1] in command
81    assert pytest_args_from_yaml not in command
82
83
84@pytest.mark.parametrize(
85    ('pytest_root', 'expected'),
86    [
87        (
88            ['pytest/test_shell_help.py'],
89            ['samples/hello/pytest/test_shell_help.py']
90        ),
91        (
92            ['pytest/test_shell_help.py', 'pytest/test_shell_version.py', 'test_dir'],
93            ['samples/hello/pytest/test_shell_help.py',
94             'samples/hello/pytest/test_shell_version.py',
95             'samples/hello/test_dir']
96        ),
97        (
98            ['../shell/pytest/test_shell.py'],
99            ['samples/shell/pytest/test_shell.py']
100        ),
101        (
102            ['/tmp/test_temp.py'],
103            ['/tmp/test_temp.py']
104        ),
105        (
106            ['~/tmp/test_temp.py'],
107            ['/home/joe/tmp/test_temp.py']
108        ),
109        (
110            ['$ZEPHYR_BASE/samples/subsys/testsuite/pytest/shell/pytest'],
111            ['/zephyr_base/samples/subsys/testsuite/pytest/shell/pytest']
112        ),
113        (
114            ['pytest/test_shell_help.py::test_A', 'pytest/test_shell_help.py::test_B'],
115            ['samples/hello/pytest/test_shell_help.py::test_A',
116             'samples/hello/pytest/test_shell_help.py::test_B']
117        ),
118        (
119            ['pytest/test_shell_help.py::test_A[param_a]'],
120            ['samples/hello/pytest/test_shell_help.py::test_A[param_a]']
121        )
122    ],
123    ids=[
124        'one_file',
125        'more_files',
126        'relative_path',
127        'absollute_path',
128        'user_dir',
129        'with_env_var',
130        'subtests',
131        'subtest_with_param'
132    ]
133)
134def test_pytest_handle_source_list(testinstance: TestInstance, monkeypatch, pytest_root, expected):
135    monkeypatch.setenv('ZEPHYR_BASE', '/zephyr_base')
136    monkeypatch.setenv('HOME', '/home/joe')
137    testinstance.testsuite.harness_config['pytest_root'] = pytest_root
138    pytest_harness = Pytest()
139    pytest_harness.configure(testinstance)
140    command = pytest_harness.generate_command()
141    for pytest_src in expected:
142        assert pytest_src in command
143
144
145def test_if_report_is_parsed(pytester, testinstance: TestInstance):
146    test_file_content = textwrap.dedent("""
147        def test_1():
148            pass
149        def test_2():
150            pass
151    """)
152    test_file = pytester.path / 'test_valid.py'
153    test_file.write_text(test_file_content)
154    report_file = Path('report.xml')
155    result = pytester.runpytest(
156        str(test_file),
157        f'--junit-xml={str(report_file)}'
158    )
159    result.assert_outcomes(passed=2)
160    assert report_file.is_file()
161
162    pytest_harness = Pytest()
163    pytest_harness.configure(testinstance)
164    pytest_harness.report_file = report_file
165
166    pytest_harness._update_test_status()
167
168    assert pytest_harness.state == "passed"
169    assert testinstance.status == "passed"
170    assert len(testinstance.testcases) == 2
171    for tc in testinstance.testcases:
172        assert tc.status == "passed"
173
174
175def test_if_report_with_error(pytester, testinstance: TestInstance):
176    test_file_content = textwrap.dedent("""
177        def test_exp():
178            raise Exception('Test error')
179        def test_err():
180            assert False
181    """)
182    test_file = pytester.path / 'test_error.py'
183    test_file.write_text(test_file_content)
184    report_file = pytester.path / 'report.xml'
185    result = pytester.runpytest(
186        str(test_file),
187        f'--junit-xml={str(report_file)}'
188    )
189    result.assert_outcomes(failed=2)
190    assert report_file.is_file()
191
192    pytest_harness = Pytest()
193    pytest_harness.configure(testinstance)
194    pytest_harness.report_file = report_file
195
196    pytest_harness._update_test_status()
197
198    assert pytest_harness.state == "failed"
199    assert testinstance.status == "failed"
200    assert len(testinstance.testcases) == 2
201    for tc in testinstance.testcases:
202        assert tc.status == "failed"
203        assert tc.output
204        assert tc.reason
205    assert testinstance.reason
206    assert '2/2' in testinstance.reason
207
208
209def test_if_report_with_skip(pytester, testinstance: TestInstance):
210    test_file_content = textwrap.dedent("""
211        import pytest
212        @pytest.mark.skip('Test skipped')
213        def test_skip_1():
214            pass
215        def test_skip_2():
216            pytest.skip('Skipped on runtime')
217    """)
218    test_file = pytester.path / 'test_skip.py'
219    test_file.write_text(test_file_content)
220    report_file = pytester.path / 'report.xml'
221    result = pytester.runpytest(
222        str(test_file),
223        f'--junit-xml={str(report_file)}'
224    )
225    result.assert_outcomes(skipped=2)
226    assert report_file.is_file()
227
228    pytest_harness = Pytest()
229    pytest_harness.configure(testinstance)
230    pytest_harness.report_file = report_file
231
232    pytest_harness._update_test_status()
233
234    assert pytest_harness.state == "skipped"
235    assert testinstance.status == "skipped"
236    assert len(testinstance.testcases) == 2
237    for tc in testinstance.testcases:
238        assert tc.status == "skipped"
239
240
241def test_if_report_with_filter(pytester, testinstance: TestInstance):
242    test_file_content = textwrap.dedent("""
243        import pytest
244        def test_A():
245            pass
246        def test_B():
247            pass
248    """)
249    test_file = pytester.path / 'test_filter.py'
250    test_file.write_text(test_file_content)
251    report_file = pytester.path / 'report.xml'
252    result = pytester.runpytest(
253        str(test_file),
254        '-k', 'test_B',
255        f'--junit-xml={str(report_file)}'
256    )
257    result.assert_outcomes(passed=1)
258    assert report_file.is_file()
259
260    pytest_harness = Pytest()
261    pytest_harness.configure(testinstance)
262    pytest_harness.report_file = report_file
263    pytest_harness._update_test_status()
264    assert pytest_harness.state == "passed"
265    assert testinstance.status == "passed"
266    assert len(testinstance.testcases) == 1
267
268
269def test_if_report_with_no_collected(pytester, testinstance: TestInstance):
270    test_file_content = textwrap.dedent("""
271        import pytest
272        def test_A():
273            pass
274    """)
275    test_file = pytester.path / 'test_filter.py'
276    test_file.write_text(test_file_content)
277    report_file = pytester.path / 'report.xml'
278    result = pytester.runpytest(
279        str(test_file),
280        '-k', 'test_B',
281        f'--junit-xml={str(report_file)}'
282    )
283    result.assert_outcomes(passed=0)
284    assert report_file.is_file()
285
286    pytest_harness = Pytest()
287    pytest_harness.configure(testinstance)
288    pytest_harness.report_file = report_file
289    pytest_harness._update_test_status()
290    assert pytest_harness.state == "skipped"
291    assert testinstance.status == "skipped"
292