Lines Matching +full:pytest +full:- +full:twister +full:- +full:harness
4 # SPDX-License-Identifier: Apache-2.0
5 # pylint: disable=line-too-long
13 import pytest
17 sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
19 from pylib.twister.twisterlib.platform import Simulator
37 @pytest.mark.parametrize(
38 "build_only, slow, harness, platform_type, platform_sim, device_testing,fixture, expected",
47 harness, argument
59 testsuite = class_testplan.testsuites.get('scripts/tests/twister/test_data/testsuites/tests/'
67 testsuite.harness = harness
122 @pytest.mark.parametrize(
141 testcase = class_testplan.testsuites.get('scripts/tests/twister/test_data/testsuites/samples/'
157 testcase = class_testplan.testsuites.get('scripts/tests/twister/test_data/testsuites/samples/'
163 with pytest.raises(BuildError):
174 '(dt_compat_enabled("st,stm32-flash-controller") or' \
175 ' dt_compat_enabled("st,stm32h7-flash-controller")) and' \
176 ' dt_label_with_parent_compat_enabled("storage_partition", "fixed-partitions")',
181 ' dt_label_with_parent_compat_enabled("storage_partition", "fixed-partitions")) or' \
183 ' dt_label_with_parent_compat_enabled("slot1_ns_partition", "fixed-partitions"))',
193 @pytest.mark.parametrize("filter_expr, expected_stages", TESTDATA_PART_3)
200 @pytest.fixture(name='testinstance')
202 testsuite_path = 'scripts/tests/twister/test_data/testsuites'
222 @pytest.mark.parametrize('detailed_test_id', TESTDATA_1)
224 testsuite_path = 'scripts/tests/twister/test_data/testsuites/samples/test_app/sample_test.app'
239 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'sample'}], indirect=True)
270 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'sample'}], indirect=True)
284 testsuite_path = 'scripts/tests/twister/test_data/testsuites/tests/test_a/test_a.check_1'
306 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'sample'}], indirect=True)
318 @pytest.mark.parametrize('reason, expected_reason', TESTDATA_2)
319 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'tests'}], indirect=True)
328 testinstance.testcases[-1].status = TwisterStatus.NONE
333 assert testinstance.testcases[-1].status == TwisterStatus.PASS
334 assert testinstance.testcases[-1].reason == expected_reason
338 testsuite_path = 'scripts/tests/twister/test_data/testsuites/samples/test_app/sample_test.app'
365 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'tests'}], indirect=True)
382 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'tests'}], indirect=True)
392 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'tests'}], indirect=True)
407 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'tests'}], indirect=True)
424 (None, 'nonexistent harness', False),
430 @pytest.mark.parametrize(
431 'fixture, harness, expected_can_run',
433 ids=['improper harness', 'fixture not in list', 'no fixture specified', 'fixture in list']
439 harness, argument
442 testsuite_path = 'scripts/tests/twister/test_data/testsuites/samples/test_app/sample_test.app'
446 testsuite.harness = harness
465 @pytest.mark.parametrize(
470 'non-qemu simulation with exec', 'unit teting', 'no handler']
472 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'tests'}], indirect=True)
515 False, mock.ANY, 'pytest',
521 False, mock.ANY, 'not pytest',
528 @pytest.mark.parametrize(
533 ids=['windows', 'build only', 'skip slow', 'pytest harness', 'sim', 'no sim', 'hardware map']
535 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'tests'}], indirect=True)
554 testinstance.testsuite.harness = testsuite_harness
577 @pytest.mark.parametrize('from_buildlog, expected_buildlog_filepath', TESTDATA_6)
578 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'tests'}], indirect=True)
606 @pytest.mark.parametrize('sysbuild, expected_error', TESTDATA_7)
607 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'tests'}], indirect=True)
627 with pytest.raises(expected_error) if expected_error else nullcontext():
639 @pytest.mark.parametrize('create_build_log, expected_error', TESTDATA_8)
640 @pytest.mark.parametrize('testinstance', [{'testsuite_kind': 'tests'}], indirect=True)
649 with pytest.raises(expected_error) if expected_error else nullcontext():