1#!/usr/bin/env python3 2# Copyright (c) 2024 Intel Corporation 3# 4# SPDX-License-Identifier: Apache-2.0 5""" 6Blackbox tests for twister's command line functions related to test filtering. 7""" 8 9import importlib 10import mock 11import os 12import pytest 13import sys 14import json 15import re 16 17# pylint: disable=no-name-in-module 18from conftest import ZEPHYR_BASE, TEST_DATA, testsuite_filename_mock 19from twisterlib.testplan import TestPlan 20 21 22class TestFilter: 23 TESTDATA_1 = [ 24 ( 25 'x86', 26 [ 27 r'(it8xxx2_evb/it81302bx).*?(FILTERED: Command line testsuite arch filter)', 28 ], 29 ), 30 ( 31 'arm', 32 [ 33 r'(it8xxx2_evb/it81302bx).*?(FILTERED: Command line testsuite arch filter)', 34 r'(qemu_x86/atom).*?(FILTERED: Command line testsuite arch filter)', 35 r'(hsdk/arc_hsdk).*?(FILTERED: Command line testsuite arch filter)', 36 ] 37 ), 38 ( 39 'riscv', 40 [ 41 r'(qemu_x86/atom).*?(FILTERED: Command line testsuite arch filter)', 42 r'(hsdk/arc_hsdk).*?(FILTERED: Command line testsuite arch filter)', ] 43 ) 44 ] 45 TESTDATA_2 = [ 46 ( 47 'nxp', 48 [ 49 r'(it8xxx2_evb/it81302bx).*?(FILTERED: Not a selected vendor platform)', 50 r'(hsdk/arc_hsdk).*?(FILTERED: Not a selected vendor platform)', 51 r'(qemu_x86).*?(FILTERED: Not a selected vendor platform)', 52 ], 53 ), 54 ( 55 'intel', 56 [ 57 r'(it8xxx2_evb/it81302bx).*?(FILTERED: Not a selected vendor platform)', 58 r'(qemu_x86/atom).*?(FILTERED: Not a selected vendor platform)', 59 r'(DEBUG\s+- adding intel_adl_crb)' 60 ] 61 ), 62 ( 63 'ite', 64 [ 65 r'(qemu_x86/atom).*?(FILTERED: Not a selected vendor platform)', 66 r'(intel_adl_crb/alder_lake).*?(FILTERED: Not a selected vendor platform)', 67 r'(hsdk/arc_hsdk).*?(FILTERED: Not a selected vendor platform)', 68 r'(DEBUG\s+- adding it8xxx2_evb)' 69 ] 70 ) 71 ] 72 73 @classmethod 74 def setup_class(cls): 75 apath = os.path.join(ZEPHYR_BASE, 'scripts', 'twister') 76 cls.loader = importlib.machinery.SourceFileLoader('__main__', apath) 77 cls.spec = importlib.util.spec_from_loader(cls.loader.name, cls.loader) 78 cls.twister_module = importlib.util.module_from_spec(cls.spec) 79 80 @classmethod 81 def teardown_class(cls): 82 pass 83 84 @pytest.mark.parametrize( 85 'tags, expected_test_count', 86 [ 87 (['device', 'cpp'], 6), 88 # dummy.agnostic.group1.subgroup1.a1_1_tests.assert 89 # dummy.agnostic.group1.subgroup2.a2_2_tests.assert 90 # dummy.agnostic.group2.a2_tests.assert1 91 # dummy.agnostic.group2.a2_tests.assert2 92 # dummy.agnostic.group2.a2_tests.assert3 93 # dummy.agnostic.group2.a3_tests.assert1 94 (['agnostic'], 1) # dummy.device.group.assert 95 ], 96 ids=['no device, no cpp', 'no agnostic'] 97 ) 98 @mock.patch.object(TestPlan, 'TESTSUITE_FILENAME', testsuite_filename_mock) 99 def test_exclude_tag(self, out_path, tags, expected_test_count): 100 test_platforms = ['qemu_x86', 'intel_adl_crb'] 101 path = os.path.join(TEST_DATA, 'tests', 'dummy') 102 args = ['-i', '--outdir', out_path, '-T', path, '-y'] + \ 103 [val for pair in zip( 104 ['--exclude-tag'] * len(tags), tags 105 ) for val in pair] + \ 106 [val for pair in zip( 107 ['-p'] * len(test_platforms), test_platforms 108 ) for val in pair] 109 110 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 111 pytest.raises(SystemExit) as sys_exit: 112 self.loader.exec_module(self.twister_module) 113 114 with open(os.path.join(out_path, 'testplan.json')) as f: 115 j = json.load(f) 116 filtered_j = [ 117 (ts['platform'], ts['name'], tc['identifier']) \ 118 for ts in j['testsuites'] \ 119 for tc in ts['testcases'] if 'reason' not in tc 120 ] 121 122 assert len(filtered_j) == expected_test_count 123 124 assert str(sys_exit.value) == '0' 125 126 @mock.patch.object(TestPlan, 'TESTSUITE_FILENAME', testsuite_filename_mock) 127 def test_enable_slow(self, out_path): 128 test_platforms = ['qemu_x86', 'intel_adl_crb'] 129 path = os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic') 130 alt_config_root = os.path.join(TEST_DATA, 'alt-test-configs', 'dummy', 'agnostic') 131 args = ['-i', '--outdir', out_path, '-T', path] + \ 132 ['--enable-slow'] + \ 133 ['--alt-config-root', alt_config_root] + \ 134 [val for pair in zip( 135 ['-p'] * len(test_platforms), test_platforms 136 ) for val in pair] 137 138 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 139 pytest.raises(SystemExit) as sys_exit: 140 self.loader.exec_module(self.twister_module) 141 142 with open(os.path.join(out_path, 'testplan.json')) as f: 143 j = json.load(f) 144 filtered_j = [ 145 (ts['platform'], ts['name'], tc['identifier']) \ 146 for ts in j['testsuites'] \ 147 for tc in ts['testcases'] if 'reason' not in tc 148 ] 149 150 assert str(sys_exit.value) == '0' 151 152 assert len(filtered_j) == 6 153 154 @mock.patch.object(TestPlan, 'TESTSUITE_FILENAME', testsuite_filename_mock) 155 def test_enable_slow_only(self, out_path): 156 test_platforms = ['qemu_x86', 'intel_adl_crb'] 157 path = os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic') 158 alt_config_root = os.path.join(TEST_DATA, 'alt-test-configs', 'dummy', 'agnostic') 159 args = ['-i', '--outdir', out_path, '-T', path] + \ 160 ['--enable-slow-only'] + \ 161 ['--alt-config-root', alt_config_root] + \ 162 [val for pair in zip( 163 ['-p'] * len(test_platforms), test_platforms 164 ) for val in pair] 165 166 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 167 pytest.raises(SystemExit) as sys_exit: 168 self.loader.exec_module(self.twister_module) 169 170 with open(os.path.join(out_path, 'testplan.json')) as f: 171 j = json.load(f) 172 filtered_j = [ 173 (ts['platform'], ts['name'], tc['identifier']) \ 174 for ts in j['testsuites'] \ 175 for tc in ts['testcases'] if 'reason' not in tc 176 ] 177 178 assert str(sys_exit.value) == '0' 179 180 assert len(filtered_j) == 4 181 182 @pytest.mark.parametrize( 183 'arch, expected', 184 TESTDATA_1, 185 ids=[ 186 'arch x86', 187 'arch arm', 188 'arch riscv' 189 ], 190 ) 191 192 @mock.patch.object(TestPlan, 'TESTSUITE_FILENAME', testsuite_filename_mock) 193 def test_arch(self, capfd, out_path, arch, expected): 194 path = os.path.join(TEST_DATA, 'tests', 'no_filter') 195 test_platforms = ['qemu_x86', 'hsdk', 'intel_adl_crb', 'it8xxx2_evb'] 196 args = ['--outdir', out_path, '-T', path, '-vv', '-ll', 'DEBUG'] + \ 197 ['--arch', arch] + \ 198 [val for pair in zip( 199 ['-p'] * len(test_platforms), test_platforms 200 ) for val in pair] 201 202 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 203 pytest.raises(SystemExit) as sys_exit: 204 self.loader.exec_module(self.twister_module) 205 206 out, err = capfd.readouterr() 207 sys.stdout.write(out) 208 sys.stderr.write(err) 209 210 assert str(sys_exit.value) == '0' 211 212 for line in expected: 213 print(err) 214 assert re.search(line, err) 215 216 @pytest.mark.parametrize( 217 'vendor, expected', 218 TESTDATA_2, 219 ids=[ 220 'vendor nxp', 221 'vendor intel', 222 'vendor ite' 223 ], 224 ) 225 226 @mock.patch.object(TestPlan, 'TESTSUITE_FILENAME', testsuite_filename_mock) 227 def test_vendor(self, capfd, out_path, vendor, expected): 228 path = os.path.join(TEST_DATA, 'tests', 'no_filter') 229 test_platforms = ['qemu_x86', 'hsdk', 'intel_adl_crb', 'it8xxx2_evb'] 230 args = ['--outdir', out_path, '-T', path, '-vv', '-ll', 'DEBUG'] + \ 231 ['--vendor', vendor] + \ 232 [val for pair in zip( 233 ['-p'] * len(test_platforms), test_platforms 234 ) for val in pair] 235 236 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 237 pytest.raises(SystemExit) as sys_exit: 238 self.loader.exec_module(self.twister_module) 239 240 out, err = capfd.readouterr() 241 sys.stdout.write(out) 242 sys.stderr.write(err) 243 244 for line in expected: 245 assert re.search(line, err) 246 247 assert str(sys_exit.value) == '0' 248 249 @pytest.mark.parametrize( 250 'flag, expected_test_count', 251 [ 252 (['--ignore-platform-key'], 2), 253 ([], 1) 254 ], 255 ids=['ignore_platform_key', 'without ignore_platform_key'] 256 ) 257 @mock.patch.object(TestPlan, 'TESTSUITE_FILENAME', testsuite_filename_mock) 258 def test_ignore_platform_key(self, out_path, flag, expected_test_count): 259 test_platforms = ['qemu_x86', 'qemu_x86_64'] 260 path = os.path.join(TEST_DATA, 'tests', 'platform_key') 261 args = ['-i', '--outdir', out_path, '-T', path] + \ 262 flag + \ 263 [val for pair in zip( 264 ['-p'] * len(test_platforms), test_platforms 265 ) for val in pair] 266 267 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 268 pytest.raises(SystemExit) as sys_exit: 269 self.loader.exec_module(self.twister_module) 270 271 with open(os.path.join(out_path, 'testplan.json')) as f: 272 j = json.load(f) 273 filtered_j = [ 274 (ts['platform'], ts['name'], tc['identifier']) \ 275 for ts in j['testsuites'] \ 276 for tc in ts['testcases'] if 'reason' not in tc 277 ] 278 279 assert str(sys_exit.value) == '0' 280 281 assert len(filtered_j) == expected_test_count 282