1#!/usr/bin/env python3 2# Copyright (c) 2023-2024 Intel Corporation 3# 4# SPDX-License-Identifier: Apache-2.0 5""" 6Blackbox tests for twister's command line functions 7""" 8 9import importlib 10import mock 11import os 12import pytest 13import sys 14import re 15 16# pylint: disable=no-name-in-module 17from conftest import ( 18 TEST_DATA, 19 ZEPHYR_BASE, 20 clear_log_in_test, 21 sample_filename_mock, 22 testsuite_filename_mock 23) 24from twisterlib.testplan import TestPlan 25 26 27@mock.patch.object(TestPlan, 'TESTSUITE_FILENAME', testsuite_filename_mock) 28class TestPrintOuts: 29 TESTDATA_1 = [ 30 ( 31 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 32 ['agnostic', 'subgrouped', 'even', 'odd'] 33 ), 34 ( 35 os.path.join(TEST_DATA, 'tests', 'dummy', 'device'), 36 ['device'] 37 ), 38 ] 39 40 TESTDATA_2 = [ 41 ( 42 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 43 [ 44 'dummy.agnostic.group1.subgroup1.a1_1_tests.assert', 45 'dummy.agnostic.group1.subgroup2.a1_2_tests.assert', 46 'dummy.agnostic.group2.a2_tests.assert1', 47 'dummy.agnostic.group2.a2_tests.assert2', 48 'dummy.agnostic.group2.a3_tests.assert1', 49 'dummy.agnostic.group2.a2_tests.assert3' 50 ], 51 '--no-detailed-test-id', 52 '' 53 ), 54 ( 55 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 56 [ 57 'dummy.agnostic.group1.subgroup2.a1_2_tests.assert', 58 'dummy.agnostic.group2.a2_tests.assert1', 59 'dummy.agnostic.group2.a2_tests.assert2', 60 'dummy.agnostic.group2.a3_tests.assert1', 61 'dummy.agnostic.group2.a2_tests.assert3' 62 ], 63 '--no-detailed-test-id', 64 'odd' 65 ), 66 ( 67 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 68 [], 69 '--no-detailed-test-id', 70 'odd even' 71 ), 72 ( 73 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 74 [ 75 'dummy.agnostic.group1.subgroup1.a1_1_tests.assert', 76 'dummy.agnostic.group1.subgroup2.a1_2_tests.assert', 77 'dummy.agnostic.group2.a2_tests.assert1', 78 'dummy.agnostic.group2.a2_tests.assert2', 79 'dummy.agnostic.group2.a3_tests.assert1', 80 'dummy.agnostic.group2.a2_tests.assert3' 81 ], 82 '--no-detailed-test-id', 83 'unknown_tag' 84 ), 85 ( 86 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 87 [ 88 'dummy.agnostic.group1.subgroup1.a1_1_tests.assert', 89 'dummy.agnostic.group1.subgroup2.a1_2_tests.assert', 90 'dummy.agnostic.group2.a2_tests.assert1', 91 'dummy.agnostic.group2.a2_tests.assert2', 92 'dummy.agnostic.group2.a3_tests.assert1', 93 'dummy.agnostic.group2.a2_tests.assert3' 94 ], 95 '--detailed-test-id', 96 '' 97 ), 98 ( 99 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 100 [ 101 'dummy.agnostic.group1.subgroup2.a1_2_tests.assert', 102 'dummy.agnostic.group2.a2_tests.assert1', 103 'dummy.agnostic.group2.a2_tests.assert2', 104 'dummy.agnostic.group2.a3_tests.assert1', 105 'dummy.agnostic.group2.a2_tests.assert3' 106 ], 107 '--detailed-test-id', 108 'odd' 109 ), 110 ( 111 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 112 [], 113 '--detailed-test-id', 114 'odd even' 115 ), 116 ( 117 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 118 [ 119 'dummy.agnostic.group1.subgroup1.a1_1_tests.assert', 120 'dummy.agnostic.group1.subgroup2.a1_2_tests.assert', 121 'dummy.agnostic.group2.a2_tests.assert1', 122 'dummy.agnostic.group2.a2_tests.assert2', 123 'dummy.agnostic.group2.a3_tests.assert1', 124 'dummy.agnostic.group2.a2_tests.assert3' 125 ], 126 '--detailed-test-id', 127 'unknown_tag' 128 ), 129 ( 130 os.path.join(TEST_DATA, 'tests', 'dummy', 'device'), 131 [ 132 'dummy.device.group.d_tests.assert' 133 ], 134 '--no-detailed-test-id', 135 '' 136 ), 137 ( 138 os.path.join(TEST_DATA, 'tests', 'dummy', 'device'), 139 [ 140 'dummy.device.group.d_tests.assert' 141 ], 142 '--detailed-test-id', 143 '' 144 ), 145 ] 146 147 TESTDATA_3 = [ 148 ( 149 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 150 'Testsuite\n' \ 151 '├── Samples\n' \ 152 '└── Tests\n' \ 153 ' └── dummy\n' \ 154 ' └── agnostic\n' \ 155 ' ├── dummy.agnostic.group1.subgroup1.a1_1_tests.assert\n' \ 156 ' ├── dummy.agnostic.group1.subgroup2.a1_2_tests.assert\n' \ 157 ' ├── dummy.agnostic.group2.a2_tests.assert1\n' \ 158 ' ├── dummy.agnostic.group2.a2_tests.assert2\n' \ 159 ' ├── dummy.agnostic.group2.a2_tests.assert3\n' \ 160 ' └── dummy.agnostic.group2.a3_tests.assert1\n', 161 '--no-detailed-test-id', 162 '' 163 ), 164 ( 165 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 166 'Testsuite\n' \ 167 '├── Samples\n' \ 168 '└── Tests\n' \ 169 ' └── dummy\n' \ 170 ' └── agnostic\n' \ 171 ' ├── dummy.agnostic.group1.subgroup2.a1_2_tests.assert\n' \ 172 ' ├── dummy.agnostic.group2.a2_tests.assert1\n' \ 173 ' ├── dummy.agnostic.group2.a2_tests.assert2\n' \ 174 ' ├── dummy.agnostic.group2.a2_tests.assert3\n' \ 175 ' └── dummy.agnostic.group2.a3_tests.assert1\n', 176 '--no-detailed-test-id', 177 'odd' 178 ), 179 ( 180 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 181 'Testsuite\n' \ 182 '├── Samples\n' \ 183 '└── Tests\n' \ 184 ' └── dummy\n' \ 185 ' └── agnostic\n' \ 186 ' ├── dummy.agnostic.group1.subgroup1.a1_1_tests.assert\n' \ 187 ' ├── dummy.agnostic.group1.subgroup2.a1_2_tests.assert\n' \ 188 ' ├── dummy.agnostic.group2.a2_tests.assert1\n' \ 189 ' ├── dummy.agnostic.group2.a2_tests.assert2\n' \ 190 ' ├── dummy.agnostic.group2.a2_tests.assert3\n' \ 191 ' └── dummy.agnostic.group2.a3_tests.assert1\n', 192 '--detailed-test-id', 193 '' 194 ), 195 ( 196 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 197 'Testsuite\n' \ 198 '├── Samples\n' \ 199 '└── Tests\n' \ 200 ' └── dummy\n' \ 201 ' └── agnostic\n' \ 202 ' ├── dummy.agnostic.group1.subgroup2.a1_2_tests.assert\n' \ 203 ' ├── dummy.agnostic.group2.a2_tests.assert1\n' \ 204 ' ├── dummy.agnostic.group2.a2_tests.assert2\n' \ 205 ' ├── dummy.agnostic.group2.a2_tests.assert3\n' \ 206 ' └── dummy.agnostic.group2.a3_tests.assert1\n', 207 '--detailed-test-id', 208 'odd' 209 ), 210 ( 211 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 212 'Testsuite\n' \ 213 '├── Samples\n' \ 214 '└── Tests\n', 215 '--detailed-test-id', 216 'odd even' 217 ), 218 ( 219 os.path.join(TEST_DATA, 'tests', 'dummy', 'agnostic'), 220 'Testsuite\n' \ 221 '├── Samples\n' \ 222 '└── Tests\n' \ 223 ' └── dummy\n' \ 224 ' └── agnostic\n' \ 225 ' ├── dummy.agnostic.group1.subgroup1.a1_1_tests.assert\n' \ 226 ' ├── dummy.agnostic.group1.subgroup2.a1_2_tests.assert\n' \ 227 ' ├── dummy.agnostic.group2.a2_tests.assert1\n' \ 228 ' ├── dummy.agnostic.group2.a2_tests.assert2\n' \ 229 ' ├── dummy.agnostic.group2.a2_tests.assert3\n' \ 230 ' └── dummy.agnostic.group2.a3_tests.assert1\n', 231 '--detailed-test-id', 232 'unknown_tag' 233 ), 234 ( 235 os.path.join(TEST_DATA, 'tests', 'dummy', 'device'), 236 'Testsuite\n' 237 '├── Samples\n' 238 '└── Tests\n' 239 ' └── dummy\n' 240 ' └── device\n' 241 ' └── dummy.device.group.d_tests.assert\n', 242 '--no-detailed-test-id', 243 '' 244 ), 245 ( 246 os.path.join(TEST_DATA, 'tests', 'dummy', 'device'), 247 'Testsuite\n' 248 '├── Samples\n' 249 '└── Tests\n' 250 ' └── dummy\n' 251 ' └── device\n' 252 ' └── dummy.device.group.d_tests.assert\n', 253 '--detailed-test-id', 254 '' 255 ), 256 ] 257 258 TESTDATA_4 = [ 259 ( 260 os.path.join(TEST_DATA, 'tests', 'dummy'), 261 ['qemu_x86', 'qemu_x86_64', 'intel_adl_crb'] 262 ) 263 ] 264 265 @classmethod 266 def setup_class(cls): 267 apath = os.path.join(ZEPHYR_BASE, 'scripts', 'twister') 268 cls.loader = importlib.machinery.SourceFileLoader('__main__', apath) 269 cls.spec = importlib.util.spec_from_loader(cls.loader.name, cls.loader) 270 cls.twister_module = importlib.util.module_from_spec(cls.spec) 271 272 @classmethod 273 def teardown_class(cls): 274 pass 275 276 @pytest.mark.parametrize( 277 'test_path, expected', 278 TESTDATA_1, 279 ids=[ 280 'tests/dummy/agnostic', 281 'tests/dummy/device', 282 ] 283 ) 284 def test_list_tags(self, capfd, out_path, test_path, expected): 285 args = ['--outdir', out_path, '-T', test_path, '--list-tags'] 286 287 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 288 pytest.raises(SystemExit) as sys_exit: 289 self.loader.exec_module(self.twister_module) 290 291 out, err = capfd.readouterr() 292 sys.stdout.write(out) 293 sys.stderr.write(err) 294 295 printed_tags = [tag.strip() for tag in out.split('- ')[1:]] 296 297 assert all([tag in printed_tags for tag in expected]) 298 assert all([tag in expected for tag in printed_tags]) 299 300 assert str(sys_exit.value) == '0' 301 302 @pytest.mark.parametrize( 303 'test_path, expected, detailed_id, exclude_tags', 304 TESTDATA_2, 305 ids=[ 306 'tests/dummy/agnostic no_detailed_id', 307 'tests/dummy/agnostic no_detailed_id excl_tag', 308 'tests/dummy/agnostic no_detailed_id excl_all_tags', 309 'tests/dummy/agnostic no_detailed_id no_excl_tag', 310 'tests/dummy/agnostic detailed_id', 311 'tests/dummy/agnostic detailed_id excl_tag', 312 'tests/dummy/agnostic detailed_id excl_all_tags', 313 'tests/dummy/agnostic detailed_id no_excl_tag', 314 'tests/dummy/device', 315 'tests/dummy/device detailed_id', 316 ] 317 ) 318 def test_list_tests(self, capfd, out_path, test_path, expected, detailed_id, exclude_tags): 319 args = ['--outdir', out_path, '-T', test_path, '--list-tests', detailed_id] 320 for tag in exclude_tags.split(): 321 args += ['--exclude-tag', tag] 322 323 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 324 pytest.raises(SystemExit) as sys_exit: 325 self.loader.exec_module(self.twister_module) 326 327 out, err = capfd.readouterr() 328 sys.stdout.write(out) 329 sys.stderr.write(err) 330 331 printed_tests = [test.strip() for test in out.split('- ')[1:]] 332 if printed_tests: 333 printed_tests[-1] = printed_tests[-1].split('\n')[0] 334 335 assert all([test in printed_tests for test in expected]) 336 assert all([test in expected for test in printed_tests]) 337 338 assert str(sys_exit.value) == '0' 339 340 @pytest.mark.parametrize( 341 'test_path, expected, detailed_id, exclude_tags', 342 TESTDATA_3, 343 ids=[ 344 'tests/dummy/agnostic no_detailed_id', 345 'tests/dummy/agnostic no_detailed_id excl_tag', 346 'tests/dummy/agnostic detailed_id', 347 'tests/dummy/agnostic detailed_id excl_tag', 348 'tests/dummy/agnostic detailed_id excl_all_tags', 349 'tests/dummy/agnostic detailed_id no_excl_tag', 350 'tests/dummy/device', 351 'tests/dummy/device detailed_id', 352 ] 353 ) 354 def test_tree(self, capfd, out_path, test_path, expected, detailed_id, exclude_tags): 355 args = ['--outdir', out_path, '-T', test_path, '--test-tree', detailed_id] 356 for tag in exclude_tags.split(): 357 args += ['--exclude-tag', tag] 358 359 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 360 pytest.raises(SystemExit) as sys_exit: 361 self.loader.exec_module(self.twister_module) 362 363 out, err = capfd.readouterr() 364 sys.stdout.write(out) 365 sys.stderr.write(err) 366 367 assert expected in out 368 assert str(sys_exit.value) == '0' 369 370 @pytest.mark.parametrize( 371 'test_path, test_platforms', 372 TESTDATA_4, 373 ids=['tests'] 374 ) 375 def test_timestamps(self, capfd, out_path, test_path, test_platforms): 376 377 args = ['-i', '--outdir', out_path, '-T', test_path, '--timestamps', '-v'] + \ 378 [val for pair in zip( 379 ['-p'] * len(test_platforms), test_platforms 380 ) for val in pair] 381 382 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 383 pytest.raises(SystemExit) as sys_exit: 384 self.loader.exec_module(self.twister_module) 385 386 assert str(sys_exit.value) == '0' 387 388 info_regex = r'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3} - (?:INFO|DEBUG|ERROR)' 389 390 out, err = capfd.readouterr() 391 sys.stdout.write(out) 392 sys.stderr.write(err) 393 394 output = err.split('\n') 395 396 # Will give false positives on lines with newlines inside of them 397 err_lines = [] 398 for line in output: 399 if line.strip(): 400 401 match = re.search(info_regex, line) 402 if match is None: 403 err_lines.append(line) 404 405 if err_lines: 406 assert False, f'No timestamp in line {err_lines}' 407 408 @pytest.mark.parametrize( 409 'flag', 410 ['--abcd', '--1234', '-%', '-1'] 411 ) 412 def test_broken_parameter(self, capfd, flag): 413 414 args = [flag] 415 416 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 417 pytest.raises(SystemExit) as sys_exit: 418 self.loader.exec_module(self.twister_module) 419 420 out, err = capfd.readouterr() 421 sys.stdout.write(out) 422 sys.stderr.write(err) 423 424 if flag == '-1': 425 assert str(sys_exit.value) == '1' 426 else: 427 assert str(sys_exit.value) == '2' 428 429 @pytest.mark.parametrize( 430 'flag', 431 ['--help', '-h'] 432 ) 433 def test_help(self, capfd, flag): 434 args = [flag] 435 436 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 437 pytest.raises(SystemExit) as sys_exit: 438 self.loader.exec_module(self.twister_module) 439 440 out, err = capfd.readouterr() 441 sys.stdout.write(out) 442 sys.stderr.write(err) 443 444 assert str(sys_exit.value) == '0' 445 446 @pytest.mark.parametrize( 447 'test_path, test_platforms', 448 TESTDATA_4, 449 ids=['tests'] 450 ) 451 def test_force_color(self, capfd, out_path, test_path, test_platforms): 452 453 args = ['-i', '--outdir', out_path, '-T', test_path, '--force-color'] + \ 454 [val for pair in zip( 455 ['-p'] * len(test_platforms), test_platforms 456 ) for val in pair] 457 458 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 459 pytest.raises(SystemExit) as sys_exit: 460 self.loader.exec_module(self.twister_module) 461 462 out, err = capfd.readouterr() 463 sys.stdout.write(out) 464 sys.stderr.write(err) 465 466 assert str(sys_exit.value) == '0' 467 468 @mock.patch.object(TestPlan, 'SAMPLE_FILENAME', sample_filename_mock) 469 def test_size(self, capfd, out_path): 470 test_platforms = ['qemu_x86', 'intel_adl_crb'] 471 path = os.path.join(TEST_DATA, 'samples', 'hello_world') 472 args = ['-i', '--outdir', out_path, '-T', path] + \ 473 [val for pair in zip( 474 ['-p'] * len(test_platforms), test_platforms 475 ) for val in pair] 476 477 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 478 pytest.raises(SystemExit) as sys_exit: 479 self.loader.exec_module(self.twister_module) 480 481 assert str(sys_exit.value) == '0' 482 483 clear_log_in_test() 484 capfd.readouterr() 485 486 p = os.path.relpath(path, ZEPHYR_BASE) 487 prev_path = os.path.join(out_path, 'qemu_x86_atom', p, 488 'sample.basic.helloworld', 'zephyr', 'zephyr.elf') 489 args = ['--size', prev_path] 490 491 with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \ 492 pytest.raises(SystemExit) as sys_exit: 493 self.loader.exec_module(self.twister_module) 494 495 assert str(sys_exit.value) == '0' 496 497 out, err = capfd.readouterr() 498 sys.stdout.write(out) 499 sys.stderr.write(err) 500 501 # Header and footer should be the most constant out of the report format. 502 header_pattern = r'SECTION NAME\s+VMA\s+LMA\s+SIZE\s+HEX SZ\s+TYPE\s*\n' 503 res = re.search(header_pattern, out) 504 assert res, 'No stdout size report header found.' 505 506 footer_pattern = r'Totals:\s+(?P<rom>[0-9]+)\s+bytes\s+\(ROM\),\s+' \ 507 r'(?P<ram>[0-9]+)\s+bytes\s+\(RAM\)\s*\n' 508 res = re.search(footer_pattern, out) 509 assert res, 'No stdout size report footer found.' 510