1#!/usr/bin/env python3
2# Copyright (c) 2023 Google LLC
3#
4# SPDX-License-Identifier: Apache-2.0
5"""
6Tests for runner.py classes
7"""
8
9import errno
10import os
11import pathlib
12import re
13import subprocess
14import sys
15from collections import deque
16from contextlib import nullcontext
17from typing import List
18from unittest import mock
19
20import pytest
21import yaml
22from elftools.elf.sections import SymbolTableSection
23from twisterlib.error import BuildError
24from twisterlib.harness import Pytest
25from twisterlib.runner import CMake, ExecutionCounter, FilterBuilder, ProjectBuilder, TwisterRunner
26from twisterlib.statuses import TwisterStatus
27
28from . import ZEPHYR_BASE
29
30
31@pytest.fixture
32def mocked_instance(tmp_path):
33    instance = mock.Mock()
34    testsuite = mock.Mock()
35    testsuite.source_dir: str = ''
36    instance.testsuite = testsuite
37    platform = mock.Mock()
38    platform.sysbuild = False
39    platform.binaries: List[str] = []
40    instance.platform = platform
41    build_dir = tmp_path / 'build_dir'
42    os.makedirs(build_dir)
43    instance.build_dir: str = str(build_dir)
44    return instance
45
46
47@pytest.fixture
48def mocked_env():
49    env = mock.Mock()
50    options = mock.Mock()
51    options.verbose = 2
52    env.options = options
53    return env
54
55
56@pytest.fixture
57def mocked_jobserver():
58    jobserver = mock.Mock()
59    return jobserver
60
61
62@pytest.fixture
63def project_builder(mocked_instance, mocked_env, mocked_jobserver) -> ProjectBuilder:
64    project_builder = ProjectBuilder(mocked_instance, mocked_env, mocked_jobserver)
65    return project_builder
66
67
68@pytest.fixture
69def runners(project_builder: ProjectBuilder) -> dict:
70    """
71    Create runners.yaml file in build_dir/zephyr directory and return file
72    content as dict.
73    """
74    build_dir_zephyr_path = os.path.join(project_builder.instance.build_dir, 'zephyr')
75    os.makedirs(build_dir_zephyr_path)
76    runners_file_path = os.path.join(build_dir_zephyr_path, 'runners.yaml')
77    runners_content: dict = {
78        'config': {
79            'elf_file': 'zephyr.elf',
80            'hex_file': os.path.join(build_dir_zephyr_path, 'zephyr.elf'),
81            'bin_file': 'zephyr.bin',
82        }
83    }
84    with open(runners_file_path, 'w') as file:
85        yaml.dump(runners_content, file)
86
87    return runners_content
88
89
90@mock.patch("os.path.exists")
91def test_projectbuilder_cmake_assemble_args_single(m):
92    # Causes the additional_overlay_path to be appended
93    m.return_value = True
94
95    class MockHandler:
96        pass
97
98    handler = MockHandler()
99    handler.args = ["handler_arg1", "handler_arg2"]
100    handler.ready = True
101
102    assert(ProjectBuilder.cmake_assemble_args(
103        ["basearg1", "CONFIG_t=\"test\"", "SNIPPET_t=\"test\""],
104        handler,
105        ["a.conf;b.conf", "c.conf"],
106        ["extra_overlay.conf"],
107        ["x.overlay;y.overlay", "z.overlay"],
108        ["cmake1=foo", "cmake2=bar"],
109        "/builddir/",
110    ) == [
111        "-DCONFIG_t=\"test\"",
112        "-Dcmake1=foo", "-Dcmake2=bar",
113        "-Dbasearg1", "-DSNIPPET_t=test",
114        "-Dhandler_arg1", "-Dhandler_arg2",
115        "-DCONF_FILE=a.conf;b.conf;c.conf",
116        "-DDTC_OVERLAY_FILE=x.overlay;y.overlay;z.overlay",
117        "-DOVERLAY_CONFIG=extra_overlay.conf "
118        "/builddir/twister/testsuite_extra.conf",
119    ])
120
121
122def test_if_default_binaries_are_taken_properly(project_builder: ProjectBuilder):
123    default_binaries = [
124        os.path.join('zephyr', 'zephyr.hex'),
125        os.path.join('zephyr', 'zephyr.bin'),
126        os.path.join('zephyr', 'zephyr.elf'),
127        os.path.join('zephyr', 'zephyr.exe'),
128    ]
129    project_builder.instance.sysbuild = False
130    binaries = project_builder._get_binaries()
131    assert sorted(binaries) == sorted(default_binaries)
132
133
134def test_if_binaries_from_platform_are_taken_properly(project_builder: ProjectBuilder):
135    platform_binaries = ['spi_image.bin']
136    project_builder.platform.binaries = platform_binaries
137    project_builder.instance.sysbuild = False
138    platform_binaries_expected = [os.path.join('zephyr', bin) for bin in platform_binaries]
139    binaries = project_builder._get_binaries()
140    assert sorted(binaries) == sorted(platform_binaries_expected)
141
142
143def test_if_binaries_from_runners_are_taken_properly(runners, project_builder: ProjectBuilder):
144    runners_binaries = list(runners['config'].values())
145    runners_binaries_expected = [bin if os.path.isabs(bin) else os.path.join('zephyr', bin) for bin in runners_binaries]
146    binaries = project_builder._get_binaries_from_runners()
147    assert sorted(binaries) == sorted(runners_binaries_expected)
148
149
150def test_if_runners_file_is_sanitized_properly(runners, project_builder: ProjectBuilder):
151    runners_file_path = os.path.join(project_builder.instance.build_dir, 'zephyr', 'runners.yaml')
152    with open(runners_file_path, 'r') as file:
153        unsanitized_runners_content = yaml.safe_load(file)
154    unsanitized_runners_binaries = list(unsanitized_runners_content['config'].values())
155    abs_paths = [bin for bin in unsanitized_runners_binaries if os.path.isabs(bin)]
156    assert len(abs_paths) > 0
157
158    project_builder._sanitize_runners_file()
159
160    with open(runners_file_path, 'r') as file:
161        sanitized_runners_content = yaml.safe_load(file)
162    sanitized_runners_binaries = list(sanitized_runners_content['config'].values())
163    abs_paths = [bin for bin in sanitized_runners_binaries if os.path.isabs(bin)]
164    assert len(abs_paths) == 0
165
166
167def test_if_zephyr_base_is_sanitized_properly(project_builder: ProjectBuilder):
168    sanitized_path_expected = os.path.join('sanitized', 'path')
169    path_to_sanitize = os.path.join(os.path.realpath(ZEPHYR_BASE), sanitized_path_expected)
170    cmakecache_file_path = os.path.join(project_builder.instance.build_dir, 'CMakeCache.txt')
171    with open(cmakecache_file_path, 'w') as file:
172        file.write(path_to_sanitize)
173
174    project_builder._sanitize_zephyr_base_from_files()
175
176    with open(cmakecache_file_path, 'r') as file:
177        sanitized_path = file.read()
178    assert sanitized_path == sanitized_path_expected
179
180
181def test_executioncounter(capfd):
182    ec = ExecutionCounter(total=12)
183
184    ec.cases = 25
185    ec.skipped_cases = 6
186    ec.error = 2
187    ec.iteration = 2
188    ec.done = 9
189    ec.passed = 6
190    ec.filtered_configs = 3
191    ec.filtered_runtime = 1
192    ec.filtered_static = 2
193    ec.failed = 1
194
195    ec.summary()
196
197    out, err = capfd.readouterr()
198    sys.stdout.write(out)
199    sys.stderr.write(err)
200
201    assert (
202"├── Total test suites: 12\n"
203"├── Processed test suites: 9\n"
204"│   ├── Filtered test suites: 3\n"
205"│   │   ├── Filtered test suites (static): 2\n"
206"│   │   └── Filtered test suites (at runtime): 1\n"
207"│   └── Selected test suites: 6\n"
208"│       ├── Skipped test suites: 0\n"
209"│       ├── Passed test suites: 6\n"
210"│       ├── Built only test suites: 0\n"
211"│       ├── Failed test suites: 1\n"
212"│       └── Errors in test suites: 2\n"
213"└── Total test cases: 25\n"
214"    ├── Filtered test cases: 0\n"
215"    └── Selected test cases: 25\n"
216"        ├── Passed test cases: 0\n"
217"        ├── Skipped test cases: 6\n"
218"        ├── Built only test cases: 0\n"
219"        ├── Blocked test cases: 0\n"
220"        ├── Failed test cases: 0\n"
221"        └── Errors in test cases: 0\n"
222    ) in out
223
224    assert ec.cases == 25
225    assert ec.skipped_cases == 6
226    assert ec.error == 2
227    assert ec.iteration == 2
228    assert ec.done == 9
229    assert ec.passed == 6
230    assert ec.filtered_configs == 3
231    assert ec.filtered_runtime == 1
232    assert ec.filtered_static == 2
233    assert ec.failed == 1
234
235
236def test_cmake_parse_generated(mocked_jobserver):
237    testsuite_mock = mock.Mock()
238    platform_mock = mock.Mock()
239    source_dir = os.path.join('source', 'dir')
240    build_dir = os.path.join('build', 'dir')
241
242    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
243                  mocked_jobserver)
244
245    result = cmake.parse_generated()
246
247    assert cmake.defconfig == {}
248    assert result == {}
249
250
251TESTDATA_1_1 = [
252    ('linux'),
253    ('nt')
254]
255TESTDATA_1_2 = [
256    (0, False, 'dummy out',
257     True, True, TwisterStatus.NOTRUN, None, False, True),
258    (0, True, '',
259     False, False, TwisterStatus.PASS, None, False, False),
260    (1, True, 'ERROR: region `FLASH\' overflowed by 123 MB',
261     True,  True, TwisterStatus.SKIP, 'FLASH overflow', True, False),
262    (1, True, 'Error: Image size (99 B) + trailer (1 B) exceeds requested size',
263     True, True, TwisterStatus.SKIP, 'imgtool overflow', True, False),
264    (1, True, 'mock.ANY',
265     True, True, TwisterStatus.ERROR, 'Build failure', False, False)
266]
267
268@pytest.mark.parametrize(
269    'return_code, is_instance_run, p_out, expect_returncode,' \
270    ' expect_writes, expected_status, expected_reason,' \
271    ' expected_change_skip, expected_add_missing',
272    TESTDATA_1_2,
273    ids=['no error, no instance run', 'no error, instance run',
274         'error - region overflow', 'error - image size exceed', 'error']
275)
276@pytest.mark.parametrize('sys_platform', TESTDATA_1_1)
277def test_cmake_run_build(
278    sys_platform,
279    return_code,
280    is_instance_run,
281    p_out,
282    expect_returncode,
283    expect_writes,
284    expected_status,
285    expected_reason,
286    expected_change_skip,
287    expected_add_missing
288):
289    process_mock = mock.Mock(
290        returncode=return_code,
291        communicate=mock.Mock(
292            return_value=(p_out.encode(sys.getdefaultencoding()), None)
293        )
294    )
295
296    def mock_popen(*args, **kwargs):
297        return process_mock
298
299    testsuite_mock = mock.Mock()
300    platform_mock = mock.Mock()
301    platform_mock.name = '<platform name>'
302    source_dir = os.path.join('source', 'dir')
303    build_dir = os.path.join('build', 'dir')
304    jobserver_mock = mock.Mock(
305        popen=mock.Mock(side_effect=mock_popen)
306    )
307    instance_mock = mock.Mock(add_missing_case_status=mock.Mock())
308    instance_mock.build_time = 0
309    instance_mock.run = is_instance_run
310    instance_mock.status = TwisterStatus.NONE
311    instance_mock.reason = None
312
313    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
314                  jobserver_mock)
315    cmake.cwd = os.path.join('dummy', 'working', 'dir')
316    cmake.instance = instance_mock
317    cmake.options = mock.Mock()
318    cmake.options.overflow_as_errors = False
319
320    cmake_path = os.path.join('dummy', 'cmake')
321
322    popen_mock = mock.Mock(side_effect=mock_popen)
323    change_mock = mock.Mock()
324
325    with mock.patch('sys.platform', sys_platform), \
326         mock.patch('shutil.which', return_value=cmake_path), \
327         mock.patch('twisterlib.runner.change_skip_to_error_if_integration',
328                    change_mock), \
329         mock.patch('builtins.open', mock.mock_open()), \
330         mock.patch('subprocess.Popen', popen_mock):
331        result = cmake.run_build(args=['arg1', 'arg2'])
332
333    expected_results = {}
334    if expect_returncode:
335        expected_results['returncode'] = return_code
336    if expected_results == {}:
337        expected_results = None
338
339    assert expected_results == result
340
341    popen_caller = cmake.jobserver.popen if sys_platform == 'linux' else \
342                   popen_mock
343    popen_caller.assert_called_once_with(
344        [os.path.join('dummy', 'cmake'), 'arg1', 'arg2'],
345        stdout=subprocess.PIPE,
346        stderr=subprocess.STDOUT,
347        cwd=os.path.join('dummy', 'working', 'dir')
348    )
349
350    assert cmake.instance.status == expected_status
351    assert cmake.instance.reason == expected_reason
352
353    if expected_change_skip:
354        change_mock.assert_called_once()
355
356    if expected_add_missing:
357        cmake.instance.add_missing_case_status.assert_called_once_with(
358            TwisterStatus.NOTRUN, 'Test was built only'
359        )
360
361
362TESTDATA_2_1 = [
363    ('linux'),
364    ('nt')
365]
366TESTDATA_2_2 = [
367    (True, ['dummy_stage_1', 'ds2'],
368     0, False, '',
369     True, True, False,
370     TwisterStatus.NONE, None,
371     [os.path.join('dummy', 'cmake'),
372      '-B' + os.path.join('build', 'dir'), '-DTC_RUNID=1', '-DTC_NAME=testcase',
373      '-DSB_CONFIG_COMPILER_WARNINGS_AS_ERRORS=y',
374      '-DEXTRA_GEN_EDT_ARGS=--edtlib-Werror', '-Gdummy_generator',
375      f'-DPython3_EXECUTABLE={pathlib.Path(sys.executable).as_posix()}',
376      '-DZEPHYR_TOOLCHAIN_VARIANT=zephyr',
377      '-S' + os.path.join('source', 'dir'),
378      'arg1', 'arg2',
379      '-DBOARD=<platform name>',
380      '-DSNIPPET=dummy snippet 1;ds2',
381      '-DMODULES=dummy_stage_1,ds2',
382      '-Pzephyr_base/cmake/package_helper.cmake']),
383    (False, [],
384     1, True, 'ERROR: region `FLASH\' overflowed by 123 MB',
385     True, False, True,
386     TwisterStatus.ERROR, 'CMake build failure',
387     [os.path.join('dummy', 'cmake'),
388      '-B' + os.path.join('build', 'dir'), '-DTC_RUNID=1', '-DTC_NAME=testcase',
389      '-DSB_CONFIG_COMPILER_WARNINGS_AS_ERRORS=n',
390      '-DEXTRA_GEN_EDT_ARGS=', '-Gdummy_generator',
391      f'-DPython3_EXECUTABLE={pathlib.Path(sys.executable).as_posix()}',
392      '-DZEPHYR_TOOLCHAIN_VARIANT=zephyr',
393      '-Szephyr_base/share/sysbuild',
394      '-DAPP_DIR=' + os.path.join('source', 'dir'),
395      'arg1', 'arg2',
396      '-DBOARD=<platform name>',
397      '-DSNIPPET=dummy snippet 1;ds2']),
398]
399
400@pytest.mark.parametrize(
401    'error_warns, f_stages,' \
402    ' return_code, is_instance_run, p_out, expect_returncode,' \
403    ' expect_filter, expect_writes, expected_status, expected_reason,' \
404    ' expected_cmd',
405    TESTDATA_2_2,
406    ids=['filter_stages with success', 'no stages with error']
407)
408@pytest.mark.parametrize('sys_platform', TESTDATA_2_1)
409def test_cmake_run_cmake(
410    sys_platform,
411    error_warns,
412    f_stages,
413    return_code,
414    is_instance_run,
415    p_out,
416    expect_returncode,
417    expect_filter,
418    expect_writes,
419    expected_status,
420    expected_reason,
421    expected_cmd
422):
423    process_mock = mock.Mock(
424        returncode=return_code,
425        communicate=mock.Mock(
426            return_value=(p_out.encode(sys.getdefaultencoding()), None)
427        )
428    )
429
430    def mock_popen(*args, **kwargs):
431        return process_mock
432
433    testsuite_mock = mock.Mock()
434    testsuite_mock.sysbuild = True
435    platform_mock = mock.Mock()
436    platform_mock.name = '<platform name>'
437    source_dir = os.path.join('source', 'dir')
438    build_dir = os.path.join('build', 'dir')
439    jobserver_mock = mock.Mock(
440        popen=mock.Mock(side_effect=mock_popen)
441    )
442    instance_mock = mock.Mock(add_missing_case_status=mock.Mock())
443    instance_mock.run = is_instance_run
444    instance_mock.run_id = 1
445    instance_mock.build_time = 0
446    instance_mock.status = TwisterStatus.NONE
447    instance_mock.reason = None
448    instance_mock.toolchain = 'zephyr'
449    instance_mock.testsuite = mock.Mock()
450    instance_mock.testsuite.name = 'testcase'
451    instance_mock.testsuite.required_snippets = ['dummy snippet 1', 'ds2']
452    instance_mock.testcases = [mock.Mock(), mock.Mock()]
453    instance_mock.testcases[0].status = TwisterStatus.NONE
454    instance_mock.testcases[1].status = TwisterStatus.NONE
455
456    cmake = CMake(testsuite_mock, platform_mock, source_dir, build_dir,
457                  jobserver_mock)
458    cmake.cwd = os.path.join('dummy', 'working', 'dir')
459    cmake.instance = instance_mock
460    cmake.options = mock.Mock()
461    cmake.options.disable_warnings_as_errors = not error_warns
462    cmake.options.overflow_as_errors = False
463    cmake.env = mock.Mock()
464    cmake.env.generator = 'dummy_generator'
465
466    cmake_path = os.path.join('dummy', 'cmake')
467
468    popen_mock = mock.Mock(side_effect=mock_popen)
469    change_mock = mock.Mock()
470
471    with mock.patch('sys.platform', sys_platform), \
472         mock.patch('shutil.which', return_value=cmake_path), \
473         mock.patch('twisterlib.runner.change_skip_to_error_if_integration',
474                    change_mock), \
475         mock.patch('twisterlib.runner.canonical_zephyr_base',
476                    'zephyr_base'), \
477         mock.patch('builtins.open', mock.mock_open()), \
478         mock.patch('subprocess.Popen', popen_mock):
479        result = cmake.run_cmake(args=['arg1', 'arg2'], filter_stages=f_stages)
480
481    expected_results = {}
482    if expect_returncode:
483        expected_results['returncode'] = return_code
484    if expect_filter:
485        expected_results['filter'] = {}
486    if expected_results == {}:
487        expected_results = None
488
489    assert expected_results == result
490
491    popen_caller = cmake.jobserver.popen if sys_platform == 'linux' else \
492                   popen_mock
493    popen_caller.assert_called_once_with(
494        expected_cmd,
495        stdout=subprocess.PIPE,
496        stderr=subprocess.STDOUT,
497        cwd=os.path.join('dummy', 'working', 'dir')
498    )
499
500    assert cmake.instance.status == expected_status
501    assert cmake.instance.reason == expected_reason
502
503    for tc in cmake.instance.testcases:
504        assert tc.status == cmake.instance.status
505
506
507TESTDATA_3 = [
508    ('unit_testing', [], False, True, None, True, None, True,
509     None, None, {}, {}, None, None, [], {}),
510    (
511        'other', [], True,
512        True, ['dummy', 'west', 'options'], True,
513        None, True,
514        os.path.join('domain', 'build', 'dir', 'zephyr', '.config'),
515        os.path.join('domain', 'build', 'dir', 'zephyr', 'edt.pickle'),
516        {'CONFIG_FOO': 'no'},
517        {'dummy cache elem': 1},
518        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
519         'CONFIG_FOO': 'no', 'dummy cache elem': 1},
520        b'dummy edt pickle contents',
521        [f'Loaded sysbuild domain data from' \
522         f' {os.path.join("build", "dir", "domains.yaml")}'],
523        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): True}
524    ),
525    (
526        'other', ['kconfig'], True,
527        True, ['dummy', 'west', 'options'], True,
528        'Dummy parse results', True,
529        os.path.join('build', 'dir', 'zephyr', '.config'),
530        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
531        {'CONFIG_FOO': 'no'},
532        {'dummy cache elem': 1},
533        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
534         'CONFIG_FOO': 'no', 'dummy cache elem': 1},
535        b'dummy edt pickle contents',
536        [],
537        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): False}
538    ),
539    (
540        'other', ['other'], False,
541        False, None, True,
542        'Dummy parse results', True,
543        None,
544        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
545        {},
546        {},
547        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True},
548        b'dummy edt pickle contents',
549        [],
550        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): False}
551    ),
552    (
553        'other', ['other'], False,
554        True, None, False,
555        'Dummy parse results', True,
556        None,
557        None,
558        {},
559        {'dummy cache elem': 1},
560        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
561         'dummy cache elem': 1},
562        None,
563        [],
564        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): False}
565    ),
566    (
567        'other', ['other'], False,
568        True, None, True,
569        'Dummy parse results', True,
570        None,
571        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
572        {},
573        {'dummy cache elem': 1},
574        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
575         'dummy cache elem': 1},
576        b'dummy edt pickle contents',
577        [],
578        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): False}
579    ),
580    (
581        'other', ['other'], False,
582        True, None, True,
583        None, True,
584        None,
585        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
586        {},
587        {'dummy cache elem': 1},
588        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
589         'dummy cache elem': 1},
590        b'dummy edt pickle contents',
591        [],
592        {os.path.join('other', 'zephyr', 'dummy.testsuite.name'): True}
593    ),
594    (
595        'other', ['other'], False,
596        True, None, True,
597        'Dummy parse results', False,
598        None,
599        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
600        {},
601        {'dummy cache elem': 1},
602        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
603         'dummy cache elem': 1},
604        b'dummy edt pickle contents',
605        [],
606        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
607         'dummy cache elem': 1}
608    ),
609    (
610        'other', ['other'], False,
611        True, None, True,
612        SyntaxError, True,
613        None,
614        os.path.join('build', 'dir', 'zephyr', 'edt.pickle'),
615        {},
616        {'dummy cache elem': 1},
617        {'ARCH': 'dummy arch', 'PLATFORM': 'other', 'env_dummy': True,
618         'dummy cache elem': 1},
619        b'dummy edt pickle contents',
620        ['Failed processing testsuite.yaml'],
621        SyntaxError
622    ),
623]
624
625@pytest.mark.parametrize(
626    'platform_name, filter_stages, sysbuild,' \
627    ' do_find_cache, west_flash_options, edt_exists,' \
628    ' parse_results, testsuite_filter,' \
629    ' expected_defconfig_path, expected_edt_pickle_path,' \
630    ' expected_defconfig, expected_cmakecache, expected_filter_data,' \
631    ' expected_edt,' \
632    ' expected_logs, expected_return',
633    TESTDATA_3,
634    ids=['unit testing', 'domain', 'kconfig', 'no cache', 'no edt',
635         'parse result', 'no parse result', 'no testsuite filter', 'parse err']
636)
637def test_filterbuilder_parse_generated(
638    caplog,
639    mocked_jobserver,
640    platform_name,
641    filter_stages,
642    sysbuild,
643    do_find_cache,
644    west_flash_options,
645    edt_exists,
646    parse_results,
647    testsuite_filter,
648    expected_defconfig_path,
649    expected_edt_pickle_path,
650    expected_defconfig,
651    expected_cmakecache,
652    expected_filter_data,
653    expected_edt,
654    expected_logs,
655    expected_return
656):
657    def mock_domains_from_file(*args, **kwargs):
658        dom = mock.Mock()
659        dom.build_dir = os.path.join('domain', 'build', 'dir')
660        res = mock.Mock(get_default_domain=mock.Mock(return_value=dom))
661        return res
662
663    def mock_cmakecache_from_file(*args, **kwargs):
664        if not do_find_cache:
665            raise FileNotFoundError(errno.ENOENT, 'Cache not found')
666        cache_elem = mock.Mock()
667        cache_elem.name = 'dummy cache elem'
668        cache_elem.value = 1
669        cache = [cache_elem]
670        return cache
671
672    def mock_open(filepath, *args, **kwargs):
673        if filepath == expected_defconfig_path:
674            rd = 'I am not a proper line\n' \
675                 'CONFIG_FOO="no"'
676        elif filepath == expected_edt_pickle_path:
677            rd = b'dummy edt pickle contents'
678        else:
679            raise FileNotFoundError(errno.ENOENT,
680                                    f'File {filepath} not mocked.')
681        return mock.mock_open(read_data=rd)()
682
683    def mock_parser(filter, filter_data, edt):
684        assert filter_data == expected_filter_data
685        if isinstance(parse_results, type) and \
686           issubclass(parse_results, Exception):
687            raise parse_results
688        return parse_results
689
690    def mock_pickle(datafile):
691        assert datafile.read() == expected_edt
692        return mock.Mock()
693
694    testsuite_mock = mock.Mock()
695    testsuite_mock.name = 'dummy.testsuite.name'
696    testsuite_mock.filter = testsuite_filter
697    platform_mock = mock.Mock()
698    platform_mock.name = platform_name
699    platform_mock.arch = 'dummy arch'
700    source_dir = os.path.join('source', 'dir')
701    build_dir = os.path.join('build', 'dir')
702
703    fb = FilterBuilder(testsuite_mock, platform_mock, source_dir, build_dir,
704                       mocked_jobserver)
705    instance_mock = mock.Mock()
706    instance_mock.sysbuild = 'sysbuild' if sysbuild else None
707    instance_mock.toolchain = 'zephyr'
708    fb.instance = instance_mock
709    fb.env = mock.Mock()
710    fb.env.options = mock.Mock()
711    fb.env.options.west_flash = west_flash_options
712    fb.env.options.device_testing = True
713
714    environ_mock = {'env_dummy': True}
715
716    with mock.patch('twisterlib.runner.Domains.from_file',
717                    mock_domains_from_file), \
718         mock.patch('twisterlib.runner.CMakeCache.from_file',
719                    mock_cmakecache_from_file), \
720         mock.patch('builtins.open', mock_open), \
721         mock.patch('expr_parser.parse', mock_parser), \
722         mock.patch('pickle.load', mock_pickle), \
723         mock.patch('os.path.exists', return_value=edt_exists), \
724         mock.patch('os.environ', environ_mock), \
725         pytest.raises(expected_return) if \
726             isinstance(parse_results, type) and \
727             issubclass(parse_results, Exception) else nullcontext() as err:
728        result = fb.parse_generated(filter_stages)
729
730    if err:
731        assert True
732        return
733
734    assert all([log in caplog.text for log in expected_logs])
735
736    assert fb.defconfig == expected_defconfig
737
738    assert fb.cmake_cache == expected_cmakecache
739
740    assert result == expected_return
741
742
743TESTDATA_4 = [
744    (False, False, [f"see: {os.path.join('dummy', 'path', 'dummy_file.log')}"]),
745    (True, False, [os.path.join('dummy', 'path', 'dummy_file.log'),
746                    'file contents',
747                    os.path.join('dummy', 'path', 'dummy_file.log')]),
748    (True, True, [os.path.join('dummy', 'path', 'dummy_file.log'),
749                   'Unable to read log data ([Errno 2] ERROR: dummy_file.log)',
750                   os.path.join('dummy', 'path', 'dummy_file.log')]),
751]
752
753@pytest.mark.parametrize(
754    'inline_logs, read_exception, expected_logs',
755    TESTDATA_4,
756    ids=['basic', 'inline logs', 'inline logs+read_exception']
757)
758def test_projectbuilder_log_info(
759    caplog,
760    mocked_jobserver,
761    inline_logs,
762    read_exception,
763    expected_logs
764):
765    def mock_open(filename, *args, **kwargs):
766        if read_exception:
767            raise OSError(errno.ENOENT, f'ERROR: {os.path.basename(filename)}')
768        return mock.mock_open(read_data='file contents')()
769
770    def mock_realpath(filename, *args, **kwargs):
771        return os.path.join('path', filename)
772
773    def mock_abspath(filename, *args, **kwargs):
774        return os.path.join('dummy', filename)
775
776    filename = 'dummy_file.log'
777
778    env_mock = mock.Mock()
779    instance_mock = mock.Mock()
780
781    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
782    with mock.patch('builtins.open', mock_open), \
783         mock.patch('os.path.realpath', mock_realpath), \
784         mock.patch('os.path.abspath', mock_abspath):
785        pb.log_info(filename, inline_logs)
786
787    assert all([log in caplog.text for log in expected_logs])
788
789
790TESTDATA_5 = [
791    (True, False, False, "Valgrind error", 0, 0, 'build_dir/valgrind.log'),
792    (True, False, False, "Error", 0, 0, 'build_dir/build.log'),
793    (False, True, False, None, 1024, 0, 'build_dir/handler.log'),
794    (False, True, False, None, 0, 0, 'build_dir/build.log'),
795    (False, False, True, None, 0, 1024, 'build_dir/device.log'),
796    (False, False, True, None, 0, 0, 'build_dir/build.log'),
797    (False, False, False, None, 0, 0, 'build_dir/build.log'),
798]
799
800@pytest.mark.parametrize(
801    'valgrind_log_exists, handler_log_exists, device_log_exists,' \
802    ' instance_reason, handler_log_getsize, device_log_getsize, expected_log',
803    TESTDATA_5,
804    ids=['valgrind log', 'valgrind log unused',
805         'handler log', 'handler log unused',
806         'device log', 'device log unused',
807         'no logs']
808)
809def test_projectbuilder_log_info_file(
810    caplog,
811    mocked_jobserver,
812    valgrind_log_exists,
813    handler_log_exists,
814    device_log_exists,
815    instance_reason,
816    handler_log_getsize,
817    device_log_getsize,
818    expected_log
819):
820    def mock_exists(filename, *args, **kwargs):
821        if filename == 'build_dir/handler.log':
822            return handler_log_exists
823        if filename == 'build_dir/valgrind.log':
824            return valgrind_log_exists
825        if filename == 'build_dir/device.log':
826            return device_log_exists
827        return False
828
829    def mock_getsize(filename, *args, **kwargs):
830        if filename == 'build_dir/handler.log':
831            return handler_log_getsize
832        if filename == 'build_dir/device.log':
833            return device_log_getsize
834        return 0
835
836    env_mock = mock.Mock()
837    instance_mock = mock.Mock()
838    instance_mock.reason = instance_reason
839    instance_mock.build_dir = 'build_dir'
840
841    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
842
843    log_info_mock = mock.Mock()
844
845    with mock.patch('os.path.exists', mock_exists), \
846         mock.patch('os.path.getsize', mock_getsize), \
847         mock.patch('twisterlib.runner.ProjectBuilder.log_info', log_info_mock):
848        pb.log_info_file(None)
849
850    log_info_mock.assert_called_with(expected_log, mock.ANY)
851
852
853TESTDATA_6 = [
854    (
855        {'op': 'filter'},
856        TwisterStatus.FAIL,
857        'Failed',
858        mock.ANY,
859        mock.ANY,
860        mock.ANY,
861        mock.ANY,
862        mock.ANY,
863        mock.ANY,
864        mock.ANY,
865        mock.ANY,
866        mock.ANY,
867        mock.ANY,
868        mock.ANY,
869        [],
870        {'op': 'report', 'test': mock.ANY},
871        TwisterStatus.FAIL,
872        'Failed',
873        0,
874        None
875    ),
876    (
877        {'op': 'filter'},
878        TwisterStatus.PASS,
879        mock.ANY,
880        mock.ANY,
881        mock.ANY,
882        mock.ANY,
883        mock.ANY,
884        mock.ANY,
885        mock.ANY,
886        {'filter': { 'dummy instance name': True }},
887        mock.ANY,
888        mock.ANY,
889        mock.ANY,
890        mock.ANY,
891        ['filtering dummy instance name'],
892        {'op': 'report', 'test': mock.ANY},
893        TwisterStatus.FILTER,
894        'runtime filter',
895        1,
896        (TwisterStatus.FILTER,)
897    ),
898    (
899        {'op': 'filter'},
900        TwisterStatus.PASS,
901        mock.ANY,
902        mock.ANY,
903        mock.ANY,
904        mock.ANY,
905        mock.ANY,
906        mock.ANY,
907        mock.ANY,
908        {'filter': { 'another dummy instance name': True }},
909        mock.ANY,
910        mock.ANY,
911        mock.ANY,
912        mock.ANY,
913        [],
914        {'op': 'cmake', 'test': mock.ANY},
915        TwisterStatus.PASS,
916        mock.ANY,
917        0,
918        None
919    ),
920    (
921        {'op': 'cmake'},
922        TwisterStatus.ERROR,
923        'dummy error',
924        mock.ANY,
925        mock.ANY,
926        mock.ANY,
927        mock.ANY,
928        mock.ANY,
929        mock.ANY,
930        mock.ANY,
931        mock.ANY,
932        mock.ANY,
933        mock.ANY,
934        mock.ANY,
935        [],
936        {'op': 'report', 'test': mock.ANY},
937        TwisterStatus.ERROR,
938        'dummy error',
939        0,
940        None
941    ),
942    (
943        {'op': 'cmake'},
944        TwisterStatus.NONE,
945        mock.ANY,
946        mock.ANY,
947        mock.ANY,
948        True,
949        mock.ANY,
950        mock.ANY,
951        mock.ANY,
952        mock.ANY,
953        mock.ANY,
954        mock.ANY,
955        mock.ANY,
956        mock.ANY,
957        [],
958        {'op': 'report', 'test': mock.ANY},
959        TwisterStatus.NOTRUN,
960        mock.ANY,
961        0,
962        None
963    ),
964    (
965        {'op': 'cmake'},
966        'success',
967        mock.ANY,
968        mock.ANY,
969        mock.ANY,
970        True,
971        mock.ANY,
972        mock.ANY,
973        mock.ANY,
974        mock.ANY,
975        mock.ANY,
976        mock.ANY,
977        mock.ANY,
978        mock.ANY,
979        [],
980        {'op': 'report', 'test': mock.ANY},
981        'success',
982        mock.ANY,
983        0,
984        None
985    ),
986    (
987        {'op': 'cmake'},
988        'success',
989        mock.ANY,
990        mock.ANY,
991        mock.ANY,
992        False,
993        mock.ANY,
994        mock.ANY,
995        mock.ANY,
996        {'filter': {'dummy instance name': True}},
997        mock.ANY,
998        mock.ANY,
999        mock.ANY,
1000        mock.ANY,
1001        ['filtering dummy instance name'],
1002        {'op': 'report', 'test': mock.ANY},
1003        TwisterStatus.FILTER,
1004        'runtime filter',
1005        1,
1006        (TwisterStatus.FILTER,) # this is a tuple
1007    ),
1008    (
1009        {'op': 'cmake'},
1010        'success',
1011        mock.ANY,
1012        mock.ANY,
1013        mock.ANY,
1014        False,
1015        mock.ANY,
1016        mock.ANY,
1017        mock.ANY,
1018        {'filter': {}},
1019        mock.ANY,
1020        mock.ANY,
1021        mock.ANY,
1022        mock.ANY,
1023        [],
1024        {'op': 'build', 'test': mock.ANY},
1025        'success',
1026        mock.ANY,
1027        0,
1028        None
1029    ),
1030    (
1031        {'op': 'build'},
1032        mock.ANY,
1033        None,
1034        mock.ANY,
1035        mock.ANY,
1036        mock.ANY,
1037        mock.ANY,
1038        mock.ANY,
1039        mock.ANY,
1040        mock.ANY,
1041        None,
1042        mock.ANY,
1043        mock.ANY,
1044        mock.ANY,
1045        ['build test: dummy instance name'],
1046        {'op': 'report', 'test': mock.ANY},
1047        TwisterStatus.ERROR,
1048        'Build Failure',
1049        0,
1050        None
1051    ),
1052    (
1053        {'op': 'build'},
1054        TwisterStatus.SKIP,
1055        mock.ANY,
1056        mock.ANY,
1057        mock.ANY,
1058        mock.ANY,
1059        mock.ANY,
1060        mock.ANY,
1061        mock.ANY,
1062        mock.ANY,
1063        {'returncode': 0},
1064        mock.ANY,
1065        mock.ANY,
1066        mock.ANY,
1067        ['build test: dummy instance name',
1068         'Determine test cases for test instance: dummy instance name'],
1069        {'op': 'gather_metrics', 'test': mock.ANY},
1070        mock.ANY,
1071        mock.ANY,
1072        0,
1073        (TwisterStatus.SKIP, mock.ANY)
1074    ),
1075    (
1076        {'op': 'build'},
1077        TwisterStatus.PASS,
1078        mock.ANY,
1079        mock.ANY,
1080        mock.ANY,
1081        mock.ANY,
1082        mock.ANY,
1083        mock.ANY,
1084        mock.ANY,
1085        mock.ANY,
1086        {'dummy': 'dummy'},
1087        mock.ANY,
1088        mock.ANY,
1089        mock.ANY,
1090        ['build test: dummy instance name'],
1091        {'op': 'report', 'test': mock.ANY},
1092        TwisterStatus.PASS,
1093        mock.ANY,
1094        0,
1095        (TwisterStatus.BLOCK, mock.ANY)
1096    ),
1097    (
1098        {'op': 'build'},
1099        'success',
1100        mock.ANY,
1101        mock.ANY,
1102        mock.ANY,
1103        mock.ANY,
1104        mock.ANY,
1105        mock.ANY,
1106        mock.ANY,
1107        mock.ANY,
1108        {'returncode': 0},
1109        mock.ANY,
1110        mock.ANY,
1111        mock.ANY,
1112        ['build test: dummy instance name',
1113         'Determine test cases for test instance: dummy instance name'],
1114        {'op': 'gather_metrics', 'test': mock.ANY},
1115        mock.ANY,
1116        mock.ANY,
1117        0,
1118        None
1119    ),
1120    (
1121        {'op': 'build'},
1122        'success',
1123        mock.ANY,
1124        mock.ANY,
1125        mock.ANY,
1126        mock.ANY,
1127        mock.ANY,
1128        mock.ANY,
1129        mock.ANY,
1130        mock.ANY,
1131        {'returncode': 0},
1132        mock.ANY,
1133        mock.ANY,
1134        BuildError,
1135        ['build test: dummy instance name',
1136         'Determine test cases for test instance: dummy instance name'],
1137        {'op': 'report', 'test': mock.ANY},
1138        TwisterStatus.ERROR,
1139        'Determine Testcases Error!',
1140        0,
1141        None
1142    ),
1143    (
1144        {'op': 'gather_metrics'},
1145        mock.ANY,
1146        mock.ANY,
1147        True,
1148        True,
1149        mock.ANY,
1150        mock.ANY,
1151        mock.ANY,
1152        mock.ANY,
1153        mock.ANY,
1154        mock.ANY,
1155        {'returncode': 0},  # metrics_res
1156        mock.ANY,
1157        mock.ANY,
1158        [],
1159        {'op': 'run', 'test': mock.ANY},
1160        mock.ANY,
1161        mock.ANY,
1162        0,
1163        None
1164    ),  # 'gather metrics, run and ready handler'
1165    (
1166        {'op': 'gather_metrics'},
1167        mock.ANY,
1168        mock.ANY,
1169        False,
1170        True,
1171        mock.ANY,
1172        mock.ANY,
1173        mock.ANY,
1174        mock.ANY,
1175        mock.ANY,
1176        mock.ANY,
1177        {'returncode': 0},  # metrics_res
1178        mock.ANY,
1179        mock.ANY,
1180        [],
1181        {'op': 'report', 'test': mock.ANY},
1182        mock.ANY,
1183        mock.ANY,
1184        0,
1185        None
1186    ),  # 'gather metrics'
1187    (
1188        {'op': 'gather_metrics'},
1189        mock.ANY,
1190        mock.ANY,
1191        False,
1192        True,
1193        mock.ANY,
1194        mock.ANY,
1195        mock.ANY,
1196        mock.ANY,
1197        mock.ANY,
1198        {'returncode': 0},  # build_res
1199        {'returncode': 1},  # metrics_res
1200        mock.ANY,
1201        mock.ANY,
1202        [],
1203        {'op': 'report', 'test': mock.ANY},
1204        'error',
1205        'Build Failure at gather_metrics.',
1206        0,
1207        None
1208    ),  # 'build ok, gather metrics fail',
1209    (
1210        {'op': 'run'},
1211        'success',
1212        'OK',
1213        mock.ANY,
1214        mock.ANY,
1215        mock.ANY,
1216        mock.ANY,
1217        mock.ANY,
1218        mock.ANY,
1219        mock.ANY,
1220        mock.ANY,
1221        mock.ANY,
1222        None,
1223        mock.ANY,
1224        ['run test: dummy instance name',
1225         'run status: dummy instance name success'],
1226        {'op': 'coverage', 'test': mock.ANY, 'status': 'success', 'reason': 'OK'},
1227        'success',
1228        'OK',
1229        0,
1230        None
1231    ),
1232    (
1233        {'op': 'run'},
1234        TwisterStatus.FAIL,
1235        mock.ANY,
1236        mock.ANY,
1237        mock.ANY,
1238        mock.ANY,
1239        mock.ANY,
1240        mock.ANY,
1241        mock.ANY,
1242        mock.ANY,
1243        mock.ANY,
1244        mock.ANY,
1245        RuntimeError,
1246        mock.ANY,
1247        ['run test: dummy instance name',
1248         'run status: dummy instance name failed',
1249         'RuntimeError: Pipeline Error!'],
1250        None,
1251        TwisterStatus.FAIL,
1252        mock.ANY,
1253        0,
1254        None
1255    ),
1256    (
1257        {'op': 'report'},
1258        mock.ANY,
1259        mock.ANY,
1260        mock.ANY,
1261        mock.ANY,
1262        mock.ANY,
1263        False,
1264        True,
1265        mock.ANY,
1266        mock.ANY,
1267        mock.ANY,
1268        mock.ANY,
1269        mock.ANY,
1270        mock.ANY,
1271        [],
1272        {'op': 'cleanup', 'mode': 'device', 'test': mock.ANY},
1273        mock.ANY,
1274        mock.ANY,
1275        0,
1276        None
1277    ),
1278    (
1279        {'op': 'report'},
1280        TwisterStatus.PASS,
1281        mock.ANY,
1282        mock.ANY,
1283        mock.ANY,
1284        mock.ANY,
1285        False,
1286        False,
1287        'pass',
1288        mock.ANY,
1289        mock.ANY,
1290        mock.ANY,
1291        mock.ANY,
1292        mock.ANY,
1293        [],
1294        {'op': 'cleanup', 'mode': 'passed', 'test': mock.ANY},
1295        mock.ANY,
1296        mock.ANY,
1297        0,
1298        None
1299    ),
1300    (
1301        {'op': 'report'},
1302        mock.ANY,
1303        mock.ANY,
1304        mock.ANY,
1305        mock.ANY,
1306        mock.ANY,
1307        False,
1308        False,
1309        'all',
1310        mock.ANY,
1311        mock.ANY,
1312        mock.ANY,
1313        mock.ANY,
1314        mock.ANY,
1315        [],
1316        {'op': 'cleanup', 'mode': 'all', 'test': mock.ANY},
1317        mock.ANY,
1318        mock.ANY,
1319        0,
1320        None
1321    ),
1322    (
1323        {'op': 'report'},
1324        mock.ANY,
1325        mock.ANY,
1326        mock.ANY,
1327        mock.ANY,
1328        mock.ANY,
1329        False,
1330        False,
1331        'other',
1332        mock.ANY,
1333        mock.ANY,
1334        mock.ANY,
1335        mock.ANY,
1336        mock.ANY,
1337        [],
1338        None,
1339        mock.ANY,
1340        mock.ANY,
1341        0,
1342        None
1343    ),
1344    (
1345        {'op': 'cleanup', 'mode': 'device'},
1346        mock.ANY,
1347        mock.ANY,
1348        mock.ANY,
1349        mock.ANY,
1350        mock.ANY,
1351        mock.ANY,
1352        mock.ANY,
1353        mock.ANY,
1354        mock.ANY,
1355        mock.ANY,
1356        mock.ANY,
1357        mock.ANY,
1358        mock.ANY,
1359        [],
1360        None,
1361        mock.ANY,
1362        mock.ANY,
1363        0,
1364        None
1365    ),
1366    (
1367        {'op': 'cleanup', 'mode': 'passed'},
1368        mock.ANY,
1369        mock.ANY,
1370        mock.ANY,
1371        mock.ANY,
1372        mock.ANY,
1373        mock.ANY,
1374        mock.ANY,
1375        mock.ANY,
1376        mock.ANY,
1377        mock.ANY,
1378        mock.ANY,
1379        mock.ANY,
1380        mock.ANY,
1381        [],
1382        None,
1383        mock.ANY,
1384        mock.ANY,
1385        0,
1386        None
1387    ),
1388    (
1389        {'op': 'cleanup', 'mode': 'all'},
1390        mock.ANY,
1391        'Valgrind error',
1392        mock.ANY,
1393        mock.ANY,
1394        mock.ANY,
1395        mock.ANY,
1396        mock.ANY,
1397        mock.ANY,
1398        mock.ANY,
1399        mock.ANY,
1400        mock.ANY,
1401        mock.ANY,
1402        mock.ANY,
1403        [],
1404        None,
1405        mock.ANY,
1406        mock.ANY,
1407        0,
1408        None
1409    ),
1410    (
1411        {'op': 'cleanup', 'mode': 'all'},
1412        mock.ANY,
1413        'CMake build failure',
1414        mock.ANY,
1415        mock.ANY,
1416        mock.ANY,
1417        mock.ANY,
1418        mock.ANY,
1419        mock.ANY,
1420        mock.ANY,
1421        mock.ANY,
1422        mock.ANY,
1423        mock.ANY,
1424        mock.ANY,
1425        [],
1426        None,
1427        mock.ANY,
1428        mock.ANY,
1429        0,
1430        None
1431    ),
1432]
1433
1434@pytest.mark.parametrize(
1435    'message,' \
1436    ' instance_status, instance_reason, instance_run, instance_handler_ready,' \
1437    ' options_cmake_only,' \
1438    ' options_coverage, options_prep_artifacts, options_runtime_artifacts,' \
1439    ' cmake_res, build_res, metrics_res,' \
1440    ' pipeline_runtime_error, determine_testcases_build_error,' \
1441    ' expected_logs, resulting_message,' \
1442    ' expected_status, expected_reason, expected_skipped, expected_missing',
1443    TESTDATA_6,
1444    ids=[
1445        'filter, failed', 'filter, cmake res', 'filter, no cmake res',
1446        'cmake, failed', 'cmake, cmake_only, no status', 'cmake, cmake_only',
1447        'cmake, no cmake_only, cmake res', 'cmake, no cmake_only, no cmake res',
1448        'build, no build res', 'build, skipped', 'build, blocked',
1449        'build, determine testcases', 'build, determine testcases Error',
1450        'gather metrics, run and ready handler', 'gather metrics',
1451        'build ok, gather metrics fail',
1452        'run', 'run, Pipeline Runtime Error',
1453        'report, prep artifacts for testing',
1454        'report, runtime artifact cleanup pass, status passed',
1455        'report, runtime artifact cleanup all', 'report, no message put',
1456        'cleanup, device', 'cleanup, mode passed', 'cleanup, mode all',
1457        'cleanup, mode all, cmake build failure'
1458    ]
1459)
1460def test_projectbuilder_process(
1461    caplog,
1462    mocked_jobserver,
1463    tmp_path,
1464    message,
1465    instance_status,
1466    instance_reason,
1467    instance_run,
1468    instance_handler_ready,
1469    options_cmake_only,
1470    options_coverage,
1471    options_prep_artifacts,
1472    options_runtime_artifacts,
1473    cmake_res,
1474    build_res,
1475    metrics_res,
1476    pipeline_runtime_error,
1477    determine_testcases_build_error,
1478    expected_logs,
1479    resulting_message,
1480    expected_status,
1481    expected_reason,
1482    expected_skipped,
1483    expected_missing
1484):
1485    def mock_processing_queue_append(msg):
1486        if isinstance(pipeline_runtime_error, type) and \
1487           issubclass(pipeline_runtime_error, Exception):
1488            raise RuntimeError('Pipeline Error!')
1489
1490    def mock_determine_testcases(res):
1491        if isinstance(determine_testcases_build_error, type) and \
1492           issubclass(determine_testcases_build_error, Exception):
1493            raise BuildError('Determine Testcases Error!')
1494
1495    instance_mock = mock.Mock()
1496    instance_mock.name = 'dummy instance name'
1497    instance_mock.status = instance_status
1498    instance_mock.reason = instance_reason
1499    instance_mock.run = instance_run
1500    instance_mock.handler = mock.Mock()
1501    instance_mock.handler.ready = instance_handler_ready
1502    instance_mock.testsuite.harness = 'test'
1503    env_mock = mock.Mock()
1504
1505    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1506    pb.options = mock.Mock()
1507    pb.options.coverage = options_coverage
1508    pb.options.prep_artifacts_for_testing = options_prep_artifacts
1509    pb.options.runtime_artifact_cleanup = options_runtime_artifacts
1510    pb.options.cmake_only = options_cmake_only
1511    pb.options.outdir = tmp_path
1512    pb.options.log_file = None
1513    pb.options.log_level = "DEBUG"
1514
1515    pb.cmake = mock.Mock(return_value=cmake_res)
1516    pb.build = mock.Mock(return_value=build_res)
1517    pb.determine_testcases = mock.Mock(side_effect=mock_determine_testcases)
1518
1519    pb.report_out = mock.Mock()
1520    pb.cleanup_artifacts = mock.Mock()
1521    pb.cleanup_device_testing_artifacts = mock.Mock()
1522    pb.run = mock.Mock()
1523    pb.gather_metrics = mock.Mock(return_value=metrics_res)
1524
1525    processing_queue_mock = mock.Mock(append=mock.Mock(side_effect=mock_processing_queue_append))
1526    processing_ready_mock = mock.Mock()
1527    lock_mock = mock.Mock(
1528        __enter__=mock.Mock(return_value=(mock.Mock(), mock.Mock())),
1529        __exit__=mock.Mock(return_value=None)
1530    )
1531    results_mock = mock.Mock()
1532    results_mock.filtered_runtime = 0
1533
1534    pb.process(processing_queue_mock, processing_ready_mock, message, lock_mock, results_mock)
1535
1536    assert all([log in caplog.text for log in expected_logs])
1537
1538    if resulting_message:
1539        processing_queue_mock.append.assert_called_with(resulting_message)
1540
1541    assert pb.instance.status == expected_status
1542    assert pb.instance.reason == expected_reason
1543    assert results_mock.filtered_runtime_increment.call_args_list == [mock.call()] * expected_skipped
1544
1545    if expected_missing:
1546        pb.instance.add_missing_case_status.assert_called_with(*expected_missing)
1547
1548
1549TESTDATA_7 = [
1550    (
1551        True,
1552        [
1553            'z_ztest_unit_test__dummy_suite1_name__dummy_test_name1',
1554            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1555            'no match'
1556        ],
1557        [
1558            'dummy.test_id.dummy_suite1_name.dummy_name1',
1559            'dummy.test_id.dummy_suite2_name.dummy_name2'
1560        ]
1561    ),
1562    (
1563        False,
1564        [
1565            'z_ztest_unit_test__dummy_suite1_name__dummy_test_name1',
1566            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1567            'no match'
1568        ],
1569        [
1570            'dummy_suite1_name.dummy_name1',
1571            'dummy_suite2_name.dummy_name2'
1572        ]
1573    ),
1574    (
1575        True,
1576        [
1577            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1578            'z_ztest_unit_test__bad_suite3_name_no_test',
1579            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name4E',
1580            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_bad_name1E',
1581            '_ZN12_GLOBAL__N_1L51z_ztest_unit_test_dummy_suite3_name__test_bad_name2E',
1582            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name5E',
1583            '_ZN15foobarnamespaceL54z_ztest_unit_test__dummy_suite3_name__test_dummy_name6E',
1584        ],
1585        [
1586           'dummy.test_id.dummy_suite2_name.dummy_name2',
1587           'dummy.test_id.dummy_suite3_name.dummy_name4',
1588           'dummy.test_id.dummy_suite3_name.bad_name1E',
1589           'dummy.test_id.dummy_suite3_name.dummy_name5',
1590           'dummy.test_id.dummy_suite3_name.dummy_name6',
1591        ]
1592    ),
1593    (
1594        True,
1595        [
1596            'z_ztest_unit_test__dummy_suite2_name__test_dummy_name2',
1597            'z_ztest_unit_test__bad_suite3_name_no_test',
1598            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name4E',
1599            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_bad_name1E',
1600            '_ZN12_GLOBAL__N_1L51z_ztest_unit_test_dummy_suite3_name__test_bad_name2E',
1601            '_ZN12_GLOBAL__N_1L54z_ztest_unit_test__dummy_suite3_name__test_dummy_name5E',
1602            '_ZN15foobarnamespaceL54z_ztest_unit_test__dummy_suite3_name__test_dummy_name6E',
1603        ],
1604        [
1605           'dummy_suite2_name.dummy_name2',
1606           'dummy_suite3_name.dummy_name4',
1607           'dummy_suite3_name.bad_name1E',
1608           'dummy_suite3_name.dummy_name5',
1609           'dummy_suite3_name.dummy_name6',
1610        ]
1611    ),
1612    (
1613        True,
1614        ['no match'],
1615        []
1616    ),
1617]
1618
1619@pytest.mark.parametrize(
1620    'detailed_id, symbols_names, added_tcs',
1621    TESTDATA_7,
1622    ids=['two hits, one miss', 'two hits short id', 'demangle', 'demangle short id', 'nothing']
1623)
1624def test_projectbuilder_determine_testcases(
1625    mocked_jobserver,
1626    mocked_env,
1627    detailed_id,
1628    symbols_names,
1629    added_tcs
1630):
1631    symbols_mock = [mock.Mock(n=name) for name in symbols_names]
1632    for m in symbols_mock:
1633        m.configure_mock(name=m.n)
1634
1635    sections_mock = [mock.Mock(spec=SymbolTableSection)]
1636    sections_mock[0].iter_symbols = mock.Mock(return_value=symbols_mock)
1637
1638    elf_mock = mock.Mock()
1639    elf_mock().iter_sections = mock.Mock(return_value=sections_mock)
1640
1641    results_mock = mock.Mock()
1642
1643    instance_mock = mock.Mock()
1644    instance_mock.testcases = []
1645    instance_mock.testsuite.id = 'dummy.test_id'
1646    instance_mock.testsuite.ztest_suite_names = []
1647    instance_mock.testsuite.detailed_test_id = detailed_id
1648    instance_mock.compose_case_name = mock.Mock(side_effect=iter(added_tcs))
1649
1650    pb = ProjectBuilder(instance_mock, mocked_env, mocked_jobserver)
1651
1652    with mock.patch('twisterlib.runner.ELFFile', elf_mock), \
1653         mock.patch('builtins.open', mock.mock_open()):
1654        pb.determine_testcases(results_mock)
1655
1656    pb.instance.add_testcase.assert_has_calls(
1657        [mock.call(name=x) for x in added_tcs]
1658    )
1659    pb.instance.testsuite.add_testcase.assert_has_calls(
1660        [mock.call(name=x) for x in added_tcs]
1661    )
1662
1663
1664TESTDATA_8 = [
1665    (
1666        ['addition.al'],
1667        'dummy',
1668        ['addition.al', '.config', 'zephyr']
1669    ),
1670    (
1671        [],
1672        'all',
1673        ['.config', 'zephyr', 'testsuite_extra.conf', 'twister']
1674    ),
1675]
1676
1677@pytest.mark.parametrize(
1678    'additional_keep, runtime_artifact_cleanup, expected_files',
1679    TESTDATA_8,
1680    ids=['additional keep', 'all cleanup']
1681)
1682def test_projectbuilder_cleanup_artifacts(
1683    tmpdir,
1684    mocked_jobserver,
1685    additional_keep,
1686    runtime_artifact_cleanup,
1687    expected_files
1688):
1689    # tmpdir
1690    # ┣ twister
1691    # ┃ ┗ testsuite_extra.conf
1692    # ┣ dummy_dir
1693    # ┃ ┗ dummy.del
1694    # ┣ dummy_link_dir -> zephyr
1695    # ┣ zephyr
1696    # ┃ ┗ .config
1697    # ┗ addition.al
1698    twister_dir = tmpdir.mkdir('twister')
1699    testsuite_extra_conf = twister_dir.join('testsuite_extra.conf')
1700    testsuite_extra_conf.write_text('dummy', 'utf-8')
1701
1702    dummy_dir = tmpdir.mkdir('dummy_dir')
1703    dummy_del = dummy_dir.join('dummy.del')
1704    dummy_del.write_text('dummy', 'utf-8')
1705
1706    zephyr = tmpdir.mkdir('zephyr')
1707    config = zephyr.join('.config')
1708    config.write_text('dummy', 'utf-8')
1709
1710    dummy_link_dir = tmpdir.join('dummy_link_dir')
1711    os.symlink(zephyr, dummy_link_dir)
1712
1713    addition_al = tmpdir.join('addition.al')
1714    addition_al.write_text('dummy', 'utf-8')
1715
1716    instance_mock = mock.Mock()
1717    instance_mock.build_dir = tmpdir
1718    env_mock = mock.Mock()
1719
1720    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1721    pb.options = mock.Mock(runtime_artifact_cleanup=runtime_artifact_cleanup)
1722
1723    pb.cleanup_artifacts(additional_keep)
1724
1725    files_left = [p.name for p in list(pathlib.Path(tmpdir).glob('**/*'))]
1726
1727    assert sorted(files_left) == sorted(expected_files)
1728
1729
1730def test_projectbuilder_cleanup_device_testing_artifacts(
1731    caplog,
1732    mocked_jobserver
1733):
1734    bins = [os.path.join('zephyr', 'file.bin')]
1735
1736    instance_mock = mock.Mock()
1737    instance_mock.sysbuild = False
1738    build_dir = os.path.join('build', 'dir')
1739    instance_mock.build_dir = build_dir
1740    env_mock = mock.Mock()
1741
1742    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1743    pb._get_binaries = mock.Mock(return_value=bins)
1744    pb.cleanup_artifacts = mock.Mock()
1745    pb._sanitize_files = mock.Mock()
1746
1747    pb.cleanup_device_testing_artifacts()
1748
1749    assert f'Cleaning up for Device Testing {build_dir}' in caplog.text
1750
1751    pb.cleanup_artifacts.assert_called_once_with(
1752        [os.path.join('zephyr', 'file.bin'),
1753         os.path.join('zephyr', 'runners.yaml')]
1754    )
1755    pb._sanitize_files.assert_called_once()
1756
1757
1758TESTDATA_9 = [
1759    (
1760        None,
1761        [],
1762        [os.path.join('zephyr', 'zephyr.hex'),
1763         os.path.join('zephyr', 'zephyr.bin'),
1764         os.path.join('zephyr', 'zephyr.elf'),
1765         os.path.join('zephyr', 'zephyr.exe')]
1766    ),
1767    (
1768        [os.path.join('dummy.bin'), os.path.join('dummy.hex')],
1769        [os.path.join('dir2', 'dummy.elf')],
1770        [os.path.join('zephyr', 'dummy.bin'),
1771         os.path.join('zephyr', 'dummy.hex'),
1772         os.path.join('dir2', 'dummy.elf')]
1773    ),
1774]
1775
1776@pytest.mark.parametrize(
1777    'platform_binaries, runner_binaries, expected_binaries',
1778    TESTDATA_9,
1779    ids=['default', 'valid']
1780)
1781def test_projectbuilder_get_binaries(
1782    mocked_jobserver,
1783    platform_binaries,
1784    runner_binaries,
1785    expected_binaries
1786):
1787    def mock_get_domains(*args, **kwargs):
1788        return []
1789
1790    instance_mock = mock.Mock()
1791    instance_mock.build_dir = os.path.join('build', 'dir')
1792    instance_mock.domains.get_domains.side_effect = mock_get_domains
1793    instance_mock.platform = mock.Mock()
1794    instance_mock.platform.binaries = platform_binaries
1795    env_mock = mock.Mock()
1796
1797    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1798    pb._get_binaries_from_runners = mock.Mock(return_value=runner_binaries)
1799
1800    bins = pb._get_binaries()
1801
1802    assert all(bin in expected_binaries for bin in bins)
1803    assert all(bin in bins for bin in expected_binaries)
1804
1805
1806TESTDATA_10 = [
1807    (None, None, []),
1808    (None, {'dummy': 'dummy'}, []),
1809    (   None,
1810        {
1811            'config': {
1812                'elf_file': '/absolute/path/dummy.elf',
1813                'bin_file': 'path/dummy.bin'
1814            }
1815        },
1816        ['/absolute/path/dummy.elf', os.path.join('zephyr', 'path/dummy.bin')]
1817    ),
1818    (   'test_domain',
1819        {
1820            'config': {
1821                'elf_file': '/absolute/path/dummy.elf',
1822                'bin_file': 'path/dummy.bin'
1823            }
1824        },
1825        ['/absolute/path/dummy.elf', os.path.join('test_domain', 'zephyr', 'path/dummy.bin')]
1826    ),
1827]
1828
1829@pytest.mark.parametrize(
1830    'domain, runners_content, expected_binaries',
1831    TESTDATA_10,
1832    ids=['no file', 'no config', 'valid', 'with domain']
1833)
1834def test_projectbuilder_get_binaries_from_runners(
1835    mocked_jobserver,
1836    domain,
1837    runners_content,
1838    expected_binaries
1839):
1840    def mock_exists(fname):
1841        assert fname == os.path.join('build', 'dir', domain if domain else '',
1842                                     'zephyr', 'runners.yaml')
1843        return runners_content is not None
1844
1845    instance_mock = mock.Mock()
1846    instance_mock.build_dir = os.path.join('build', 'dir')
1847    env_mock = mock.Mock()
1848
1849    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1850
1851    with mock.patch('os.path.exists', mock_exists), \
1852         mock.patch('builtins.open', mock.mock_open()), \
1853         mock.patch('yaml.load', return_value=runners_content):
1854        if domain:
1855            bins = pb._get_binaries_from_runners(domain)
1856        else:
1857            bins = pb._get_binaries_from_runners()
1858
1859    assert all(bin in expected_binaries for bin in bins)
1860    assert all(bin in bins for bin in expected_binaries)
1861
1862
1863def test_projectbuilder_sanitize_files(mocked_jobserver):
1864    instance_mock = mock.Mock()
1865    env_mock = mock.Mock()
1866
1867    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1868    pb._sanitize_runners_file = mock.Mock()
1869    pb._sanitize_zephyr_base_from_files = mock.Mock()
1870
1871    pb._sanitize_files()
1872
1873    pb._sanitize_runners_file.assert_called_once()
1874    pb._sanitize_zephyr_base_from_files.assert_called_once()
1875
1876
1877
1878TESTDATA_11 = [
1879    (None, None),
1880    ('dummy: []', None),
1881    (
1882"""
1883config:
1884  elf_file: relative/path/dummy.elf
1885  hex_file: /absolute/path/build_dir/zephyr/dummy.hex
1886""",
1887"""
1888config:
1889  elf_file: relative/path/dummy.elf
1890  hex_file: dummy.hex
1891"""
1892    ),
1893]
1894
1895@pytest.mark.parametrize(
1896    'runners_text, expected_write_text',
1897    TESTDATA_11,
1898    ids=['no file', 'no config', 'valid']
1899)
1900def test_projectbuilder_sanitize_runners_file(
1901    mocked_jobserver,
1902    runners_text,
1903    expected_write_text
1904):
1905    def mock_exists(fname):
1906        return runners_text is not None
1907
1908    instance_mock = mock.Mock()
1909    instance_mock.build_dir = '/absolute/path/build_dir'
1910    env_mock = mock.Mock()
1911
1912    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1913
1914    with mock.patch('os.path.exists', mock_exists), \
1915         mock.patch('builtins.open',
1916                    mock.mock_open(read_data=runners_text)) as f:
1917        pb._sanitize_runners_file()
1918
1919    if expected_write_text is not None:
1920        f().write.assert_called_with(expected_write_text)
1921    else:
1922        f().write.assert_not_called()
1923
1924
1925TESTDATA_12 = [
1926    (
1927        {
1928            'CMakeCache.txt': mock.mock_open(
1929                read_data='canonical/zephyr/base/dummy.file: ERROR'
1930            )
1931        },
1932        {
1933            'CMakeCache.txt': 'dummy.file: ERROR'
1934        }
1935    ),
1936    (
1937        {
1938            os.path.join('zephyr', 'runners.yaml'): mock.mock_open(
1939                read_data='There was canonical/zephyr/base/dummy.file here'
1940            )
1941        },
1942        {
1943            os.path.join('zephyr', 'runners.yaml'): 'There was dummy.file here'
1944        }
1945    ),
1946]
1947
1948@pytest.mark.parametrize(
1949    'text_mocks, expected_write_texts',
1950    TESTDATA_12,
1951    ids=['CMakeCache file', 'runners.yaml file']
1952)
1953def test_projectbuilder_sanitize_zephyr_base_from_files(
1954    mocked_jobserver,
1955    text_mocks,
1956    expected_write_texts
1957):
1958    build_dir_path = 'canonical/zephyr/base/build_dir/'
1959
1960    def mock_exists(fname):
1961        if not fname.startswith(build_dir_path):
1962            return False
1963        return fname[len(build_dir_path):] in text_mocks
1964
1965    def mock_open(fname, *args, **kwargs):
1966        if not fname.startswith(build_dir_path):
1967            raise FileNotFoundError(errno.ENOENT, f'File {fname} not found.')
1968        return text_mocks[fname[len(build_dir_path):]]()
1969
1970    instance_mock = mock.Mock()
1971    instance_mock.build_dir = build_dir_path
1972    env_mock = mock.Mock()
1973
1974    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
1975
1976    with mock.patch('os.path.exists', mock_exists), \
1977         mock.patch('builtins.open', mock_open), \
1978         mock.patch('twisterlib.runner.canonical_zephyr_base',
1979                    'canonical/zephyr/base'):
1980        pb._sanitize_zephyr_base_from_files()
1981
1982    for fname, fhandler in text_mocks.items():
1983        fhandler().write.assert_called_with(expected_write_texts[fname])
1984
1985
1986TESTDATA_13 = [
1987    (
1988        TwisterStatus.ERROR, True, True, False,
1989        ['INFO      20/25 dummy platform' \
1990         '            dummy.testsuite.name' \
1991         '                               ERROR dummy reason (cmake)'],
1992        None
1993    ),
1994    (
1995        TwisterStatus.FAIL, False, False, False,
1996        ['ERROR     dummy platform' \
1997         '            dummy.testsuite.name' \
1998         '                               FAILED: dummy reason'],
1999        'INFO    - Total complete:   20/  25  80%' \
2000        '  built (not run):    0, filtered:    3, failed:    3, error:    1'
2001    ),
2002    (
2003        TwisterStatus.SKIP, True, False, False,
2004        ['INFO      20/25 dummy platform' \
2005         '            dummy.testsuite.name' \
2006         '                               SKIPPED (dummy reason)'],
2007        None
2008    ),
2009    (
2010        TwisterStatus.FILTER, False, False, False,
2011        [],
2012        'INFO    - Total complete:   20/  25  80%' \
2013        '  built (not run):    0, filtered:    4, failed:    2, error:    1'
2014    ),
2015    (
2016        TwisterStatus.PASS, True, False, True,
2017        ['INFO      20/25 dummy platform' \
2018         '            dummy.testsuite.name' \
2019         '                               PASSED' \
2020         ' (dummy handler type: dummy dut, 60.000s <zephyr>)'],
2021        None
2022    ),
2023    (
2024        TwisterStatus.PASS, True, False, False,
2025        ['INFO      20/25 dummy platform' \
2026         '            dummy.testsuite.name' \
2027         '                               PASSED (build <zephyr>)'],
2028        None
2029    ),
2030    (
2031        'unknown status', False, False, False,
2032        ['Unknown status = unknown status'],
2033        'INFO    - Total complete:   20/  25  80%'
2034        '  built (not run):    0, filtered:    3, failed:    2, error:    1\r'
2035    )
2036]
2037
2038@pytest.mark.parametrize(
2039    'status, verbose, cmake_only, ready_run, expected_logs, expected_out',
2040    TESTDATA_13,
2041    ids=['verbose error cmake only', 'failed', 'verbose skipped', 'filtered',
2042         'verbose passed ready run', 'verbose passed', 'unknown status']
2043)
2044def test_projectbuilder_report_out(
2045    capfd,
2046    caplog,
2047    mocked_jobserver,
2048    status,
2049    verbose,
2050    cmake_only,
2051    ready_run,
2052    expected_logs,
2053    expected_out
2054):
2055    instance_mock = mock.Mock()
2056    instance_mock.handler.type_str = 'dummy handler type'
2057    instance_mock.handler.seed = 123
2058    instance_mock.handler.ready = ready_run
2059    instance_mock.run = ready_run
2060    instance_mock.dut = 'dummy dut'
2061    instance_mock.execution_time = 60
2062    instance_mock.platform.name = 'dummy platform'
2063    instance_mock.status = status
2064    instance_mock.reason = 'dummy reason'
2065    instance_mock.toolchain = 'zephyr'
2066    instance_mock.testsuite.name = 'dummy.testsuite.name'
2067    skip_mock_tc = mock.Mock(status=TwisterStatus.SKIP, reason=None)
2068    skip_mock_tc.name = 'mocked_testcase_to_skip'
2069    unknown_mock_tc = mock.Mock(status=mock.Mock(value='dummystatus'), reason=None)
2070    unknown_mock_tc.name = 'mocked_testcase_unknown'
2071    instance_mock.testsuite.testcases = [unknown_mock_tc for _ in range(25)]
2072    instance_mock.testcases = [unknown_mock_tc for _ in range(24)] + \
2073                              [skip_mock_tc]
2074    env_mock = mock.Mock()
2075
2076    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2077    pb.options.verbose = verbose
2078    pb.options.cmake_only = cmake_only
2079    pb.options.seed = 123
2080    pb.log_info_file = mock.Mock()
2081
2082    results_mock = mock.Mock(
2083        total = 25,
2084        done = 19,
2085        passed = 17,
2086        notrun = 0,
2087        failed = 2,
2088        filtered_configs = 3,
2089        filtered_runtime = 0,
2090        filtered_static = 0,
2091        error = 1,
2092        cases = 0,
2093        filtered_cases = 0,
2094        skipped_cases = 4,
2095        failed_cases = 0,
2096        error_cases = 0,
2097        blocked_cases = 0,
2098        passed_cases = 0,
2099        none_cases = 0,
2100        started_cases = 0
2101    )
2102    results_mock.iteration = 1
2103    def results_done_increment(value=1, decrement=False):
2104        results_mock.done += value * (-1 if decrement else 1)
2105    results_mock.done_increment = results_done_increment
2106    def filtered_configs_increment(value=1, decrement=False):
2107        results_mock.filtered_configs += value * (-1 if decrement else 1)
2108    results_mock.filtered_configs_increment = filtered_configs_increment
2109    def filtered_static_increment(value=1, decrement=False):
2110        results_mock.filtered_static += value * (-1 if decrement else 1)
2111    results_mock.filtered_static_increment = filtered_static_increment
2112    def filtered_runtime_increment(value=1, decrement=False):
2113        results_mock.filtered_runtime += value * (-1 if decrement else 1)
2114    results_mock.filtered_runtime_increment = filtered_runtime_increment
2115    def failed_increment(value=1, decrement=False):
2116        results_mock.failed += value * (-1 if decrement else 1)
2117    results_mock.failed_increment = failed_increment
2118    def notrun_increment(value=1, decrement=False):
2119        results_mock.notrun += value * (-1 if decrement else 1)
2120    results_mock.notrun_increment = notrun_increment
2121
2122    pb.report_out(results_mock)
2123
2124    assert results_mock.cases_increment.call_args_list == [mock.call(25)]
2125
2126    trim_actual_log = re.sub(
2127        r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])',
2128        '',
2129        caplog.text
2130    )
2131    trim_actual_log = re.sub(r'twister:runner.py:\d+', '', trim_actual_log)
2132
2133    assert all([log in trim_actual_log for log in expected_logs])
2134
2135    print(trim_actual_log)
2136    if expected_out:
2137        out, err = capfd.readouterr()
2138        sys.stdout.write(out)
2139        sys.stderr.write(err)
2140
2141        # Remove 7b ANSI C1 escape sequences (colours)
2142        out = re.sub(
2143            r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])',
2144            '',
2145            out
2146        )
2147
2148        assert expected_out in out
2149
2150
2151def test_projectbuilder_cmake_assemble_args():
2152    extra_args = ['CONFIG_FOO=y', 'DUMMY_EXTRA="yes"']
2153    handler = mock.Mock(ready=True, args=['dummy_handler'])
2154    extra_conf_files = ['extrafile1.conf', 'extrafile2.conf']
2155    extra_overlay_confs = ['extra_overlay_conf']
2156    extra_dtc_overlay_files = ['overlay1.dtc', 'overlay2.dtc']
2157    cmake_extra_args = ['CMAKE1="yes"', 'CMAKE2=n']
2158    build_dir = os.path.join('build', 'dir')
2159
2160    with mock.patch('os.path.exists', return_value=True):
2161        results = ProjectBuilder.cmake_assemble_args(extra_args, handler,
2162                                                     extra_conf_files,
2163                                                     extra_overlay_confs,
2164                                                     extra_dtc_overlay_files,
2165                                                     cmake_extra_args,
2166                                                     build_dir)
2167
2168    expected_results = [
2169        '-DCONFIG_FOO=y',
2170        '-DCMAKE1=\"yes\"',
2171        '-DCMAKE2=n',
2172        '-DDUMMY_EXTRA=yes',
2173        '-Ddummy_handler',
2174        '-DCONF_FILE=extrafile1.conf;extrafile2.conf',
2175        '-DDTC_OVERLAY_FILE=overlay1.dtc;overlay2.dtc',
2176        f'-DOVERLAY_CONFIG=extra_overlay_conf ' \
2177        f'{os.path.join("build", "dir", "twister", "testsuite_extra.conf")}'
2178    ]
2179
2180    assert results == expected_results
2181
2182
2183def test_projectbuilder_cmake():
2184    instance_mock = mock.Mock()
2185    instance_mock.handler = 'dummy handler'
2186    instance_mock.build_dir = os.path.join('build', 'dir')
2187    env_mock = mock.Mock()
2188
2189    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2190    pb.build_dir = 'build_dir'
2191    pb.testsuite.extra_args = ['some', 'args']
2192    pb.testsuite.extra_conf_files = ['some', 'files1']
2193    pb.testsuite.extra_overlay_confs = ['some', 'files2']
2194    pb.testsuite.extra_dtc_overlay_files = ['some', 'files3']
2195    pb.options.extra_args = ['other', 'args']
2196    pb.cmake_assemble_args = mock.Mock(return_value=['dummy'])
2197    cmake_res_mock = mock.Mock()
2198    pb.run_cmake = mock.Mock(return_value=cmake_res_mock)
2199
2200    res = pb.cmake(['dummy filter'])
2201
2202    assert res == cmake_res_mock
2203    pb.cmake_assemble_args.assert_called_once_with(
2204        pb.testsuite.extra_args,
2205        pb.instance.handler,
2206        pb.testsuite.extra_conf_files,
2207        pb.testsuite.extra_overlay_confs,
2208        pb.testsuite.extra_dtc_overlay_files,
2209        pb.options.extra_args,
2210        pb.instance.build_dir
2211    )
2212    pb.run_cmake.assert_called_once_with(['dummy'], ['dummy filter'])
2213
2214
2215def test_projectbuilder_build(mocked_jobserver):
2216    instance_mock = mock.Mock()
2217    instance_mock.testsuite.harness = 'test'
2218    env_mock = mock.Mock()
2219
2220    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2221
2222    pb.build_dir = 'build_dir'
2223    pb.run_build = mock.Mock(return_value={'dummy': 'dummy'})
2224
2225    res = pb.build()
2226
2227    pb.run_build.assert_called_once_with(['--build', 'build_dir'])
2228    assert res == {'dummy': 'dummy'}
2229
2230
2231TESTDATA_14 = [
2232    (
2233        True,
2234        'device',
2235        234,
2236        'native_sim',
2237        'posix',
2238        {'CONFIG_FAKE_ENTROPY_NATIVE_SIM': 'y'},
2239        'pytest',
2240        True,
2241        True,
2242        True,
2243        True,
2244        True,
2245        False
2246    ),
2247    (
2248        True,
2249        'not device',
2250        None,
2251        'native_sim',
2252        'not posix',
2253        {'CONFIG_FAKE_ENTROPY_NATIVE_SIM': 'y'},
2254        'not pytest',
2255        False,
2256        False,
2257        False,
2258        False,
2259        False,
2260        True
2261    ),
2262    (
2263        False,
2264        'device',
2265        234,
2266        'native_sim',
2267        'posix',
2268        {'CONFIG_FAKE_ENTROPY_NATIVE_SIM': 'y'},
2269        'pytest',
2270        False,
2271        False,
2272        False,
2273        False,
2274        False,
2275        False
2276    ),
2277]
2278
2279@pytest.mark.parametrize(
2280    'ready, type_str, seed, platform_name, platform_arch, defconfig, harness,' \
2281    ' expect_duts, expect_parse_generated, expect_seed,' \
2282    ' expect_extra_test_args, expect_pytest, expect_handle',
2283    TESTDATA_14,
2284    ids=['pytest full', 'not pytest minimal', 'not ready']
2285)
2286def test_projectbuilder_run(
2287    mocked_jobserver,
2288    ready,
2289    type_str,
2290    seed,
2291    platform_name,
2292    platform_arch,
2293    defconfig,
2294    harness,
2295    expect_duts,
2296    expect_parse_generated,
2297    expect_seed,
2298    expect_extra_test_args,
2299    expect_pytest,
2300    expect_handle
2301):
2302    pytest_mock = mock.Mock(spec=Pytest)
2303    harness_mock = mock.Mock()
2304
2305    def mock_harness(name):
2306        if name == 'Pytest':
2307            return pytest_mock
2308        else:
2309            return harness_mock
2310
2311    instance_mock = mock.Mock()
2312    instance_mock.handler.get_test_timeout = mock.Mock(return_value=60)
2313    instance_mock.handler.seed = 123
2314    instance_mock.handler.ready = ready
2315    instance_mock.handler.type_str = type_str
2316    instance_mock.handler.duts = [mock.Mock(name='dummy dut')]
2317    instance_mock.platform.name = platform_name
2318    instance_mock.platform.arch = platform_arch
2319    instance_mock.testsuite.harness = harness
2320    env_mock = mock.Mock()
2321
2322    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2323    pb.options.extra_test_args = ['dummy_arg1', 'dummy_arg2']
2324    pb.duts = ['another dut']
2325    pb.options.seed = seed
2326    pb.defconfig = defconfig
2327    pb.parse_generated = mock.Mock()
2328
2329    with mock.patch('twisterlib.runner.HarnessImporter.get_harness',
2330                    mock_harness):
2331        pb.run()
2332
2333    if expect_duts:
2334        assert pb.instance.handler.duts == ['another dut']
2335
2336    if expect_parse_generated:
2337        pb.parse_generated.assert_called_once()
2338
2339    if expect_seed:
2340        assert pb.instance.handler.seed == seed
2341
2342    if expect_extra_test_args:
2343        assert pb.instance.handler.extra_test_args == ['dummy_arg1',
2344                                                       'dummy_arg2']
2345
2346    if expect_pytest:
2347        pytest_mock.pytest_run.assert_called_once_with(60)
2348
2349    if expect_handle:
2350        pb.instance.handler.handle.assert_called_once_with(harness_mock)
2351
2352
2353TESTDATA_15 = [
2354    (False, False, False, True),
2355    (True, False, True, False),
2356    (False, True, False, True),
2357    (True, True, False, True),
2358]
2359
2360@pytest.mark.parametrize(
2361    'enable_size_report, cmake_only, expect_calc_size, expect_zeroes',
2362    TESTDATA_15,
2363    ids=['none', 'size_report', 'cmake', 'size_report+cmake']
2364)
2365def test_projectbuilder_gather_metrics(
2366    mocked_jobserver,
2367    enable_size_report,
2368    cmake_only,
2369    expect_calc_size,
2370    expect_zeroes
2371):
2372    instance_mock = mock.Mock()
2373    instance_mock.metrics = {}
2374    env_mock = mock.Mock()
2375
2376    pb = ProjectBuilder(instance_mock, env_mock, mocked_jobserver)
2377    pb.options.enable_size_report = enable_size_report
2378    pb.options.create_rom_ram_report = False
2379    pb.options.cmake_only = cmake_only
2380    pb.calc_size = mock.Mock()
2381
2382    pb.gather_metrics(instance_mock)
2383
2384    if expect_calc_size:
2385        pb.calc_size.assert_called_once()
2386
2387    if expect_zeroes:
2388        assert instance_mock.metrics['used_ram'] == 0
2389        assert instance_mock.metrics['used_rom'] == 0
2390        assert instance_mock.metrics['available_rom'] == 0
2391        assert instance_mock.metrics['available_ram'] == 0
2392
2393
2394TESTDATA_16 = [
2395    (TwisterStatus.ERROR, mock.ANY, False, False, False),
2396    (TwisterStatus.FAIL, mock.ANY, False, False, False),
2397    (TwisterStatus.SKIP, mock.ANY, False, False, False),
2398    (TwisterStatus.FILTER, 'native', False, False, True),
2399    (TwisterStatus.PASS, 'qemu', False, False, True),
2400    (TwisterStatus.FILTER, 'unit', False, False, True),
2401    (TwisterStatus.FILTER, 'mcu', True, True, False),
2402    (TwisterStatus.PASS, 'frdm_k64f', False, True, False),
2403]
2404
2405@pytest.mark.parametrize(
2406    'status, platform_type, expect_warnings, expect_calcs, expect_zeroes',
2407    TESTDATA_16,
2408    ids=[x[0] + (', ' + x[1]) if x[1] != mock.ANY else '' for x in TESTDATA_16]
2409)
2410def test_projectbuilder_calc_size(
2411    status,
2412    platform_type,
2413    expect_warnings,
2414    expect_calcs,
2415    expect_zeroes
2416):
2417    size_calc_mock = mock.Mock()
2418
2419    instance_mock = mock.Mock()
2420    instance_mock.status = status
2421    instance_mock.platform.type = platform_type
2422    instance_mock.metrics = {}
2423    instance_mock.calculate_sizes = mock.Mock(return_value=size_calc_mock)
2424
2425    from_buildlog = True
2426
2427    ProjectBuilder.calc_size(instance_mock, from_buildlog)
2428
2429    if expect_calcs:
2430        instance_mock.calculate_sizes.assert_called_once_with(
2431            from_buildlog=from_buildlog,
2432            generate_warning=expect_warnings
2433        )
2434
2435        assert instance_mock.metrics['used_ram'] == \
2436               size_calc_mock.get_used_ram()
2437        assert instance_mock.metrics['used_rom'] == \
2438               size_calc_mock.get_used_rom()
2439        assert instance_mock.metrics['available_rom'] == \
2440               size_calc_mock.get_available_rom()
2441        assert instance_mock.metrics['available_ram'] == \
2442               size_calc_mock.get_available_ram()
2443
2444    if expect_zeroes:
2445        assert instance_mock.metrics['used_ram'] == 0
2446        assert instance_mock.metrics['used_rom'] == 0
2447        assert instance_mock.metrics['available_rom'] == 0
2448        assert instance_mock.metrics['available_ram'] == 0
2449
2450    if expect_calcs or expect_zeroes:
2451        assert instance_mock.metrics['handler_time'] == \
2452               instance_mock.execution_time
2453    else:
2454        assert instance_mock.metrics == {}
2455
2456
2457TESTDATA_17 = [
2458    ('linux', 'posix', {'jobs': 4}, True, 32, 'GNUMakeJobClient'),
2459    ('linux', 'posix', {'build_only': True}, False, 16, 'GNUMakeJobServer'),
2460    ('linux', '???', {}, False, 8, 'JobClient'),
2461    ('linux', '???', {'jobs': 4}, False, 4, 'JobClient'),
2462]
2463
2464@pytest.mark.parametrize(
2465    'platform, os_name, options, jobclient_from_environ, expected_jobs,' \
2466    ' expected_jobserver',
2467    TESTDATA_17,
2468    ids=['GNUMakeJobClient', 'GNUMakeJobServer',
2469         'JobClient', 'Jobclient+options']
2470)
2471def test_twisterrunner_run(
2472    caplog,
2473    platform,
2474    os_name,
2475    options,
2476    jobclient_from_environ,
2477    expected_jobs,
2478    expected_jobserver
2479):
2480    def mock_client_from_environ(jobs):
2481        if jobclient_from_environ:
2482            jobclient_mock = mock.Mock(jobs=32)
2483            jobclient_mock.name = 'GNUMakeJobClient'
2484            return jobclient_mock
2485        return None
2486
2487    instances = {'dummy instance': mock.Mock(metrics={'k': 'v'})}
2488    suites = [mock.Mock()]
2489    env_mock = mock.Mock()
2490
2491    tr = TwisterRunner(instances, suites, env=env_mock)
2492    tr.options.retry_failed = 2
2493    tr.options.retry_interval = 10
2494    tr.options.retry_build_errors = True
2495    tr.options.jobs = None
2496    tr.options.build_only = None
2497    for k, v in options.items():
2498        setattr(tr.options, k, v)
2499    tr.update_counting_before_pipeline = mock.Mock()
2500    tr.execute = mock.Mock()
2501    tr.show_brief = mock.Mock()
2502
2503    gnumakejobserver_mock = mock.Mock()
2504    gnumakejobserver_mock().name='GNUMakeJobServer'
2505    jobclient_mock = mock.Mock()
2506    jobclient_mock().name='JobClient'
2507
2508    processing_queue = deque()
2509    processing_ready = {}
2510    processing_instance = mock.Mock(
2511        metrics={'k': 'v2'},
2512        execution_time=30
2513    )
2514    processing_instance.name='dummy instance'
2515    processing_ready[processing_instance.name] = processing_instance
2516    manager_mock = mock.Mock()
2517    manager_mock().deque = mock.Mock(return_value=processing_queue)
2518    manager_mock().get_dict = mock.Mock(return_value=processing_ready)
2519
2520    results_mock = mock.Mock()
2521    results_mock().error = 1
2522    results_mock().iteration = 0
2523    results_mock().failed = 2
2524    results_mock().total = 9
2525    results_mock().filtered_static = 0
2526    results_mock().skipped = 0
2527
2528    def iteration_increment(value=1, decrement=False):
2529        results_mock().iteration += value * (-1 if decrement else 1)
2530    results_mock().iteration_increment = iteration_increment
2531
2532    with mock.patch('twisterlib.runner.ExecutionCounter', results_mock), \
2533         mock.patch('twisterlib.runner.BaseManager', manager_mock), \
2534         mock.patch('twisterlib.runner.GNUMakeJobClient.from_environ',
2535                    mock_client_from_environ), \
2536         mock.patch('twisterlib.runner.GNUMakeJobServer',
2537                    gnumakejobserver_mock), \
2538         mock.patch('twisterlib.runner.JobClient', jobclient_mock), \
2539         mock.patch('multiprocessing.cpu_count', return_value=8), \
2540         mock.patch('sys.platform', platform), \
2541         mock.patch('time.sleep', mock.Mock()), \
2542         mock.patch('os.name', os_name):
2543        tr.run()
2544
2545    assert f'JOBS: {expected_jobs}' in caplog.text
2546
2547    assert tr.jobserver.name == expected_jobserver
2548
2549    assert tr.instances['dummy instance'].metrics == {
2550        'k': 'v2',
2551        'handler_time': 30
2552    }
2553
2554    assert results_mock().error == 0
2555
2556
2557def test_twisterrunner_update_counting_before_pipeline():
2558    instances = {
2559        'dummy1': mock.Mock(
2560            status=TwisterStatus.FILTER,
2561            reason='runtime filter',
2562            testsuite=mock.Mock(
2563                testcases=[mock.Mock()]
2564            )
2565        ),
2566        'dummy2': mock.Mock(
2567            status=TwisterStatus.FILTER,
2568            reason='static filter',
2569            testsuite=mock.Mock(
2570                testcases=[mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock()]
2571            )
2572        ),
2573        'dummy3': mock.Mock(
2574            status=TwisterStatus.ERROR,
2575            reason='error',
2576            testsuite=mock.Mock(
2577                testcases=[mock.Mock()]
2578            )
2579        ),
2580        'dummy4': mock.Mock(
2581            status=TwisterStatus.PASS,
2582            reason='OK',
2583            testsuite=mock.Mock(
2584                testcases=[mock.Mock()]
2585            )
2586        ),
2587        'dummy5': mock.Mock(
2588            status=TwisterStatus.SKIP,
2589            reason="Quarantine",
2590            testsuite=mock.Mock(
2591                testcases=[mock.Mock()]
2592            )
2593        )
2594    }
2595    suites = [mock.Mock()]
2596    env_mock = mock.Mock()
2597
2598    tr = TwisterRunner(instances, suites, env=env_mock)
2599    tr.results = mock.Mock(
2600        total = 0,
2601        done = 0,
2602        passed = 0,
2603        failed = 0,
2604        filtered_configs = 0,
2605        filtered_runtime = 0,
2606        filtered_static = 0,
2607        error = 0,
2608        cases = 0,
2609        filtered_cases = 0,
2610        skipped = 0,
2611        skipped_cases = 0,
2612        failed_cases = 0,
2613        error_cases = 0,
2614        blocked_cases = 0,
2615        passed_cases = 0,
2616        none_cases = 0,
2617        started_cases = 0
2618    )
2619    def filtered_configs_increment(value=1, decrement=False):
2620        tr.results.filtered_configs += value * (-1 if decrement else 1)
2621    tr.results.filtered_configs_increment = filtered_configs_increment
2622    def filtered_static_increment(value=1, decrement=False):
2623        tr.results.filtered_static += value * (-1 if decrement else 1)
2624    tr.results.filtered_static_increment = filtered_static_increment
2625    def error_increment(value=1, decrement=False):
2626        tr.results.error += value * (-1 if decrement else 1)
2627    tr.results.error_increment = error_increment
2628    def cases_increment(value=1, decrement=False):
2629        tr.results.cases += value * (-1 if decrement else 1)
2630    tr.results.cases_increment = cases_increment
2631    def filtered_cases_increment(value=1, decrement=False):
2632        tr.results.filtered_cases += value * (-1 if decrement else 1)
2633    tr.results.filtered_cases_increment = filtered_cases_increment
2634    def skipped_increment(value=1, decrement=False):
2635        tr.results.skipped += value * (-1 if decrement else 1)
2636    tr.results.skipped_increment = skipped_increment
2637    def skipped_cases_increment(value=1, decrement=False):
2638        tr.results.skipped_cases += value * (-1 if decrement else 1)
2639    tr.results.skipped_cases_increment = skipped_cases_increment
2640
2641    tr.update_counting_before_pipeline()
2642
2643    assert tr.results.filtered_static == 1
2644    assert tr.results.filtered_configs == 1
2645    assert tr.results.filtered_cases == 4
2646    assert tr.results.cases == 5
2647    assert tr.results.error == 1
2648    assert tr.results.skipped == 1
2649    assert tr.results.skipped_cases == 1
2650
2651
2652def test_twisterrunner_show_brief(caplog):
2653    instances = {
2654        'dummy1': mock.Mock(),
2655        'dummy2': mock.Mock(),
2656        'dummy3': mock.Mock(),
2657        'dummy4': mock.Mock(),
2658        'dummy5': mock.Mock()
2659    }
2660    suites = [mock.Mock(), mock.Mock()]
2661    env_mock = mock.Mock()
2662
2663    tr = TwisterRunner(instances, suites, env=env_mock)
2664    tr.results = mock.Mock(
2665        filtered_static = 3,
2666        filtered_configs = 4,
2667        skipped_cases = 0,
2668        cases = 0,
2669        error = 0
2670    )
2671
2672    tr.show_brief()
2673
2674    log = '2 test scenarios (5 configurations) selected,' \
2675          ' 4 configurations filtered (3 by static filter, 1 at runtime).'
2676
2677    assert log in caplog.text
2678
2679
2680TESTDATA_18 = [
2681    (False, False, False, [{'op': 'cmake', 'test': mock.ANY}]),
2682    (False, False, True, [{'op': 'filter', 'test': mock.ANY},
2683                          {'op': 'cmake', 'test': mock.ANY}]),
2684    (False, True, True, [{'op': 'run', 'test': mock.ANY},
2685                         {'op': 'run', 'test': mock.ANY}]),
2686    (False, True, False, [{'op': 'run', 'test': mock.ANY}]),
2687    (True, True, False, [{'op': 'cmake', 'test': mock.ANY}]),
2688    (True, True, True, [{'op': 'filter', 'test': mock.ANY},
2689                        {'op': 'cmake', 'test': mock.ANY}]),
2690    (True, False, True, [{'op': 'filter', 'test': mock.ANY},
2691                         {'op': 'cmake', 'test': mock.ANY}]),
2692    (True, False, False, [{'op': 'cmake', 'test': mock.ANY}]),
2693]
2694
2695@pytest.mark.parametrize(
2696    'build_only, test_only, retry_build_errors, expected_pipeline_elements',
2697    TESTDATA_18,
2698    ids=['none', 'retry', 'test+retry', 'test', 'build+test',
2699         'build+test+retry', 'build+retry', 'build']
2700)
2701def test_twisterrunner_add_tasks_to_queue(
2702    build_only,
2703    test_only,
2704    retry_build_errors,
2705    expected_pipeline_elements
2706):
2707    def mock_get_cmake_filter_stages(filter, keys):
2708        return [filter]
2709
2710    instances = {
2711        'dummy1': mock.Mock(run=True, retries=0, status=TwisterStatus.PASS, build_dir="/tmp"),
2712        'dummy2': mock.Mock(run=True, retries=0, status=TwisterStatus.SKIP, build_dir="/tmp"),
2713        'dummy3': mock.Mock(run=True, retries=0, status=TwisterStatus.FILTER, build_dir="/tmp"),
2714        'dummy4': mock.Mock(run=True, retries=0, status=TwisterStatus.ERROR, build_dir="/tmp"),
2715        'dummy5': mock.Mock(run=True, retries=0, status=TwisterStatus.FAIL, build_dir="/tmp")
2716    }
2717    instances['dummy4'].testsuite.filter = 'some'
2718    instances['dummy5'].testsuite.filter = 'full'
2719    suites = [mock.Mock(), mock.Mock()]
2720    env_mock = mock.Mock()
2721
2722    tr = TwisterRunner(instances, suites, env=env_mock)
2723    tr.get_cmake_filter_stages = mock.Mock(
2724        side_effect=mock_get_cmake_filter_stages
2725    )
2726    tr.results = mock.Mock(iteration=0)
2727
2728    processing_queue_mock = mock.Mock()
2729
2730    tr.add_tasks_to_queue(
2731        processing_queue_mock,
2732        build_only,
2733        test_only,
2734        retry_build_errors
2735    )
2736
2737    assert all(
2738        [build_only != instance.run for instance in instances.values()]
2739    )
2740
2741    tr.get_cmake_filter_stages.assert_any_call('full', mock.ANY)
2742    if retry_build_errors:
2743        tr.get_cmake_filter_stages.assert_any_call('some', mock.ANY)
2744
2745    print(processing_queue_mock.append.call_args_list)
2746    print([mock.call(el) for el in expected_pipeline_elements])
2747
2748    assert processing_queue_mock.append.call_args_list == \
2749           [mock.call(el) for el in expected_pipeline_elements]
2750
2751
2752TESTDATA_19 = [
2753    ('linux'),
2754    ('nt')
2755]
2756
2757@pytest.mark.parametrize(
2758    'platform',
2759    TESTDATA_19,
2760)
2761def test_twisterrunner_pipeline_mgr(mocked_jobserver, platform):
2762    counter = 0
2763    def mock_pop():
2764        nonlocal counter
2765        counter += 1
2766        if counter > 5:
2767            raise IndexError
2768        return {'test': mock.Mock(required_applications=[])}
2769
2770    instances = {}
2771    suites = []
2772    env_mock = mock.Mock()
2773
2774    tr = TwisterRunner(instances, suites, env=env_mock)
2775    tr.jobserver = mock.Mock(
2776        get_job=mock.Mock(
2777            return_value=nullcontext()
2778        )
2779    )
2780
2781    processing_queue_mock = mock.Mock()
2782    processing_queue_mock.pop = mock.Mock(side_effect=mock_pop)
2783    processing_ready_mock = mock.Mock()
2784    lock_mock = mock.Mock()
2785    results_mock = mock.Mock()
2786
2787    with mock.patch('sys.platform', platform), \
2788         mock.patch('twisterlib.runner.ProjectBuilder',\
2789                    return_value=mock.Mock()) as pb:
2790        tr.pipeline_mgr(processing_queue_mock, processing_ready_mock, lock_mock, results_mock)
2791
2792    assert len(pb().process.call_args_list) == 5
2793
2794    if platform == 'linux':
2795        tr.jobserver.get_job.assert_called_once()
2796
2797
2798def test_twisterrunner_execute(caplog):
2799    counter = 0
2800    def mock_join():
2801        nonlocal counter
2802        counter += 1
2803        if counter > 3:
2804            raise KeyboardInterrupt()
2805
2806    instances = {}
2807    suites = []
2808    env_mock = mock.Mock()
2809
2810    tr = TwisterRunner(instances, suites, env=env_mock)
2811    tr.add_tasks_to_queue = mock.Mock()
2812    tr.jobs = 5
2813
2814    process_mock = mock.Mock()
2815    process_mock().join = mock.Mock(side_effect=mock_join)
2816    process_mock().exitcode = 0
2817    pipeline_mock = mock.Mock()
2818    done_mock = mock.Mock()
2819
2820    with mock.patch('twisterlib.runner.Process', process_mock):
2821        tr.execute(pipeline_mock, done_mock)
2822
2823    assert 'Execution interrupted' in caplog.text
2824
2825    assert len(process_mock().start.call_args_list) == 5
2826    assert len(process_mock().join.call_args_list) == 4
2827    assert len(process_mock().terminate.call_args_list) == 5
2828
2829
2830
2831TESTDATA_20 = [
2832    ('', []),
2833    ('not ARCH in ["x86", "arc"]', ['full']),
2834    ('dt_dummy(x, y)', ['dts']),
2835    ('not CONFIG_FOO', ['kconfig']),
2836    ('dt_dummy and CONFIG_FOO', ['dts', 'kconfig']),
2837]
2838
2839@pytest.mark.parametrize(
2840    'filter, expected_result',
2841    TESTDATA_20,
2842    ids=['none', 'full', 'dts', 'kconfig', 'dts+kconfig']
2843)
2844def test_twisterrunner_get_cmake_filter_stages(filter, expected_result):
2845    result = TwisterRunner.get_cmake_filter_stages(filter, ['not', 'and'])
2846
2847    assert sorted(result) == sorted(expected_result)
2848
2849
2850@pytest.mark.parametrize(
2851    'required_apps, processing_ready_keys, expected_result',
2852    [
2853        (['app1', 'app2'], ['app1', 'app2'], True),  # all apps ready
2854        (['app1', 'app2', 'app3'], ['app1', 'app2'], False),  # some apps missing
2855        ([], [], True),  # no required apps
2856        (['app1'], [], False),  # single app missing
2857    ],
2858    ids=['all_ready', 'some_missing', 'no_apps', 'single_missing']
2859)
2860def test_twisterrunner_are_required_apps_ready(required_apps, processing_ready_keys, expected_result):
2861    """Test _are_required_apps_ready method with various scenarios"""
2862    instances = {}
2863    suites = []
2864    env_mock = mock.Mock()
2865    tr = TwisterRunner(instances, suites, env=env_mock)
2866
2867    instance_mock = mock.Mock()
2868    instance_mock.required_applications = required_apps
2869
2870    processing_ready = {key: mock.Mock() for key in processing_ready_keys}
2871
2872    result = tr._are_required_apps_ready(instance_mock, processing_ready)
2873
2874    assert result is expected_result
2875
2876
2877@pytest.mark.parametrize(
2878    'app_statuses, expected_result',
2879    [
2880        ([TwisterStatus.PASS, TwisterStatus.PASS], True),  # all passed
2881        ([TwisterStatus.NOTRUN, TwisterStatus.NOTRUN], True),  # all notrun
2882        ([TwisterStatus.PASS, TwisterStatus.NOTRUN], True),  # mixed pass/notrun
2883        ([TwisterStatus.PASS, TwisterStatus.FAIL], False),  # one failed
2884        ([TwisterStatus.ERROR], False),  # single error
2885    ],
2886    ids=['all_pass', 'all_notrun', 'mixed_pass_notrun', 'one_fail', 'single_error']
2887)
2888def test_twisterrunner_are_all_required_apps_success(app_statuses, expected_result):
2889    """Test _are_all_required_apps_success method with various app statuses"""
2890    instances = {}
2891    suites = []
2892    env_mock = mock.Mock()
2893    tr = TwisterRunner(instances, suites, env=env_mock)
2894
2895    instance_mock = mock.Mock()
2896    required_apps = [f'app{i + 1}' for i in range(len(app_statuses))]
2897    instance_mock.required_applications = required_apps
2898
2899    processing_ready = {}
2900    for i, status in enumerate(app_statuses):
2901        app_instance = mock.Mock()
2902        app_instance.status = status
2903        app_instance.reason = f"Reason for app{i + 1}"
2904        processing_ready[f'app{i + 1}'] = app_instance
2905
2906    result = tr._are_all_required_apps_success(instance_mock, processing_ready)
2907    assert result is expected_result
2908
2909
2910@pytest.mark.parametrize(
2911    'required_apps, ready_apps, expected_result, expected_actions',
2912    [
2913        ([], {}, True,
2914         {'requeue': False, 'skip': False, 'build_dirs': 0}),
2915        (['app1'], {}, False,
2916         {'requeue': True, 'skip': False, 'build_dirs': 0}),
2917        (['app1', 'app2'], {'app1': TwisterStatus.PASS}, False,
2918         {'requeue': True, 'skip': False, 'build_dirs': 0}),
2919        (['app1'], {'app1': TwisterStatus.FAIL}, False,
2920         {'requeue': False, 'skip': True, 'build_dirs': 0}),
2921        (['app1', 'app2'], {'app1': TwisterStatus.PASS, 'app2': TwisterStatus.NOTRUN}, True,
2922         {'requeue': False, 'skip': False, 'build_dirs': 2}),
2923    ],
2924    ids=['no_apps', 'not_ready_single_job', 'not_ready_multi_job',
2925         'apps_failed', 'apps_success']
2926)
2927def test_twisterrunner_are_required_apps_processed(required_apps, ready_apps,
2928                                                   expected_result, expected_actions):
2929    """Test are_required_apps_processed method with various scenarios"""
2930    # Setup TwisterRunner instances dict
2931    tr_instances = {}
2932    for app_name in required_apps:
2933        tr_instances[app_name] = mock.Mock(build_dir=f'/path/to/{app_name}')
2934
2935    env_mock = mock.Mock()
2936    tr = TwisterRunner(tr_instances, [], env=env_mock)
2937    tr.jobs = 1
2938
2939    instance_mock = mock.Mock()
2940    instance_mock.required_applications = required_apps[:]
2941    instance_mock.required_build_dirs = []
2942
2943    # Setup testcases for skip scenarios
2944    if expected_actions['skip']:
2945        testcase_mock = mock.Mock()
2946        instance_mock.testcases = [testcase_mock]
2947
2948    # Setup processing_ready with app instances
2949    processing_ready = {}
2950    for app_name, status in ready_apps.items():
2951        app_instance = mock.Mock()
2952        app_instance.status = status
2953        app_instance.reason = f"Reason for {app_name}"
2954        app_instance.build_dir = f'/path/to/{app_name}'
2955        processing_ready[app_name] = app_instance
2956
2957    processing_queue = deque()
2958    task = {'test': instance_mock}
2959
2960    result = tr.are_required_apps_processed(instance_mock, processing_queue, processing_ready, task)
2961
2962    assert result is expected_result
2963
2964    if expected_actions['requeue']:
2965        assert len(processing_queue) == 1
2966        assert processing_queue[0] == task
2967
2968    if expected_actions['skip']:
2969        assert instance_mock.status == TwisterStatus.SKIP
2970        assert instance_mock.reason == "Required application failed"
2971        assert instance_mock.required_applications == []
2972        assert instance_mock.testcases[0].status == TwisterStatus.SKIP
2973        # Check for report task in queue
2974        assert any(item.get('op') == 'report' for item in processing_queue)
2975
2976    assert len(instance_mock.required_build_dirs) == expected_actions['build_dirs']
2977    if expected_actions['build_dirs'] > 0:
2978        assert instance_mock.required_applications == []
2979